blob: d59e0fc960b3e988031c16caa717808a61999bc9 [file] [log] [blame]
Anders Carlsson1d8e5212007-08-20 18:05:56 +00001//===---- CGBuiltin.cpp - Emit LLVM Code for builtins ---------------------===//
2//
Chandler Carruth2946cd72019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
Anders Carlsson1d8e5212007-08-20 18:05:56 +00006//
7//===----------------------------------------------------------------------===//
8//
9// This contains code to emit Builtin calls as LLVM code.
10//
11//===----------------------------------------------------------------------===//
12
David Majnemerba3e5ec2015-03-13 18:26:17 +000013#include "CGCXXABI.h"
Fariborz Jahanian021510e2010-06-15 22:44:06 +000014#include "CGObjCRuntime.h"
Alexey Bader465c1892016-09-23 14:20:00 +000015#include "CGOpenCLRuntime.h"
Aaron Ballman06525342018-04-10 21:58:13 +000016#include "CGRecordLayout.h"
Mehdi Amini06d367c2016-10-24 20:39:34 +000017#include "CodeGenFunction.h"
Chandler Carruth3a022472012-12-04 09:13:33 +000018#include "CodeGenModule.h"
John McCallde0fe072017-08-15 21:42:52 +000019#include "ConstantEmitter.h"
JF Bastienef202c32019-04-12 00:11:27 +000020#include "PatternInit.h"
Chandler Carruth3a022472012-12-04 09:13:33 +000021#include "TargetInfo.h"
Chris Lattner1eec6602007-08-31 04:31:45 +000022#include "clang/AST/ASTContext.h"
Daniel Dunbar6e8aa532008-08-11 05:35:13 +000023#include "clang/AST/Decl.h"
Tim Northover314fbfa2018-11-02 13:14:11 +000024#include "clang/AST/OSLog.h"
Chris Lattner5abdec72009-06-14 01:05:48 +000025#include "clang/Basic/TargetBuiltins.h"
Chandler Carruth3a022472012-12-04 09:13:33 +000026#include "clang/Basic/TargetInfo.h"
Mark Laceya8e7df32013-10-30 21:53:58 +000027#include "clang/CodeGen/CGFunctionInfo.h"
Eric Fiselier26187502018-12-14 21:11:28 +000028#include "llvm/ADT/SmallPtrSet.h"
Saleem Abdulrasool86b881c2014-12-17 17:52:30 +000029#include "llvm/ADT/StringExtras.h"
Chandler Carruthffd55512013-01-02 11:45:17 +000030#include "llvm/IR/DataLayout.h"
Saleem Abdulrasool86b881c2014-12-17 17:52:30 +000031#include "llvm/IR/InlineAsm.h"
Chandler Carruthffd55512013-01-02 11:45:17 +000032#include "llvm/IR/Intrinsics.h"
Jan Veselyd7e03a52016-07-10 22:38:04 +000033#include "llvm/IR/MDBuilder.h"
Reid Kleckner30701ed2017-09-05 20:27:35 +000034#include "llvm/Support/ConvertUTF.h"
Erich Keane82025212017-11-15 00:11:24 +000035#include "llvm/Support/ScopedPrinter.h"
36#include "llvm/Support/TargetParser.h"
Kit Barton8246f282015-03-25 19:41:41 +000037#include <sstream>
Jakub Staszakd2cf2cb2011-07-08 22:45:14 +000038
Anders Carlsson1d8e5212007-08-20 18:05:56 +000039using namespace clang;
40using namespace CodeGen;
Anders Carlssona020c432007-12-09 21:20:04 +000041using namespace llvm;
42
Sean Fertile96d9e0e2017-01-05 21:43:30 +000043static
44int64_t clamp(int64_t Value, int64_t Low, int64_t High) {
45 return std::min(High, std::max(Low, Value));
46}
47
JF Bastienef202c32019-04-12 00:11:27 +000048static void initializeAlloca(CodeGenFunction &CGF, AllocaInst *AI, Value *Size, unsigned AlignmentInBytes) {
49 ConstantInt *Byte;
50 switch (CGF.getLangOpts().getTrivialAutoVarInit()) {
51 case LangOptions::TrivialAutoVarInitKind::Uninitialized:
52 // Nothing to initialize.
53 return;
54 case LangOptions::TrivialAutoVarInitKind::Zero:
55 Byte = CGF.Builder.getInt8(0x00);
56 break;
57 case LangOptions::TrivialAutoVarInitKind::Pattern: {
58 llvm::Type *Int8 = llvm::IntegerType::getInt8Ty(CGF.CGM.getLLVMContext());
59 Byte = llvm::dyn_cast<llvm::ConstantInt>(
60 initializationPatternFor(CGF.CGM, Int8));
61 break;
62 }
63 }
64 CGF.Builder.CreateMemSet(AI, Byte, Size, AlignmentInBytes);
65}
66
John McCall30e4efd2011-09-13 23:05:03 +000067/// getBuiltinLibFunction - Given a builtin id for a function like
68/// "__builtin_fabsf", return a Function* for "fabsf".
John McCallb92ab1a2016-10-26 23:46:34 +000069llvm::Constant *CodeGenModule::getBuiltinLibFunction(const FunctionDecl *FD,
70 unsigned BuiltinID) {
John McCall30e4efd2011-09-13 23:05:03 +000071 assert(Context.BuiltinInfo.isLibFunction(BuiltinID));
72
73 // Get the name, skip over the __builtin_ prefix (if necessary).
74 StringRef Name;
75 GlobalDecl D(FD);
76
77 // If the builtin has been declared explicitly with an assembler label,
78 // use the mangled name. This differs from the plain label on platforms
79 // that prefix labels.
80 if (FD->hasAttr<AsmLabelAttr>())
81 Name = getMangledName(D);
82 else
Mehdi Amini7186a432016-10-11 19:04:24 +000083 Name = Context.BuiltinInfo.getName(BuiltinID) + 10;
John McCall30e4efd2011-09-13 23:05:03 +000084
85 llvm::FunctionType *Ty =
86 cast<llvm::FunctionType>(getTypes().ConvertType(FD->getType()));
87
88 return GetOrCreateLLVMFunction(Name, Ty, D, /*ForVTable=*/false);
89}
90
John McCall3a7f6922010-10-27 20:58:56 +000091/// Emit the conversions required to turn the given value into an
92/// integer of the given size.
93static Value *EmitToInt(CodeGenFunction &CGF, llvm::Value *V,
Chris Lattner2192fe52011-07-18 04:24:23 +000094 QualType T, llvm::IntegerType *IntType) {
John McCall3a7f6922010-10-27 20:58:56 +000095 V = CGF.EmitToMemory(V, T);
Chris Lattner07e96862010-10-01 23:43:16 +000096
John McCall3a7f6922010-10-27 20:58:56 +000097 if (V->getType()->isPointerTy())
98 return CGF.Builder.CreatePtrToInt(V, IntType);
99
100 assert(V->getType() == IntType);
101 return V;
Chandler Carruthbc8cab12010-07-18 07:23:17 +0000102}
103
John McCall3a7f6922010-10-27 20:58:56 +0000104static Value *EmitFromInt(CodeGenFunction &CGF, llvm::Value *V,
Chris Lattner2192fe52011-07-18 04:24:23 +0000105 QualType T, llvm::Type *ResultType) {
John McCall3a7f6922010-10-27 20:58:56 +0000106 V = CGF.EmitFromMemory(V, T);
107
108 if (ResultType->isPointerTy())
109 return CGF.Builder.CreateIntToPtr(V, ResultType);
110
111 assert(V->getType() == ResultType);
112 return V;
Chandler Carruthbc8cab12010-07-18 07:23:17 +0000113}
114
Raphael Isemannb23ccec2018-12-10 12:37:46 +0000115/// Utility to insert an atomic instruction based on Intrinsic::ID
Daniel Dunbar4fab57d2009-04-07 00:55:51 +0000116/// and the expression node.
Eli Friedmanb262d162018-10-31 21:31:09 +0000117static Value *MakeBinaryAtomicValue(
118 CodeGenFunction &CGF, llvm::AtomicRMWInst::BinOp Kind, const CallExpr *E,
119 AtomicOrdering Ordering = AtomicOrdering::SequentiallyConsistent) {
John McCall3a7f6922010-10-27 20:58:56 +0000120 QualType T = E->getType();
121 assert(E->getArg(0)->getType()->isPointerType());
122 assert(CGF.getContext().hasSameUnqualifiedType(T,
123 E->getArg(0)->getType()->getPointeeType()));
124 assert(CGF.getContext().hasSameUnqualifiedType(T, E->getArg(1)->getType()));
125
Chris Lattnerb2f659b2010-09-21 23:40:48 +0000126 llvm::Value *DestPtr = CGF.EmitScalarExpr(E->getArg(0));
Micah Villmowea2fea22012-10-25 15:39:14 +0000127 unsigned AddrSpace = DestPtr->getType()->getPointerAddressSpace();
John McCall6bde9542010-10-26 22:09:15 +0000128
Chris Lattnera5f58b02011-07-09 17:41:47 +0000129 llvm::IntegerType *IntType =
John McCall3a7f6922010-10-27 20:58:56 +0000130 llvm::IntegerType::get(CGF.getLLVMContext(),
131 CGF.getContext().getTypeSize(T));
Chris Lattnera5f58b02011-07-09 17:41:47 +0000132 llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
John McCall3a7f6922010-10-27 20:58:56 +0000133
John McCall3a7f6922010-10-27 20:58:56 +0000134 llvm::Value *Args[2];
135 Args[0] = CGF.Builder.CreateBitCast(DestPtr, IntPtrType);
136 Args[1] = CGF.EmitScalarExpr(E->getArg(1));
Chris Lattner2192fe52011-07-18 04:24:23 +0000137 llvm::Type *ValueType = Args[1]->getType();
John McCall3a7f6922010-10-27 20:58:56 +0000138 Args[1] = EmitToInt(CGF, Args[1], T, IntType);
139
JF Bastien92f4ef12016-04-06 17:26:42 +0000140 llvm::Value *Result = CGF.Builder.CreateAtomicRMW(
Eli Friedmanb262d162018-10-31 21:31:09 +0000141 Kind, Args[0], Args[1], Ordering);
Artem Belevichd21e5c62015-06-25 18:29:42 +0000142 return EmitFromInt(CGF, Result, T, ValueType);
143}
144
Michael Zolotukhin84df1232015-09-08 23:52:33 +0000145static Value *EmitNontemporalStore(CodeGenFunction &CGF, const CallExpr *E) {
146 Value *Val = CGF.EmitScalarExpr(E->getArg(0));
147 Value *Address = CGF.EmitScalarExpr(E->getArg(1));
148
149 // Convert the type of the pointer to a pointer to the stored type.
150 Val = CGF.EmitToMemory(Val, E->getArg(0)->getType());
151 Value *BC = CGF.Builder.CreateBitCast(
152 Address, llvm::PointerType::getUnqual(Val->getType()), "cast");
153 LValue LV = CGF.MakeNaturalAlignAddrLValue(BC, E->getArg(0)->getType());
154 LV.setNontemporal(true);
155 CGF.EmitStoreOfScalar(Val, LV, false);
156 return nullptr;
157}
158
159static Value *EmitNontemporalLoad(CodeGenFunction &CGF, const CallExpr *E) {
160 Value *Address = CGF.EmitScalarExpr(E->getArg(0));
161
162 LValue LV = CGF.MakeNaturalAlignAddrLValue(Address, E->getType());
163 LV.setNontemporal(true);
164 return CGF.EmitLoadOfScalar(LV, E->getExprLoc());
165}
166
Artem Belevichd21e5c62015-06-25 18:29:42 +0000167static RValue EmitBinaryAtomic(CodeGenFunction &CGF,
168 llvm::AtomicRMWInst::BinOp Kind,
169 const CallExpr *E) {
170 return RValue::get(MakeBinaryAtomicValue(CGF, Kind, E));
Daniel Dunbar4fab57d2009-04-07 00:55:51 +0000171}
172
Raphael Isemannb23ccec2018-12-10 12:37:46 +0000173/// Utility to insert an atomic instruction based Intrinsic::ID and
John McCall3a7f6922010-10-27 20:58:56 +0000174/// the expression node, where the return value is the result of the
175/// operation.
Chris Lattner43660c52010-05-06 05:35:16 +0000176static RValue EmitBinaryAtomicPost(CodeGenFunction &CGF,
Eli Friedmane9f81132011-09-07 01:41:24 +0000177 llvm::AtomicRMWInst::BinOp Kind,
178 const CallExpr *E,
Hal Finkeld2208b52014-10-02 20:53:50 +0000179 Instruction::BinaryOps Op,
180 bool Invert = false) {
John McCall3a7f6922010-10-27 20:58:56 +0000181 QualType T = E->getType();
182 assert(E->getArg(0)->getType()->isPointerType());
183 assert(CGF.getContext().hasSameUnqualifiedType(T,
184 E->getArg(0)->getType()->getPointeeType()));
185 assert(CGF.getContext().hasSameUnqualifiedType(T, E->getArg(1)->getType()));
186
Chris Lattnerb2f659b2010-09-21 23:40:48 +0000187 llvm::Value *DestPtr = CGF.EmitScalarExpr(E->getArg(0));
Micah Villmowea2fea22012-10-25 15:39:14 +0000188 unsigned AddrSpace = DestPtr->getType()->getPointerAddressSpace();
John McCall6bde9542010-10-26 22:09:15 +0000189
Chris Lattnera5f58b02011-07-09 17:41:47 +0000190 llvm::IntegerType *IntType =
John McCall3a7f6922010-10-27 20:58:56 +0000191 llvm::IntegerType::get(CGF.getLLVMContext(),
192 CGF.getContext().getTypeSize(T));
Chris Lattnera5f58b02011-07-09 17:41:47 +0000193 llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
John McCall3a7f6922010-10-27 20:58:56 +0000194
John McCall3a7f6922010-10-27 20:58:56 +0000195 llvm::Value *Args[2];
196 Args[1] = CGF.EmitScalarExpr(E->getArg(1));
Chris Lattner2192fe52011-07-18 04:24:23 +0000197 llvm::Type *ValueType = Args[1]->getType();
John McCall3a7f6922010-10-27 20:58:56 +0000198 Args[1] = EmitToInt(CGF, Args[1], T, IntType);
199 Args[0] = CGF.Builder.CreateBitCast(DestPtr, IntPtrType);
200
JF Bastien92f4ef12016-04-06 17:26:42 +0000201 llvm::Value *Result = CGF.Builder.CreateAtomicRMW(
202 Kind, Args[0], Args[1], llvm::AtomicOrdering::SequentiallyConsistent);
John McCall3a7f6922010-10-27 20:58:56 +0000203 Result = CGF.Builder.CreateBinOp(Op, Result, Args[1]);
Hal Finkeld2208b52014-10-02 20:53:50 +0000204 if (Invert)
205 Result = CGF.Builder.CreateBinOp(llvm::Instruction::Xor, Result,
206 llvm::ConstantInt::get(IntType, -1));
John McCall3a7f6922010-10-27 20:58:56 +0000207 Result = EmitFromInt(CGF, Result, T, ValueType);
208 return RValue::get(Result);
Mon P Wangb84407d2008-05-09 22:40:52 +0000209}
210
Adrian Prantl9fc8faf2018-05-09 01:00:01 +0000211/// Utility to insert an atomic cmpxchg instruction.
Artem Belevichd21e5c62015-06-25 18:29:42 +0000212///
213/// @param CGF The current codegen function.
214/// @param E Builtin call expression to convert to cmpxchg.
215/// arg0 - address to operate on
216/// arg1 - value to compare with
217/// arg2 - new value
218/// @param ReturnBool Specifies whether to return success flag of
219/// cmpxchg result or the old value.
220///
221/// @returns result of cmpxchg, according to ReturnBool
Mandeep Singh Grang6b880682018-11-06 00:36:48 +0000222///
223/// Note: In order to lower Microsoft's _InterlockedCompareExchange* intrinsics
224/// invoke the function EmitAtomicCmpXchgForMSIntrin.
Artem Belevichd21e5c62015-06-25 18:29:42 +0000225static Value *MakeAtomicCmpXchgValue(CodeGenFunction &CGF, const CallExpr *E,
226 bool ReturnBool) {
227 QualType T = ReturnBool ? E->getArg(1)->getType() : E->getType();
228 llvm::Value *DestPtr = CGF.EmitScalarExpr(E->getArg(0));
229 unsigned AddrSpace = DestPtr->getType()->getPointerAddressSpace();
230
231 llvm::IntegerType *IntType = llvm::IntegerType::get(
232 CGF.getLLVMContext(), CGF.getContext().getTypeSize(T));
233 llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
234
235 Value *Args[3];
236 Args[0] = CGF.Builder.CreateBitCast(DestPtr, IntPtrType);
237 Args[1] = CGF.EmitScalarExpr(E->getArg(1));
238 llvm::Type *ValueType = Args[1]->getType();
239 Args[1] = EmitToInt(CGF, Args[1], T, IntType);
240 Args[2] = EmitToInt(CGF, CGF.EmitScalarExpr(E->getArg(2)), T, IntType);
241
JF Bastien92f4ef12016-04-06 17:26:42 +0000242 Value *Pair = CGF.Builder.CreateAtomicCmpXchg(
243 Args[0], Args[1], Args[2], llvm::AtomicOrdering::SequentiallyConsistent,
244 llvm::AtomicOrdering::SequentiallyConsistent);
Artem Belevichd21e5c62015-06-25 18:29:42 +0000245 if (ReturnBool)
246 // Extract boolean success flag and zext it to int.
247 return CGF.Builder.CreateZExt(CGF.Builder.CreateExtractValue(Pair, 1),
248 CGF.ConvertType(E->getType()));
249 else
250 // Extract old value and emit it using the same type as compare value.
251 return EmitFromInt(CGF, CGF.Builder.CreateExtractValue(Pair, 0), T,
252 ValueType);
253}
254
Mandeep Singh Grang6b880682018-11-06 00:36:48 +0000255/// This function should be invoked to emit atomic cmpxchg for Microsoft's
256/// _InterlockedCompareExchange* intrinsics which have the following signature:
257/// T _InterlockedCompareExchange(T volatile *Destination,
258/// T Exchange,
259/// T Comparand);
260///
261/// Whereas the llvm 'cmpxchg' instruction has the following syntax:
262/// cmpxchg *Destination, Comparand, Exchange.
263/// So we need to swap Comparand and Exchange when invoking
264/// CreateAtomicCmpXchg. That is the reason we could not use the above utility
265/// function MakeAtomicCmpXchgValue since it expects the arguments to be
266/// already swapped.
267
268static
269Value *EmitAtomicCmpXchgForMSIntrin(CodeGenFunction &CGF, const CallExpr *E,
270 AtomicOrdering SuccessOrdering = AtomicOrdering::SequentiallyConsistent) {
Mandeep Singh Grang6b880682018-11-06 00:36:48 +0000271 assert(E->getArg(0)->getType()->isPointerType());
Kadir Cetinkayab1501462018-11-06 08:59:25 +0000272 assert(CGF.getContext().hasSameUnqualifiedType(
273 E->getType(), E->getArg(0)->getType()->getPointeeType()));
274 assert(CGF.getContext().hasSameUnqualifiedType(E->getType(),
275 E->getArg(1)->getType()));
276 assert(CGF.getContext().hasSameUnqualifiedType(E->getType(),
277 E->getArg(2)->getType()));
Mandeep Singh Grang6b880682018-11-06 00:36:48 +0000278
279 auto *Destination = CGF.EmitScalarExpr(E->getArg(0));
280 auto *Comparand = CGF.EmitScalarExpr(E->getArg(2));
281 auto *Exchange = CGF.EmitScalarExpr(E->getArg(1));
282
283 // For Release ordering, the failure ordering should be Monotonic.
284 auto FailureOrdering = SuccessOrdering == AtomicOrdering::Release ?
285 AtomicOrdering::Monotonic :
286 SuccessOrdering;
287
288 auto *Result = CGF.Builder.CreateAtomicCmpXchg(
289 Destination, Comparand, Exchange,
290 SuccessOrdering, FailureOrdering);
291 Result->setVolatile(true);
292 return CGF.Builder.CreateExtractValue(Result, 0);
293}
294
Mandeep Singh Grangfdf74d92018-11-06 05:05:32 +0000295static Value *EmitAtomicIncrementValue(CodeGenFunction &CGF, const CallExpr *E,
296 AtomicOrdering Ordering = AtomicOrdering::SequentiallyConsistent) {
297 assert(E->getArg(0)->getType()->isPointerType());
298
299 auto *IntTy = CGF.ConvertType(E->getType());
300 auto *Result = CGF.Builder.CreateAtomicRMW(
301 AtomicRMWInst::Add,
302 CGF.EmitScalarExpr(E->getArg(0)),
303 ConstantInt::get(IntTy, 1),
304 Ordering);
305 return CGF.Builder.CreateAdd(Result, ConstantInt::get(IntTy, 1));
306}
307
Mandeep Singh Grang574cadd2018-11-06 05:07:43 +0000308static Value *EmitAtomicDecrementValue(CodeGenFunction &CGF, const CallExpr *E,
309 AtomicOrdering Ordering = AtomicOrdering::SequentiallyConsistent) {
310 assert(E->getArg(0)->getType()->isPointerType());
311
312 auto *IntTy = CGF.ConvertType(E->getType());
313 auto *Result = CGF.Builder.CreateAtomicRMW(
314 AtomicRMWInst::Sub,
315 CGF.EmitScalarExpr(E->getArg(0)),
316 ConstantInt::get(IntTy, 1),
317 Ordering);
318 return CGF.Builder.CreateSub(Result, ConstantInt::get(IntTy, 1));
319}
320
Reid Kleckner73253bd2019-03-28 22:59:09 +0000321// Build a plain volatile load.
322static Value *EmitISOVolatileLoad(CodeGenFunction &CGF, const CallExpr *E) {
323 Value *Ptr = CGF.EmitScalarExpr(E->getArg(0));
324 QualType ElTy = E->getArg(0)->getType()->getPointeeType();
325 CharUnits LoadSize = CGF.getContext().getTypeSizeInChars(ElTy);
326 llvm::Type *ITy =
327 llvm::IntegerType::get(CGF.getLLVMContext(), LoadSize.getQuantity() * 8);
328 Ptr = CGF.Builder.CreateBitCast(Ptr, ITy->getPointerTo());
329 llvm::LoadInst *Load = CGF.Builder.CreateAlignedLoad(Ptr, LoadSize);
330 Load->setVolatile(true);
331 return Load;
332}
333
334// Build a plain volatile store.
335static Value *EmitISOVolatileStore(CodeGenFunction &CGF, const CallExpr *E) {
336 Value *Ptr = CGF.EmitScalarExpr(E->getArg(0));
337 Value *Value = CGF.EmitScalarExpr(E->getArg(1));
338 QualType ElTy = E->getArg(0)->getType()->getPointeeType();
339 CharUnits StoreSize = CGF.getContext().getTypeSizeInChars(ElTy);
340 llvm::Type *ITy =
341 llvm::IntegerType::get(CGF.getLLVMContext(), StoreSize.getQuantity() * 8);
342 Ptr = CGF.Builder.CreateBitCast(Ptr, ITy->getPointerTo());
343 llvm::StoreInst *Store =
344 CGF.Builder.CreateAlignedStore(Value, Ptr, StoreSize);
345 Store->setVolatile(true);
346 return Store;
347}
348
Matt Arsenaultf652cae2016-07-01 17:38:14 +0000349// Emit a simple mangled intrinsic that has 1 argument and a return type
350// matching the argument type.
351static Value *emitUnaryBuiltin(CodeGenFunction &CGF,
352 const CallExpr *E,
353 unsigned IntrinsicID) {
354 llvm::Value *Src0 = CGF.EmitScalarExpr(E->getArg(0));
355
James Y Knight8799cae2019-02-03 21:53:49 +0000356 Function *F = CGF.CGM.getIntrinsic(IntrinsicID, Src0->getType());
Matt Arsenaultf652cae2016-07-01 17:38:14 +0000357 return CGF.Builder.CreateCall(F, Src0);
358}
359
360// Emit an intrinsic that has 2 operands of the same type as its result.
361static Value *emitBinaryBuiltin(CodeGenFunction &CGF,
362 const CallExpr *E,
363 unsigned IntrinsicID) {
364 llvm::Value *Src0 = CGF.EmitScalarExpr(E->getArg(0));
365 llvm::Value *Src1 = CGF.EmitScalarExpr(E->getArg(1));
366
James Y Knight8799cae2019-02-03 21:53:49 +0000367 Function *F = CGF.CGM.getIntrinsic(IntrinsicID, Src0->getType());
Matt Arsenaultf652cae2016-07-01 17:38:14 +0000368 return CGF.Builder.CreateCall(F, { Src0, Src1 });
369}
370
371// Emit an intrinsic that has 3 operands of the same type as its result.
372static Value *emitTernaryBuiltin(CodeGenFunction &CGF,
373 const CallExpr *E,
374 unsigned IntrinsicID) {
375 llvm::Value *Src0 = CGF.EmitScalarExpr(E->getArg(0));
376 llvm::Value *Src1 = CGF.EmitScalarExpr(E->getArg(1));
377 llvm::Value *Src2 = CGF.EmitScalarExpr(E->getArg(2));
378
James Y Knight8799cae2019-02-03 21:53:49 +0000379 Function *F = CGF.CGM.getIntrinsic(IntrinsicID, Src0->getType());
Matt Arsenaultf652cae2016-07-01 17:38:14 +0000380 return CGF.Builder.CreateCall(F, { Src0, Src1, Src2 });
381}
382
383// Emit an intrinsic that has 1 float or double operand, and 1 integer.
384static Value *emitFPIntBuiltin(CodeGenFunction &CGF,
385 const CallExpr *E,
386 unsigned IntrinsicID) {
387 llvm::Value *Src0 = CGF.EmitScalarExpr(E->getArg(0));
388 llvm::Value *Src1 = CGF.EmitScalarExpr(E->getArg(1));
389
James Y Knight8799cae2019-02-03 21:53:49 +0000390 Function *F = CGF.CGM.getIntrinsic(IntrinsicID, Src0->getType());
Matt Arsenaultf652cae2016-07-01 17:38:14 +0000391 return CGF.Builder.CreateCall(F, {Src0, Src1});
392}
393
Craig Topperaf7a1882019-05-20 16:27:09 +0000394// Emit an intrinsic that has overloaded integer result and fp operand.
395static Value *emitFPToIntRoundBuiltin(CodeGenFunction &CGF,
396 const CallExpr *E,
397 unsigned IntrinsicID) {
398 llvm::Type *ResultType = CGF.ConvertType(E->getType());
399 llvm::Value *Src0 = CGF.EmitScalarExpr(E->getArg(0));
400
401 Function *F = CGF.CGM.getIntrinsic(IntrinsicID,
402 {ResultType, Src0->getType()});
403 return CGF.Builder.CreateCall(F, Src0);
404}
405
Tom Stellardc4e0c102014-09-03 15:24:29 +0000406/// EmitFAbs - Emit a call to @llvm.fabs().
Reid Kleckner4cad00a2014-11-03 23:51:40 +0000407static Value *EmitFAbs(CodeGenFunction &CGF, Value *V) {
James Y Knight8799cae2019-02-03 21:53:49 +0000408 Function *F = CGF.CGM.getIntrinsic(Intrinsic::fabs, V->getType());
Tom Stellardc4e0c102014-09-03 15:24:29 +0000409 llvm::CallInst *Call = CGF.Builder.CreateCall(F, V);
410 Call->setDoesNotAccessMemory();
411 return Call;
Chris Lattner43660c52010-05-06 05:35:16 +0000412}
413
Chandler Carruthc66deaf2015-03-19 22:39:51 +0000414/// Emit the computation of the sign bit for a floating point value. Returns
415/// the i1 sign bit value.
416static Value *EmitSignBit(CodeGenFunction &CGF, Value *V) {
417 LLVMContext &C = CGF.CGM.getLLVMContext();
418
419 llvm::Type *Ty = V->getType();
420 int Width = Ty->getPrimitiveSizeInBits();
421 llvm::Type *IntTy = llvm::IntegerType::get(C, Width);
422 V = CGF.Builder.CreateBitCast(V, IntTy);
423 if (Ty->isPPC_FP128Ty()) {
Petar Jovanovic73d10442015-11-06 14:52:46 +0000424 // We want the sign bit of the higher-order double. The bitcast we just
425 // did works as if the double-double was stored to memory and then
426 // read as an i128. The "store" will put the higher-order double in the
427 // lower address in both little- and big-Endian modes, but the "load"
428 // will treat those bits as a different part of the i128: the low bits in
429 // little-Endian, the high bits in big-Endian. Therefore, on big-Endian
430 // we need to shift the high bits down to the low before truncating.
Chandler Carruthc66deaf2015-03-19 22:39:51 +0000431 Width >>= 1;
Simon Pilgrim532de1c2016-06-13 10:05:19 +0000432 if (CGF.getTarget().isBigEndian()) {
433 Value *ShiftCst = llvm::ConstantInt::get(IntTy, Width);
434 V = CGF.Builder.CreateLShr(V, ShiftCst);
435 }
436 // We are truncating value in order to extract the higher-order
437 // double, which we will be using to extract the sign from.
438 IntTy = llvm::IntegerType::get(C, Width);
439 V = CGF.Builder.CreateTrunc(V, IntTy);
Chandler Carruthc66deaf2015-03-19 22:39:51 +0000440 }
441 Value *Zero = llvm::Constant::getNullValue(IntTy);
442 return CGF.Builder.CreateICmpSLT(V, Zero);
443}
444
John McCallb92ab1a2016-10-26 23:46:34 +0000445static RValue emitLibraryCall(CodeGenFunction &CGF, const FunctionDecl *FD,
446 const CallExpr *E, llvm::Constant *calleeValue) {
Erich Keanede6480a32018-11-13 15:48:08 +0000447 CGCallee callee = CGCallee::forDirect(calleeValue, GlobalDecl(FD));
John McCallb92ab1a2016-10-26 23:46:34 +0000448 return CGF.EmitCall(E->getCallee()->getType(), callee, E, ReturnValueSlot());
John McCall30e4efd2011-09-13 23:05:03 +0000449}
450
Adrian Prantl9fc8faf2018-05-09 01:00:01 +0000451/// Emit a call to llvm.{sadd,uadd,ssub,usub,smul,umul}.with.overflow.*
Michael Gottesman54398012013-01-13 02:22:39 +0000452/// depending on IntrinsicID.
453///
454/// \arg CGF The current codegen function.
455/// \arg IntrinsicID The ID for the Intrinsic we wish to generate.
456/// \arg X The first argument to the llvm.*.with.overflow.*.
457/// \arg Y The second argument to the llvm.*.with.overflow.*.
458/// \arg Carry The carry returned by the llvm.*.with.overflow.*.
459/// \returns The result (i.e. sum/product) returned by the intrinsic.
460static llvm::Value *EmitOverflowIntrinsic(CodeGenFunction &CGF,
461 const llvm::Intrinsic::ID IntrinsicID,
462 llvm::Value *X, llvm::Value *Y,
463 llvm::Value *&Carry) {
464 // Make sure we have integers of the same width.
465 assert(X->getType() == Y->getType() &&
466 "Arguments must be the same type. (Did you forget to make sure both "
467 "arguments have the same integer width?)");
468
James Y Knight8799cae2019-02-03 21:53:49 +0000469 Function *Callee = CGF.CGM.getIntrinsic(IntrinsicID, X->getType());
David Blaikie43f9bb72015-05-18 22:14:03 +0000470 llvm::Value *Tmp = CGF.Builder.CreateCall(Callee, {X, Y});
Michael Gottesman54398012013-01-13 02:22:39 +0000471 Carry = CGF.Builder.CreateExtractValue(Tmp, 1);
472 return CGF.Builder.CreateExtractValue(Tmp, 0);
473}
474
Jan Veselyd7e03a52016-07-10 22:38:04 +0000475static Value *emitRangedBuiltin(CodeGenFunction &CGF,
476 unsigned IntrinsicID,
477 int low, int high) {
478 llvm::MDBuilder MDHelper(CGF.getLLVMContext());
479 llvm::MDNode *RNode = MDHelper.createRange(APInt(32, low), APInt(32, high));
James Y Knight8799cae2019-02-03 21:53:49 +0000480 Function *F = CGF.CGM.getIntrinsic(IntrinsicID, {});
Jan Veselyd7e03a52016-07-10 22:38:04 +0000481 llvm::Instruction *Call = CGF.Builder.CreateCall(F);
482 Call->setMetadata(llvm::LLVMContext::MD_range, RNode);
483 return Call;
484}
485
John McCall03107a42015-10-29 20:48:01 +0000486namespace {
487 struct WidthAndSignedness {
488 unsigned Width;
489 bool Signed;
490 };
491}
492
493static WidthAndSignedness
494getIntegerWidthAndSignedness(const clang::ASTContext &context,
495 const clang::QualType Type) {
496 assert(Type->isIntegerType() && "Given type is not an integer.");
497 unsigned Width = Type->isBooleanType() ? 1 : context.getTypeInfo(Type).Width;
498 bool Signed = Type->isSignedIntegerType();
499 return {Width, Signed};
500}
501
502// Given one or more integer types, this function produces an integer type that
503// encompasses them: any value in one of the given types could be expressed in
504// the encompassing type.
505static struct WidthAndSignedness
506EncompassingIntegerType(ArrayRef<struct WidthAndSignedness> Types) {
507 assert(Types.size() > 0 && "Empty list of types.");
508
509 // If any of the given types is signed, we must return a signed type.
510 bool Signed = false;
511 for (const auto &Type : Types) {
512 Signed |= Type.Signed;
513 }
514
515 // The encompassing type must have a width greater than or equal to the width
Alexander Kornienko2a8c18d2018-04-06 15:14:32 +0000516 // of the specified types. Additionally, if the encompassing type is signed,
John McCall03107a42015-10-29 20:48:01 +0000517 // its width must be strictly greater than the width of any unsigned types
518 // given.
519 unsigned Width = 0;
520 for (const auto &Type : Types) {
521 unsigned MinWidth = Type.Width + (Signed && !Type.Signed);
522 if (Width < MinWidth) {
523 Width = MinWidth;
524 }
525 }
526
527 return {Width, Signed};
528}
529
Charles Davisc7d5c942015-09-17 20:55:33 +0000530Value *CodeGenFunction::EmitVAStartEnd(Value *ArgValue, bool IsStart) {
531 llvm::Type *DestType = Int8PtrTy;
532 if (ArgValue->getType() != DestType)
533 ArgValue =
534 Builder.CreateBitCast(ArgValue, DestType, ArgValue->getName().data());
535
536 Intrinsic::ID inst = IsStart ? Intrinsic::vastart : Intrinsic::vaend;
537 return Builder.CreateCall(CGM.getIntrinsic(inst), ArgValue);
538}
539
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000540/// Checks if using the result of __builtin_object_size(p, @p From) in place of
541/// __builtin_object_size(p, @p To) is correct
542static bool areBOSTypesCompatible(int From, int To) {
543 // Note: Our __builtin_object_size implementation currently treats Type=0 and
544 // Type=2 identically. Encoding this implementation detail here may make
545 // improving __builtin_object_size difficult in the future, so it's omitted.
546 return From == To || (From == 0 && To == 1) || (From == 3 && To == 2);
547}
548
549static llvm::Value *
550getDefaultBuiltinObjectSizeResult(unsigned Type, llvm::IntegerType *ResType) {
551 return ConstantInt::get(ResType, (Type & 2) ? 0 : -1, /*isSigned=*/true);
552}
553
554llvm::Value *
555CodeGenFunction::evaluateOrEmitBuiltinObjectSize(const Expr *E, unsigned Type,
George Burgess IV0d6592a2017-02-23 05:59:56 +0000556 llvm::IntegerType *ResType,
Erik Pilkington9c3b5882019-01-30 20:34:53 +0000557 llvm::Value *EmittedE,
558 bool IsDynamic) {
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000559 uint64_t ObjectSize;
560 if (!E->tryEvaluateObjectSize(ObjectSize, getContext(), Type))
Erik Pilkington9c3b5882019-01-30 20:34:53 +0000561 return emitBuiltinObjectSize(E, Type, ResType, EmittedE, IsDynamic);
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000562 return ConstantInt::get(ResType, ObjectSize, /*isSigned=*/true);
563}
564
565/// Returns a Value corresponding to the size of the given expression.
566/// This Value may be either of the following:
567/// - A llvm::Argument (if E is a param with the pass_object_size attribute on
568/// it)
569/// - A call to the @llvm.objectsize intrinsic
George Burgess IV0d6592a2017-02-23 05:59:56 +0000570///
571/// EmittedE is the result of emitting `E` as a scalar expr. If it's non-null
572/// and we wouldn't otherwise try to reference a pass_object_size parameter,
573/// we'll call @llvm.objectsize on EmittedE, rather than emitting E.
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000574llvm::Value *
575CodeGenFunction::emitBuiltinObjectSize(const Expr *E, unsigned Type,
George Burgess IV0d6592a2017-02-23 05:59:56 +0000576 llvm::IntegerType *ResType,
Erik Pilkington9c3b5882019-01-30 20:34:53 +0000577 llvm::Value *EmittedE, bool IsDynamic) {
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000578 // We need to reference an argument if the pointer is a parameter with the
579 // pass_object_size attribute.
580 if (auto *D = dyn_cast<DeclRefExpr>(E->IgnoreParenImpCasts())) {
581 auto *Param = dyn_cast<ParmVarDecl>(D->getDecl());
582 auto *PS = D->getDecl()->getAttr<PassObjectSizeAttr>();
583 if (Param != nullptr && PS != nullptr &&
584 areBOSTypesCompatible(PS->getType(), Type)) {
585 auto Iter = SizeArguments.find(Param);
586 assert(Iter != SizeArguments.end());
587
588 const ImplicitParamDecl *D = Iter->second;
589 auto DIter = LocalDeclMap.find(D);
590 assert(DIter != LocalDeclMap.end());
591
592 return EmitLoadOfScalar(DIter->second, /*volatile=*/false,
Stephen Kellyf2ceec42018-08-09 21:08:08 +0000593 getContext().getSizeType(), E->getBeginLoc());
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000594 }
595 }
596
597 // LLVM can't handle Type=3 appropriately, and __builtin_object_size shouldn't
598 // evaluate E for side-effects. In either case, we shouldn't lower to
599 // @llvm.objectsize.
George Burgess IV0d6592a2017-02-23 05:59:56 +0000600 if (Type == 3 || (!EmittedE && E->HasSideEffects(getContext())))
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000601 return getDefaultBuiltinObjectSizeResult(Type, ResType);
602
George Burgess IV0d6592a2017-02-23 05:59:56 +0000603 Value *Ptr = EmittedE ? EmittedE : EmitScalarExpr(E);
George Burgess IV8856aa92017-02-22 02:35:51 +0000604 assert(Ptr->getType()->isPointerTy() &&
605 "Non-pointer passed to __builtin_object_size?");
606
James Y Knight8799cae2019-02-03 21:53:49 +0000607 Function *F =
608 CGM.getIntrinsic(Intrinsic::objectsize, {ResType, Ptr->getType()});
George Burgess IVa63f9152017-03-21 20:09:35 +0000609
610 // LLVM only supports 0 and 2, make sure that we pass along that as a boolean.
611 Value *Min = Builder.getInt1((Type & 2) != 0);
Alexander Kornienko2a8c18d2018-04-06 15:14:32 +0000612 // For GCC compatibility, __builtin_object_size treat NULL as unknown size.
George Burgess IVa63f9152017-03-21 20:09:35 +0000613 Value *NullIsUnknown = Builder.getTrue();
Erik Pilkington9c3b5882019-01-30 20:34:53 +0000614 Value *Dynamic = Builder.getInt1(IsDynamic);
Erik Pilkington600e9de2019-01-30 20:34:35 +0000615 return Builder.CreateCall(F, {Ptr, Min, NullIsUnknown, Dynamic});
George Burgess IV3e3bb95b2015-12-02 21:58:08 +0000616}
617
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000618namespace {
Raphael Isemannb23ccec2018-12-10 12:37:46 +0000619/// A struct to generically describe a bit test intrinsic.
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000620struct BitTest {
621 enum ActionKind : uint8_t { TestOnly, Complement, Reset, Set };
622 enum InterlockingKind : uint8_t {
623 Unlocked,
624 Sequential,
625 Acquire,
626 Release,
627 NoFence
628 };
Reid Kleckner368d52b2018-06-06 01:35:08 +0000629
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000630 ActionKind Action;
631 InterlockingKind Interlocking;
632 bool Is64Bit;
633
634 static BitTest decodeBitTestBuiltin(unsigned BuiltinID);
635};
636} // namespace
637
638BitTest BitTest::decodeBitTestBuiltin(unsigned BuiltinID) {
639 switch (BuiltinID) {
640 // Main portable variants.
641 case Builtin::BI_bittest:
642 return {TestOnly, Unlocked, false};
643 case Builtin::BI_bittestandcomplement:
644 return {Complement, Unlocked, false};
645 case Builtin::BI_bittestandreset:
646 return {Reset, Unlocked, false};
647 case Builtin::BI_bittestandset:
648 return {Set, Unlocked, false};
649 case Builtin::BI_interlockedbittestandreset:
650 return {Reset, Sequential, false};
651 case Builtin::BI_interlockedbittestandset:
652 return {Set, Sequential, false};
653
654 // X86-specific 64-bit variants.
Reid Kleckner368d52b2018-06-06 01:35:08 +0000655 case Builtin::BI_bittest64:
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000656 return {TestOnly, Unlocked, true};
Reid Kleckner368d52b2018-06-06 01:35:08 +0000657 case Builtin::BI_bittestandcomplement64:
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000658 return {Complement, Unlocked, true};
Reid Kleckner368d52b2018-06-06 01:35:08 +0000659 case Builtin::BI_bittestandreset64:
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000660 return {Reset, Unlocked, true};
Reid Kleckner368d52b2018-06-06 01:35:08 +0000661 case Builtin::BI_bittestandset64:
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000662 return {Set, Unlocked, true};
Reid Kleckner368d52b2018-06-06 01:35:08 +0000663 case Builtin::BI_interlockedbittestandreset64:
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000664 return {Reset, Sequential, true};
Reid Kleckner368d52b2018-06-06 01:35:08 +0000665 case Builtin::BI_interlockedbittestandset64:
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000666 return {Set, Sequential, true};
667
668 // ARM/AArch64-specific ordering variants.
669 case Builtin::BI_interlockedbittestandset_acq:
670 return {Set, Acquire, false};
671 case Builtin::BI_interlockedbittestandset_rel:
672 return {Set, Release, false};
673 case Builtin::BI_interlockedbittestandset_nf:
674 return {Set, NoFence, false};
675 case Builtin::BI_interlockedbittestandreset_acq:
676 return {Reset, Acquire, false};
677 case Builtin::BI_interlockedbittestandreset_rel:
678 return {Reset, Release, false};
679 case Builtin::BI_interlockedbittestandreset_nf:
680 return {Reset, NoFence, false};
Reid Kleckner368d52b2018-06-06 01:35:08 +0000681 }
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000682 llvm_unreachable("expected only bittest intrinsics");
Reid Kleckner368d52b2018-06-06 01:35:08 +0000683}
684
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000685static char bitActionToX86BTCode(BitTest::ActionKind A) {
686 switch (A) {
687 case BitTest::TestOnly: return '\0';
688 case BitTest::Complement: return 'c';
689 case BitTest::Reset: return 'r';
690 case BitTest::Set: return 's';
691 }
692 llvm_unreachable("invalid action");
693}
694
695static llvm::Value *EmitX86BitTestIntrinsic(CodeGenFunction &CGF,
696 BitTest BT,
697 const CallExpr *E, Value *BitBase,
698 Value *BitPos) {
699 char Action = bitActionToX86BTCode(BT.Action);
700 char SizeSuffix = BT.Is64Bit ? 'q' : 'l';
Reid Kleckner1d9c2492018-06-05 01:33:40 +0000701
702 // Build the assembly.
703 SmallString<64> Asm;
704 raw_svector_ostream AsmOS(Asm);
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000705 if (BT.Interlocking != BitTest::Unlocked)
Reid Kleckner1d9c2492018-06-05 01:33:40 +0000706 AsmOS << "lock ";
707 AsmOS << "bt";
Reid Kleckner368d52b2018-06-06 01:35:08 +0000708 if (Action)
709 AsmOS << Action;
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000710 AsmOS << SizeSuffix << " $2, ($1)\n\tsetc ${0:b}";
Reid Kleckner1d9c2492018-06-05 01:33:40 +0000711
712 // Build the constraints. FIXME: We should support immediates when possible.
Reid Kleckner368d52b2018-06-06 01:35:08 +0000713 std::string Constraints = "=r,r,r,~{cc},~{flags},~{fpsr}";
Reid Kleckner1d9c2492018-06-05 01:33:40 +0000714 llvm::IntegerType *IntType = llvm::IntegerType::get(
715 CGF.getLLVMContext(),
716 CGF.getContext().getTypeSize(E->getArg(1)->getType()));
717 llvm::Type *IntPtrType = IntType->getPointerTo();
718 llvm::FunctionType *FTy =
719 llvm::FunctionType::get(CGF.Int8Ty, {IntPtrType, IntType}, false);
720
721 llvm::InlineAsm *IA =
722 llvm::InlineAsm::get(FTy, Asm, Constraints, /*SideEffects=*/true);
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000723 return CGF.Builder.CreateCall(IA, {BitBase, BitPos});
724}
725
726static llvm::AtomicOrdering
727getBitTestAtomicOrdering(BitTest::InterlockingKind I) {
728 switch (I) {
729 case BitTest::Unlocked: return llvm::AtomicOrdering::NotAtomic;
730 case BitTest::Sequential: return llvm::AtomicOrdering::SequentiallyConsistent;
731 case BitTest::Acquire: return llvm::AtomicOrdering::Acquire;
732 case BitTest::Release: return llvm::AtomicOrdering::Release;
733 case BitTest::NoFence: return llvm::AtomicOrdering::Monotonic;
734 }
735 llvm_unreachable("invalid interlocking");
Reid Kleckner1d9c2492018-06-05 01:33:40 +0000736}
737
Reid Kleckner368d52b2018-06-06 01:35:08 +0000738/// Emit a _bittest* intrinsic. These intrinsics take a pointer to an array of
739/// bits and a bit position and read and optionally modify the bit at that
740/// position. The position index can be arbitrarily large, i.e. it can be larger
741/// than 31 or 63, so we need an indexed load in the general case.
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000742static llvm::Value *EmitBitTestIntrinsic(CodeGenFunction &CGF,
743 unsigned BuiltinID,
744 const CallExpr *E) {
Reid Kleckner368d52b2018-06-06 01:35:08 +0000745 Value *BitBase = CGF.EmitScalarExpr(E->getArg(0));
746 Value *BitPos = CGF.EmitScalarExpr(E->getArg(1));
747
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000748 BitTest BT = BitTest::decodeBitTestBuiltin(BuiltinID);
749
Reid Kleckner368d52b2018-06-06 01:35:08 +0000750 // X86 has special BT, BTC, BTR, and BTS instructions that handle the array
751 // indexing operation internally. Use them if possible.
752 llvm::Triple::ArchType Arch = CGF.getTarget().getTriple().getArch();
753 if (Arch == llvm::Triple::x86 || Arch == llvm::Triple::x86_64)
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000754 return EmitX86BitTestIntrinsic(CGF, BT, E, BitBase, BitPos);
Reid Kleckner368d52b2018-06-06 01:35:08 +0000755
756 // Otherwise, use generic code to load one byte and test the bit. Use all but
757 // the bottom three bits as the array index, and the bottom three bits to form
758 // a mask.
759 // Bit = BitBaseI8[BitPos >> 3] & (1 << (BitPos & 0x7)) != 0;
760 Value *ByteIndex = CGF.Builder.CreateAShr(
761 BitPos, llvm::ConstantInt::get(BitPos->getType(), 3), "bittest.byteidx");
762 Value *BitBaseI8 = CGF.Builder.CreatePointerCast(BitBase, CGF.Int8PtrTy);
763 Address ByteAddr(CGF.Builder.CreateInBoundsGEP(CGF.Int8Ty, BitBaseI8,
764 ByteIndex, "bittest.byteaddr"),
765 CharUnits::One());
766 Value *PosLow =
767 CGF.Builder.CreateAnd(CGF.Builder.CreateTrunc(BitPos, CGF.Int8Ty),
768 llvm::ConstantInt::get(CGF.Int8Ty, 0x7));
769
770 // The updating instructions will need a mask.
771 Value *Mask = nullptr;
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000772 if (BT.Action != BitTest::TestOnly) {
Reid Kleckner368d52b2018-06-06 01:35:08 +0000773 Mask = CGF.Builder.CreateShl(llvm::ConstantInt::get(CGF.Int8Ty, 1), PosLow,
774 "bittest.mask");
775 }
776
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000777 // Check the action and ordering of the interlocked intrinsics.
778 llvm::AtomicOrdering Ordering = getBitTestAtomicOrdering(BT.Interlocking);
Reid Kleckner368d52b2018-06-06 01:35:08 +0000779
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000780 Value *OldByte = nullptr;
781 if (Ordering != llvm::AtomicOrdering::NotAtomic) {
782 // Emit a combined atomicrmw load/store operation for the interlocked
783 // intrinsics.
784 llvm::AtomicRMWInst::BinOp RMWOp = llvm::AtomicRMWInst::Or;
785 if (BT.Action == BitTest::Reset) {
786 Mask = CGF.Builder.CreateNot(Mask);
787 RMWOp = llvm::AtomicRMWInst::And;
788 }
789 OldByte = CGF.Builder.CreateAtomicRMW(RMWOp, ByteAddr.getPointer(), Mask,
790 Ordering);
791 } else {
792 // Emit a plain load for the non-interlocked intrinsics.
Reid Kleckner368d52b2018-06-06 01:35:08 +0000793 OldByte = CGF.Builder.CreateLoad(ByteAddr, "bittest.byte");
794 Value *NewByte = nullptr;
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000795 switch (BT.Action) {
796 case BitTest::TestOnly:
Reid Kleckner368d52b2018-06-06 01:35:08 +0000797 // Don't store anything.
798 break;
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000799 case BitTest::Complement:
Reid Kleckner368d52b2018-06-06 01:35:08 +0000800 NewByte = CGF.Builder.CreateXor(OldByte, Mask);
801 break;
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000802 case BitTest::Reset:
Reid Kleckner368d52b2018-06-06 01:35:08 +0000803 NewByte = CGF.Builder.CreateAnd(OldByte, CGF.Builder.CreateNot(Mask));
804 break;
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000805 case BitTest::Set:
Reid Kleckner368d52b2018-06-06 01:35:08 +0000806 NewByte = CGF.Builder.CreateOr(OldByte, Mask);
807 break;
Reid Kleckner368d52b2018-06-06 01:35:08 +0000808 }
809 if (NewByte)
810 CGF.Builder.CreateStore(NewByte, ByteAddr);
811 }
812
813 // However we loaded the old byte, either by plain load or atomicrmw, shift
814 // the bit into the low position and mask it to 0 or 1.
815 Value *ShiftedByte = CGF.Builder.CreateLShr(OldByte, PosLow, "bittest.shr");
Reid Kleckneraa46ed92018-06-07 21:39:04 +0000816 return CGF.Builder.CreateAnd(
817 ShiftedByte, llvm::ConstantInt::get(CGF.Int8Ty, 1), "bittest.res");
Reid Kleckner368d52b2018-06-06 01:35:08 +0000818}
819
Reid Kleckner11c99ed2018-06-06 18:39:47 +0000820namespace {
821enum class MSVCSetJmpKind {
822 _setjmpex,
823 _setjmp3,
824 _setjmp
825};
826}
827
828/// MSVC handles setjmp a bit differently on different platforms. On every
829/// architecture except 32-bit x86, the frame address is passed. On x86, extra
830/// parameters can be passed as variadic arguments, but we always pass none.
831static RValue EmitMSVCRTSetJmp(CodeGenFunction &CGF, MSVCSetJmpKind SJKind,
832 const CallExpr *E) {
833 llvm::Value *Arg1 = nullptr;
834 llvm::Type *Arg1Ty = nullptr;
835 StringRef Name;
836 bool IsVarArg = false;
837 if (SJKind == MSVCSetJmpKind::_setjmp3) {
838 Name = "_setjmp3";
839 Arg1Ty = CGF.Int32Ty;
840 Arg1 = llvm::ConstantInt::get(CGF.IntTy, 0);
841 IsVarArg = true;
842 } else {
843 Name = SJKind == MSVCSetJmpKind::_setjmp ? "_setjmp" : "_setjmpex";
844 Arg1Ty = CGF.Int8PtrTy;
Mandeep Singh Grang6cef4e52018-11-02 18:10:07 +0000845 if (CGF.getTarget().getTriple().getArch() == llvm::Triple::aarch64) {
846 Arg1 = CGF.Builder.CreateCall(CGF.CGM.getIntrinsic(Intrinsic::sponentry));
847 } else
848 Arg1 = CGF.Builder.CreateCall(CGF.CGM.getIntrinsic(Intrinsic::frameaddress),
849 llvm::ConstantInt::get(CGF.Int32Ty, 0));
Reid Kleckner11c99ed2018-06-06 18:39:47 +0000850 }
851
852 // Mark the call site and declaration with ReturnsTwice.
853 llvm::Type *ArgTypes[2] = {CGF.Int8PtrTy, Arg1Ty};
854 llvm::AttributeList ReturnsTwiceAttr = llvm::AttributeList::get(
855 CGF.getLLVMContext(), llvm::AttributeList::FunctionIndex,
856 llvm::Attribute::ReturnsTwice);
James Y Knight9871db02019-02-05 16:42:33 +0000857 llvm::FunctionCallee SetJmpFn = CGF.CGM.CreateRuntimeFunction(
Reid Kleckner11c99ed2018-06-06 18:39:47 +0000858 llvm::FunctionType::get(CGF.IntTy, ArgTypes, IsVarArg), Name,
859 ReturnsTwiceAttr, /*Local=*/true);
860
861 llvm::Value *Buf = CGF.Builder.CreateBitOrPointerCast(
862 CGF.EmitScalarExpr(E->getArg(0)), CGF.Int8PtrTy);
863 llvm::Value *Args[] = {Buf, Arg1};
James Y Knight3933add2019-01-30 02:54:28 +0000864 llvm::CallBase *CB = CGF.EmitRuntimeCallOrInvoke(SetJmpFn, Args);
865 CB->setAttributes(ReturnsTwiceAttr);
866 return RValue::get(CB);
Reid Kleckner11c99ed2018-06-06 18:39:47 +0000867}
868
Mandeep Singh Grang0054f482018-07-17 22:03:24 +0000869// Many of MSVC builtins are on x64, ARM and AArch64; to avoid repeating code,
870// we handle them here.
Albert Gutowski5e08df02016-10-13 22:35:07 +0000871enum class CodeGenFunction::MSVCIntrin {
872 _BitScanForward,
873 _BitScanReverse,
874 _InterlockedAnd,
875 _InterlockedDecrement,
876 _InterlockedExchange,
877 _InterlockedExchangeAdd,
878 _InterlockedExchangeSub,
879 _InterlockedIncrement,
880 _InterlockedOr,
881 _InterlockedXor,
Eli Friedmanb262d162018-10-31 21:31:09 +0000882 _InterlockedExchangeAdd_acq,
883 _InterlockedExchangeAdd_rel,
884 _InterlockedExchangeAdd_nf,
Mandeep Singh Grang7fa07e52018-11-02 21:18:23 +0000885 _InterlockedExchange_acq,
886 _InterlockedExchange_rel,
887 _InterlockedExchange_nf,
Mandeep Singh Grang6b880682018-11-06 00:36:48 +0000888 _InterlockedCompareExchange_acq,
889 _InterlockedCompareExchange_rel,
890 _InterlockedCompareExchange_nf,
Mandeep Singh Grangec62b312018-11-06 01:11:25 +0000891 _InterlockedOr_acq,
892 _InterlockedOr_rel,
893 _InterlockedOr_nf,
Mandeep Singh Grang806f1072018-11-06 04:55:20 +0000894 _InterlockedXor_acq,
895 _InterlockedXor_rel,
896 _InterlockedXor_nf,
Mandeep Singh Grangc89157b2018-11-06 05:03:13 +0000897 _InterlockedAnd_acq,
898 _InterlockedAnd_rel,
899 _InterlockedAnd_nf,
Mandeep Singh Grangfdf74d92018-11-06 05:05:32 +0000900 _InterlockedIncrement_acq,
901 _InterlockedIncrement_rel,
902 _InterlockedIncrement_nf,
Mandeep Singh Grang574cadd2018-11-06 05:07:43 +0000903 _InterlockedDecrement_acq,
904 _InterlockedDecrement_rel,
905 _InterlockedDecrement_nf,
Reid Kleckner04f9f912017-02-09 18:31:06 +0000906 __fastfail,
Albert Gutowski5e08df02016-10-13 22:35:07 +0000907};
908
909Value *CodeGenFunction::EmitMSVCBuiltinExpr(MSVCIntrin BuiltinID,
Reid Kleckner04f9f912017-02-09 18:31:06 +0000910 const CallExpr *E) {
Albert Gutowski5e08df02016-10-13 22:35:07 +0000911 switch (BuiltinID) {
912 case MSVCIntrin::_BitScanForward:
913 case MSVCIntrin::_BitScanReverse: {
914 Value *ArgValue = EmitScalarExpr(E->getArg(1));
915
916 llvm::Type *ArgType = ArgValue->getType();
917 llvm::Type *IndexType =
918 EmitScalarExpr(E->getArg(0))->getType()->getPointerElementType();
919 llvm::Type *ResultType = ConvertType(E->getType());
920
921 Value *ArgZero = llvm::Constant::getNullValue(ArgType);
922 Value *ResZero = llvm::Constant::getNullValue(ResultType);
923 Value *ResOne = llvm::ConstantInt::get(ResultType, 1);
924
925 BasicBlock *Begin = Builder.GetInsertBlock();
926 BasicBlock *End = createBasicBlock("bitscan_end", this->CurFn);
927 Builder.SetInsertPoint(End);
928 PHINode *Result = Builder.CreatePHI(ResultType, 2, "bitscan_result");
929
930 Builder.SetInsertPoint(Begin);
931 Value *IsZero = Builder.CreateICmpEQ(ArgValue, ArgZero);
932 BasicBlock *NotZero = createBasicBlock("bitscan_not_zero", this->CurFn);
933 Builder.CreateCondBr(IsZero, End, NotZero);
934 Result->addIncoming(ResZero, Begin);
935
936 Builder.SetInsertPoint(NotZero);
937 Address IndexAddress = EmitPointerWithAlignment(E->getArg(0));
938
939 if (BuiltinID == MSVCIntrin::_BitScanForward) {
James Y Knight8799cae2019-02-03 21:53:49 +0000940 Function *F = CGM.getIntrinsic(Intrinsic::cttz, ArgType);
Albert Gutowski5e08df02016-10-13 22:35:07 +0000941 Value *ZeroCount = Builder.CreateCall(F, {ArgValue, Builder.getTrue()});
942 ZeroCount = Builder.CreateIntCast(ZeroCount, IndexType, false);
943 Builder.CreateStore(ZeroCount, IndexAddress, false);
944 } else {
945 unsigned ArgWidth = cast<llvm::IntegerType>(ArgType)->getBitWidth();
946 Value *ArgTypeLastIndex = llvm::ConstantInt::get(IndexType, ArgWidth - 1);
947
James Y Knight8799cae2019-02-03 21:53:49 +0000948 Function *F = CGM.getIntrinsic(Intrinsic::ctlz, ArgType);
Albert Gutowski5e08df02016-10-13 22:35:07 +0000949 Value *ZeroCount = Builder.CreateCall(F, {ArgValue, Builder.getTrue()});
950 ZeroCount = Builder.CreateIntCast(ZeroCount, IndexType, false);
951 Value *Index = Builder.CreateNSWSub(ArgTypeLastIndex, ZeroCount);
952 Builder.CreateStore(Index, IndexAddress, false);
953 }
954 Builder.CreateBr(End);
955 Result->addIncoming(ResOne, NotZero);
956
957 Builder.SetInsertPoint(End);
958 return Result;
959 }
960 case MSVCIntrin::_InterlockedAnd:
961 return MakeBinaryAtomicValue(*this, AtomicRMWInst::And, E);
962 case MSVCIntrin::_InterlockedExchange:
963 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Xchg, E);
964 case MSVCIntrin::_InterlockedExchangeAdd:
965 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Add, E);
966 case MSVCIntrin::_InterlockedExchangeSub:
967 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Sub, E);
968 case MSVCIntrin::_InterlockedOr:
969 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Or, E);
970 case MSVCIntrin::_InterlockedXor:
971 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Xor, E);
Eli Friedmanb262d162018-10-31 21:31:09 +0000972 case MSVCIntrin::_InterlockedExchangeAdd_acq:
973 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Add, E,
974 AtomicOrdering::Acquire);
975 case MSVCIntrin::_InterlockedExchangeAdd_rel:
976 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Add, E,
977 AtomicOrdering::Release);
978 case MSVCIntrin::_InterlockedExchangeAdd_nf:
979 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Add, E,
980 AtomicOrdering::Monotonic);
Mandeep Singh Grang7fa07e52018-11-02 21:18:23 +0000981 case MSVCIntrin::_InterlockedExchange_acq:
982 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Xchg, E,
983 AtomicOrdering::Acquire);
984 case MSVCIntrin::_InterlockedExchange_rel:
985 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Xchg, E,
986 AtomicOrdering::Release);
987 case MSVCIntrin::_InterlockedExchange_nf:
988 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Xchg, E,
989 AtomicOrdering::Monotonic);
Mandeep Singh Grang6b880682018-11-06 00:36:48 +0000990 case MSVCIntrin::_InterlockedCompareExchange_acq:
991 return EmitAtomicCmpXchgForMSIntrin(*this, E, AtomicOrdering::Acquire);
992 case MSVCIntrin::_InterlockedCompareExchange_rel:
993 return EmitAtomicCmpXchgForMSIntrin(*this, E, AtomicOrdering::Release);
994 case MSVCIntrin::_InterlockedCompareExchange_nf:
995 return EmitAtomicCmpXchgForMSIntrin(*this, E, AtomicOrdering::Monotonic);
Mandeep Singh Grangec62b312018-11-06 01:11:25 +0000996 case MSVCIntrin::_InterlockedOr_acq:
997 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Or, E,
998 AtomicOrdering::Acquire);
999 case MSVCIntrin::_InterlockedOr_rel:
1000 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Or, E,
1001 AtomicOrdering::Release);
1002 case MSVCIntrin::_InterlockedOr_nf:
1003 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Or, E,
1004 AtomicOrdering::Monotonic);
Mandeep Singh Grang806f1072018-11-06 04:55:20 +00001005 case MSVCIntrin::_InterlockedXor_acq:
1006 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Xor, E,
1007 AtomicOrdering::Acquire);
1008 case MSVCIntrin::_InterlockedXor_rel:
1009 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Xor, E,
1010 AtomicOrdering::Release);
1011 case MSVCIntrin::_InterlockedXor_nf:
1012 return MakeBinaryAtomicValue(*this, AtomicRMWInst::Xor, E,
1013 AtomicOrdering::Monotonic);
Mandeep Singh Grangc89157b2018-11-06 05:03:13 +00001014 case MSVCIntrin::_InterlockedAnd_acq:
1015 return MakeBinaryAtomicValue(*this, AtomicRMWInst::And, E,
1016 AtomicOrdering::Acquire);
1017 case MSVCIntrin::_InterlockedAnd_rel:
1018 return MakeBinaryAtomicValue(*this, AtomicRMWInst::And, E,
1019 AtomicOrdering::Release);
1020 case MSVCIntrin::_InterlockedAnd_nf:
1021 return MakeBinaryAtomicValue(*this, AtomicRMWInst::And, E,
1022 AtomicOrdering::Monotonic);
Mandeep Singh Grangfdf74d92018-11-06 05:05:32 +00001023 case MSVCIntrin::_InterlockedIncrement_acq:
1024 return EmitAtomicIncrementValue(*this, E, AtomicOrdering::Acquire);
1025 case MSVCIntrin::_InterlockedIncrement_rel:
1026 return EmitAtomicIncrementValue(*this, E, AtomicOrdering::Release);
1027 case MSVCIntrin::_InterlockedIncrement_nf:
1028 return EmitAtomicIncrementValue(*this, E, AtomicOrdering::Monotonic);
Mandeep Singh Grang574cadd2018-11-06 05:07:43 +00001029 case MSVCIntrin::_InterlockedDecrement_acq:
1030 return EmitAtomicDecrementValue(*this, E, AtomicOrdering::Acquire);
1031 case MSVCIntrin::_InterlockedDecrement_rel:
1032 return EmitAtomicDecrementValue(*this, E, AtomicOrdering::Release);
1033 case MSVCIntrin::_InterlockedDecrement_nf:
1034 return EmitAtomicDecrementValue(*this, E, AtomicOrdering::Monotonic);
Albert Gutowski5e08df02016-10-13 22:35:07 +00001035
Mandeep Singh Grang574cadd2018-11-06 05:07:43 +00001036 case MSVCIntrin::_InterlockedDecrement:
1037 return EmitAtomicDecrementValue(*this, E);
Mandeep Singh Grangfdf74d92018-11-06 05:05:32 +00001038 case MSVCIntrin::_InterlockedIncrement:
1039 return EmitAtomicIncrementValue(*this, E);
Reid Kleckner04f9f912017-02-09 18:31:06 +00001040
1041 case MSVCIntrin::__fastfail: {
1042 // Request immediate process termination from the kernel. The instruction
1043 // sequences to do this are documented on MSDN:
1044 // https://msdn.microsoft.com/en-us/library/dn774154.aspx
1045 llvm::Triple::ArchType ISA = getTarget().getTriple().getArch();
1046 StringRef Asm, Constraints;
1047 switch (ISA) {
1048 default:
1049 ErrorUnsupported(E, "__fastfail call for this architecture");
1050 break;
1051 case llvm::Triple::x86:
1052 case llvm::Triple::x86_64:
1053 Asm = "int $$0x29";
1054 Constraints = "{cx}";
1055 break;
1056 case llvm::Triple::thumb:
1057 Asm = "udf #251";
1058 Constraints = "{r0}";
1059 break;
Tom Tandcb9e082019-02-06 20:08:26 +00001060 case llvm::Triple::aarch64:
1061 Asm = "brk #0xF003";
1062 Constraints = "{w0}";
Reid Kleckner04f9f912017-02-09 18:31:06 +00001063 }
1064 llvm::FunctionType *FTy = llvm::FunctionType::get(VoidTy, {Int32Ty}, false);
1065 llvm::InlineAsm *IA =
1066 llvm::InlineAsm::get(FTy, Asm, Constraints, /*SideEffects=*/true);
Reid Klecknerde864822017-03-21 16:57:30 +00001067 llvm::AttributeList NoReturnAttr = llvm::AttributeList::get(
1068 getLLVMContext(), llvm::AttributeList::FunctionIndex,
1069 llvm::Attribute::NoReturn);
James Y Knight3933add2019-01-30 02:54:28 +00001070 llvm::CallInst *CI = Builder.CreateCall(IA, EmitScalarExpr(E->getArg(0)));
1071 CI->setAttributes(NoReturnAttr);
1072 return CI;
Reid Kleckner04f9f912017-02-09 18:31:06 +00001073 }
Albert Gutowski5e08df02016-10-13 22:35:07 +00001074 }
1075 llvm_unreachable("Incorrect MSVC intrinsic!");
1076}
1077
Mehdi Amini06d367c2016-10-24 20:39:34 +00001078namespace {
1079// ARC cleanup for __builtin_os_log_format
1080struct CallObjCArcUse final : EHScopeStack::Cleanup {
1081 CallObjCArcUse(llvm::Value *object) : object(object) {}
1082 llvm::Value *object;
1083
1084 void Emit(CodeGenFunction &CGF, Flags flags) override {
1085 CGF.EmitARCIntrinsicUse(object);
1086 }
1087};
1088}
1089
Vedant Kumar10c31022017-07-29 00:19:51 +00001090Value *CodeGenFunction::EmitCheckedArgForBuiltin(const Expr *E,
1091 BuiltinCheckKind Kind) {
Victor Leschuk198357b2017-07-29 08:18:38 +00001092 assert((Kind == BCK_CLZPassedZero || Kind == BCK_CTZPassedZero)
1093 && "Unsupported builtin check kind");
Vedant Kumar10c31022017-07-29 00:19:51 +00001094
1095 Value *ArgValue = EmitScalarExpr(E);
1096 if (!SanOpts.has(SanitizerKind::Builtin) || !getTarget().isCLZForZeroUndef())
1097 return ArgValue;
1098
1099 SanitizerScope SanScope(this);
1100 Value *Cond = Builder.CreateICmpNE(
1101 ArgValue, llvm::Constant::getNullValue(ArgValue->getType()));
1102 EmitCheck(std::make_pair(Cond, SanitizerKind::Builtin),
1103 SanitizerHandler::InvalidBuiltin,
1104 {EmitCheckSourceLocation(E->getExprLoc()),
1105 llvm::ConstantInt::get(Builder.getInt8Ty(), Kind)},
1106 None);
1107 return ArgValue;
1108}
1109
Akira Hatanaka6b103bc2017-10-06 07:12:46 +00001110/// Get the argument type for arguments to os_log_helper.
1111static CanQualType getOSLogArgType(ASTContext &C, int Size) {
1112 QualType UnsignedTy = C.getIntTypeForBitwidth(Size * 8, /*Signed=*/false);
1113 return C.getCanonicalType(UnsignedTy);
1114}
1115
1116llvm::Function *CodeGenFunction::generateBuiltinOSLogHelperFunction(
1117 const analyze_os_log::OSLogBufferLayout &Layout,
1118 CharUnits BufferAlignment) {
1119 ASTContext &Ctx = getContext();
1120
1121 llvm::SmallString<64> Name;
1122 {
1123 raw_svector_ostream OS(Name);
1124 OS << "__os_log_helper";
1125 OS << "_" << BufferAlignment.getQuantity();
1126 OS << "_" << int(Layout.getSummaryByte());
1127 OS << "_" << int(Layout.getNumArgsByte());
1128 for (const auto &Item : Layout.Items)
1129 OS << "_" << int(Item.getSizeByte()) << "_"
1130 << int(Item.getDescriptorByte());
1131 }
1132
1133 if (llvm::Function *F = CGM.getModule().getFunction(Name))
1134 return F;
1135
Jonas Devlieghere64a26302018-11-11 00:56:15 +00001136 llvm::SmallVector<QualType, 4> ArgTys;
Dmitri Gribenko39192042019-05-23 09:22:43 +00001137 FunctionArgList Args;
1138 Args.push_back(ImplicitParamDecl::Create(
1139 Ctx, nullptr, SourceLocation(), &Ctx.Idents.get("buffer"), Ctx.VoidPtrTy,
1140 ImplicitParamDecl::Other));
Jonas Devlieghere64a26302018-11-11 00:56:15 +00001141 ArgTys.emplace_back(Ctx.VoidPtrTy);
Akira Hatanaka6b103bc2017-10-06 07:12:46 +00001142
1143 for (unsigned int I = 0, E = Layout.Items.size(); I < E; ++I) {
1144 char Size = Layout.Items[I].getSizeByte();
1145 if (!Size)
1146 continue;
1147
Jonas Devlieghere64a26302018-11-11 00:56:15 +00001148 QualType ArgTy = getOSLogArgType(Ctx, Size);
Dmitri Gribenko39192042019-05-23 09:22:43 +00001149 Args.push_back(ImplicitParamDecl::Create(
Akira Hatanakaa4638122017-10-06 07:47:47 +00001150 Ctx, nullptr, SourceLocation(),
Jonas Devlieghere64a26302018-11-11 00:56:15 +00001151 &Ctx.Idents.get(std::string("arg") + llvm::to_string(I)), ArgTy,
Dmitri Gribenko39192042019-05-23 09:22:43 +00001152 ImplicitParamDecl::Other));
Jonas Devlieghere64a26302018-11-11 00:56:15 +00001153 ArgTys.emplace_back(ArgTy);
Akira Hatanaka6b103bc2017-10-06 07:12:46 +00001154 }
1155
Jonas Devlieghere64a26302018-11-11 00:56:15 +00001156 QualType ReturnTy = Ctx.VoidTy;
1157 QualType FuncionTy = Ctx.getFunctionType(ReturnTy, ArgTys, {});
1158
Akira Hatanaka6b103bc2017-10-06 07:12:46 +00001159 // The helper function has linkonce_odr linkage to enable the linker to merge
1160 // identical functions. To ensure the merging always happens, 'noinline' is
1161 // attached to the function when compiling with -Oz.
1162 const CGFunctionInfo &FI =
Jonas Devlieghere64a26302018-11-11 00:56:15 +00001163 CGM.getTypes().arrangeBuiltinFunctionDeclaration(ReturnTy, Args);
Akira Hatanaka6b103bc2017-10-06 07:12:46 +00001164 llvm::FunctionType *FuncTy = CGM.getTypes().GetFunctionType(FI);
1165 llvm::Function *Fn = llvm::Function::Create(
1166 FuncTy, llvm::GlobalValue::LinkOnceODRLinkage, Name, &CGM.getModule());
1167 Fn->setVisibility(llvm::GlobalValue::HiddenVisibility);
Erich Keanede6480a32018-11-13 15:48:08 +00001168 CGM.SetLLVMFunctionAttributes(GlobalDecl(), FI, Fn);
Akira Hatanaka6b103bc2017-10-06 07:12:46 +00001169 CGM.SetLLVMFunctionAttributesForDefinition(nullptr, Fn);
Vedant Kumar37b0f9a2019-04-02 17:42:38 +00001170 Fn->setDoesNotThrow();
Akira Hatanaka6b103bc2017-10-06 07:12:46 +00001171
1172 // Attach 'noinline' at -Oz.
1173 if (CGM.getCodeGenOpts().OptimizeSize == 2)
1174 Fn->addFnAttr(llvm::Attribute::NoInline);
1175
1176 auto NL = ApplyDebugLocation::CreateEmpty(*this);
1177 IdentifierInfo *II = &Ctx.Idents.get(Name);
1178 FunctionDecl *FD = FunctionDecl::Create(
1179 Ctx, Ctx.getTranslationUnitDecl(), SourceLocation(), SourceLocation(), II,
Jonas Devlieghere64a26302018-11-11 00:56:15 +00001180 FuncionTy, nullptr, SC_PrivateExtern, false, false);
Akira Hatanaka6b103bc2017-10-06 07:12:46 +00001181
Jonas Devlieghere64a26302018-11-11 00:56:15 +00001182 StartFunction(FD, ReturnTy, Fn, FI, Args);
Akira Hatanaka6b103bc2017-10-06 07:12:46 +00001183
1184 // Create a scope with an artificial location for the body of this function.
1185 auto AL = ApplyDebugLocation::CreateArtificial(*this);
1186
1187 CharUnits Offset;
Dmitri Gribenko39192042019-05-23 09:22:43 +00001188 Address BufAddr(Builder.CreateLoad(GetAddrOfLocalVar(Args[0]), "buf"),
Akira Hatanaka6b103bc2017-10-06 07:12:46 +00001189 BufferAlignment);
1190 Builder.CreateStore(Builder.getInt8(Layout.getSummaryByte()),
1191 Builder.CreateConstByteGEP(BufAddr, Offset++, "summary"));
1192 Builder.CreateStore(Builder.getInt8(Layout.getNumArgsByte()),
1193 Builder.CreateConstByteGEP(BufAddr, Offset++, "numArgs"));
1194
1195 unsigned I = 1;
1196 for (const auto &Item : Layout.Items) {
1197 Builder.CreateStore(
1198 Builder.getInt8(Item.getDescriptorByte()),
1199 Builder.CreateConstByteGEP(BufAddr, Offset++, "argDescriptor"));
1200 Builder.CreateStore(
1201 Builder.getInt8(Item.getSizeByte()),
1202 Builder.CreateConstByteGEP(BufAddr, Offset++, "argSize"));
1203
1204 CharUnits Size = Item.size();
1205 if (!Size.getQuantity())
1206 continue;
1207
Dmitri Gribenko39192042019-05-23 09:22:43 +00001208 Address Arg = GetAddrOfLocalVar(Args[I]);
Akira Hatanaka6b103bc2017-10-06 07:12:46 +00001209 Address Addr = Builder.CreateConstByteGEP(BufAddr, Offset, "argData");
1210 Addr = Builder.CreateBitCast(Addr, Arg.getPointer()->getType(),
1211 "argDataCast");
1212 Builder.CreateStore(Builder.CreateLoad(Arg), Addr);
1213 Offset += Size;
1214 ++I;
1215 }
1216
1217 FinishFunction();
1218
1219 return Fn;
1220}
1221
1222RValue CodeGenFunction::emitBuiltinOSLogFormat(const CallExpr &E) {
1223 assert(E.getNumArgs() >= 2 &&
1224 "__builtin_os_log_format takes at least 2 arguments");
1225 ASTContext &Ctx = getContext();
1226 analyze_os_log::OSLogBufferLayout Layout;
1227 analyze_os_log::computeOSLogBufferLayout(Ctx, &E, Layout);
1228 Address BufAddr = EmitPointerWithAlignment(E.getArg(0));
1229 llvm::SmallVector<llvm::Value *, 4> RetainableOperands;
1230
1231 // Ignore argument 1, the format string. It is not currently used.
1232 CallArgList Args;
1233 Args.add(RValue::get(BufAddr.getPointer()), Ctx.VoidPtrTy);
1234
1235 for (const auto &Item : Layout.Items) {
1236 int Size = Item.getSizeByte();
1237 if (!Size)
1238 continue;
1239
1240 llvm::Value *ArgVal;
1241
Akira Hatanakad572cf42018-11-06 07:05:14 +00001242 if (Item.getKind() == analyze_os_log::OSLogBufferItem::MaskKind) {
1243 uint64_t Val = 0;
1244 for (unsigned I = 0, E = Item.getMaskType().size(); I < E; ++I)
Akira Hatanaka908aabb2018-11-06 07:12:28 +00001245 Val |= ((uint64_t)Item.getMaskType()[I]) << I * 8;
Akira Hatanakad572cf42018-11-06 07:05:14 +00001246 ArgVal = llvm::Constant::getIntegerValue(Int64Ty, llvm::APInt(64, Val));
1247 } else if (const Expr *TheExpr = Item.getExpr()) {
Akira Hatanaka6b103bc2017-10-06 07:12:46 +00001248 ArgVal = EmitScalarExpr(TheExpr, /*Ignore*/ false);
1249
1250 // Check if this is a retainable type.
1251 if (TheExpr->getType()->isObjCRetainableType()) {
1252 assert(getEvaluationKind(TheExpr->getType()) == TEK_Scalar &&
1253 "Only scalar can be a ObjC retainable type");
1254 // Check if the object is constant, if not, save it in
1255 // RetainableOperands.
1256 if (!isa<Constant>(ArgVal))
1257 RetainableOperands.push_back(ArgVal);
1258 }
1259 } else {
1260 ArgVal = Builder.getInt32(Item.getConstValue().getQuantity());
1261 }
1262
1263 unsigned ArgValSize =
1264 CGM.getDataLayout().getTypeSizeInBits(ArgVal->getType());
1265 llvm::IntegerType *IntTy = llvm::Type::getIntNTy(getLLVMContext(),
1266 ArgValSize);
1267 ArgVal = Builder.CreateBitOrPointerCast(ArgVal, IntTy);
1268 CanQualType ArgTy = getOSLogArgType(Ctx, Size);
1269 // If ArgVal has type x86_fp80, zero-extend ArgVal.
1270 ArgVal = Builder.CreateZExtOrBitCast(ArgVal, ConvertType(ArgTy));
1271 Args.add(RValue::get(ArgVal), ArgTy);
1272 }
1273
1274 const CGFunctionInfo &FI =
1275 CGM.getTypes().arrangeBuiltinFunctionCall(Ctx.VoidTy, Args);
1276 llvm::Function *F = CodeGenFunction(CGM).generateBuiltinOSLogHelperFunction(
1277 Layout, BufAddr.getAlignment());
1278 EmitCall(FI, CGCallee::forDirect(F), ReturnValueSlot(), Args);
1279
1280 // Push a clang.arc.use cleanup for each object in RetainableOperands. The
1281 // cleanup will cause the use to appear after the final log call, keeping
1282 // the object valid while it’s held in the log buffer. Note that if there’s
1283 // a release cleanup on the object, it will already be active; since
1284 // cleanups are emitted in reverse order, the use will occur before the
1285 // object is released.
1286 if (!RetainableOperands.empty() && getLangOpts().ObjCAutoRefCount &&
1287 CGM.getCodeGenOpts().OptimizationLevel != 0)
1288 for (llvm::Value *Object : RetainableOperands)
1289 pushFullExprCleanup<CallObjCArcUse>(getARCCleanupKind(), Object);
1290
1291 return RValue::get(BufAddr.getPointer());
1292}
1293
Vedant Kumarfa5a0e52017-12-16 01:28:25 +00001294/// Determine if a binop is a checked mixed-sign multiply we can specialize.
1295static bool isSpecialMixedSignMultiply(unsigned BuiltinID,
1296 WidthAndSignedness Op1Info,
1297 WidthAndSignedness Op2Info,
1298 WidthAndSignedness ResultInfo) {
1299 return BuiltinID == Builtin::BI__builtin_mul_overflow &&
Vedant Kumar77dfca82018-12-18 21:05:03 +00001300 std::max(Op1Info.Width, Op2Info.Width) >= ResultInfo.Width &&
Vedant Kumarfa5a0e52017-12-16 01:28:25 +00001301 Op1Info.Signed != Op2Info.Signed;
1302}
1303
1304/// Emit a checked mixed-sign multiply. This is a cheaper specialization of
1305/// the generic checked-binop irgen.
1306static RValue
1307EmitCheckedMixedSignMultiply(CodeGenFunction &CGF, const clang::Expr *Op1,
1308 WidthAndSignedness Op1Info, const clang::Expr *Op2,
1309 WidthAndSignedness Op2Info,
1310 const clang::Expr *ResultArg, QualType ResultQTy,
1311 WidthAndSignedness ResultInfo) {
1312 assert(isSpecialMixedSignMultiply(Builtin::BI__builtin_mul_overflow, Op1Info,
1313 Op2Info, ResultInfo) &&
1314 "Not a mixed-sign multipliction we can specialize");
1315
1316 // Emit the signed and unsigned operands.
1317 const clang::Expr *SignedOp = Op1Info.Signed ? Op1 : Op2;
1318 const clang::Expr *UnsignedOp = Op1Info.Signed ? Op2 : Op1;
1319 llvm::Value *Signed = CGF.EmitScalarExpr(SignedOp);
1320 llvm::Value *Unsigned = CGF.EmitScalarExpr(UnsignedOp);
Vedant Kumar77dfca82018-12-18 21:05:03 +00001321 unsigned SignedOpWidth = Op1Info.Signed ? Op1Info.Width : Op2Info.Width;
1322 unsigned UnsignedOpWidth = Op1Info.Signed ? Op2Info.Width : Op1Info.Width;
1323
1324 // One of the operands may be smaller than the other. If so, [s|z]ext it.
1325 if (SignedOpWidth < UnsignedOpWidth)
1326 Signed = CGF.Builder.CreateSExt(Signed, Unsigned->getType(), "op.sext");
1327 if (UnsignedOpWidth < SignedOpWidth)
1328 Unsigned = CGF.Builder.CreateZExt(Unsigned, Signed->getType(), "op.zext");
Vedant Kumarfa5a0e52017-12-16 01:28:25 +00001329
1330 llvm::Type *OpTy = Signed->getType();
1331 llvm::Value *Zero = llvm::Constant::getNullValue(OpTy);
1332 Address ResultPtr = CGF.EmitPointerWithAlignment(ResultArg);
1333 llvm::Type *ResTy = ResultPtr.getElementType();
Vedant Kumar77dfca82018-12-18 21:05:03 +00001334 unsigned OpWidth = std::max(Op1Info.Width, Op2Info.Width);
Vedant Kumarfa5a0e52017-12-16 01:28:25 +00001335
1336 // Take the absolute value of the signed operand.
1337 llvm::Value *IsNegative = CGF.Builder.CreateICmpSLT(Signed, Zero);
1338 llvm::Value *AbsOfNegative = CGF.Builder.CreateSub(Zero, Signed);
1339 llvm::Value *AbsSigned =
1340 CGF.Builder.CreateSelect(IsNegative, AbsOfNegative, Signed);
1341
1342 // Perform a checked unsigned multiplication.
1343 llvm::Value *UnsignedOverflow;
1344 llvm::Value *UnsignedResult =
1345 EmitOverflowIntrinsic(CGF, llvm::Intrinsic::umul_with_overflow, AbsSigned,
1346 Unsigned, UnsignedOverflow);
1347
1348 llvm::Value *Overflow, *Result;
1349 if (ResultInfo.Signed) {
1350 // Signed overflow occurs if the result is greater than INT_MAX or lesser
1351 // than INT_MIN, i.e when |Result| > (INT_MAX + IsNegative).
Vedant Kumar77dfca82018-12-18 21:05:03 +00001352 auto IntMax =
1353 llvm::APInt::getSignedMaxValue(ResultInfo.Width).zextOrSelf(OpWidth);
Vedant Kumarfa5a0e52017-12-16 01:28:25 +00001354 llvm::Value *MaxResult =
1355 CGF.Builder.CreateAdd(llvm::ConstantInt::get(OpTy, IntMax),
1356 CGF.Builder.CreateZExt(IsNegative, OpTy));
1357 llvm::Value *SignedOverflow =
1358 CGF.Builder.CreateICmpUGT(UnsignedResult, MaxResult);
1359 Overflow = CGF.Builder.CreateOr(UnsignedOverflow, SignedOverflow);
1360
1361 // Prepare the signed result (possibly by negating it).
1362 llvm::Value *NegativeResult = CGF.Builder.CreateNeg(UnsignedResult);
1363 llvm::Value *SignedResult =
1364 CGF.Builder.CreateSelect(IsNegative, NegativeResult, UnsignedResult);
1365 Result = CGF.Builder.CreateTrunc(SignedResult, ResTy);
1366 } else {
1367 // Unsigned overflow occurs if the result is < 0 or greater than UINT_MAX.
1368 llvm::Value *Underflow = CGF.Builder.CreateAnd(
1369 IsNegative, CGF.Builder.CreateIsNotNull(UnsignedResult));
1370 Overflow = CGF.Builder.CreateOr(UnsignedOverflow, Underflow);
Vedant Kumar77dfca82018-12-18 21:05:03 +00001371 if (ResultInfo.Width < OpWidth) {
Vedant Kumarfa5a0e52017-12-16 01:28:25 +00001372 auto IntMax =
Vedant Kumar77dfca82018-12-18 21:05:03 +00001373 llvm::APInt::getMaxValue(ResultInfo.Width).zext(OpWidth);
Vedant Kumarfa5a0e52017-12-16 01:28:25 +00001374 llvm::Value *TruncOverflow = CGF.Builder.CreateICmpUGT(
1375 UnsignedResult, llvm::ConstantInt::get(OpTy, IntMax));
1376 Overflow = CGF.Builder.CreateOr(Overflow, TruncOverflow);
1377 }
1378
Vedant Kumarbbafd502018-01-03 23:11:32 +00001379 // Negate the product if it would be negative in infinite precision.
1380 Result = CGF.Builder.CreateSelect(
1381 IsNegative, CGF.Builder.CreateNeg(UnsignedResult), UnsignedResult);
1382
1383 Result = CGF.Builder.CreateTrunc(Result, ResTy);
Vedant Kumarfa5a0e52017-12-16 01:28:25 +00001384 }
1385 assert(Overflow && Result && "Missing overflow or result");
1386
1387 bool isVolatile =
1388 ResultArg->getType()->getPointeeType().isVolatileQualified();
1389 CGF.Builder.CreateStore(CGF.EmitToMemory(Result, ResultQTy), ResultPtr,
1390 isVolatile);
1391 return RValue::get(Overflow);
1392}
1393
Aaron Ballman06525342018-04-10 21:58:13 +00001394static llvm::Value *dumpRecord(CodeGenFunction &CGF, QualType RType,
James Y Knight76f78742019-02-05 19:17:50 +00001395 Value *&RecordPtr, CharUnits Align,
1396 llvm::FunctionCallee Func, int Lvl) {
Aaron Ballman06525342018-04-10 21:58:13 +00001397 const auto *RT = RType->getAs<RecordType>();
1398 ASTContext &Context = CGF.getContext();
1399 RecordDecl *RD = RT->getDecl()->getDefinition();
Aaron Ballman06525342018-04-10 21:58:13 +00001400 std::string Pad = std::string(Lvl * 4, ' ');
1401
1402 Value *GString =
1403 CGF.Builder.CreateGlobalStringPtr(RType.getAsString() + " {\n");
1404 Value *Res = CGF.Builder.CreateCall(Func, {GString});
1405
1406 static llvm::DenseMap<QualType, const char *> Types;
1407 if (Types.empty()) {
1408 Types[Context.CharTy] = "%c";
1409 Types[Context.BoolTy] = "%d";
Aaron Ballmanfe935462018-04-17 14:00:06 +00001410 Types[Context.SignedCharTy] = "%hhd";
1411 Types[Context.UnsignedCharTy] = "%hhu";
Aaron Ballman06525342018-04-10 21:58:13 +00001412 Types[Context.IntTy] = "%d";
1413 Types[Context.UnsignedIntTy] = "%u";
1414 Types[Context.LongTy] = "%ld";
1415 Types[Context.UnsignedLongTy] = "%lu";
1416 Types[Context.LongLongTy] = "%lld";
1417 Types[Context.UnsignedLongLongTy] = "%llu";
1418 Types[Context.ShortTy] = "%hd";
1419 Types[Context.UnsignedShortTy] = "%hu";
1420 Types[Context.VoidPtrTy] = "%p";
1421 Types[Context.FloatTy] = "%f";
1422 Types[Context.DoubleTy] = "%f";
1423 Types[Context.LongDoubleTy] = "%Lf";
1424 Types[Context.getPointerType(Context.CharTy)] = "%s";
Aaron Ballmanb6a77022018-04-17 11:57:47 +00001425 Types[Context.getPointerType(Context.getConstType(Context.CharTy))] = "%s";
Aaron Ballman06525342018-04-10 21:58:13 +00001426 }
1427
1428 for (const auto *FD : RD->fields()) {
Aaron Ballman06525342018-04-10 21:58:13 +00001429 Value *FieldPtr = RecordPtr;
1430 if (RD->isUnion())
1431 FieldPtr = CGF.Builder.CreatePointerCast(
1432 FieldPtr, CGF.ConvertType(Context.getPointerType(FD->getType())));
1433 else
1434 FieldPtr = CGF.Builder.CreateStructGEP(CGF.ConvertType(RType), FieldPtr,
1435 FD->getFieldIndex());
1436
1437 GString = CGF.Builder.CreateGlobalStringPtr(
1438 llvm::Twine(Pad)
1439 .concat(FD->getType().getAsString())
1440 .concat(llvm::Twine(' '))
1441 .concat(FD->getNameAsString())
1442 .concat(" : ")
1443 .str());
1444 Value *TmpRes = CGF.Builder.CreateCall(Func, {GString});
1445 Res = CGF.Builder.CreateAdd(Res, TmpRes);
1446
1447 QualType CanonicalType =
1448 FD->getType().getUnqualifiedType().getCanonicalType();
1449
1450 // We check whether we are in a recursive type
1451 if (CanonicalType->isRecordType()) {
1452 Value *TmpRes =
1453 dumpRecord(CGF, CanonicalType, FieldPtr, Align, Func, Lvl + 1);
1454 Res = CGF.Builder.CreateAdd(TmpRes, Res);
1455 continue;
1456 }
1457
1458 // We try to determine the best format to print the current field
1459 llvm::Twine Format = Types.find(CanonicalType) == Types.end()
1460 ? Types[Context.VoidPtrTy]
1461 : Types[CanonicalType];
1462
1463 Address FieldAddress = Address(FieldPtr, Align);
1464 FieldPtr = CGF.Builder.CreateLoad(FieldAddress);
1465
1466 // FIXME Need to handle bitfield here
1467 GString = CGF.Builder.CreateGlobalStringPtr(
1468 Format.concat(llvm::Twine('\n')).str());
1469 TmpRes = CGF.Builder.CreateCall(Func, {GString, FieldPtr});
1470 Res = CGF.Builder.CreateAdd(Res, TmpRes);
1471 }
1472
1473 GString = CGF.Builder.CreateGlobalStringPtr(Pad + "}\n");
1474 Value *TmpRes = CGF.Builder.CreateCall(Func, {GString});
1475 Res = CGF.Builder.CreateAdd(Res, TmpRes);
1476 return Res;
1477}
1478
Eric Fiselier26187502018-12-14 21:11:28 +00001479static bool
1480TypeRequiresBuiltinLaunderImp(const ASTContext &Ctx, QualType Ty,
1481 llvm::SmallPtrSetImpl<const Decl *> &Seen) {
1482 if (const auto *Arr = Ctx.getAsArrayType(Ty))
1483 Ty = Ctx.getBaseElementType(Arr);
1484
1485 const auto *Record = Ty->getAsCXXRecordDecl();
1486 if (!Record)
1487 return false;
1488
1489 // We've already checked this type, or are in the process of checking it.
1490 if (!Seen.insert(Record).second)
1491 return false;
1492
1493 assert(Record->hasDefinition() &&
1494 "Incomplete types should already be diagnosed");
1495
1496 if (Record->isDynamicClass())
1497 return true;
1498
1499 for (FieldDecl *F : Record->fields()) {
1500 if (TypeRequiresBuiltinLaunderImp(Ctx, F->getType(), Seen))
1501 return true;
1502 }
1503 return false;
1504}
1505
1506/// Determine if the specified type requires laundering by checking if it is a
1507/// dynamic class type or contains a subobject which is a dynamic class type.
1508static bool TypeRequiresBuiltinLaunder(CodeGenModule &CGM, QualType Ty) {
1509 if (!CGM.getCodeGenOpts().StrictVTablePointers)
1510 return false;
1511 llvm::SmallPtrSet<const Decl *, 16> Seen;
1512 return TypeRequiresBuiltinLaunderImp(CGM.getContext(), Ty, Seen);
1513}
1514
Sanjay Patelad823902018-08-19 16:50:30 +00001515RValue CodeGenFunction::emitRotate(const CallExpr *E, bool IsRotateRight) {
1516 llvm::Value *Src = EmitScalarExpr(E->getArg(0));
1517 llvm::Value *ShiftAmt = EmitScalarExpr(E->getArg(1));
1518
1519 // The builtin's shift arg may have a different type than the source arg and
1520 // result, but the LLVM intrinsic uses the same type for all values.
1521 llvm::Type *Ty = Src->getType();
1522 ShiftAmt = Builder.CreateIntCast(ShiftAmt, Ty, false);
1523
1524 // Rotate is a special case of LLVM funnel shift - 1st 2 args are the same.
1525 unsigned IID = IsRotateRight ? Intrinsic::fshr : Intrinsic::fshl;
James Y Knight8799cae2019-02-03 21:53:49 +00001526 Function *F = CGM.getIntrinsic(IID, Ty);
Sanjay Patelad823902018-08-19 16:50:30 +00001527 return RValue::get(Builder.CreateCall(F, { Src, Src, ShiftAmt }));
1528}
1529
Erich Keanede6480a32018-11-13 15:48:08 +00001530RValue CodeGenFunction::EmitBuiltinExpr(const GlobalDecl GD, unsigned BuiltinID,
1531 const CallExpr *E,
Peter Collingbournef7706832014-12-12 23:41:25 +00001532 ReturnValueSlot ReturnValue) {
Erich Keanede6480a32018-11-13 15:48:08 +00001533 const FunctionDecl *FD = GD.getDecl()->getAsFunction();
Chris Lattner24355b52008-10-06 06:56:41 +00001534 // See if we can constant fold this builtin. If so, don't emit it at all.
Anders Carlssonc9687902008-12-01 02:31:41 +00001535 Expr::EvalResult Result;
Eli Friedmandf88c542012-01-06 20:03:09 +00001536 if (E->EvaluateAsRValue(Result, CGM.getContext()) &&
Fariborz Jahanian24ac1592011-04-25 23:10:07 +00001537 !Result.hasSideEffects()) {
Anders Carlssonc9687902008-12-01 02:31:41 +00001538 if (Result.Val.isInt())
John McCallad7c5c12011-02-08 08:22:06 +00001539 return RValue::get(llvm::ConstantInt::get(getLLVMContext(),
Owen Andersonb7a2fe62009-07-24 23:12:58 +00001540 Result.Val.getInt()));
Chris Lattner07e96862010-10-01 23:43:16 +00001541 if (Result.Val.isFloat())
John McCallad7c5c12011-02-08 08:22:06 +00001542 return RValue::get(llvm::ConstantFP::get(getLLVMContext(),
1543 Result.Val.getFloat()));
Chris Lattnera1518b12008-10-06 06:09:18 +00001544 }
Mike Stump11289f42009-09-09 15:08:12 +00001545
Sanjay Patel08fba372017-12-02 17:52:00 +00001546 // There are LLVM math intrinsics/instructions corresponding to math library
1547 // functions except the LLVM op will never set errno while the math library
1548 // might. Also, math builtins have the same semantics as their math library
1549 // twins. Thus, we can transform math library and builtin calls to their
1550 // LLVM counterparts if the call is marked 'const' (known to never set errno).
Sanjay Patel3e287b42017-12-01 23:15:52 +00001551 if (FD->hasAttr<ConstAttr>()) {
1552 switch (BuiltinID) {
1553 case Builtin::BIceil:
1554 case Builtin::BIceilf:
1555 case Builtin::BIceill:
1556 case Builtin::BI__builtin_ceil:
1557 case Builtin::BI__builtin_ceilf:
1558 case Builtin::BI__builtin_ceill:
1559 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::ceil));
1560
1561 case Builtin::BIcopysign:
1562 case Builtin::BIcopysignf:
1563 case Builtin::BIcopysignl:
1564 case Builtin::BI__builtin_copysign:
1565 case Builtin::BI__builtin_copysignf:
1566 case Builtin::BI__builtin_copysignl:
Benjamin Kramerdfecbe92018-01-06 21:49:54 +00001567 case Builtin::BI__builtin_copysignf128:
Sanjay Patel3e287b42017-12-01 23:15:52 +00001568 return RValue::get(emitBinaryBuiltin(*this, E, Intrinsic::copysign));
1569
1570 case Builtin::BIcos:
1571 case Builtin::BIcosf:
1572 case Builtin::BIcosl:
1573 case Builtin::BI__builtin_cos:
1574 case Builtin::BI__builtin_cosf:
1575 case Builtin::BI__builtin_cosl:
1576 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::cos));
1577
1578 case Builtin::BIexp:
1579 case Builtin::BIexpf:
1580 case Builtin::BIexpl:
1581 case Builtin::BI__builtin_exp:
1582 case Builtin::BI__builtin_expf:
1583 case Builtin::BI__builtin_expl:
1584 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::exp));
1585
1586 case Builtin::BIexp2:
1587 case Builtin::BIexp2f:
1588 case Builtin::BIexp2l:
1589 case Builtin::BI__builtin_exp2:
1590 case Builtin::BI__builtin_exp2f:
1591 case Builtin::BI__builtin_exp2l:
1592 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::exp2));
1593
1594 case Builtin::BIfabs:
1595 case Builtin::BIfabsf:
1596 case Builtin::BIfabsl:
1597 case Builtin::BI__builtin_fabs:
1598 case Builtin::BI__builtin_fabsf:
1599 case Builtin::BI__builtin_fabsl:
Benjamin Kramerdfecbe92018-01-06 21:49:54 +00001600 case Builtin::BI__builtin_fabsf128:
Sanjay Patel3e287b42017-12-01 23:15:52 +00001601 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::fabs));
1602
1603 case Builtin::BIfloor:
1604 case Builtin::BIfloorf:
1605 case Builtin::BIfloorl:
1606 case Builtin::BI__builtin_floor:
1607 case Builtin::BI__builtin_floorf:
1608 case Builtin::BI__builtin_floorl:
1609 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::floor));
1610
1611 case Builtin::BIfma:
1612 case Builtin::BIfmaf:
1613 case Builtin::BIfmal:
1614 case Builtin::BI__builtin_fma:
1615 case Builtin::BI__builtin_fmaf:
1616 case Builtin::BI__builtin_fmal:
1617 return RValue::get(emitTernaryBuiltin(*this, E, Intrinsic::fma));
1618
1619 case Builtin::BIfmax:
1620 case Builtin::BIfmaxf:
1621 case Builtin::BIfmaxl:
1622 case Builtin::BI__builtin_fmax:
1623 case Builtin::BI__builtin_fmaxf:
1624 case Builtin::BI__builtin_fmaxl:
1625 return RValue::get(emitBinaryBuiltin(*this, E, Intrinsic::maxnum));
1626
1627 case Builtin::BIfmin:
1628 case Builtin::BIfminf:
1629 case Builtin::BIfminl:
1630 case Builtin::BI__builtin_fmin:
1631 case Builtin::BI__builtin_fminf:
1632 case Builtin::BI__builtin_fminl:
1633 return RValue::get(emitBinaryBuiltin(*this, E, Intrinsic::minnum));
1634
Sanjay Patel08fba372017-12-02 17:52:00 +00001635 // fmod() is a special-case. It maps to the frem instruction rather than an
1636 // LLVM intrinsic.
1637 case Builtin::BIfmod:
1638 case Builtin::BIfmodf:
1639 case Builtin::BIfmodl:
1640 case Builtin::BI__builtin_fmod:
1641 case Builtin::BI__builtin_fmodf:
1642 case Builtin::BI__builtin_fmodl: {
1643 Value *Arg1 = EmitScalarExpr(E->getArg(0));
1644 Value *Arg2 = EmitScalarExpr(E->getArg(1));
1645 return RValue::get(Builder.CreateFRem(Arg1, Arg2, "fmod"));
1646 }
1647
Sanjay Patel3e287b42017-12-01 23:15:52 +00001648 case Builtin::BIlog:
1649 case Builtin::BIlogf:
1650 case Builtin::BIlogl:
1651 case Builtin::BI__builtin_log:
1652 case Builtin::BI__builtin_logf:
1653 case Builtin::BI__builtin_logl:
1654 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::log));
1655
1656 case Builtin::BIlog10:
1657 case Builtin::BIlog10f:
1658 case Builtin::BIlog10l:
1659 case Builtin::BI__builtin_log10:
1660 case Builtin::BI__builtin_log10f:
1661 case Builtin::BI__builtin_log10l:
1662 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::log10));
1663
1664 case Builtin::BIlog2:
1665 case Builtin::BIlog2f:
1666 case Builtin::BIlog2l:
1667 case Builtin::BI__builtin_log2:
1668 case Builtin::BI__builtin_log2f:
1669 case Builtin::BI__builtin_log2l:
1670 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::log2));
1671
1672 case Builtin::BInearbyint:
1673 case Builtin::BInearbyintf:
1674 case Builtin::BInearbyintl:
1675 case Builtin::BI__builtin_nearbyint:
1676 case Builtin::BI__builtin_nearbyintf:
1677 case Builtin::BI__builtin_nearbyintl:
1678 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::nearbyint));
1679
1680 case Builtin::BIpow:
1681 case Builtin::BIpowf:
1682 case Builtin::BIpowl:
1683 case Builtin::BI__builtin_pow:
1684 case Builtin::BI__builtin_powf:
1685 case Builtin::BI__builtin_powl:
1686 return RValue::get(emitBinaryBuiltin(*this, E, Intrinsic::pow));
1687
1688 case Builtin::BIrint:
1689 case Builtin::BIrintf:
1690 case Builtin::BIrintl:
1691 case Builtin::BI__builtin_rint:
1692 case Builtin::BI__builtin_rintf:
1693 case Builtin::BI__builtin_rintl:
1694 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::rint));
1695
1696 case Builtin::BIround:
1697 case Builtin::BIroundf:
1698 case Builtin::BIroundl:
1699 case Builtin::BI__builtin_round:
1700 case Builtin::BI__builtin_roundf:
1701 case Builtin::BI__builtin_roundl:
1702 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::round));
1703
1704 case Builtin::BIsin:
1705 case Builtin::BIsinf:
1706 case Builtin::BIsinl:
1707 case Builtin::BI__builtin_sin:
1708 case Builtin::BI__builtin_sinf:
1709 case Builtin::BI__builtin_sinl:
1710 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::sin));
1711
1712 case Builtin::BIsqrt:
1713 case Builtin::BIsqrtf:
1714 case Builtin::BIsqrtl:
1715 case Builtin::BI__builtin_sqrt:
1716 case Builtin::BI__builtin_sqrtf:
1717 case Builtin::BI__builtin_sqrtl:
1718 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::sqrt));
1719
1720 case Builtin::BItrunc:
1721 case Builtin::BItruncf:
1722 case Builtin::BItruncl:
1723 case Builtin::BI__builtin_trunc:
1724 case Builtin::BI__builtin_truncf:
1725 case Builtin::BI__builtin_truncl:
1726 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::trunc));
1727
Adhemerval Zanella0d9dcd72019-05-16 13:43:25 +00001728 case Builtin::BIlround:
1729 case Builtin::BIlroundf:
1730 case Builtin::BIlroundl:
1731 case Builtin::BI__builtin_lround:
1732 case Builtin::BI__builtin_lroundf:
Craig Topperaf7a1882019-05-20 16:27:09 +00001733 case Builtin::BI__builtin_lroundl:
1734 return RValue::get(emitFPToIntRoundBuiltin(*this, E, Intrinsic::lround));
Adhemerval Zanella0d9dcd72019-05-16 13:43:25 +00001735
1736 case Builtin::BIllround:
1737 case Builtin::BIllroundf:
1738 case Builtin::BIllroundl:
1739 case Builtin::BI__builtin_llround:
1740 case Builtin::BI__builtin_llroundf:
1741 case Builtin::BI__builtin_llroundl:
Craig Topperaf7a1882019-05-20 16:27:09 +00001742 return RValue::get(emitFPToIntRoundBuiltin(*this, E, Intrinsic::llround));
Adhemerval Zanella0d9dcd72019-05-16 13:43:25 +00001743
Sanjay Patel3e287b42017-12-01 23:15:52 +00001744 default:
1745 break;
1746 }
1747 }
1748
Chris Lattner24355b52008-10-06 06:56:41 +00001749 switch (BuiltinID) {
Sanjay Patel0c0f77d2017-12-02 16:29:34 +00001750 default: break;
Chris Lattnera97132a2008-10-06 07:26:43 +00001751 case Builtin::BI__builtin___CFStringMakeConstantString:
David Chisnall481e3a82010-01-23 02:40:42 +00001752 case Builtin::BI__builtin___NSStringMakeConstantString:
John McCallde0fe072017-08-15 21:42:52 +00001753 return RValue::get(ConstantEmitter(*this).emitAbstract(E, E->getType()));
Chris Lattner0bf67912008-07-09 17:28:44 +00001754 case Builtin::BI__builtin_stdarg_start:
Anders Carlsson24ebce62007-10-12 23:56:29 +00001755 case Builtin::BI__builtin_va_start:
Reid Kleckner597e81d2014-03-26 15:38:33 +00001756 case Builtin::BI__va_start:
Charles Davisc7d5c942015-09-17 20:55:33 +00001757 case Builtin::BI__builtin_va_end:
1758 return RValue::get(
1759 EmitVAStartEnd(BuiltinID == Builtin::BI__va_start
1760 ? EmitScalarExpr(E->getArg(0))
1761 : EmitVAListRef(E->getArg(0)).getPointer(),
1762 BuiltinID != Builtin::BI__builtin_va_end));
Anders Carlssonc0b0e592008-02-09 20:26:43 +00001763 case Builtin::BI__builtin_va_copy: {
John McCall7f416cc2015-09-08 08:05:57 +00001764 Value *DstPtr = EmitVAListRef(E->getArg(0)).getPointer();
1765 Value *SrcPtr = EmitVAListRef(E->getArg(1)).getPointer();
Anders Carlssonc0b0e592008-02-09 20:26:43 +00001766
Chris Lattner2192fe52011-07-18 04:24:23 +00001767 llvm::Type *Type = Int8PtrTy;
Anders Carlssonc0b0e592008-02-09 20:26:43 +00001768
1769 DstPtr = Builder.CreateBitCast(DstPtr, Type);
1770 SrcPtr = Builder.CreateBitCast(SrcPtr, Type);
David Blaikie43f9bb72015-05-18 22:14:03 +00001771 return RValue::get(Builder.CreateCall(CGM.getIntrinsic(Intrinsic::vacopy),
1772 {DstPtr, SrcPtr}));
Anders Carlssonc0b0e592008-02-09 20:26:43 +00001773 }
Jim Grosbachd3608f42012-09-21 00:18:27 +00001774 case Builtin::BI__builtin_abs:
Eli Friedman65499b42012-01-17 22:11:30 +00001775 case Builtin::BI__builtin_labs:
1776 case Builtin::BI__builtin_llabs: {
Sanjay Patel1ff6b272018-05-22 15:36:50 +00001777 // X < 0 ? -X : X
Sanjay Patel74c7fb02018-05-22 23:02:13 +00001778 // The negation has 'nsw' because abs of INT_MIN is undefined.
Mike Stump11289f42009-09-09 15:08:12 +00001779 Value *ArgValue = EmitScalarExpr(E->getArg(0));
Sanjay Patel74c7fb02018-05-22 23:02:13 +00001780 Value *NegOp = Builder.CreateNSWNeg(ArgValue, "neg");
Sanjay Patel1ff6b272018-05-22 15:36:50 +00001781 Constant *Zero = llvm::Constant::getNullValue(ArgValue->getType());
1782 Value *CmpResult = Builder.CreateICmpSLT(ArgValue, Zero, "abscond");
1783 Value *Result = Builder.CreateSelect(CmpResult, NegOp, ArgValue, "abs");
Anders Carlsson4f8eb122007-11-20 19:05:17 +00001784 return RValue::get(Result);
1785 }
Fariborz Jahanian1ac11192012-08-14 20:09:28 +00001786 case Builtin::BI__builtin_conj:
1787 case Builtin::BI__builtin_conjf:
1788 case Builtin::BI__builtin_conjl: {
1789 ComplexPairTy ComplexVal = EmitComplexExpr(E->getArg(0));
1790 Value *Real = ComplexVal.first;
1791 Value *Imag = ComplexVal.second;
Jim Grosbachd3608f42012-09-21 00:18:27 +00001792 Value *Zero =
1793 Imag->getType()->isFPOrFPVectorTy()
Fariborz Jahanian1ac11192012-08-14 20:09:28 +00001794 ? llvm::ConstantFP::getZeroValueForNegation(Imag->getType())
1795 : llvm::Constant::getNullValue(Imag->getType());
Jim Grosbachd3608f42012-09-21 00:18:27 +00001796
Fariborz Jahanian1ac11192012-08-14 20:09:28 +00001797 Imag = Builder.CreateFSub(Zero, Imag, "sub");
1798 return RValue::getComplex(std::make_pair(Real, Imag));
1799 }
1800 case Builtin::BI__builtin_creal:
1801 case Builtin::BI__builtin_crealf:
Meador Ingeb97878a2012-12-18 20:58:04 +00001802 case Builtin::BI__builtin_creall:
1803 case Builtin::BIcreal:
1804 case Builtin::BIcrealf:
1805 case Builtin::BIcreall: {
Fariborz Jahanian1ac11192012-08-14 20:09:28 +00001806 ComplexPairTy ComplexVal = EmitComplexExpr(E->getArg(0));
1807 return RValue::get(ComplexVal.first);
1808 }
Jim Grosbachd3608f42012-09-21 00:18:27 +00001809
Aaron Ballman06525342018-04-10 21:58:13 +00001810 case Builtin::BI__builtin_dump_struct: {
James Y Knight76f78742019-02-05 19:17:50 +00001811 llvm::Type *LLVMIntTy = getTypes().ConvertType(getContext().IntTy);
1812 llvm::FunctionType *LLVMFuncType = llvm::FunctionType::get(
1813 LLVMIntTy, {llvm::Type::getInt8PtrTy(getLLVMContext())}, true);
1814
Aaron Ballman06525342018-04-10 21:58:13 +00001815 Value *Func = EmitScalarExpr(E->getArg(1)->IgnoreImpCasts());
1816 CharUnits Arg0Align = EmitPointerWithAlignment(E->getArg(0)).getAlignment();
1817
1818 const Expr *Arg0 = E->getArg(0)->IgnoreImpCasts();
1819 QualType Arg0Type = Arg0->getType()->getPointeeType();
1820
1821 Value *RecordPtr = EmitScalarExpr(Arg0);
James Y Knight76f78742019-02-05 19:17:50 +00001822 Value *Res = dumpRecord(*this, Arg0Type, RecordPtr, Arg0Align,
1823 {LLVMFuncType, Func}, 0);
Aaron Ballman06525342018-04-10 21:58:13 +00001824 return RValue::get(Res);
1825 }
1826
Fariborz Jahanian1ac11192012-08-14 20:09:28 +00001827 case Builtin::BI__builtin_cimag:
1828 case Builtin::BI__builtin_cimagf:
Meador Ingeb97878a2012-12-18 20:58:04 +00001829 case Builtin::BI__builtin_cimagl:
1830 case Builtin::BIcimag:
1831 case Builtin::BIcimagf:
1832 case Builtin::BIcimagl: {
Fariborz Jahanian1ac11192012-08-14 20:09:28 +00001833 ComplexPairTy ComplexVal = EmitComplexExpr(E->getArg(0));
1834 return RValue::get(ComplexVal.second);
1835 }
Jim Grosbachd3608f42012-09-21 00:18:27 +00001836
Craig Topper0a4f6be2018-08-08 19:55:52 +00001837 case Builtin::BI__builtin_clrsb:
1838 case Builtin::BI__builtin_clrsbl:
1839 case Builtin::BI__builtin_clrsbll: {
1840 // clrsb(x) -> clz(x < 0 ? ~x : x) - 1 or
1841 Value *ArgValue = EmitScalarExpr(E->getArg(0));
1842
1843 llvm::Type *ArgType = ArgValue->getType();
James Y Knight8799cae2019-02-03 21:53:49 +00001844 Function *F = CGM.getIntrinsic(Intrinsic::ctlz, ArgType);
Craig Topper0a4f6be2018-08-08 19:55:52 +00001845
1846 llvm::Type *ResultType = ConvertType(E->getType());
1847 Value *Zero = llvm::Constant::getNullValue(ArgType);
1848 Value *IsNeg = Builder.CreateICmpSLT(ArgValue, Zero, "isneg");
1849 Value *Inverse = Builder.CreateNot(ArgValue, "not");
1850 Value *Tmp = Builder.CreateSelect(IsNeg, Inverse, ArgValue);
1851 Value *Ctlz = Builder.CreateCall(F, {Tmp, Builder.getFalse()});
1852 Value *Result = Builder.CreateSub(Ctlz, llvm::ConstantInt::get(ArgType, 1));
1853 Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
1854 "cast");
1855 return RValue::get(Result);
1856 }
Benjamin Kramer14128162012-01-28 18:42:57 +00001857 case Builtin::BI__builtin_ctzs:
Anders Carlsson093f1a02008-02-06 07:19:27 +00001858 case Builtin::BI__builtin_ctz:
1859 case Builtin::BI__builtin_ctzl:
1860 case Builtin::BI__builtin_ctzll: {
Vedant Kumar10c31022017-07-29 00:19:51 +00001861 Value *ArgValue = EmitCheckedArgForBuiltin(E->getArg(0), BCK_CTZPassedZero);
Mike Stump11289f42009-09-09 15:08:12 +00001862
Chris Lattnera5f58b02011-07-09 17:41:47 +00001863 llvm::Type *ArgType = ArgValue->getType();
James Y Knight8799cae2019-02-03 21:53:49 +00001864 Function *F = CGM.getIntrinsic(Intrinsic::cttz, ArgType);
Anders Carlsson093f1a02008-02-06 07:19:27 +00001865
Chris Lattner2192fe52011-07-18 04:24:23 +00001866 llvm::Type *ResultType = ConvertType(E->getType());
John McCallc8e01702013-04-16 22:48:15 +00001867 Value *ZeroUndef = Builder.getInt1(getTarget().isCLZForZeroUndef());
David Blaikie43f9bb72015-05-18 22:14:03 +00001868 Value *Result = Builder.CreateCall(F, {ArgValue, ZeroUndef});
Anders Carlsson093f1a02008-02-06 07:19:27 +00001869 if (Result->getType() != ResultType)
Duncan Sands7876dad2009-11-16 13:11:21 +00001870 Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
1871 "cast");
Anders Carlsson093f1a02008-02-06 07:19:27 +00001872 return RValue::get(Result);
1873 }
Benjamin Kramer14128162012-01-28 18:42:57 +00001874 case Builtin::BI__builtin_clzs:
Eli Friedman5e2281e2008-05-27 15:32:46 +00001875 case Builtin::BI__builtin_clz:
1876 case Builtin::BI__builtin_clzl:
1877 case Builtin::BI__builtin_clzll: {
Vedant Kumar10c31022017-07-29 00:19:51 +00001878 Value *ArgValue = EmitCheckedArgForBuiltin(E->getArg(0), BCK_CLZPassedZero);
Mike Stump11289f42009-09-09 15:08:12 +00001879
Chris Lattnera5f58b02011-07-09 17:41:47 +00001880 llvm::Type *ArgType = ArgValue->getType();
James Y Knight8799cae2019-02-03 21:53:49 +00001881 Function *F = CGM.getIntrinsic(Intrinsic::ctlz, ArgType);
Eli Friedman5e2281e2008-05-27 15:32:46 +00001882
Chris Lattner2192fe52011-07-18 04:24:23 +00001883 llvm::Type *ResultType = ConvertType(E->getType());
John McCallc8e01702013-04-16 22:48:15 +00001884 Value *ZeroUndef = Builder.getInt1(getTarget().isCLZForZeroUndef());
David Blaikie43f9bb72015-05-18 22:14:03 +00001885 Value *Result = Builder.CreateCall(F, {ArgValue, ZeroUndef});
Eli Friedman5e2281e2008-05-27 15:32:46 +00001886 if (Result->getType() != ResultType)
Duncan Sands7876dad2009-11-16 13:11:21 +00001887 Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
1888 "cast");
Eli Friedman5e2281e2008-05-27 15:32:46 +00001889 return RValue::get(Result);
1890 }
Daniel Dunbard93abc32008-07-21 17:19:41 +00001891 case Builtin::BI__builtin_ffs:
1892 case Builtin::BI__builtin_ffsl:
1893 case Builtin::BI__builtin_ffsll: {
1894 // ffs(x) -> x ? cttz(x) + 1 : 0
1895 Value *ArgValue = EmitScalarExpr(E->getArg(0));
Mike Stump11289f42009-09-09 15:08:12 +00001896
Chris Lattnera5f58b02011-07-09 17:41:47 +00001897 llvm::Type *ArgType = ArgValue->getType();
James Y Knight8799cae2019-02-03 21:53:49 +00001898 Function *F = CGM.getIntrinsic(Intrinsic::cttz, ArgType);
Mike Stump11289f42009-09-09 15:08:12 +00001899
Chris Lattner2192fe52011-07-18 04:24:23 +00001900 llvm::Type *ResultType = ConvertType(E->getType());
David Blaikie43f9bb72015-05-18 22:14:03 +00001901 Value *Tmp =
1902 Builder.CreateAdd(Builder.CreateCall(F, {ArgValue, Builder.getTrue()}),
1903 llvm::ConstantInt::get(ArgType, 1));
Owen Anderson0b75f232009-07-31 20:28:54 +00001904 Value *Zero = llvm::Constant::getNullValue(ArgType);
Daniel Dunbard93abc32008-07-21 17:19:41 +00001905 Value *IsZero = Builder.CreateICmpEQ(ArgValue, Zero, "iszero");
1906 Value *Result = Builder.CreateSelect(IsZero, Zero, Tmp, "ffs");
1907 if (Result->getType() != ResultType)
Duncan Sands7876dad2009-11-16 13:11:21 +00001908 Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
1909 "cast");
Daniel Dunbard93abc32008-07-21 17:19:41 +00001910 return RValue::get(Result);
1911 }
1912 case Builtin::BI__builtin_parity:
1913 case Builtin::BI__builtin_parityl:
1914 case Builtin::BI__builtin_parityll: {
1915 // parity(x) -> ctpop(x) & 1
1916 Value *ArgValue = EmitScalarExpr(E->getArg(0));
Mike Stump11289f42009-09-09 15:08:12 +00001917
Chris Lattnera5f58b02011-07-09 17:41:47 +00001918 llvm::Type *ArgType = ArgValue->getType();
James Y Knight8799cae2019-02-03 21:53:49 +00001919 Function *F = CGM.getIntrinsic(Intrinsic::ctpop, ArgType);
Mike Stump11289f42009-09-09 15:08:12 +00001920
Chris Lattner2192fe52011-07-18 04:24:23 +00001921 llvm::Type *ResultType = ConvertType(E->getType());
Benjamin Kramer76399eb2011-09-27 21:06:10 +00001922 Value *Tmp = Builder.CreateCall(F, ArgValue);
1923 Value *Result = Builder.CreateAnd(Tmp, llvm::ConstantInt::get(ArgType, 1));
Daniel Dunbard93abc32008-07-21 17:19:41 +00001924 if (Result->getType() != ResultType)
Duncan Sands7876dad2009-11-16 13:11:21 +00001925 Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
1926 "cast");
Daniel Dunbard93abc32008-07-21 17:19:41 +00001927 return RValue::get(Result);
1928 }
Craig Topper1f2b1812018-12-14 00:21:02 +00001929 case Builtin::BI__lzcnt16:
1930 case Builtin::BI__lzcnt:
1931 case Builtin::BI__lzcnt64: {
1932 Value *ArgValue = EmitScalarExpr(E->getArg(0));
1933
1934 llvm::Type *ArgType = ArgValue->getType();
James Y Knight8799cae2019-02-03 21:53:49 +00001935 Function *F = CGM.getIntrinsic(Intrinsic::ctlz, ArgType);
Craig Topper1f2b1812018-12-14 00:21:02 +00001936
1937 llvm::Type *ResultType = ConvertType(E->getType());
1938 Value *Result = Builder.CreateCall(F, {ArgValue, Builder.getFalse()});
1939 if (Result->getType() != ResultType)
1940 Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
1941 "cast");
1942 return RValue::get(Result);
1943 }
Albert Gutowski727ab8a2016-09-14 21:19:43 +00001944 case Builtin::BI__popcnt16:
1945 case Builtin::BI__popcnt:
1946 case Builtin::BI__popcnt64:
Daniel Dunbard93abc32008-07-21 17:19:41 +00001947 case Builtin::BI__builtin_popcount:
1948 case Builtin::BI__builtin_popcountl:
1949 case Builtin::BI__builtin_popcountll: {
1950 Value *ArgValue = EmitScalarExpr(E->getArg(0));
Mike Stump11289f42009-09-09 15:08:12 +00001951
Chris Lattnera5f58b02011-07-09 17:41:47 +00001952 llvm::Type *ArgType = ArgValue->getType();
James Y Knight8799cae2019-02-03 21:53:49 +00001953 Function *F = CGM.getIntrinsic(Intrinsic::ctpop, ArgType);
Mike Stump11289f42009-09-09 15:08:12 +00001954
Chris Lattner2192fe52011-07-18 04:24:23 +00001955 llvm::Type *ResultType = ConvertType(E->getType());
Benjamin Kramer76399eb2011-09-27 21:06:10 +00001956 Value *Result = Builder.CreateCall(F, ArgValue);
Daniel Dunbard93abc32008-07-21 17:19:41 +00001957 if (Result->getType() != ResultType)
Duncan Sands7876dad2009-11-16 13:11:21 +00001958 Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
1959 "cast");
Daniel Dunbard93abc32008-07-21 17:19:41 +00001960 return RValue::get(Result);
1961 }
Sanjay Patela24296b2015-09-02 20:01:30 +00001962 case Builtin::BI__builtin_unpredictable: {
1963 // Always return the argument of __builtin_unpredictable. LLVM does not
1964 // handle this builtin. Metadata for this builtin should be added directly
1965 // to instructions such as branches or switches that use it.
1966 return RValue::get(EmitScalarExpr(E->getArg(0)));
1967 }
Fariborz Jahanian0ebca282010-07-26 23:11:03 +00001968 case Builtin::BI__builtin_expect: {
Fariborz Jahanian24ac1592011-04-25 23:10:07 +00001969 Value *ArgValue = EmitScalarExpr(E->getArg(0));
Chris Lattnera5f58b02011-07-09 17:41:47 +00001970 llvm::Type *ArgType = ArgValue->getType();
Jakub Staszakd2cf2cb2011-07-08 22:45:14 +00001971
Jakub Staszakd2cf2cb2011-07-08 22:45:14 +00001972 Value *ExpectedValue = EmitScalarExpr(E->getArg(1));
Pete Cooperf051cbf2015-01-26 20:51:58 +00001973 // Don't generate llvm.expect on -O0 as the backend won't use it for
1974 // anything.
1975 // Note, we still IRGen ExpectedValue because it could have side-effects.
1976 if (CGM.getCodeGenOpts().OptimizationLevel == 0)
1977 return RValue::get(ArgValue);
Jakub Staszakd2cf2cb2011-07-08 22:45:14 +00001978
James Y Knight8799cae2019-02-03 21:53:49 +00001979 Function *FnExpect = CGM.getIntrinsic(Intrinsic::expect, ArgType);
David Blaikie43f9bb72015-05-18 22:14:03 +00001980 Value *Result =
1981 Builder.CreateCall(FnExpect, {ArgValue, ExpectedValue}, "expval");
Jakub Staszakd2cf2cb2011-07-08 22:45:14 +00001982 return RValue::get(Result);
Fariborz Jahanian0ebca282010-07-26 23:11:03 +00001983 }
Hal Finkelbcc06082014-09-07 22:58:14 +00001984 case Builtin::BI__builtin_assume_aligned: {
Roman Lebedevbd1c0872019-01-15 09:44:25 +00001985 const Expr *Ptr = E->getArg(0);
1986 Value *PtrValue = EmitScalarExpr(Ptr);
Hal Finkelbcc06082014-09-07 22:58:14 +00001987 Value *OffsetValue =
1988 (E->getNumArgs() > 2) ? EmitScalarExpr(E->getArg(2)) : nullptr;
1989
1990 Value *AlignmentValue = EmitScalarExpr(E->getArg(1));
1991 ConstantInt *AlignmentCI = cast<ConstantInt>(AlignmentValue);
Roman Lebedevbd1c0872019-01-15 09:44:25 +00001992 unsigned Alignment = (unsigned)AlignmentCI->getZExtValue();
Hal Finkelbcc06082014-09-07 22:58:14 +00001993
Kristina Brooks716cbfb2019-02-24 17:57:33 +00001994 EmitAlignmentAssumption(PtrValue, Ptr,
Kristina Brooks103799c2019-02-24 18:06:10 +00001995 /*The expr loc is sufficient.*/ SourceLocation(),
Roman Lebedevbd1c0872019-01-15 09:44:25 +00001996 Alignment, OffsetValue);
Hal Finkelbcc06082014-09-07 22:58:14 +00001997 return RValue::get(PtrValue);
1998 }
1999 case Builtin::BI__assume:
2000 case Builtin::BI__builtin_assume: {
2001 if (E->getArg(0)->HasSideEffects(getContext()))
2002 return RValue::get(nullptr);
2003
2004 Value *ArgValue = EmitScalarExpr(E->getArg(0));
James Y Knight8799cae2019-02-03 21:53:49 +00002005 Function *FnAssume = CGM.getIntrinsic(Intrinsic::assume);
Hal Finkelbcc06082014-09-07 22:58:14 +00002006 return RValue::get(Builder.CreateCall(FnAssume, ArgValue));
2007 }
Benjamin Kramera801f4a2012-10-06 14:42:22 +00002008 case Builtin::BI__builtin_bswap16:
Anders Carlssonef93b9d2007-12-02 21:58:10 +00002009 case Builtin::BI__builtin_bswap32:
2010 case Builtin::BI__builtin_bswap64: {
Matt Arsenault105e8922016-02-03 17:49:38 +00002011 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::bswap));
2012 }
Matt Arsenault08087c52016-03-23 22:14:43 +00002013 case Builtin::BI__builtin_bitreverse8:
Matt Arsenault105e8922016-02-03 17:49:38 +00002014 case Builtin::BI__builtin_bitreverse16:
2015 case Builtin::BI__builtin_bitreverse32:
2016 case Builtin::BI__builtin_bitreverse64: {
2017 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::bitreverse));
Mike Stump11289f42009-09-09 15:08:12 +00002018 }
Sanjay Patelad823902018-08-19 16:50:30 +00002019 case Builtin::BI__builtin_rotateleft8:
2020 case Builtin::BI__builtin_rotateleft16:
2021 case Builtin::BI__builtin_rotateleft32:
2022 case Builtin::BI__builtin_rotateleft64:
Sanjay Patelc6fa5bc2018-11-25 17:53:16 +00002023 case Builtin::BI_rotl8: // Microsoft variants of rotate left
2024 case Builtin::BI_rotl16:
2025 case Builtin::BI_rotl:
2026 case Builtin::BI_lrotl:
2027 case Builtin::BI_rotl64:
Sanjay Patelad823902018-08-19 16:50:30 +00002028 return emitRotate(E, false);
2029
2030 case Builtin::BI__builtin_rotateright8:
2031 case Builtin::BI__builtin_rotateright16:
2032 case Builtin::BI__builtin_rotateright32:
2033 case Builtin::BI__builtin_rotateright64:
Sanjay Patelc6fa5bc2018-11-25 17:53:16 +00002034 case Builtin::BI_rotr8: // Microsoft variants of rotate right
2035 case Builtin::BI_rotr16:
2036 case Builtin::BI_rotr:
2037 case Builtin::BI_lrotr:
2038 case Builtin::BI_rotr64:
Sanjay Patelad823902018-08-19 16:50:30 +00002039 return emitRotate(E, true);
2040
Fangrui Song407659a2018-11-30 23:41:18 +00002041 case Builtin::BI__builtin_constant_p: {
2042 llvm::Type *ResultType = ConvertType(E->getType());
2043 if (CGM.getCodeGenOpts().OptimizationLevel == 0)
2044 // At -O0, we don't perform inlining, so we don't need to delay the
2045 // processing.
2046 return RValue::get(ConstantInt::get(ResultType, 0));
2047
2048 const Expr *Arg = E->getArg(0);
2049 QualType ArgType = Arg->getType();
Richard Smith31cfb312019-04-27 02:58:17 +00002050 // FIXME: The allowance for Obj-C pointers and block pointers is historical
2051 // and likely a mistake.
2052 if (!ArgType->isIntegralOrEnumerationType() && !ArgType->isFloatingType() &&
2053 !ArgType->isObjCObjectPointerType() && !ArgType->isBlockPointerType())
2054 // Per the GCC documentation, only numeric constants are recognized after
2055 // inlining.
2056 return RValue::get(ConstantInt::get(ResultType, 0));
2057
2058 if (Arg->HasSideEffects(getContext()))
2059 // The argument is unevaluated, so be conservative if it might have
2060 // side-effects.
Fangrui Song407659a2018-11-30 23:41:18 +00002061 return RValue::get(ConstantInt::get(ResultType, 0));
2062
2063 Value *ArgValue = EmitScalarExpr(Arg);
Volodymyr Sapsai15705712019-02-08 23:02:13 +00002064 if (ArgType->isObjCObjectPointerType()) {
2065 // Convert Objective-C objects to id because we cannot distinguish between
2066 // LLVM types for Obj-C classes as they are opaque.
2067 ArgType = CGM.getContext().getObjCIdType();
2068 ArgValue = Builder.CreateBitCast(ArgValue, ConvertType(ArgType));
2069 }
James Y Knight8799cae2019-02-03 21:53:49 +00002070 Function *F =
2071 CGM.getIntrinsic(Intrinsic::is_constant, ConvertType(ArgType));
Fangrui Song407659a2018-11-30 23:41:18 +00002072 Value *Result = Builder.CreateCall(F, ArgValue);
2073 if (Result->getType() != ResultType)
2074 Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/false);
2075 return RValue::get(Result);
2076 }
Erik Pilkington9c3b5882019-01-30 20:34:53 +00002077 case Builtin::BI__builtin_dynamic_object_size:
Daniel Dunbarb0d34c82008-09-03 21:13:56 +00002078 case Builtin::BI__builtin_object_size: {
George Burgess IV3e3bb95b2015-12-02 21:58:08 +00002079 unsigned Type =
2080 E->getArg(1)->EvaluateKnownConstInt(getContext()).getZExtValue();
2081 auto *ResType = cast<llvm::IntegerType>(ConvertType(E->getType()));
Richard Smith01ade172012-05-23 04:13:20 +00002082
George Burgess IV3e3bb95b2015-12-02 21:58:08 +00002083 // We pass this builtin onto the optimizer so that it can figure out the
2084 // object size in more complex cases.
Erik Pilkington9c3b5882019-01-30 20:34:53 +00002085 bool IsDynamic = BuiltinID == Builtin::BI__builtin_dynamic_object_size;
George Burgess IV0d6592a2017-02-23 05:59:56 +00002086 return RValue::get(emitBuiltinObjectSize(E->getArg(0), Type, ResType,
Erik Pilkington9c3b5882019-01-30 20:34:53 +00002087 /*EmittedE=*/nullptr, IsDynamic));
Daniel Dunbarb0d34c82008-09-03 21:13:56 +00002088 }
Daniel Dunbarb7257262008-07-21 22:59:13 +00002089 case Builtin::BI__builtin_prefetch: {
2090 Value *Locality, *RW, *Address = EmitScalarExpr(E->getArg(0));
2091 // FIXME: Technically these constants should of type 'int', yes?
Mike Stump11289f42009-09-09 15:08:12 +00002092 RW = (E->getNumArgs() > 1) ? EmitScalarExpr(E->getArg(1)) :
Chris Lattner5e016ae2010-06-27 07:15:29 +00002093 llvm::ConstantInt::get(Int32Ty, 0);
Mike Stump11289f42009-09-09 15:08:12 +00002094 Locality = (E->getNumArgs() > 2) ? EmitScalarExpr(E->getArg(2)) :
Chris Lattner5e016ae2010-06-27 07:15:29 +00002095 llvm::ConstantInt::get(Int32Ty, 3);
Bruno Cardoso Lopes3b0297a2011-06-14 05:00:30 +00002096 Value *Data = llvm::ConstantInt::get(Int32Ty, 1);
James Y Knight8799cae2019-02-03 21:53:49 +00002097 Function *F = CGM.getIntrinsic(Intrinsic::prefetch);
David Blaikie43f9bb72015-05-18 22:14:03 +00002098 return RValue::get(Builder.CreateCall(F, {Address, RW, Locality, Data}));
Anders Carlssonef93b9d2007-12-02 21:58:10 +00002099 }
Hal Finkel3fadbb52012-08-05 22:03:08 +00002100 case Builtin::BI__builtin_readcyclecounter: {
James Y Knight8799cae2019-02-03 21:53:49 +00002101 Function *F = CGM.getIntrinsic(Intrinsic::readcyclecounter);
David Blaikie4ba525b2015-07-14 17:27:39 +00002102 return RValue::get(Builder.CreateCall(F));
Hal Finkel3fadbb52012-08-05 22:03:08 +00002103 }
Renato Golinc491a8d2014-03-26 15:36:05 +00002104 case Builtin::BI__builtin___clear_cache: {
2105 Value *Begin = EmitScalarExpr(E->getArg(0));
2106 Value *End = EmitScalarExpr(E->getArg(1));
James Y Knight8799cae2019-02-03 21:53:49 +00002107 Function *F = CGM.getIntrinsic(Intrinsic::clear_cache);
David Blaikie43f9bb72015-05-18 22:14:03 +00002108 return RValue::get(Builder.CreateCall(F, {Begin, End}));
Renato Golinc491a8d2014-03-26 15:36:05 +00002109 }
Akira Hatanaka85365cd2015-07-02 22:15:41 +00002110 case Builtin::BI__builtin_trap:
2111 return RValue::get(EmitTrapCall(Intrinsic::trap));
2112 case Builtin::BI__debugbreak:
2113 return RValue::get(EmitTrapCall(Intrinsic::debugtrap));
Chris Lattnerbf206382009-09-21 03:09:59 +00002114 case Builtin::BI__builtin_unreachable: {
Vedant Kumar09b5bfd2017-12-21 00:10:25 +00002115 EmitUnreachable(E->getExprLoc());
John McCall20f6ab82011-01-12 03:41:02 +00002116
2117 // We do need to preserve an insertion point.
John McCallad7c5c12011-02-08 08:22:06 +00002118 EmitBlock(createBasicBlock("unreachable.cont"));
John McCall20f6ab82011-01-12 03:41:02 +00002119
Craig Topper8a13c412014-05-21 05:09:00 +00002120 return RValue::get(nullptr);
Chris Lattnerbf206382009-09-21 03:09:59 +00002121 }
Jim Grosbachd3608f42012-09-21 00:18:27 +00002122
Daniel Dunbarc2f67962008-07-21 18:44:41 +00002123 case Builtin::BI__builtin_powi:
2124 case Builtin::BI__builtin_powif:
Reid Kleckner1fcccdd2015-02-05 00:24:57 +00002125 case Builtin::BI__builtin_powil: {
Daniel Dunbarc2f67962008-07-21 18:44:41 +00002126 Value *Base = EmitScalarExpr(E->getArg(0));
2127 Value *Exponent = EmitScalarExpr(E->getArg(1));
Chris Lattnera5f58b02011-07-09 17:41:47 +00002128 llvm::Type *ArgType = Base->getType();
James Y Knight8799cae2019-02-03 21:53:49 +00002129 Function *F = CGM.getIntrinsic(Intrinsic::powi, ArgType);
David Blaikie43f9bb72015-05-18 22:14:03 +00002130 return RValue::get(Builder.CreateCall(F, {Base, Exponent}));
Daniel Dunbarc2f67962008-07-21 18:44:41 +00002131 }
2132
Chris Lattner6c9ffe92007-12-20 00:44:32 +00002133 case Builtin::BI__builtin_isgreater:
2134 case Builtin::BI__builtin_isgreaterequal:
2135 case Builtin::BI__builtin_isless:
2136 case Builtin::BI__builtin_islessequal:
2137 case Builtin::BI__builtin_islessgreater:
2138 case Builtin::BI__builtin_isunordered: {
2139 // Ordered comparisons: we know the arguments to these are matching scalar
2140 // floating point values.
Mike Stump11289f42009-09-09 15:08:12 +00002141 Value *LHS = EmitScalarExpr(E->getArg(0));
Chris Lattner6c9ffe92007-12-20 00:44:32 +00002142 Value *RHS = EmitScalarExpr(E->getArg(1));
Mike Stump11289f42009-09-09 15:08:12 +00002143
Chris Lattner6c9ffe92007-12-20 00:44:32 +00002144 switch (BuiltinID) {
David Blaikie83d382b2011-09-23 05:06:16 +00002145 default: llvm_unreachable("Unknown ordered comparison");
Chris Lattner6c9ffe92007-12-20 00:44:32 +00002146 case Builtin::BI__builtin_isgreater:
2147 LHS = Builder.CreateFCmpOGT(LHS, RHS, "cmp");
2148 break;
2149 case Builtin::BI__builtin_isgreaterequal:
2150 LHS = Builder.CreateFCmpOGE(LHS, RHS, "cmp");
2151 break;
2152 case Builtin::BI__builtin_isless:
2153 LHS = Builder.CreateFCmpOLT(LHS, RHS, "cmp");
2154 break;
2155 case Builtin::BI__builtin_islessequal:
2156 LHS = Builder.CreateFCmpOLE(LHS, RHS, "cmp");
2157 break;
2158 case Builtin::BI__builtin_islessgreater:
2159 LHS = Builder.CreateFCmpONE(LHS, RHS, "cmp");
2160 break;
Mike Stump11289f42009-09-09 15:08:12 +00002161 case Builtin::BI__builtin_isunordered:
Chris Lattner6c9ffe92007-12-20 00:44:32 +00002162 LHS = Builder.CreateFCmpUNO(LHS, RHS, "cmp");
2163 break;
2164 }
2165 // ZExt bool to int type.
Benjamin Kramer76399eb2011-09-27 21:06:10 +00002166 return RValue::get(Builder.CreateZExt(LHS, ConvertType(E->getType())));
Chris Lattner6c9ffe92007-12-20 00:44:32 +00002167 }
Eli Friedman1c277d02009-09-01 04:19:44 +00002168 case Builtin::BI__builtin_isnan: {
2169 Value *V = EmitScalarExpr(E->getArg(0));
2170 V = Builder.CreateFCmpUNO(V, V, "cmp");
Benjamin Kramer76399eb2011-09-27 21:06:10 +00002171 return RValue::get(Builder.CreateZExt(V, ConvertType(E->getType())));
Eli Friedman1c277d02009-09-01 04:19:44 +00002172 }
Jim Grosbachd3608f42012-09-21 00:18:27 +00002173
Dehao Chen5d4f0be2016-09-14 17:34:14 +00002174 case Builtin::BIfinite:
2175 case Builtin::BI__finite:
2176 case Builtin::BIfinitef:
2177 case Builtin::BI__finitef:
2178 case Builtin::BIfinitel:
2179 case Builtin::BI__finitel:
Sanjay Patelae7a9df2016-04-07 14:29:05 +00002180 case Builtin::BI__builtin_isinf:
2181 case Builtin::BI__builtin_isfinite: {
2182 // isinf(x) --> fabs(x) == infinity
2183 // isfinite(x) --> fabs(x) != infinity
2184 // x != NaN via the ordered compare in either case.
Chris Lattner43660c52010-05-06 05:35:16 +00002185 Value *V = EmitScalarExpr(E->getArg(0));
Sanjay Patelae7a9df2016-04-07 14:29:05 +00002186 Value *Fabs = EmitFAbs(*this, V);
2187 Constant *Infinity = ConstantFP::getInfinity(V->getType());
2188 CmpInst::Predicate Pred = (BuiltinID == Builtin::BI__builtin_isinf)
2189 ? CmpInst::FCMP_OEQ
2190 : CmpInst::FCMP_ONE;
2191 Value *FCmp = Builder.CreateFCmp(Pred, Fabs, Infinity, "cmpinf");
2192 return RValue::get(Builder.CreateZExt(FCmp, ConvertType(E->getType())));
Chris Lattner43660c52010-05-06 05:35:16 +00002193 }
Jim Grosbachd3608f42012-09-21 00:18:27 +00002194
Chandler Carruthc66deaf2015-03-19 22:39:51 +00002195 case Builtin::BI__builtin_isinf_sign: {
2196 // isinf_sign(x) -> fabs(x) == infinity ? (signbit(x) ? -1 : 1) : 0
2197 Value *Arg = EmitScalarExpr(E->getArg(0));
2198 Value *AbsArg = EmitFAbs(*this, Arg);
2199 Value *IsInf = Builder.CreateFCmpOEQ(
2200 AbsArg, ConstantFP::getInfinity(Arg->getType()), "isinf");
2201 Value *IsNeg = EmitSignBit(*this, Arg);
2202
2203 llvm::Type *IntTy = ConvertType(E->getType());
2204 Value *Zero = Constant::getNullValue(IntTy);
2205 Value *One = ConstantInt::get(IntTy, 1);
2206 Value *NegativeOne = ConstantInt::get(IntTy, -1);
2207 Value *SignResult = Builder.CreateSelect(IsNeg, NegativeOne, One);
2208 Value *Result = Builder.CreateSelect(IsInf, SignResult, Zero);
2209 return RValue::get(Result);
2210 }
Benjamin Kramerfdb61d72010-05-19 11:24:26 +00002211
2212 case Builtin::BI__builtin_isnormal: {
2213 // isnormal(x) --> x == x && fabsf(x) < infinity && fabsf(x) >= float_min
2214 Value *V = EmitScalarExpr(E->getArg(0));
2215 Value *Eq = Builder.CreateFCmpOEQ(V, V, "iseq");
2216
Reid Kleckner4cad00a2014-11-03 23:51:40 +00002217 Value *Abs = EmitFAbs(*this, V);
Benjamin Kramerfdb61d72010-05-19 11:24:26 +00002218 Value *IsLessThanInf =
2219 Builder.CreateFCmpULT(Abs, ConstantFP::getInfinity(V->getType()),"isinf");
2220 APFloat Smallest = APFloat::getSmallestNormalized(
2221 getContext().getFloatTypeSemantics(E->getArg(0)->getType()));
2222 Value *IsNormal =
2223 Builder.CreateFCmpUGE(Abs, ConstantFP::get(V->getContext(), Smallest),
2224 "isnormal");
2225 V = Builder.CreateAnd(Eq, IsLessThanInf, "and");
2226 V = Builder.CreateAnd(V, IsNormal, "and");
2227 return RValue::get(Builder.CreateZExt(V, ConvertType(E->getType())));
2228 }
2229
Anton Korobeynikov81cff312019-01-17 15:21:55 +00002230 case Builtin::BI__builtin_flt_rounds: {
James Y Knight8799cae2019-02-03 21:53:49 +00002231 Function *F = CGM.getIntrinsic(Intrinsic::flt_rounds);
Anton Korobeynikov81cff312019-01-17 15:21:55 +00002232
2233 llvm::Type *ResultType = ConvertType(E->getType());
2234 Value *Result = Builder.CreateCall(F);
2235 if (Result->getType() != ResultType)
2236 Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
2237 "cast");
2238 return RValue::get(Result);
2239 }
2240
Benjamin Kramer7039fcb2010-06-14 10:30:41 +00002241 case Builtin::BI__builtin_fpclassify: {
2242 Value *V = EmitScalarExpr(E->getArg(5));
Chris Lattner2192fe52011-07-18 04:24:23 +00002243 llvm::Type *Ty = ConvertType(E->getArg(5)->getType());
Benjamin Kramer7039fcb2010-06-14 10:30:41 +00002244
2245 // Create Result
2246 BasicBlock *Begin = Builder.GetInsertBlock();
2247 BasicBlock *End = createBasicBlock("fpclassify_end", this->CurFn);
2248 Builder.SetInsertPoint(End);
2249 PHINode *Result =
Jay Foad20c0f022011-03-30 11:28:58 +00002250 Builder.CreatePHI(ConvertType(E->getArg(0)->getType()), 4,
Benjamin Kramer7039fcb2010-06-14 10:30:41 +00002251 "fpclassify_result");
2252
2253 // if (V==0) return FP_ZERO
2254 Builder.SetInsertPoint(Begin);
2255 Value *IsZero = Builder.CreateFCmpOEQ(V, Constant::getNullValue(Ty),
2256 "iszero");
2257 Value *ZeroLiteral = EmitScalarExpr(E->getArg(4));
2258 BasicBlock *NotZero = createBasicBlock("fpclassify_not_zero", this->CurFn);
2259 Builder.CreateCondBr(IsZero, End, NotZero);
2260 Result->addIncoming(ZeroLiteral, Begin);
2261
2262 // if (V != V) return FP_NAN
2263 Builder.SetInsertPoint(NotZero);
2264 Value *IsNan = Builder.CreateFCmpUNO(V, V, "cmp");
2265 Value *NanLiteral = EmitScalarExpr(E->getArg(0));
2266 BasicBlock *NotNan = createBasicBlock("fpclassify_not_nan", this->CurFn);
2267 Builder.CreateCondBr(IsNan, End, NotNan);
2268 Result->addIncoming(NanLiteral, NotZero);
2269
2270 // if (fabs(V) == infinity) return FP_INFINITY
2271 Builder.SetInsertPoint(NotNan);
Reid Kleckner4cad00a2014-11-03 23:51:40 +00002272 Value *VAbs = EmitFAbs(*this, V);
Benjamin Kramer7039fcb2010-06-14 10:30:41 +00002273 Value *IsInf =
2274 Builder.CreateFCmpOEQ(VAbs, ConstantFP::getInfinity(V->getType()),
2275 "isinf");
2276 Value *InfLiteral = EmitScalarExpr(E->getArg(1));
2277 BasicBlock *NotInf = createBasicBlock("fpclassify_not_inf", this->CurFn);
2278 Builder.CreateCondBr(IsInf, End, NotInf);
2279 Result->addIncoming(InfLiteral, NotNan);
2280
2281 // if (fabs(V) >= MIN_NORMAL) return FP_NORMAL else FP_SUBNORMAL
2282 Builder.SetInsertPoint(NotInf);
2283 APFloat Smallest = APFloat::getSmallestNormalized(
2284 getContext().getFloatTypeSemantics(E->getArg(5)->getType()));
2285 Value *IsNormal =
2286 Builder.CreateFCmpUGE(VAbs, ConstantFP::get(V->getContext(), Smallest),
2287 "isnormal");
2288 Value *NormalResult =
2289 Builder.CreateSelect(IsNormal, EmitScalarExpr(E->getArg(2)),
2290 EmitScalarExpr(E->getArg(3)));
2291 Builder.CreateBr(End);
2292 Result->addIncoming(NormalResult, NotInf);
2293
2294 // return Result
2295 Builder.SetInsertPoint(End);
2296 return RValue::get(Result);
2297 }
Jim Grosbachd3608f42012-09-21 00:18:27 +00002298
Eli Friedmanf6bd1502009-06-02 07:10:30 +00002299 case Builtin::BIalloca:
Reid Kleckner59e4a6f2013-11-13 22:58:53 +00002300 case Builtin::BI_alloca:
Chris Lattner22b9ff42008-06-16 17:15:14 +00002301 case Builtin::BI__builtin_alloca: {
Chris Lattner22b9ff42008-06-16 17:15:14 +00002302 Value *Size = EmitScalarExpr(E->getArg(0));
David Majnemer1878da42016-10-27 17:18:24 +00002303 const TargetInfo &TI = getContext().getTargetInfo();
2304 // The alignment of the alloca should correspond to __BIGGEST_ALIGNMENT__.
2305 unsigned SuitableAlignmentInBytes =
David Majnemerbb103d92016-10-31 16:48:30 +00002306 CGM.getContext()
2307 .toCharUnitsFromBits(TI.getSuitableAlign())
2308 .getQuantity();
David Majnemer1878da42016-10-27 17:18:24 +00002309 AllocaInst *AI = Builder.CreateAlloca(Builder.getInt8Ty(), Size);
2310 AI->setAlignment(SuitableAlignmentInBytes);
JF Bastienef202c32019-04-12 00:11:27 +00002311 initializeAlloca(*this, AI, Size, SuitableAlignmentInBytes);
David Majnemer1878da42016-10-27 17:18:24 +00002312 return RValue::get(AI);
Daniel Dunbar327acd72008-07-22 00:26:45 +00002313 }
David Majnemer51169932016-10-31 05:37:48 +00002314
2315 case Builtin::BI__builtin_alloca_with_align: {
2316 Value *Size = EmitScalarExpr(E->getArg(0));
David Majnemerbb103d92016-10-31 16:48:30 +00002317 Value *AlignmentInBitsValue = EmitScalarExpr(E->getArg(1));
2318 auto *AlignmentInBitsCI = cast<ConstantInt>(AlignmentInBitsValue);
2319 unsigned AlignmentInBits = AlignmentInBitsCI->getZExtValue();
2320 unsigned AlignmentInBytes =
2321 CGM.getContext().toCharUnitsFromBits(AlignmentInBits).getQuantity();
David Majnemer51169932016-10-31 05:37:48 +00002322 AllocaInst *AI = Builder.CreateAlloca(Builder.getInt8Ty(), Size);
2323 AI->setAlignment(AlignmentInBytes);
JF Bastienef202c32019-04-12 00:11:27 +00002324 initializeAlloca(*this, AI, Size, AlignmentInBytes);
David Majnemer51169932016-10-31 05:37:48 +00002325 return RValue::get(AI);
2326 }
2327
Eli Friedmand6ef69a2010-01-23 19:00:10 +00002328 case Builtin::BIbzero:
Daniel Dunbar327acd72008-07-22 00:26:45 +00002329 case Builtin::BI__builtin_bzero: {
John McCall7f416cc2015-09-08 08:05:57 +00002330 Address Dest = EmitPointerWithAlignment(E->getArg(0));
Mon P Wangcc2ab0c2010-04-04 03:10:52 +00002331 Value *SizeVal = EmitScalarExpr(E->getArg(1));
John McCall7f416cc2015-09-08 08:05:57 +00002332 EmitNonNullArgCheck(RValue::get(Dest.getPointer()), E->getArg(0)->getType(),
Nuno Lopes1ba2d782015-05-30 16:11:40 +00002333 E->getArg(0)->getExprLoc(), FD, 0);
John McCall7f416cc2015-09-08 08:05:57 +00002334 Builder.CreateMemSet(Dest, Builder.getInt8(0), SizeVal, false);
John McCall26d55e02017-11-09 09:32:32 +00002335 return RValue::get(nullptr);
Chris Lattner22b9ff42008-06-16 17:15:14 +00002336 }
Eli Friedman7f4933f2009-12-17 00:14:28 +00002337 case Builtin::BImemcpy:
Eli Friedmana3a40682008-05-19 23:27:48 +00002338 case Builtin::BI__builtin_memcpy: {
John McCall7f416cc2015-09-08 08:05:57 +00002339 Address Dest = EmitPointerWithAlignment(E->getArg(0));
2340 Address Src = EmitPointerWithAlignment(E->getArg(1));
Mon P Wangcc2ab0c2010-04-04 03:10:52 +00002341 Value *SizeVal = EmitScalarExpr(E->getArg(2));
John McCall7f416cc2015-09-08 08:05:57 +00002342 EmitNonNullArgCheck(RValue::get(Dest.getPointer()), E->getArg(0)->getType(),
Nuno Lopes1ba2d782015-05-30 16:11:40 +00002343 E->getArg(0)->getExprLoc(), FD, 0);
John McCall7f416cc2015-09-08 08:05:57 +00002344 EmitNonNullArgCheck(RValue::get(Src.getPointer()), E->getArg(1)->getType(),
Nuno Lopes1ba2d782015-05-30 16:11:40 +00002345 E->getArg(1)->getExprLoc(), FD, 1);
John McCall7f416cc2015-09-08 08:05:57 +00002346 Builder.CreateMemCpy(Dest, Src, SizeVal, false);
2347 return RValue::get(Dest.getPointer());
Daniel Dunbar327acd72008-07-22 00:26:45 +00002348 }
Jim Grosbachd3608f42012-09-21 00:18:27 +00002349
Richard Smith5e29dd32017-01-20 00:45:35 +00002350 case Builtin::BI__builtin_char_memchr:
2351 BuiltinID = Builtin::BI__builtin_memchr;
2352 break;
2353
Chris Lattner30107ed2011-04-17 00:40:24 +00002354 case Builtin::BI__builtin___memcpy_chk: {
Sylvestre Ledru33b5baf2012-09-27 10:16:10 +00002355 // fold __builtin_memcpy_chk(x, y, cst1, cst2) to memcpy iff cst1<=cst2.
Fangrui Song407659a2018-11-30 23:41:18 +00002356 Expr::EvalResult SizeResult, DstSizeResult;
2357 if (!E->getArg(2)->EvaluateAsInt(SizeResult, CGM.getContext()) ||
2358 !E->getArg(3)->EvaluateAsInt(DstSizeResult, CGM.getContext()))
Chris Lattner30107ed2011-04-17 00:40:24 +00002359 break;
Fangrui Song407659a2018-11-30 23:41:18 +00002360 llvm::APSInt Size = SizeResult.Val.getInt();
2361 llvm::APSInt DstSize = DstSizeResult.Val.getInt();
Chris Lattner30107ed2011-04-17 00:40:24 +00002362 if (Size.ugt(DstSize))
2363 break;
John McCall7f416cc2015-09-08 08:05:57 +00002364 Address Dest = EmitPointerWithAlignment(E->getArg(0));
2365 Address Src = EmitPointerWithAlignment(E->getArg(1));
Chris Lattner30107ed2011-04-17 00:40:24 +00002366 Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
John McCall7f416cc2015-09-08 08:05:57 +00002367 Builder.CreateMemCpy(Dest, Src, SizeVal, false);
2368 return RValue::get(Dest.getPointer());
Chris Lattner30107ed2011-04-17 00:40:24 +00002369 }
Jim Grosbachd3608f42012-09-21 00:18:27 +00002370
Fariborz Jahanian4a303072010-06-16 16:22:04 +00002371 case Builtin::BI__builtin_objc_memmove_collectable: {
John McCall7f416cc2015-09-08 08:05:57 +00002372 Address DestAddr = EmitPointerWithAlignment(E->getArg(0));
2373 Address SrcAddr = EmitPointerWithAlignment(E->getArg(1));
Fariborz Jahanian021510e2010-06-15 22:44:06 +00002374 Value *SizeVal = EmitScalarExpr(E->getArg(2));
Jim Grosbachd3608f42012-09-21 00:18:27 +00002375 CGM.getObjCRuntime().EmitGCMemmoveCollectable(*this,
John McCall7f416cc2015-09-08 08:05:57 +00002376 DestAddr, SrcAddr, SizeVal);
2377 return RValue::get(DestAddr.getPointer());
Fariborz Jahanian021510e2010-06-15 22:44:06 +00002378 }
Chris Lattner30107ed2011-04-17 00:40:24 +00002379
2380 case Builtin::BI__builtin___memmove_chk: {
Sylvestre Ledru33b5baf2012-09-27 10:16:10 +00002381 // fold __builtin_memmove_chk(x, y, cst1, cst2) to memmove iff cst1<=cst2.
Fangrui Song407659a2018-11-30 23:41:18 +00002382 Expr::EvalResult SizeResult, DstSizeResult;
2383 if (!E->getArg(2)->EvaluateAsInt(SizeResult, CGM.getContext()) ||
2384 !E->getArg(3)->EvaluateAsInt(DstSizeResult, CGM.getContext()))
Chris Lattner30107ed2011-04-17 00:40:24 +00002385 break;
Fangrui Song407659a2018-11-30 23:41:18 +00002386 llvm::APSInt Size = SizeResult.Val.getInt();
2387 llvm::APSInt DstSize = DstSizeResult.Val.getInt();
Chris Lattner30107ed2011-04-17 00:40:24 +00002388 if (Size.ugt(DstSize))
2389 break;
John McCall7f416cc2015-09-08 08:05:57 +00002390 Address Dest = EmitPointerWithAlignment(E->getArg(0));
2391 Address Src = EmitPointerWithAlignment(E->getArg(1));
Chris Lattner30107ed2011-04-17 00:40:24 +00002392 Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
John McCall7f416cc2015-09-08 08:05:57 +00002393 Builder.CreateMemMove(Dest, Src, SizeVal, false);
2394 return RValue::get(Dest.getPointer());
Chris Lattner30107ed2011-04-17 00:40:24 +00002395 }
2396
Eli Friedman7f4933f2009-12-17 00:14:28 +00002397 case Builtin::BImemmove:
Daniel Dunbar327acd72008-07-22 00:26:45 +00002398 case Builtin::BI__builtin_memmove: {
John McCall7f416cc2015-09-08 08:05:57 +00002399 Address Dest = EmitPointerWithAlignment(E->getArg(0));
2400 Address Src = EmitPointerWithAlignment(E->getArg(1));
Mon P Wangcc2ab0c2010-04-04 03:10:52 +00002401 Value *SizeVal = EmitScalarExpr(E->getArg(2));
John McCall7f416cc2015-09-08 08:05:57 +00002402 EmitNonNullArgCheck(RValue::get(Dest.getPointer()), E->getArg(0)->getType(),
Nuno Lopes1ba2d782015-05-30 16:11:40 +00002403 E->getArg(0)->getExprLoc(), FD, 0);
John McCall7f416cc2015-09-08 08:05:57 +00002404 EmitNonNullArgCheck(RValue::get(Src.getPointer()), E->getArg(1)->getType(),
Nuno Lopes1ba2d782015-05-30 16:11:40 +00002405 E->getArg(1)->getExprLoc(), FD, 1);
John McCall7f416cc2015-09-08 08:05:57 +00002406 Builder.CreateMemMove(Dest, Src, SizeVal, false);
2407 return RValue::get(Dest.getPointer());
Daniel Dunbar327acd72008-07-22 00:26:45 +00002408 }
Eli Friedman7f4933f2009-12-17 00:14:28 +00002409 case Builtin::BImemset:
Daniel Dunbar327acd72008-07-22 00:26:45 +00002410 case Builtin::BI__builtin_memset: {
John McCall7f416cc2015-09-08 08:05:57 +00002411 Address Dest = EmitPointerWithAlignment(E->getArg(0));
Benjamin Krameracc6b4e2010-12-30 00:13:21 +00002412 Value *ByteVal = Builder.CreateTrunc(EmitScalarExpr(E->getArg(1)),
2413 Builder.getInt8Ty());
Mon P Wangcc2ab0c2010-04-04 03:10:52 +00002414 Value *SizeVal = EmitScalarExpr(E->getArg(2));
John McCall7f416cc2015-09-08 08:05:57 +00002415 EmitNonNullArgCheck(RValue::get(Dest.getPointer()), E->getArg(0)->getType(),
Nuno Lopes1ba2d782015-05-30 16:11:40 +00002416 E->getArg(0)->getExprLoc(), FD, 0);
John McCall7f416cc2015-09-08 08:05:57 +00002417 Builder.CreateMemSet(Dest, ByteVal, SizeVal, false);
2418 return RValue::get(Dest.getPointer());
Eli Friedmana3a40682008-05-19 23:27:48 +00002419 }
Chris Lattner30107ed2011-04-17 00:40:24 +00002420 case Builtin::BI__builtin___memset_chk: {
Sylvestre Ledru33b5baf2012-09-27 10:16:10 +00002421 // fold __builtin_memset_chk(x, y, cst1, cst2) to memset iff cst1<=cst2.
Fangrui Song407659a2018-11-30 23:41:18 +00002422 Expr::EvalResult SizeResult, DstSizeResult;
2423 if (!E->getArg(2)->EvaluateAsInt(SizeResult, CGM.getContext()) ||
2424 !E->getArg(3)->EvaluateAsInt(DstSizeResult, CGM.getContext()))
Chris Lattner30107ed2011-04-17 00:40:24 +00002425 break;
Fangrui Song407659a2018-11-30 23:41:18 +00002426 llvm::APSInt Size = SizeResult.Val.getInt();
2427 llvm::APSInt DstSize = DstSizeResult.Val.getInt();
Chris Lattner30107ed2011-04-17 00:40:24 +00002428 if (Size.ugt(DstSize))
2429 break;
John McCall7f416cc2015-09-08 08:05:57 +00002430 Address Dest = EmitPointerWithAlignment(E->getArg(0));
Chris Lattner30107ed2011-04-17 00:40:24 +00002431 Value *ByteVal = Builder.CreateTrunc(EmitScalarExpr(E->getArg(1)),
2432 Builder.getInt8Ty());
2433 Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
John McCall7f416cc2015-09-08 08:05:57 +00002434 Builder.CreateMemSet(Dest, ByteVal, SizeVal, false);
2435 return RValue::get(Dest.getPointer());
Chris Lattner30107ed2011-04-17 00:40:24 +00002436 }
Peter Collingbourne9e31f0a2018-01-24 18:59:58 +00002437 case Builtin::BI__builtin_wmemcmp: {
2438 // The MSVC runtime library does not provide a definition of wmemcmp, so we
2439 // need an inline implementation.
2440 if (!getTarget().getTriple().isOSMSVCRT())
2441 break;
2442
2443 llvm::Type *WCharTy = ConvertType(getContext().WCharTy);
2444
2445 Value *Dst = EmitScalarExpr(E->getArg(0));
2446 Value *Src = EmitScalarExpr(E->getArg(1));
2447 Value *Size = EmitScalarExpr(E->getArg(2));
2448
2449 BasicBlock *Entry = Builder.GetInsertBlock();
2450 BasicBlock *CmpGT = createBasicBlock("wmemcmp.gt");
2451 BasicBlock *CmpLT = createBasicBlock("wmemcmp.lt");
2452 BasicBlock *Next = createBasicBlock("wmemcmp.next");
2453 BasicBlock *Exit = createBasicBlock("wmemcmp.exit");
2454 Value *SizeEq0 = Builder.CreateICmpEQ(Size, ConstantInt::get(SizeTy, 0));
2455 Builder.CreateCondBr(SizeEq0, Exit, CmpGT);
2456
2457 EmitBlock(CmpGT);
2458 PHINode *DstPhi = Builder.CreatePHI(Dst->getType(), 2);
2459 DstPhi->addIncoming(Dst, Entry);
2460 PHINode *SrcPhi = Builder.CreatePHI(Src->getType(), 2);
2461 SrcPhi->addIncoming(Src, Entry);
2462 PHINode *SizePhi = Builder.CreatePHI(SizeTy, 2);
2463 SizePhi->addIncoming(Size, Entry);
2464 CharUnits WCharAlign =
2465 getContext().getTypeAlignInChars(getContext().WCharTy);
2466 Value *DstCh = Builder.CreateAlignedLoad(WCharTy, DstPhi, WCharAlign);
2467 Value *SrcCh = Builder.CreateAlignedLoad(WCharTy, SrcPhi, WCharAlign);
2468 Value *DstGtSrc = Builder.CreateICmpUGT(DstCh, SrcCh);
2469 Builder.CreateCondBr(DstGtSrc, Exit, CmpLT);
2470
2471 EmitBlock(CmpLT);
2472 Value *DstLtSrc = Builder.CreateICmpULT(DstCh, SrcCh);
2473 Builder.CreateCondBr(DstLtSrc, Exit, Next);
2474
2475 EmitBlock(Next);
2476 Value *NextDst = Builder.CreateConstInBoundsGEP1_32(WCharTy, DstPhi, 1);
2477 Value *NextSrc = Builder.CreateConstInBoundsGEP1_32(WCharTy, SrcPhi, 1);
2478 Value *NextSize = Builder.CreateSub(SizePhi, ConstantInt::get(SizeTy, 1));
2479 Value *NextSizeEq0 =
2480 Builder.CreateICmpEQ(NextSize, ConstantInt::get(SizeTy, 0));
2481 Builder.CreateCondBr(NextSizeEq0, Exit, CmpGT);
2482 DstPhi->addIncoming(NextDst, Next);
2483 SrcPhi->addIncoming(NextSrc, Next);
2484 SizePhi->addIncoming(NextSize, Next);
2485
2486 EmitBlock(Exit);
2487 PHINode *Ret = Builder.CreatePHI(IntTy, 4);
2488 Ret->addIncoming(ConstantInt::get(IntTy, 0), Entry);
2489 Ret->addIncoming(ConstantInt::get(IntTy, 1), CmpGT);
2490 Ret->addIncoming(ConstantInt::get(IntTy, -1), CmpLT);
2491 Ret->addIncoming(ConstantInt::get(IntTy, 0), Next);
2492 return RValue::get(Ret);
2493 }
John McCall515c3c52010-03-03 10:30:05 +00002494 case Builtin::BI__builtin_dwarf_cfa: {
2495 // The offset in bytes from the first argument to the CFA.
2496 //
2497 // Why on earth is this in the frontend? Is there any reason at
2498 // all that the backend can't reasonably determine this while
2499 // lowering llvm.eh.dwarf.cfa()?
2500 //
2501 // TODO: If there's a satisfactory reason, add a target hook for
2502 // this instead of hard-coding 0, which is correct for most targets.
2503 int32_t Offset = 0;
2504
James Y Knight8799cae2019-02-03 21:53:49 +00002505 Function *F = CGM.getIntrinsic(Intrinsic::eh_dwarf_cfa);
Jim Grosbachd3608f42012-09-21 00:18:27 +00002506 return RValue::get(Builder.CreateCall(F,
Chris Lattner5e016ae2010-06-27 07:15:29 +00002507 llvm::ConstantInt::get(Int32Ty, Offset)));
John McCall515c3c52010-03-03 10:30:05 +00002508 }
Eli Friedman53e38bd2008-05-20 08:59:34 +00002509 case Builtin::BI__builtin_return_address: {
John McCallde0fe072017-08-15 21:42:52 +00002510 Value *Depth = ConstantEmitter(*this).emitAbstract(E->getArg(0),
2511 getContext().UnsignedIntTy);
James Y Knight8799cae2019-02-03 21:53:49 +00002512 Function *F = CGM.getIntrinsic(Intrinsic::returnaddress);
Anton Korobeynikov73d50b92009-12-27 14:27:22 +00002513 return RValue::get(Builder.CreateCall(F, Depth));
Eli Friedman53e38bd2008-05-20 08:59:34 +00002514 }
Albert Gutowski397d81b2016-10-13 16:03:42 +00002515 case Builtin::BI_ReturnAddress: {
James Y Knight8799cae2019-02-03 21:53:49 +00002516 Function *F = CGM.getIntrinsic(Intrinsic::returnaddress);
Albert Gutowski397d81b2016-10-13 16:03:42 +00002517 return RValue::get(Builder.CreateCall(F, Builder.getInt32(0)));
2518 }
Eli Friedman53e38bd2008-05-20 08:59:34 +00002519 case Builtin::BI__builtin_frame_address: {
John McCallde0fe072017-08-15 21:42:52 +00002520 Value *Depth = ConstantEmitter(*this).emitAbstract(E->getArg(0),
2521 getContext().UnsignedIntTy);
James Y Knight8799cae2019-02-03 21:53:49 +00002522 Function *F = CGM.getIntrinsic(Intrinsic::frameaddress);
Anton Korobeynikov73d50b92009-12-27 14:27:22 +00002523 return RValue::get(Builder.CreateCall(F, Depth));
Eli Friedman53e38bd2008-05-20 08:59:34 +00002524 }
Eli Friedman5b73b5e2009-05-03 19:23:23 +00002525 case Builtin::BI__builtin_extract_return_addr: {
John McCalld4f4b7f2010-03-03 04:15:11 +00002526 Value *Address = EmitScalarExpr(E->getArg(0));
2527 Value *Result = getTargetHooks().decodeReturnAddress(*this, Address);
2528 return RValue::get(Result);
2529 }
2530 case Builtin::BI__builtin_frob_return_addr: {
2531 Value *Address = EmitScalarExpr(E->getArg(0));
2532 Value *Result = getTargetHooks().encodeReturnAddress(*this, Address);
2533 return RValue::get(Result);
Eli Friedman5b73b5e2009-05-03 19:23:23 +00002534 }
John McCallbeec5a02010-03-06 00:35:14 +00002535 case Builtin::BI__builtin_dwarf_sp_column: {
Chris Lattner2192fe52011-07-18 04:24:23 +00002536 llvm::IntegerType *Ty
John McCallbeec5a02010-03-06 00:35:14 +00002537 = cast<llvm::IntegerType>(ConvertType(E->getType()));
2538 int Column = getTargetHooks().getDwarfEHStackPointer(CGM);
2539 if (Column == -1) {
2540 CGM.ErrorUnsupported(E, "__builtin_dwarf_sp_column");
2541 return RValue::get(llvm::UndefValue::get(Ty));
2542 }
2543 return RValue::get(llvm::ConstantInt::get(Ty, Column, true));
2544 }
2545 case Builtin::BI__builtin_init_dwarf_reg_size_table: {
2546 Value *Address = EmitScalarExpr(E->getArg(0));
2547 if (getTargetHooks().initDwarfEHRegSizeTable(*this, Address))
2548 CGM.ErrorUnsupported(E, "__builtin_init_dwarf_reg_size_table");
2549 return RValue::get(llvm::UndefValue::get(ConvertType(E->getType())));
2550 }
John McCall66769f82010-03-03 05:38:58 +00002551 case Builtin::BI__builtin_eh_return: {
2552 Value *Int = EmitScalarExpr(E->getArg(0));
2553 Value *Ptr = EmitScalarExpr(E->getArg(1));
2554
Chris Lattner2192fe52011-07-18 04:24:23 +00002555 llvm::IntegerType *IntTy = cast<llvm::IntegerType>(Int->getType());
John McCall66769f82010-03-03 05:38:58 +00002556 assert((IntTy->getBitWidth() == 32 || IntTy->getBitWidth() == 64) &&
2557 "LLVM's __builtin_eh_return only supports 32- and 64-bit variants");
James Y Knight8799cae2019-02-03 21:53:49 +00002558 Function *F =
2559 CGM.getIntrinsic(IntTy->getBitWidth() == 32 ? Intrinsic::eh_return_i32
2560 : Intrinsic::eh_return_i64);
David Blaikie43f9bb72015-05-18 22:14:03 +00002561 Builder.CreateCall(F, {Int, Ptr});
John McCall20f6ab82011-01-12 03:41:02 +00002562 Builder.CreateUnreachable();
2563
2564 // We do need to preserve an insertion point.
John McCallad7c5c12011-02-08 08:22:06 +00002565 EmitBlock(createBasicBlock("builtin_eh_return.cont"));
John McCall20f6ab82011-01-12 03:41:02 +00002566
Craig Topper8a13c412014-05-21 05:09:00 +00002567 return RValue::get(nullptr);
John McCall66769f82010-03-03 05:38:58 +00002568 }
Eli Friedmancb9d07c2009-06-02 09:37:50 +00002569 case Builtin::BI__builtin_unwind_init: {
James Y Knight8799cae2019-02-03 21:53:49 +00002570 Function *F = CGM.getIntrinsic(Intrinsic::eh_unwind_init);
David Blaikie4ba525b2015-07-14 17:27:39 +00002571 return RValue::get(Builder.CreateCall(F));
Eli Friedmancb9d07c2009-06-02 09:37:50 +00002572 }
John McCall4b613fa2010-03-02 02:31:24 +00002573 case Builtin::BI__builtin_extend_pointer: {
2574 // Extends a pointer to the size of an _Unwind_Word, which is
John McCallb6cc2c042010-03-02 03:50:12 +00002575 // uint64_t on all platforms. Generally this gets poked into a
2576 // register and eventually used as an address, so if the
2577 // addressing registers are wider than pointers and the platform
2578 // doesn't implicitly ignore high-order bits when doing
2579 // addressing, we need to make sure we zext / sext based on
2580 // the platform's expectations.
John McCall4b613fa2010-03-02 02:31:24 +00002581 //
2582 // See: http://gcc.gnu.org/ml/gcc-bugs/2002-02/msg00237.html
John McCallb6cc2c042010-03-02 03:50:12 +00002583
John McCallb6cc2c042010-03-02 03:50:12 +00002584 // Cast the pointer to intptr_t.
John McCall4b613fa2010-03-02 02:31:24 +00002585 Value *Ptr = EmitScalarExpr(E->getArg(0));
John McCallb6cc2c042010-03-02 03:50:12 +00002586 Value *Result = Builder.CreatePtrToInt(Ptr, IntPtrTy, "extend.cast");
2587
2588 // If that's 64 bits, we're done.
2589 if (IntPtrTy->getBitWidth() == 64)
2590 return RValue::get(Result);
2591
2592 // Otherwise, ask the codegen data what to do.
John McCalld4f4b7f2010-03-03 04:15:11 +00002593 if (getTargetHooks().extendPointerWithSExt())
John McCallb6cc2c042010-03-02 03:50:12 +00002594 return RValue::get(Builder.CreateSExt(Result, Int64Ty, "extend.sext"));
2595 else
2596 return RValue::get(Builder.CreateZExt(Result, Int64Ty, "extend.zext"));
John McCall4b613fa2010-03-02 02:31:24 +00002597 }
Eli Friedmancb9d07c2009-06-02 09:37:50 +00002598 case Builtin::BI__builtin_setjmp: {
John McCall02269a62010-05-27 18:47:06 +00002599 // Buffer is a void**.
John McCall7f416cc2015-09-08 08:05:57 +00002600 Address Buf = EmitPointerWithAlignment(E->getArg(0));
John McCall02269a62010-05-27 18:47:06 +00002601
2602 // Store the frame pointer to the setjmp buffer.
Eli Friedmancb9d07c2009-06-02 09:37:50 +00002603 Value *FrameAddr =
John McCall02269a62010-05-27 18:47:06 +00002604 Builder.CreateCall(CGM.getIntrinsic(Intrinsic::frameaddress),
Chris Lattner5e016ae2010-06-27 07:15:29 +00002605 ConstantInt::get(Int32Ty, 0));
Eli Friedmancb9d07c2009-06-02 09:37:50 +00002606 Builder.CreateStore(FrameAddr, Buf);
John McCall02269a62010-05-27 18:47:06 +00002607
Jim Grosbach4cf59b92010-05-27 23:54:20 +00002608 // Store the stack pointer to the setjmp buffer.
2609 Value *StackAddr =
David Blaikie4ba525b2015-07-14 17:27:39 +00002610 Builder.CreateCall(CGM.getIntrinsic(Intrinsic::stacksave));
James Y Knight751fe282019-02-09 22:22:28 +00002611 Address StackSaveSlot = Builder.CreateConstInBoundsGEP(Buf, 2);
Jim Grosbach4cf59b92010-05-27 23:54:20 +00002612 Builder.CreateStore(StackAddr, StackSaveSlot);
2613
John McCall02269a62010-05-27 18:47:06 +00002614 // Call LLVM's EH setjmp, which is lightweight.
James Y Knight8799cae2019-02-03 21:53:49 +00002615 Function *F = CGM.getIntrinsic(Intrinsic::eh_sjlj_setjmp);
John McCallad7c5c12011-02-08 08:22:06 +00002616 Buf = Builder.CreateBitCast(Buf, Int8PtrTy);
John McCall7f416cc2015-09-08 08:05:57 +00002617 return RValue::get(Builder.CreateCall(F, Buf.getPointer()));
Eli Friedmancb9d07c2009-06-02 09:37:50 +00002618 }
2619 case Builtin::BI__builtin_longjmp: {
Eli Friedmancb9d07c2009-06-02 09:37:50 +00002620 Value *Buf = EmitScalarExpr(E->getArg(0));
John McCallad7c5c12011-02-08 08:22:06 +00002621 Buf = Builder.CreateBitCast(Buf, Int8PtrTy);
John McCall02269a62010-05-27 18:47:06 +00002622
2623 // Call LLVM's EH longjmp, which is lightweight.
2624 Builder.CreateCall(CGM.getIntrinsic(Intrinsic::eh_sjlj_longjmp), Buf);
2625
John McCall20f6ab82011-01-12 03:41:02 +00002626 // longjmp doesn't return; mark this as unreachable.
2627 Builder.CreateUnreachable();
2628
2629 // We do need to preserve an insertion point.
John McCallad7c5c12011-02-08 08:22:06 +00002630 EmitBlock(createBasicBlock("longjmp.cont"));
John McCall20f6ab82011-01-12 03:41:02 +00002631
Craig Topper8a13c412014-05-21 05:09:00 +00002632 return RValue::get(nullptr);
Eli Friedmancb9d07c2009-06-02 09:37:50 +00002633 }
Eric Fiselier26187502018-12-14 21:11:28 +00002634 case Builtin::BI__builtin_launder: {
2635 const Expr *Arg = E->getArg(0);
2636 QualType ArgTy = Arg->getType()->getPointeeType();
2637 Value *Ptr = EmitScalarExpr(Arg);
2638 if (TypeRequiresBuiltinLaunder(CGM, ArgTy))
2639 Ptr = Builder.CreateLaunderInvariantGroup(Ptr);
2640
2641 return RValue::get(Ptr);
2642 }
Mon P Wangb84407d2008-05-09 22:40:52 +00002643 case Builtin::BI__sync_fetch_and_add:
Mon P Wangb84407d2008-05-09 22:40:52 +00002644 case Builtin::BI__sync_fetch_and_sub:
Chris Lattnerdc046542009-05-08 06:58:22 +00002645 case Builtin::BI__sync_fetch_and_or:
2646 case Builtin::BI__sync_fetch_and_and:
2647 case Builtin::BI__sync_fetch_and_xor:
Hal Finkeld2208b52014-10-02 20:53:50 +00002648 case Builtin::BI__sync_fetch_and_nand:
Chris Lattnerdc046542009-05-08 06:58:22 +00002649 case Builtin::BI__sync_add_and_fetch:
2650 case Builtin::BI__sync_sub_and_fetch:
2651 case Builtin::BI__sync_and_and_fetch:
2652 case Builtin::BI__sync_or_and_fetch:
2653 case Builtin::BI__sync_xor_and_fetch:
Hal Finkeld2208b52014-10-02 20:53:50 +00002654 case Builtin::BI__sync_nand_and_fetch:
Chris Lattnerdc046542009-05-08 06:58:22 +00002655 case Builtin::BI__sync_val_compare_and_swap:
2656 case Builtin::BI__sync_bool_compare_and_swap:
2657 case Builtin::BI__sync_lock_test_and_set:
2658 case Builtin::BI__sync_lock_release:
Chris Lattner9cb59fa2011-04-09 03:57:26 +00002659 case Builtin::BI__sync_swap:
David Blaikie83d382b2011-09-23 05:06:16 +00002660 llvm_unreachable("Shouldn't make it through sema");
Chris Lattnerdc046542009-05-08 06:58:22 +00002661 case Builtin::BI__sync_fetch_and_add_1:
2662 case Builtin::BI__sync_fetch_and_add_2:
2663 case Builtin::BI__sync_fetch_and_add_4:
2664 case Builtin::BI__sync_fetch_and_add_8:
2665 case Builtin::BI__sync_fetch_and_add_16:
Eli Friedmane9f81132011-09-07 01:41:24 +00002666 return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Add, E);
Chris Lattnerdc046542009-05-08 06:58:22 +00002667 case Builtin::BI__sync_fetch_and_sub_1:
2668 case Builtin::BI__sync_fetch_and_sub_2:
2669 case Builtin::BI__sync_fetch_and_sub_4:
2670 case Builtin::BI__sync_fetch_and_sub_8:
2671 case Builtin::BI__sync_fetch_and_sub_16:
Eli Friedmane9f81132011-09-07 01:41:24 +00002672 return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Sub, E);
Chris Lattnerdc046542009-05-08 06:58:22 +00002673 case Builtin::BI__sync_fetch_and_or_1:
2674 case Builtin::BI__sync_fetch_and_or_2:
2675 case Builtin::BI__sync_fetch_and_or_4:
2676 case Builtin::BI__sync_fetch_and_or_8:
2677 case Builtin::BI__sync_fetch_and_or_16:
Eli Friedmane9f81132011-09-07 01:41:24 +00002678 return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Or, E);
Chris Lattnerdc046542009-05-08 06:58:22 +00002679 case Builtin::BI__sync_fetch_and_and_1:
2680 case Builtin::BI__sync_fetch_and_and_2:
2681 case Builtin::BI__sync_fetch_and_and_4:
2682 case Builtin::BI__sync_fetch_and_and_8:
2683 case Builtin::BI__sync_fetch_and_and_16:
Eli Friedmane9f81132011-09-07 01:41:24 +00002684 return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::And, E);
Chris Lattnerdc046542009-05-08 06:58:22 +00002685 case Builtin::BI__sync_fetch_and_xor_1:
2686 case Builtin::BI__sync_fetch_and_xor_2:
2687 case Builtin::BI__sync_fetch_and_xor_4:
2688 case Builtin::BI__sync_fetch_and_xor_8:
2689 case Builtin::BI__sync_fetch_and_xor_16:
Eli Friedmane9f81132011-09-07 01:41:24 +00002690 return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Xor, E);
Hal Finkeld2208b52014-10-02 20:53:50 +00002691 case Builtin::BI__sync_fetch_and_nand_1:
2692 case Builtin::BI__sync_fetch_and_nand_2:
2693 case Builtin::BI__sync_fetch_and_nand_4:
2694 case Builtin::BI__sync_fetch_and_nand_8:
2695 case Builtin::BI__sync_fetch_and_nand_16:
2696 return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Nand, E);
Mike Stump11289f42009-09-09 15:08:12 +00002697
Chris Lattnerdc046542009-05-08 06:58:22 +00002698 // Clang extensions: not overloaded yet.
Mon P Wangb84407d2008-05-09 22:40:52 +00002699 case Builtin::BI__sync_fetch_and_min:
Eli Friedmane9f81132011-09-07 01:41:24 +00002700 return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Min, E);
Mon P Wangb84407d2008-05-09 22:40:52 +00002701 case Builtin::BI__sync_fetch_and_max:
Eli Friedmane9f81132011-09-07 01:41:24 +00002702 return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Max, E);
Mon P Wangb84407d2008-05-09 22:40:52 +00002703 case Builtin::BI__sync_fetch_and_umin:
Eli Friedmane9f81132011-09-07 01:41:24 +00002704 return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::UMin, E);
Mon P Wangb84407d2008-05-09 22:40:52 +00002705 case Builtin::BI__sync_fetch_and_umax:
Eli Friedmane9f81132011-09-07 01:41:24 +00002706 return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::UMax, E);
Daniel Dunbar4fab57d2009-04-07 00:55:51 +00002707
Chris Lattnerdc046542009-05-08 06:58:22 +00002708 case Builtin::BI__sync_add_and_fetch_1:
2709 case Builtin::BI__sync_add_and_fetch_2:
2710 case Builtin::BI__sync_add_and_fetch_4:
2711 case Builtin::BI__sync_add_and_fetch_8:
2712 case Builtin::BI__sync_add_and_fetch_16:
Eli Friedmane9f81132011-09-07 01:41:24 +00002713 return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Add, E,
Daniel Dunbar4fab57d2009-04-07 00:55:51 +00002714 llvm::Instruction::Add);
Chris Lattnerdc046542009-05-08 06:58:22 +00002715 case Builtin::BI__sync_sub_and_fetch_1:
2716 case Builtin::BI__sync_sub_and_fetch_2:
2717 case Builtin::BI__sync_sub_and_fetch_4:
2718 case Builtin::BI__sync_sub_and_fetch_8:
2719 case Builtin::BI__sync_sub_and_fetch_16:
Eli Friedmane9f81132011-09-07 01:41:24 +00002720 return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Sub, E,
Daniel Dunbar4fab57d2009-04-07 00:55:51 +00002721 llvm::Instruction::Sub);
Chris Lattnerdc046542009-05-08 06:58:22 +00002722 case Builtin::BI__sync_and_and_fetch_1:
2723 case Builtin::BI__sync_and_and_fetch_2:
2724 case Builtin::BI__sync_and_and_fetch_4:
2725 case Builtin::BI__sync_and_and_fetch_8:
2726 case Builtin::BI__sync_and_and_fetch_16:
Eli Friedmane9f81132011-09-07 01:41:24 +00002727 return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::And, E,
Daniel Dunbar4fab57d2009-04-07 00:55:51 +00002728 llvm::Instruction::And);
Chris Lattnerdc046542009-05-08 06:58:22 +00002729 case Builtin::BI__sync_or_and_fetch_1:
2730 case Builtin::BI__sync_or_and_fetch_2:
2731 case Builtin::BI__sync_or_and_fetch_4:
2732 case Builtin::BI__sync_or_and_fetch_8:
2733 case Builtin::BI__sync_or_and_fetch_16:
Eli Friedmane9f81132011-09-07 01:41:24 +00002734 return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Or, E,
Daniel Dunbar4fab57d2009-04-07 00:55:51 +00002735 llvm::Instruction::Or);
Chris Lattnerdc046542009-05-08 06:58:22 +00002736 case Builtin::BI__sync_xor_and_fetch_1:
2737 case Builtin::BI__sync_xor_and_fetch_2:
2738 case Builtin::BI__sync_xor_and_fetch_4:
2739 case Builtin::BI__sync_xor_and_fetch_8:
2740 case Builtin::BI__sync_xor_and_fetch_16:
Eli Friedmane9f81132011-09-07 01:41:24 +00002741 return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Xor, E,
Daniel Dunbar4fab57d2009-04-07 00:55:51 +00002742 llvm::Instruction::Xor);
Hal Finkeld2208b52014-10-02 20:53:50 +00002743 case Builtin::BI__sync_nand_and_fetch_1:
2744 case Builtin::BI__sync_nand_and_fetch_2:
2745 case Builtin::BI__sync_nand_and_fetch_4:
2746 case Builtin::BI__sync_nand_and_fetch_8:
2747 case Builtin::BI__sync_nand_and_fetch_16:
2748 return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Nand, E,
2749 llvm::Instruction::And, true);
Mike Stump11289f42009-09-09 15:08:12 +00002750
Chris Lattnerdc046542009-05-08 06:58:22 +00002751 case Builtin::BI__sync_val_compare_and_swap_1:
2752 case Builtin::BI__sync_val_compare_and_swap_2:
2753 case Builtin::BI__sync_val_compare_and_swap_4:
2754 case Builtin::BI__sync_val_compare_and_swap_8:
Artem Belevichd21e5c62015-06-25 18:29:42 +00002755 case Builtin::BI__sync_val_compare_and_swap_16:
2756 return RValue::get(MakeAtomicCmpXchgValue(*this, E, false));
Daniel Dunbar4fab57d2009-04-07 00:55:51 +00002757
Chris Lattnerdc046542009-05-08 06:58:22 +00002758 case Builtin::BI__sync_bool_compare_and_swap_1:
2759 case Builtin::BI__sync_bool_compare_and_swap_2:
2760 case Builtin::BI__sync_bool_compare_and_swap_4:
2761 case Builtin::BI__sync_bool_compare_and_swap_8:
Artem Belevichd21e5c62015-06-25 18:29:42 +00002762 case Builtin::BI__sync_bool_compare_and_swap_16:
2763 return RValue::get(MakeAtomicCmpXchgValue(*this, E, true));
Daniel Dunbar4fab57d2009-04-07 00:55:51 +00002764
Chris Lattner9cb59fa2011-04-09 03:57:26 +00002765 case Builtin::BI__sync_swap_1:
2766 case Builtin::BI__sync_swap_2:
2767 case Builtin::BI__sync_swap_4:
2768 case Builtin::BI__sync_swap_8:
2769 case Builtin::BI__sync_swap_16:
Eli Friedmane9f81132011-09-07 01:41:24 +00002770 return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Xchg, E);
Chris Lattner9cb59fa2011-04-09 03:57:26 +00002771
Chris Lattnerdc046542009-05-08 06:58:22 +00002772 case Builtin::BI__sync_lock_test_and_set_1:
2773 case Builtin::BI__sync_lock_test_and_set_2:
2774 case Builtin::BI__sync_lock_test_and_set_4:
2775 case Builtin::BI__sync_lock_test_and_set_8:
2776 case Builtin::BI__sync_lock_test_and_set_16:
Eli Friedmane9f81132011-09-07 01:41:24 +00002777 return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Xchg, E);
Daniel Dunbar4ff562d2010-03-20 07:04:11 +00002778
Chris Lattnerdc046542009-05-08 06:58:22 +00002779 case Builtin::BI__sync_lock_release_1:
2780 case Builtin::BI__sync_lock_release_2:
2781 case Builtin::BI__sync_lock_release_4:
2782 case Builtin::BI__sync_lock_release_8:
Chris Lattnerafde2592009-05-13 04:46:13 +00002783 case Builtin::BI__sync_lock_release_16: {
2784 Value *Ptr = EmitScalarExpr(E->getArg(0));
Eli Friedman84d28122011-09-13 22:21:56 +00002785 QualType ElTy = E->getArg(0)->getType()->getPointeeType();
2786 CharUnits StoreSize = getContext().getTypeSizeInChars(ElTy);
Eli Friedmanfefe0d02012-03-16 01:48:04 +00002787 llvm::Type *ITy = llvm::IntegerType::get(getLLVMContext(),
2788 StoreSize.getQuantity() * 8);
2789 Ptr = Builder.CreateBitCast(Ptr, ITy->getPointerTo());
Jim Grosbachd3608f42012-09-21 00:18:27 +00002790 llvm::StoreInst *Store =
John McCall7f416cc2015-09-08 08:05:57 +00002791 Builder.CreateAlignedStore(llvm::Constant::getNullValue(ITy), Ptr,
2792 StoreSize);
JF Bastien92f4ef12016-04-06 17:26:42 +00002793 Store->setAtomic(llvm::AtomicOrdering::Release);
Craig Topper8a13c412014-05-21 05:09:00 +00002794 return RValue::get(nullptr);
Chris Lattnerafde2592009-05-13 04:46:13 +00002795 }
Daniel Dunbar8eb018a2009-02-16 22:43:43 +00002796
Chris Lattnerafde2592009-05-13 04:46:13 +00002797 case Builtin::BI__sync_synchronize: {
Eli Friedmane9f81132011-09-07 01:41:24 +00002798 // We assume this is supposed to correspond to a C++0x-style
2799 // sequentially-consistent fence (i.e. this is only usable for
Alexander Kornienko2a8c18d2018-04-06 15:14:32 +00002800 // synchronization, not device I/O or anything like that). This intrinsic
Jim Grosbachd3608f42012-09-21 00:18:27 +00002801 // is really badly designed in the sense that in theory, there isn't
Eli Friedmane9f81132011-09-07 01:41:24 +00002802 // any way to safely use it... but in practice, it mostly works
2803 // to use it with non-atomic loads and stores to get acquire/release
2804 // semantics.
JF Bastien92f4ef12016-04-06 17:26:42 +00002805 Builder.CreateFence(llvm::AtomicOrdering::SequentiallyConsistent);
Craig Topper8a13c412014-05-21 05:09:00 +00002806 return RValue::get(nullptr);
Chris Lattnerafde2592009-05-13 04:46:13 +00002807 }
Mike Stump11289f42009-09-09 15:08:12 +00002808
Michael Zolotukhin84df1232015-09-08 23:52:33 +00002809 case Builtin::BI__builtin_nontemporal_load:
2810 return RValue::get(EmitNontemporalLoad(*this, E));
2811 case Builtin::BI__builtin_nontemporal_store:
2812 return RValue::get(EmitNontemporalStore(*this, E));
Richard Smith01ba47d2012-04-13 00:45:38 +00002813 case Builtin::BI__c11_atomic_is_lock_free:
2814 case Builtin::BI__atomic_is_lock_free: {
2815 // Call "bool __atomic_is_lock_free(size_t size, void *ptr)". For the
2816 // __c11 builtin, ptr is 0 (indicating a properly-aligned object), since
2817 // _Atomic(T) is always properly-aligned.
2818 const char *LibCallName = "__atomic_is_lock_free";
2819 CallArgList Args;
2820 Args.add(RValue::get(EmitScalarExpr(E->getArg(0))),
2821 getContext().getSizeType());
2822 if (BuiltinID == Builtin::BI__atomic_is_lock_free)
2823 Args.add(RValue::get(EmitScalarExpr(E->getArg(1))),
2824 getContext().VoidPtrTy);
2825 else
2826 Args.add(RValue::get(llvm::Constant::getNullValue(VoidPtrTy)),
2827 getContext().VoidPtrTy);
2828 const CGFunctionInfo &FuncInfo =
John McCallc56a8b32016-03-11 04:30:31 +00002829 CGM.getTypes().arrangeBuiltinFunctionCall(E->getType(), Args);
Richard Smith01ba47d2012-04-13 00:45:38 +00002830 llvm::FunctionType *FTy = CGM.getTypes().GetFunctionType(FuncInfo);
James Y Knight9871db02019-02-05 16:42:33 +00002831 llvm::FunctionCallee Func = CGM.CreateRuntimeFunction(FTy, LibCallName);
John McCallb92ab1a2016-10-26 23:46:34 +00002832 return EmitCall(FuncInfo, CGCallee::forDirect(Func),
2833 ReturnValueSlot(), Args);
Richard Smith01ba47d2012-04-13 00:45:38 +00002834 }
2835
2836 case Builtin::BI__atomic_test_and_set: {
2837 // Look at the argument type to determine whether this is a volatile
2838 // operation. The parameter type is always volatile.
2839 QualType PtrTy = E->getArg(0)->IgnoreImpCasts()->getType();
2840 bool Volatile =
2841 PtrTy->castAs<PointerType>()->getPointeeType().isVolatileQualified();
2842
2843 Value *Ptr = EmitScalarExpr(E->getArg(0));
Micah Villmowea2fea22012-10-25 15:39:14 +00002844 unsigned AddrSpace = Ptr->getType()->getPointerAddressSpace();
Richard Smith01ba47d2012-04-13 00:45:38 +00002845 Ptr = Builder.CreateBitCast(Ptr, Int8Ty->getPointerTo(AddrSpace));
2846 Value *NewVal = Builder.getInt8(1);
2847 Value *Order = EmitScalarExpr(E->getArg(1));
2848 if (isa<llvm::ConstantInt>(Order)) {
2849 int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
Craig Topper8a13c412014-05-21 05:09:00 +00002850 AtomicRMWInst *Result = nullptr;
Richard Smith01ba47d2012-04-13 00:45:38 +00002851 switch (ord) {
2852 case 0: // memory_order_relaxed
2853 default: // invalid order
JF Bastien92f4ef12016-04-06 17:26:42 +00002854 Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
2855 llvm::AtomicOrdering::Monotonic);
Richard Smith01ba47d2012-04-13 00:45:38 +00002856 break;
JF Bastien92f4ef12016-04-06 17:26:42 +00002857 case 1: // memory_order_consume
2858 case 2: // memory_order_acquire
2859 Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
2860 llvm::AtomicOrdering::Acquire);
Richard Smith01ba47d2012-04-13 00:45:38 +00002861 break;
JF Bastien92f4ef12016-04-06 17:26:42 +00002862 case 3: // memory_order_release
2863 Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
2864 llvm::AtomicOrdering::Release);
Richard Smith01ba47d2012-04-13 00:45:38 +00002865 break;
JF Bastien92f4ef12016-04-06 17:26:42 +00002866 case 4: // memory_order_acq_rel
2867
2868 Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
2869 llvm::AtomicOrdering::AcquireRelease);
Richard Smith01ba47d2012-04-13 00:45:38 +00002870 break;
JF Bastien92f4ef12016-04-06 17:26:42 +00002871 case 5: // memory_order_seq_cst
2872 Result = Builder.CreateAtomicRMW(
2873 llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
2874 llvm::AtomicOrdering::SequentiallyConsistent);
Richard Smith01ba47d2012-04-13 00:45:38 +00002875 break;
2876 }
2877 Result->setVolatile(Volatile);
2878 return RValue::get(Builder.CreateIsNotNull(Result, "tobool"));
2879 }
2880
2881 llvm::BasicBlock *ContBB = createBasicBlock("atomic.continue", CurFn);
2882
2883 llvm::BasicBlock *BBs[5] = {
2884 createBasicBlock("monotonic", CurFn),
2885 createBasicBlock("acquire", CurFn),
2886 createBasicBlock("release", CurFn),
2887 createBasicBlock("acqrel", CurFn),
2888 createBasicBlock("seqcst", CurFn)
2889 };
2890 llvm::AtomicOrdering Orders[5] = {
JF Bastien92f4ef12016-04-06 17:26:42 +00002891 llvm::AtomicOrdering::Monotonic, llvm::AtomicOrdering::Acquire,
2892 llvm::AtomicOrdering::Release, llvm::AtomicOrdering::AcquireRelease,
2893 llvm::AtomicOrdering::SequentiallyConsistent};
Richard Smith01ba47d2012-04-13 00:45:38 +00002894
2895 Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(), false);
2896 llvm::SwitchInst *SI = Builder.CreateSwitch(Order, BBs[0]);
2897
2898 Builder.SetInsertPoint(ContBB);
2899 PHINode *Result = Builder.CreatePHI(Int8Ty, 5, "was_set");
2900
2901 for (unsigned i = 0; i < 5; ++i) {
2902 Builder.SetInsertPoint(BBs[i]);
2903 AtomicRMWInst *RMW = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
2904 Ptr, NewVal, Orders[i]);
2905 RMW->setVolatile(Volatile);
2906 Result->addIncoming(RMW, BBs[i]);
2907 Builder.CreateBr(ContBB);
2908 }
2909
2910 SI->addCase(Builder.getInt32(0), BBs[0]);
2911 SI->addCase(Builder.getInt32(1), BBs[1]);
2912 SI->addCase(Builder.getInt32(2), BBs[1]);
2913 SI->addCase(Builder.getInt32(3), BBs[2]);
2914 SI->addCase(Builder.getInt32(4), BBs[3]);
2915 SI->addCase(Builder.getInt32(5), BBs[4]);
2916
2917 Builder.SetInsertPoint(ContBB);
2918 return RValue::get(Builder.CreateIsNotNull(Result, "tobool"));
2919 }
2920
2921 case Builtin::BI__atomic_clear: {
2922 QualType PtrTy = E->getArg(0)->IgnoreImpCasts()->getType();
2923 bool Volatile =
2924 PtrTy->castAs<PointerType>()->getPointeeType().isVolatileQualified();
2925
John McCall7f416cc2015-09-08 08:05:57 +00002926 Address Ptr = EmitPointerWithAlignment(E->getArg(0));
2927 unsigned AddrSpace = Ptr.getPointer()->getType()->getPointerAddressSpace();
Richard Smith01ba47d2012-04-13 00:45:38 +00002928 Ptr = Builder.CreateBitCast(Ptr, Int8Ty->getPointerTo(AddrSpace));
2929 Value *NewVal = Builder.getInt8(0);
2930 Value *Order = EmitScalarExpr(E->getArg(1));
2931 if (isa<llvm::ConstantInt>(Order)) {
2932 int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
2933 StoreInst *Store = Builder.CreateStore(NewVal, Ptr, Volatile);
Richard Smith01ba47d2012-04-13 00:45:38 +00002934 switch (ord) {
2935 case 0: // memory_order_relaxed
2936 default: // invalid order
JF Bastien92f4ef12016-04-06 17:26:42 +00002937 Store->setOrdering(llvm::AtomicOrdering::Monotonic);
Richard Smith01ba47d2012-04-13 00:45:38 +00002938 break;
2939 case 3: // memory_order_release
JF Bastien92f4ef12016-04-06 17:26:42 +00002940 Store->setOrdering(llvm::AtomicOrdering::Release);
Richard Smith01ba47d2012-04-13 00:45:38 +00002941 break;
2942 case 5: // memory_order_seq_cst
JF Bastien92f4ef12016-04-06 17:26:42 +00002943 Store->setOrdering(llvm::AtomicOrdering::SequentiallyConsistent);
Richard Smith01ba47d2012-04-13 00:45:38 +00002944 break;
2945 }
Craig Topper8a13c412014-05-21 05:09:00 +00002946 return RValue::get(nullptr);
Richard Smith01ba47d2012-04-13 00:45:38 +00002947 }
2948
2949 llvm::BasicBlock *ContBB = createBasicBlock("atomic.continue", CurFn);
2950
2951 llvm::BasicBlock *BBs[3] = {
2952 createBasicBlock("monotonic", CurFn),
2953 createBasicBlock("release", CurFn),
2954 createBasicBlock("seqcst", CurFn)
2955 };
2956 llvm::AtomicOrdering Orders[3] = {
JF Bastien92f4ef12016-04-06 17:26:42 +00002957 llvm::AtomicOrdering::Monotonic, llvm::AtomicOrdering::Release,
2958 llvm::AtomicOrdering::SequentiallyConsistent};
Richard Smith01ba47d2012-04-13 00:45:38 +00002959
2960 Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(), false);
2961 llvm::SwitchInst *SI = Builder.CreateSwitch(Order, BBs[0]);
2962
2963 for (unsigned i = 0; i < 3; ++i) {
2964 Builder.SetInsertPoint(BBs[i]);
2965 StoreInst *Store = Builder.CreateStore(NewVal, Ptr, Volatile);
Richard Smith01ba47d2012-04-13 00:45:38 +00002966 Store->setOrdering(Orders[i]);
2967 Builder.CreateBr(ContBB);
2968 }
2969
2970 SI->addCase(Builder.getInt32(0), BBs[0]);
2971 SI->addCase(Builder.getInt32(3), BBs[1]);
2972 SI->addCase(Builder.getInt32(5), BBs[2]);
2973
2974 Builder.SetInsertPoint(ContBB);
Craig Topper8a13c412014-05-21 05:09:00 +00002975 return RValue::get(nullptr);
Richard Smith01ba47d2012-04-13 00:45:38 +00002976 }
2977
Eli Friedmandf14b3a2011-10-11 02:20:01 +00002978 case Builtin::BI__atomic_thread_fence:
Richard Smithb1e36c62012-04-11 17:55:32 +00002979 case Builtin::BI__atomic_signal_fence:
2980 case Builtin::BI__c11_atomic_thread_fence:
2981 case Builtin::BI__c11_atomic_signal_fence: {
Konstantin Zhuravlyovb0beb302017-07-11 22:23:37 +00002982 llvm::SyncScope::ID SSID;
Richard Smithb1e36c62012-04-11 17:55:32 +00002983 if (BuiltinID == Builtin::BI__atomic_signal_fence ||
2984 BuiltinID == Builtin::BI__c11_atomic_signal_fence)
Konstantin Zhuravlyovb0beb302017-07-11 22:23:37 +00002985 SSID = llvm::SyncScope::SingleThread;
Eli Friedmandf14b3a2011-10-11 02:20:01 +00002986 else
Konstantin Zhuravlyovb0beb302017-07-11 22:23:37 +00002987 SSID = llvm::SyncScope::System;
Eli Friedmandf14b3a2011-10-11 02:20:01 +00002988 Value *Order = EmitScalarExpr(E->getArg(0));
2989 if (isa<llvm::ConstantInt>(Order)) {
2990 int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
2991 switch (ord) {
2992 case 0: // memory_order_relaxed
2993 default: // invalid order
2994 break;
2995 case 1: // memory_order_consume
2996 case 2: // memory_order_acquire
Konstantin Zhuravlyovb0beb302017-07-11 22:23:37 +00002997 Builder.CreateFence(llvm::AtomicOrdering::Acquire, SSID);
Eli Friedmandf14b3a2011-10-11 02:20:01 +00002998 break;
2999 case 3: // memory_order_release
Konstantin Zhuravlyovb0beb302017-07-11 22:23:37 +00003000 Builder.CreateFence(llvm::AtomicOrdering::Release, SSID);
Eli Friedmandf14b3a2011-10-11 02:20:01 +00003001 break;
3002 case 4: // memory_order_acq_rel
Konstantin Zhuravlyovb0beb302017-07-11 22:23:37 +00003003 Builder.CreateFence(llvm::AtomicOrdering::AcquireRelease, SSID);
Eli Friedmandf14b3a2011-10-11 02:20:01 +00003004 break;
3005 case 5: // memory_order_seq_cst
Konstantin Zhuravlyovb0beb302017-07-11 22:23:37 +00003006 Builder.CreateFence(llvm::AtomicOrdering::SequentiallyConsistent, SSID);
Eli Friedmandf14b3a2011-10-11 02:20:01 +00003007 break;
3008 }
Craig Topper8a13c412014-05-21 05:09:00 +00003009 return RValue::get(nullptr);
Eli Friedmandf14b3a2011-10-11 02:20:01 +00003010 }
3011
3012 llvm::BasicBlock *AcquireBB, *ReleaseBB, *AcqRelBB, *SeqCstBB;
3013 AcquireBB = createBasicBlock("acquire", CurFn);
3014 ReleaseBB = createBasicBlock("release", CurFn);
3015 AcqRelBB = createBasicBlock("acqrel", CurFn);
3016 SeqCstBB = createBasicBlock("seqcst", CurFn);
3017 llvm::BasicBlock *ContBB = createBasicBlock("atomic.continue", CurFn);
3018
3019 Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(), false);
3020 llvm::SwitchInst *SI = Builder.CreateSwitch(Order, ContBB);
3021
3022 Builder.SetInsertPoint(AcquireBB);
Konstantin Zhuravlyovb0beb302017-07-11 22:23:37 +00003023 Builder.CreateFence(llvm::AtomicOrdering::Acquire, SSID);
Eli Friedmandf14b3a2011-10-11 02:20:01 +00003024 Builder.CreateBr(ContBB);
3025 SI->addCase(Builder.getInt32(1), AcquireBB);
3026 SI->addCase(Builder.getInt32(2), AcquireBB);
3027
3028 Builder.SetInsertPoint(ReleaseBB);
Konstantin Zhuravlyovb0beb302017-07-11 22:23:37 +00003029 Builder.CreateFence(llvm::AtomicOrdering::Release, SSID);
Eli Friedmandf14b3a2011-10-11 02:20:01 +00003030 Builder.CreateBr(ContBB);
3031 SI->addCase(Builder.getInt32(3), ReleaseBB);
3032
3033 Builder.SetInsertPoint(AcqRelBB);
Konstantin Zhuravlyovb0beb302017-07-11 22:23:37 +00003034 Builder.CreateFence(llvm::AtomicOrdering::AcquireRelease, SSID);
Eli Friedmandf14b3a2011-10-11 02:20:01 +00003035 Builder.CreateBr(ContBB);
3036 SI->addCase(Builder.getInt32(4), AcqRelBB);
3037
3038 Builder.SetInsertPoint(SeqCstBB);
Konstantin Zhuravlyovb0beb302017-07-11 22:23:37 +00003039 Builder.CreateFence(llvm::AtomicOrdering::SequentiallyConsistent, SSID);
Eli Friedmandf14b3a2011-10-11 02:20:01 +00003040 Builder.CreateBr(ContBB);
3041 SI->addCase(Builder.getInt32(5), SeqCstBB);
3042
3043 Builder.SetInsertPoint(ContBB);
Craig Topper8a13c412014-05-21 05:09:00 +00003044 return RValue::get(nullptr);
Eli Friedmandf14b3a2011-10-11 02:20:01 +00003045 }
3046
Eli Friedman99d20f82010-03-06 02:17:52 +00003047 case Builtin::BI__builtin_signbit:
3048 case Builtin::BI__builtin_signbitf:
3049 case Builtin::BI__builtin_signbitl: {
Chandler Carruthc66deaf2015-03-19 22:39:51 +00003050 return RValue::get(
3051 Builder.CreateZExt(EmitSignBit(*this, EmitScalarExpr(E->getArg(0))),
3052 ConvertType(E->getType())));
Eli Friedman99d20f82010-03-06 02:17:52 +00003053 }
Reid Kleckner30701ed2017-09-05 20:27:35 +00003054 case Builtin::BI__annotation: {
3055 // Re-encode each wide string to UTF8 and make an MDString.
3056 SmallVector<Metadata *, 1> Strings;
3057 for (const Expr *Arg : E->arguments()) {
3058 const auto *Str = cast<StringLiteral>(Arg->IgnoreParenCasts());
3059 assert(Str->getCharByteWidth() == 2);
3060 StringRef WideBytes = Str->getBytes();
3061 std::string StrUtf8;
3062 if (!convertUTF16ToUTF8String(
3063 makeArrayRef(WideBytes.data(), WideBytes.size()), StrUtf8)) {
3064 CGM.ErrorUnsupported(E, "non-UTF16 __annotation argument");
3065 continue;
3066 }
3067 Strings.push_back(llvm::MDString::get(getLLVMContext(), StrUtf8));
3068 }
3069
3070 // Build and MDTuple of MDStrings and emit the intrinsic call.
James Y Knight8799cae2019-02-03 21:53:49 +00003071 llvm::Function *F =
3072 CGM.getIntrinsic(llvm::Intrinsic::codeview_annotation, {});
Reid Kleckner30701ed2017-09-05 20:27:35 +00003073 MDTuple *StrTuple = MDTuple::get(getLLVMContext(), Strings);
3074 Builder.CreateCall(F, MetadataAsValue::get(getLLVMContext(), StrTuple));
3075 return RValue::getIgnored();
3076 }
Julien Lerouge5a6b6982011-09-09 22:41:49 +00003077 case Builtin::BI__builtin_annotation: {
3078 llvm::Value *AnnVal = EmitScalarExpr(E->getArg(0));
James Y Knight8799cae2019-02-03 21:53:49 +00003079 llvm::Function *F = CGM.getIntrinsic(llvm::Intrinsic::annotation,
Julien Lerouge5a6b6982011-09-09 22:41:49 +00003080 AnnVal->getType());
3081
3082 // Get the annotation string, go through casts. Sema requires this to be a
3083 // non-wide string literal, potentially casted, so the cast<> is safe.
3084 const Expr *AnnotationStrExpr = E->getArg(1)->IgnoreParenCasts();
Dmitri Gribenkof8579502013-01-12 19:30:44 +00003085 StringRef Str = cast<StringLiteral>(AnnotationStrExpr)->getString();
Julien Lerouge5a6b6982011-09-09 22:41:49 +00003086 return RValue::get(EmitAnnotationCall(F, AnnVal, Str, E->getExprLoc()));
3087 }
Michael Gottesman15343992013-06-18 20:40:40 +00003088 case Builtin::BI__builtin_addcb:
Michael Gottesman54398012013-01-13 02:22:39 +00003089 case Builtin::BI__builtin_addcs:
3090 case Builtin::BI__builtin_addc:
3091 case Builtin::BI__builtin_addcl:
Michael Gottesmana2b5c4b2013-01-14 21:44:30 +00003092 case Builtin::BI__builtin_addcll:
Michael Gottesman15343992013-06-18 20:40:40 +00003093 case Builtin::BI__builtin_subcb:
Michael Gottesmana2b5c4b2013-01-14 21:44:30 +00003094 case Builtin::BI__builtin_subcs:
3095 case Builtin::BI__builtin_subc:
3096 case Builtin::BI__builtin_subcl:
3097 case Builtin::BI__builtin_subcll: {
Michael Gottesman54398012013-01-13 02:22:39 +00003098
3099 // We translate all of these builtins from expressions of the form:
3100 // int x = ..., y = ..., carryin = ..., carryout, result;
3101 // result = __builtin_addc(x, y, carryin, &carryout);
3102 //
3103 // to LLVM IR of the form:
3104 //
3105 // %tmp1 = call {i32, i1} @llvm.uadd.with.overflow.i32(i32 %x, i32 %y)
3106 // %tmpsum1 = extractvalue {i32, i1} %tmp1, 0
3107 // %carry1 = extractvalue {i32, i1} %tmp1, 1
3108 // %tmp2 = call {i32, i1} @llvm.uadd.with.overflow.i32(i32 %tmpsum1,
3109 // i32 %carryin)
3110 // %result = extractvalue {i32, i1} %tmp2, 0
3111 // %carry2 = extractvalue {i32, i1} %tmp2, 1
3112 // %tmp3 = or i1 %carry1, %carry2
3113 // %tmp4 = zext i1 %tmp3 to i32
3114 // store i32 %tmp4, i32* %carryout
3115
3116 // Scalarize our inputs.
3117 llvm::Value *X = EmitScalarExpr(E->getArg(0));
3118 llvm::Value *Y = EmitScalarExpr(E->getArg(1));
3119 llvm::Value *Carryin = EmitScalarExpr(E->getArg(2));
John McCall7f416cc2015-09-08 08:05:57 +00003120 Address CarryOutPtr = EmitPointerWithAlignment(E->getArg(3));
Michael Gottesman54398012013-01-13 02:22:39 +00003121
Michael Gottesmana2b5c4b2013-01-14 21:44:30 +00003122 // Decide if we are lowering to a uadd.with.overflow or usub.with.overflow.
3123 llvm::Intrinsic::ID IntrinsicId;
3124 switch (BuiltinID) {
3125 default: llvm_unreachable("Unknown multiprecision builtin id.");
Michael Gottesman15343992013-06-18 20:40:40 +00003126 case Builtin::BI__builtin_addcb:
Michael Gottesmana2b5c4b2013-01-14 21:44:30 +00003127 case Builtin::BI__builtin_addcs:
3128 case Builtin::BI__builtin_addc:
3129 case Builtin::BI__builtin_addcl:
3130 case Builtin::BI__builtin_addcll:
3131 IntrinsicId = llvm::Intrinsic::uadd_with_overflow;
3132 break;
Michael Gottesman15343992013-06-18 20:40:40 +00003133 case Builtin::BI__builtin_subcb:
Michael Gottesmana2b5c4b2013-01-14 21:44:30 +00003134 case Builtin::BI__builtin_subcs:
3135 case Builtin::BI__builtin_subc:
3136 case Builtin::BI__builtin_subcl:
3137 case Builtin::BI__builtin_subcll:
3138 IntrinsicId = llvm::Intrinsic::usub_with_overflow;
3139 break;
3140 }
Michael Gottesman54398012013-01-13 02:22:39 +00003141
3142 // Construct our resulting LLVM IR expression.
3143 llvm::Value *Carry1;
3144 llvm::Value *Sum1 = EmitOverflowIntrinsic(*this, IntrinsicId,
3145 X, Y, Carry1);
3146 llvm::Value *Carry2;
3147 llvm::Value *Sum2 = EmitOverflowIntrinsic(*this, IntrinsicId,
3148 Sum1, Carryin, Carry2);
3149 llvm::Value *CarryOut = Builder.CreateZExt(Builder.CreateOr(Carry1, Carry2),
3150 X->getType());
John McCall7f416cc2015-09-08 08:05:57 +00003151 Builder.CreateStore(CarryOut, CarryOutPtr);
Michael Gottesman54398012013-01-13 02:22:39 +00003152 return RValue::get(Sum2);
3153 }
John McCall03107a42015-10-29 20:48:01 +00003154
3155 case Builtin::BI__builtin_add_overflow:
3156 case Builtin::BI__builtin_sub_overflow:
3157 case Builtin::BI__builtin_mul_overflow: {
3158 const clang::Expr *LeftArg = E->getArg(0);
3159 const clang::Expr *RightArg = E->getArg(1);
3160 const clang::Expr *ResultArg = E->getArg(2);
3161
3162 clang::QualType ResultQTy =
3163 ResultArg->getType()->castAs<PointerType>()->getPointeeType();
3164
3165 WidthAndSignedness LeftInfo =
3166 getIntegerWidthAndSignedness(CGM.getContext(), LeftArg->getType());
3167 WidthAndSignedness RightInfo =
3168 getIntegerWidthAndSignedness(CGM.getContext(), RightArg->getType());
3169 WidthAndSignedness ResultInfo =
3170 getIntegerWidthAndSignedness(CGM.getContext(), ResultQTy);
Vedant Kumarfa5a0e52017-12-16 01:28:25 +00003171
3172 // Handle mixed-sign multiplication as a special case, because adding
3173 // runtime or backend support for our generic irgen would be too expensive.
3174 if (isSpecialMixedSignMultiply(BuiltinID, LeftInfo, RightInfo, ResultInfo))
3175 return EmitCheckedMixedSignMultiply(*this, LeftArg, LeftInfo, RightArg,
3176 RightInfo, ResultArg, ResultQTy,
3177 ResultInfo);
3178
John McCall03107a42015-10-29 20:48:01 +00003179 WidthAndSignedness EncompassingInfo =
3180 EncompassingIntegerType({LeftInfo, RightInfo, ResultInfo});
3181
3182 llvm::Type *EncompassingLLVMTy =
3183 llvm::IntegerType::get(CGM.getLLVMContext(), EncompassingInfo.Width);
3184
3185 llvm::Type *ResultLLVMTy = CGM.getTypes().ConvertType(ResultQTy);
3186
3187 llvm::Intrinsic::ID IntrinsicId;
3188 switch (BuiltinID) {
3189 default:
3190 llvm_unreachable("Unknown overflow builtin id.");
3191 case Builtin::BI__builtin_add_overflow:
3192 IntrinsicId = EncompassingInfo.Signed
3193 ? llvm::Intrinsic::sadd_with_overflow
3194 : llvm::Intrinsic::uadd_with_overflow;
3195 break;
3196 case Builtin::BI__builtin_sub_overflow:
3197 IntrinsicId = EncompassingInfo.Signed
3198 ? llvm::Intrinsic::ssub_with_overflow
3199 : llvm::Intrinsic::usub_with_overflow;
3200 break;
3201 case Builtin::BI__builtin_mul_overflow:
3202 IntrinsicId = EncompassingInfo.Signed
3203 ? llvm::Intrinsic::smul_with_overflow
3204 : llvm::Intrinsic::umul_with_overflow;
3205 break;
3206 }
3207
3208 llvm::Value *Left = EmitScalarExpr(LeftArg);
3209 llvm::Value *Right = EmitScalarExpr(RightArg);
3210 Address ResultPtr = EmitPointerWithAlignment(ResultArg);
3211
3212 // Extend each operand to the encompassing type.
3213 Left = Builder.CreateIntCast(Left, EncompassingLLVMTy, LeftInfo.Signed);
3214 Right = Builder.CreateIntCast(Right, EncompassingLLVMTy, RightInfo.Signed);
3215
3216 // Perform the operation on the extended values.
3217 llvm::Value *Overflow, *Result;
3218 Result = EmitOverflowIntrinsic(*this, IntrinsicId, Left, Right, Overflow);
3219
3220 if (EncompassingInfo.Width > ResultInfo.Width) {
3221 // The encompassing type is wider than the result type, so we need to
3222 // truncate it.
3223 llvm::Value *ResultTrunc = Builder.CreateTrunc(Result, ResultLLVMTy);
3224
3225 // To see if the truncation caused an overflow, we will extend
3226 // the result and then compare it to the original result.
3227 llvm::Value *ResultTruncExt = Builder.CreateIntCast(
3228 ResultTrunc, EncompassingLLVMTy, ResultInfo.Signed);
3229 llvm::Value *TruncationOverflow =
3230 Builder.CreateICmpNE(Result, ResultTruncExt);
3231
3232 Overflow = Builder.CreateOr(Overflow, TruncationOverflow);
3233 Result = ResultTrunc;
3234 }
3235
3236 // Finally, store the result using the pointer.
3237 bool isVolatile =
3238 ResultArg->getType()->getPointeeType().isVolatileQualified();
3239 Builder.CreateStore(EmitToMemory(Result, ResultQTy), ResultPtr, isVolatile);
3240
3241 return RValue::get(Overflow);
3242 }
3243
Michael Gottesman930ecdb2013-06-20 23:28:10 +00003244 case Builtin::BI__builtin_uadd_overflow:
3245 case Builtin::BI__builtin_uaddl_overflow:
3246 case Builtin::BI__builtin_uaddll_overflow:
3247 case Builtin::BI__builtin_usub_overflow:
3248 case Builtin::BI__builtin_usubl_overflow:
3249 case Builtin::BI__builtin_usubll_overflow:
3250 case Builtin::BI__builtin_umul_overflow:
3251 case Builtin::BI__builtin_umull_overflow:
3252 case Builtin::BI__builtin_umulll_overflow:
3253 case Builtin::BI__builtin_sadd_overflow:
3254 case Builtin::BI__builtin_saddl_overflow:
3255 case Builtin::BI__builtin_saddll_overflow:
3256 case Builtin::BI__builtin_ssub_overflow:
3257 case Builtin::BI__builtin_ssubl_overflow:
3258 case Builtin::BI__builtin_ssubll_overflow:
3259 case Builtin::BI__builtin_smul_overflow:
3260 case Builtin::BI__builtin_smull_overflow:
3261 case Builtin::BI__builtin_smulll_overflow: {
3262
3263 // We translate all of these builtins directly to the relevant llvm IR node.
3264
3265 // Scalarize our inputs.
3266 llvm::Value *X = EmitScalarExpr(E->getArg(0));
3267 llvm::Value *Y = EmitScalarExpr(E->getArg(1));
John McCall7f416cc2015-09-08 08:05:57 +00003268 Address SumOutPtr = EmitPointerWithAlignment(E->getArg(2));
Michael Gottesman930ecdb2013-06-20 23:28:10 +00003269
3270 // Decide which of the overflow intrinsics we are lowering to:
3271 llvm::Intrinsic::ID IntrinsicId;
3272 switch (BuiltinID) {
John McCall03107a42015-10-29 20:48:01 +00003273 default: llvm_unreachable("Unknown overflow builtin id.");
Michael Gottesman930ecdb2013-06-20 23:28:10 +00003274 case Builtin::BI__builtin_uadd_overflow:
3275 case Builtin::BI__builtin_uaddl_overflow:
3276 case Builtin::BI__builtin_uaddll_overflow:
3277 IntrinsicId = llvm::Intrinsic::uadd_with_overflow;
3278 break;
3279 case Builtin::BI__builtin_usub_overflow:
3280 case Builtin::BI__builtin_usubl_overflow:
3281 case Builtin::BI__builtin_usubll_overflow:
3282 IntrinsicId = llvm::Intrinsic::usub_with_overflow;
3283 break;
3284 case Builtin::BI__builtin_umul_overflow:
3285 case Builtin::BI__builtin_umull_overflow:
3286 case Builtin::BI__builtin_umulll_overflow:
3287 IntrinsicId = llvm::Intrinsic::umul_with_overflow;
3288 break;
3289 case Builtin::BI__builtin_sadd_overflow:
3290 case Builtin::BI__builtin_saddl_overflow:
3291 case Builtin::BI__builtin_saddll_overflow:
3292 IntrinsicId = llvm::Intrinsic::sadd_with_overflow;
3293 break;
3294 case Builtin::BI__builtin_ssub_overflow:
3295 case Builtin::BI__builtin_ssubl_overflow:
3296 case Builtin::BI__builtin_ssubll_overflow:
3297 IntrinsicId = llvm::Intrinsic::ssub_with_overflow;
3298 break;
3299 case Builtin::BI__builtin_smul_overflow:
3300 case Builtin::BI__builtin_smull_overflow:
3301 case Builtin::BI__builtin_smulll_overflow:
3302 IntrinsicId = llvm::Intrinsic::smul_with_overflow;
Simon Pilgrim532de1c2016-06-13 10:05:19 +00003303 break;
3304 }
3305
3306
3307 llvm::Value *Carry;
3308 llvm::Value *Sum = EmitOverflowIntrinsic(*this, IntrinsicId, X, Y, Carry);
3309 Builder.CreateStore(Sum, SumOutPtr);
Michael Gottesman930ecdb2013-06-20 23:28:10 +00003310
3311 return RValue::get(Carry);
3312 }
Richard Smith6cbd65d2013-07-11 02:27:57 +00003313 case Builtin::BI__builtin_addressof:
John McCall7f416cc2015-09-08 08:05:57 +00003314 return RValue::get(EmitLValue(E->getArg(0)).getPointer());
Richard Smith760520b2014-06-03 23:27:44 +00003315 case Builtin::BI__builtin_operator_new:
Eric Fiselierfa752f22018-03-21 19:19:48 +00003316 return EmitBuiltinNewDeleteCall(
3317 E->getCallee()->getType()->castAs<FunctionProtoType>(), E, false);
Richard Smith760520b2014-06-03 23:27:44 +00003318 case Builtin::BI__builtin_operator_delete:
Eric Fiselierfa752f22018-03-21 19:19:48 +00003319 return EmitBuiltinNewDeleteCall(
3320 E->getCallee()->getType()->castAs<FunctionProtoType>(), E, true);
3321
Nico Weber636fc092012-10-13 22:30:41 +00003322 case Builtin::BI__noop:
Reid Klecknered5d4ad2014-07-11 20:22:55 +00003323 // __noop always evaluates to an integer literal zero.
3324 return RValue::get(ConstantInt::get(IntTy, 0));
Peter Collingbournef7706832014-12-12 23:41:25 +00003325 case Builtin::BI__builtin_call_with_static_chain: {
3326 const CallExpr *Call = cast<CallExpr>(E->getArg(0));
3327 const Expr *Chain = E->getArg(1);
3328 return EmitCall(Call->getCallee()->getType(),
John McCallb92ab1a2016-10-26 23:46:34 +00003329 EmitCallee(Call->getCallee()), Call, ReturnValue,
3330 EmitScalarExpr(Chain));
Peter Collingbournef7706832014-12-12 23:41:25 +00003331 }
Albert Gutowskice7a9a42016-09-13 19:43:33 +00003332 case Builtin::BI_InterlockedExchange8:
3333 case Builtin::BI_InterlockedExchange16:
Saleem Abdulrasool114efe02014-06-18 20:51:10 +00003334 case Builtin::BI_InterlockedExchange:
3335 case Builtin::BI_InterlockedExchangePointer:
Albert Gutowski5e08df02016-10-13 22:35:07 +00003336 return RValue::get(
3337 EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchange, E));
Mandeep Singh Grang15e0f7fa2018-10-05 19:49:36 +00003338 case Builtin::BI_InterlockedCompareExchangePointer:
3339 case Builtin::BI_InterlockedCompareExchangePointer_nf: {
Saleem Abdulrasool114efe02014-06-18 20:51:10 +00003340 llvm::Type *RTy;
3341 llvm::IntegerType *IntType =
3342 IntegerType::get(getLLVMContext(),
3343 getContext().getTypeSize(E->getType()));
3344 llvm::Type *IntPtrType = IntType->getPointerTo();
3345
3346 llvm::Value *Destination =
3347 Builder.CreateBitCast(EmitScalarExpr(E->getArg(0)), IntPtrType);
3348
3349 llvm::Value *Exchange = EmitScalarExpr(E->getArg(1));
3350 RTy = Exchange->getType();
3351 Exchange = Builder.CreatePtrToInt(Exchange, IntType);
3352
3353 llvm::Value *Comparand =
3354 Builder.CreatePtrToInt(EmitScalarExpr(E->getArg(2)), IntType);
3355
Mandeep Singh Grang15e0f7fa2018-10-05 19:49:36 +00003356 auto Ordering =
3357 BuiltinID == Builtin::BI_InterlockedCompareExchangePointer_nf ?
3358 AtomicOrdering::Monotonic : AtomicOrdering::SequentiallyConsistent;
3359
3360 auto Result = Builder.CreateAtomicCmpXchg(Destination, Comparand, Exchange,
3361 Ordering, Ordering);
Saleem Abdulrasool114efe02014-06-18 20:51:10 +00003362 Result->setVolatile(true);
3363
3364 return RValue::get(Builder.CreateIntToPtr(Builder.CreateExtractValue(Result,
3365 0),
3366 RTy));
3367 }
Albert Gutowskice7a9a42016-09-13 19:43:33 +00003368 case Builtin::BI_InterlockedCompareExchange8:
3369 case Builtin::BI_InterlockedCompareExchange16:
3370 case Builtin::BI_InterlockedCompareExchange:
Mandeep Singh Grang6b880682018-11-06 00:36:48 +00003371 case Builtin::BI_InterlockedCompareExchange64:
3372 return RValue::get(EmitAtomicCmpXchgForMSIntrin(*this, E));
Albert Gutowskice7a9a42016-09-13 19:43:33 +00003373 case Builtin::BI_InterlockedIncrement16:
Albert Gutowski5e08df02016-10-13 22:35:07 +00003374 case Builtin::BI_InterlockedIncrement:
3375 return RValue::get(
3376 EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedIncrement, E));
Albert Gutowskice7a9a42016-09-13 19:43:33 +00003377 case Builtin::BI_InterlockedDecrement16:
Albert Gutowski5e08df02016-10-13 22:35:07 +00003378 case Builtin::BI_InterlockedDecrement:
3379 return RValue::get(
3380 EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedDecrement, E));
Albert Gutowskice7a9a42016-09-13 19:43:33 +00003381 case Builtin::BI_InterlockedAnd8:
3382 case Builtin::BI_InterlockedAnd16:
3383 case Builtin::BI_InterlockedAnd:
Albert Gutowski5e08df02016-10-13 22:35:07 +00003384 return RValue::get(EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedAnd, E));
Albert Gutowskice7a9a42016-09-13 19:43:33 +00003385 case Builtin::BI_InterlockedExchangeAdd8:
3386 case Builtin::BI_InterlockedExchangeAdd16:
3387 case Builtin::BI_InterlockedExchangeAdd:
Albert Gutowski5e08df02016-10-13 22:35:07 +00003388 return RValue::get(
3389 EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeAdd, E));
Albert Gutowskice7a9a42016-09-13 19:43:33 +00003390 case Builtin::BI_InterlockedExchangeSub8:
3391 case Builtin::BI_InterlockedExchangeSub16:
3392 case Builtin::BI_InterlockedExchangeSub:
Albert Gutowski5e08df02016-10-13 22:35:07 +00003393 return RValue::get(
3394 EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeSub, E));
Albert Gutowskice7a9a42016-09-13 19:43:33 +00003395 case Builtin::BI_InterlockedOr8:
3396 case Builtin::BI_InterlockedOr16:
3397 case Builtin::BI_InterlockedOr:
Albert Gutowski5e08df02016-10-13 22:35:07 +00003398 return RValue::get(EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedOr, E));
Albert Gutowskice7a9a42016-09-13 19:43:33 +00003399 case Builtin::BI_InterlockedXor8:
3400 case Builtin::BI_InterlockedXor16:
3401 case Builtin::BI_InterlockedXor:
Albert Gutowski5e08df02016-10-13 22:35:07 +00003402 return RValue::get(EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedXor, E));
Reid Kleckner1d9c2492018-06-05 01:33:40 +00003403
Reid Kleckner1d9c2492018-06-05 01:33:40 +00003404 case Builtin::BI_bittest64:
Reid Kleckner368d52b2018-06-06 01:35:08 +00003405 case Builtin::BI_bittest:
Reid Kleckner1d9c2492018-06-05 01:33:40 +00003406 case Builtin::BI_bittestandcomplement64:
Reid Kleckner368d52b2018-06-06 01:35:08 +00003407 case Builtin::BI_bittestandcomplement:
Reid Kleckner1d9c2492018-06-05 01:33:40 +00003408 case Builtin::BI_bittestandreset64:
Reid Kleckner368d52b2018-06-06 01:35:08 +00003409 case Builtin::BI_bittestandreset:
Reid Kleckner1d9c2492018-06-05 01:33:40 +00003410 case Builtin::BI_bittestandset64:
Reid Kleckner368d52b2018-06-06 01:35:08 +00003411 case Builtin::BI_bittestandset:
3412 case Builtin::BI_interlockedbittestandreset:
Reid Kleckner1d9c2492018-06-05 01:33:40 +00003413 case Builtin::BI_interlockedbittestandreset64:
Reid Kleckner1d9c2492018-06-05 01:33:40 +00003414 case Builtin::BI_interlockedbittestandset64:
Reid Kleckner368d52b2018-06-06 01:35:08 +00003415 case Builtin::BI_interlockedbittestandset:
Reid Kleckneraa46ed92018-06-07 21:39:04 +00003416 case Builtin::BI_interlockedbittestandset_acq:
3417 case Builtin::BI_interlockedbittestandset_rel:
3418 case Builtin::BI_interlockedbittestandset_nf:
3419 case Builtin::BI_interlockedbittestandreset_acq:
3420 case Builtin::BI_interlockedbittestandreset_rel:
3421 case Builtin::BI_interlockedbittestandreset_nf:
3422 return RValue::get(EmitBitTestIntrinsic(*this, BuiltinID, E));
Reid Kleckner1d59f992015-01-22 01:36:17 +00003423
Reid Kleckner73253bd2019-03-28 22:59:09 +00003424 // These builtins exist to emit regular volatile loads and stores not
3425 // affected by the -fms-volatile setting.
3426 case Builtin::BI__iso_volatile_load8:
3427 case Builtin::BI__iso_volatile_load16:
3428 case Builtin::BI__iso_volatile_load32:
3429 case Builtin::BI__iso_volatile_load64:
3430 return RValue::get(EmitISOVolatileLoad(*this, E));
3431 case Builtin::BI__iso_volatile_store8:
3432 case Builtin::BI__iso_volatile_store16:
3433 case Builtin::BI__iso_volatile_store32:
3434 case Builtin::BI__iso_volatile_store64:
3435 return RValue::get(EmitISOVolatileStore(*this, E));
3436
Reid Kleckner1d59f992015-01-22 01:36:17 +00003437 case Builtin::BI__exception_code:
3438 case Builtin::BI_exception_code:
3439 return RValue::get(EmitSEHExceptionCode());
3440 case Builtin::BI__exception_info:
3441 case Builtin::BI_exception_info:
3442 return RValue::get(EmitSEHExceptionInfo());
Reid Kleckneraca01db2015-02-04 22:37:07 +00003443 case Builtin::BI__abnormal_termination:
3444 case Builtin::BI_abnormal_termination:
3445 return RValue::get(EmitSEHAbnormalTermination());
Reid Kleckner11c99ed2018-06-06 18:39:47 +00003446 case Builtin::BI_setjmpex:
3447 if (getTarget().getTriple().isOSMSVCRT())
3448 return EmitMSVCRTSetJmp(*this, MSVCSetJmpKind::_setjmpex, E);
3449 break;
3450 case Builtin::BI_setjmp:
David Majnemer310e3a82015-01-29 09:29:21 +00003451 if (getTarget().getTriple().isOSMSVCRT()) {
Reid Kleckner11c99ed2018-06-06 18:39:47 +00003452 if (getTarget().getTriple().getArch() == llvm::Triple::x86)
3453 return EmitMSVCRTSetJmp(*this, MSVCSetJmpKind::_setjmp3, E);
3454 else if (getTarget().getTriple().getArch() == llvm::Triple::aarch64)
3455 return EmitMSVCRTSetJmp(*this, MSVCSetJmpKind::_setjmpex, E);
3456 return EmitMSVCRTSetJmp(*this, MSVCSetJmpKind::_setjmp, E);
David Majnemer310e3a82015-01-29 09:29:21 +00003457 }
David Majnemerc403a1c2015-03-20 17:03:35 +00003458 break;
David Majnemerba3e5ec2015-03-13 18:26:17 +00003459
3460 case Builtin::BI__GetExceptionInfo: {
3461 if (llvm::GlobalVariable *GV =
3462 CGM.getCXXABI().getThrowInfo(FD->getParamDecl(0)->getType()))
3463 return RValue::get(llvm::ConstantExpr::getBitCast(GV, CGM.Int8PtrTy));
3464 break;
3465 }
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003466
Hans Wennborg5c3c51f2017-04-07 16:41:47 +00003467 case Builtin::BI__fastfail:
Reid Kleckner04f9f912017-02-09 18:31:06 +00003468 return RValue::get(EmitMSVCBuiltinExpr(MSVCIntrin::__fastfail, E));
Reid Kleckner04f9f912017-02-09 18:31:06 +00003469
Gor Nishanov97e3b6d2016-10-03 22:44:48 +00003470 case Builtin::BI__builtin_coro_size: {
3471 auto & Context = getContext();
3472 auto SizeTy = Context.getSizeType();
3473 auto T = Builder.getIntNTy(Context.getTypeSize(SizeTy));
James Y Knight8799cae2019-02-03 21:53:49 +00003474 Function *F = CGM.getIntrinsic(Intrinsic::coro_size, T);
Gor Nishanov97e3b6d2016-10-03 22:44:48 +00003475 return RValue::get(Builder.CreateCall(F));
3476 }
3477
3478 case Builtin::BI__builtin_coro_id:
3479 return EmitCoroutineIntrinsic(E, Intrinsic::coro_id);
3480 case Builtin::BI__builtin_coro_promise:
3481 return EmitCoroutineIntrinsic(E, Intrinsic::coro_promise);
3482 case Builtin::BI__builtin_coro_resume:
3483 return EmitCoroutineIntrinsic(E, Intrinsic::coro_resume);
3484 case Builtin::BI__builtin_coro_frame:
3485 return EmitCoroutineIntrinsic(E, Intrinsic::coro_frame);
Gor Nishanov2a78fa52018-04-02 17:35:37 +00003486 case Builtin::BI__builtin_coro_noop:
3487 return EmitCoroutineIntrinsic(E, Intrinsic::coro_noop);
Gor Nishanov97e3b6d2016-10-03 22:44:48 +00003488 case Builtin::BI__builtin_coro_free:
3489 return EmitCoroutineIntrinsic(E, Intrinsic::coro_free);
3490 case Builtin::BI__builtin_coro_destroy:
3491 return EmitCoroutineIntrinsic(E, Intrinsic::coro_destroy);
3492 case Builtin::BI__builtin_coro_done:
3493 return EmitCoroutineIntrinsic(E, Intrinsic::coro_done);
3494 case Builtin::BI__builtin_coro_alloc:
3495 return EmitCoroutineIntrinsic(E, Intrinsic::coro_alloc);
3496 case Builtin::BI__builtin_coro_begin:
3497 return EmitCoroutineIntrinsic(E, Intrinsic::coro_begin);
3498 case Builtin::BI__builtin_coro_end:
3499 return EmitCoroutineIntrinsic(E, Intrinsic::coro_end);
3500 case Builtin::BI__builtin_coro_suspend:
3501 return EmitCoroutineIntrinsic(E, Intrinsic::coro_suspend);
3502 case Builtin::BI__builtin_coro_param:
3503 return EmitCoroutineIntrinsic(E, Intrinsic::coro_param);
3504
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003505 // OpenCL v2.0 s6.13.16.2, Built-in pipe read and write functions
3506 case Builtin::BIread_pipe:
3507 case Builtin::BIwrite_pipe: {
3508 Value *Arg0 = EmitScalarExpr(E->getArg(0)),
3509 *Arg1 = EmitScalarExpr(E->getArg(1));
Alexey Bader465c1892016-09-23 14:20:00 +00003510 CGOpenCLRuntime OpenCLRT(CGM);
3511 Value *PacketSize = OpenCLRT.getPipeElemSize(E->getArg(0));
3512 Value *PacketAlign = OpenCLRT.getPipeElemAlign(E->getArg(0));
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003513
3514 // Type of the generic packet parameter.
3515 unsigned GenericAS =
3516 getContext().getTargetAddressSpace(LangAS::opencl_generic);
3517 llvm::Type *I8PTy = llvm::PointerType::get(
3518 llvm::Type::getInt8Ty(getLLVMContext()), GenericAS);
3519
3520 // Testing which overloaded version we should generate the call for.
3521 if (2U == E->getNumArgs()) {
3522 const char *Name = (BuiltinID == Builtin::BIread_pipe) ? "__read_pipe_2"
3523 : "__write_pipe_2";
3524 // Creating a generic function type to be able to call with any builtin or
3525 // user defined type.
Alexey Bader465c1892016-09-23 14:20:00 +00003526 llvm::Type *ArgTys[] = {Arg0->getType(), I8PTy, Int32Ty, Int32Ty};
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003527 llvm::FunctionType *FTy = llvm::FunctionType::get(
3528 Int32Ty, llvm::ArrayRef<llvm::Type *>(ArgTys), false);
3529 Value *BCast = Builder.CreatePointerCast(Arg1, I8PTy);
Alexey Bader465c1892016-09-23 14:20:00 +00003530 return RValue::get(
3531 Builder.CreateCall(CGM.CreateRuntimeFunction(FTy, Name),
3532 {Arg0, BCast, PacketSize, PacketAlign}));
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003533 } else {
3534 assert(4 == E->getNumArgs() &&
3535 "Illegal number of parameters to pipe function");
3536 const char *Name = (BuiltinID == Builtin::BIread_pipe) ? "__read_pipe_4"
3537 : "__write_pipe_4";
3538
Alexey Bader465c1892016-09-23 14:20:00 +00003539 llvm::Type *ArgTys[] = {Arg0->getType(), Arg1->getType(), Int32Ty, I8PTy,
3540 Int32Ty, Int32Ty};
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003541 Value *Arg2 = EmitScalarExpr(E->getArg(2)),
3542 *Arg3 = EmitScalarExpr(E->getArg(3));
3543 llvm::FunctionType *FTy = llvm::FunctionType::get(
3544 Int32Ty, llvm::ArrayRef<llvm::Type *>(ArgTys), false);
3545 Value *BCast = Builder.CreatePointerCast(Arg3, I8PTy);
3546 // We know the third argument is an integer type, but we may need to cast
3547 // it to i32.
3548 if (Arg2->getType() != Int32Ty)
3549 Arg2 = Builder.CreateZExtOrTrunc(Arg2, Int32Ty);
3550 return RValue::get(Builder.CreateCall(
Alexey Bader465c1892016-09-23 14:20:00 +00003551 CGM.CreateRuntimeFunction(FTy, Name),
3552 {Arg0, Arg1, Arg2, BCast, PacketSize, PacketAlign}));
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003553 }
3554 }
3555 // OpenCL v2.0 s6.13.16 ,s9.17.3.5 - Built-in pipe reserve read and write
3556 // functions
3557 case Builtin::BIreserve_read_pipe:
3558 case Builtin::BIreserve_write_pipe:
3559 case Builtin::BIwork_group_reserve_read_pipe:
3560 case Builtin::BIwork_group_reserve_write_pipe:
3561 case Builtin::BIsub_group_reserve_read_pipe:
3562 case Builtin::BIsub_group_reserve_write_pipe: {
3563 // Composing the mangled name for the function.
3564 const char *Name;
3565 if (BuiltinID == Builtin::BIreserve_read_pipe)
3566 Name = "__reserve_read_pipe";
3567 else if (BuiltinID == Builtin::BIreserve_write_pipe)
3568 Name = "__reserve_write_pipe";
3569 else if (BuiltinID == Builtin::BIwork_group_reserve_read_pipe)
3570 Name = "__work_group_reserve_read_pipe";
3571 else if (BuiltinID == Builtin::BIwork_group_reserve_write_pipe)
3572 Name = "__work_group_reserve_write_pipe";
3573 else if (BuiltinID == Builtin::BIsub_group_reserve_read_pipe)
3574 Name = "__sub_group_reserve_read_pipe";
3575 else
3576 Name = "__sub_group_reserve_write_pipe";
3577
3578 Value *Arg0 = EmitScalarExpr(E->getArg(0)),
3579 *Arg1 = EmitScalarExpr(E->getArg(1));
3580 llvm::Type *ReservedIDTy = ConvertType(getContext().OCLReserveIDTy);
Alexey Bader465c1892016-09-23 14:20:00 +00003581 CGOpenCLRuntime OpenCLRT(CGM);
3582 Value *PacketSize = OpenCLRT.getPipeElemSize(E->getArg(0));
3583 Value *PacketAlign = OpenCLRT.getPipeElemAlign(E->getArg(0));
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003584
3585 // Building the generic function prototype.
Alexey Bader465c1892016-09-23 14:20:00 +00003586 llvm::Type *ArgTys[] = {Arg0->getType(), Int32Ty, Int32Ty, Int32Ty};
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003587 llvm::FunctionType *FTy = llvm::FunctionType::get(
3588 ReservedIDTy, llvm::ArrayRef<llvm::Type *>(ArgTys), false);
3589 // We know the second argument is an integer type, but we may need to cast
3590 // it to i32.
3591 if (Arg1->getType() != Int32Ty)
3592 Arg1 = Builder.CreateZExtOrTrunc(Arg1, Int32Ty);
3593 return RValue::get(
Alexey Bader465c1892016-09-23 14:20:00 +00003594 Builder.CreateCall(CGM.CreateRuntimeFunction(FTy, Name),
3595 {Arg0, Arg1, PacketSize, PacketAlign}));
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003596 }
Anastasia Stulova7f8d6dc2016-07-04 16:07:18 +00003597 // OpenCL v2.0 s6.13.16, s9.17.3.5 - Built-in pipe commit read and write
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003598 // functions
3599 case Builtin::BIcommit_read_pipe:
3600 case Builtin::BIcommit_write_pipe:
3601 case Builtin::BIwork_group_commit_read_pipe:
3602 case Builtin::BIwork_group_commit_write_pipe:
3603 case Builtin::BIsub_group_commit_read_pipe:
3604 case Builtin::BIsub_group_commit_write_pipe: {
3605 const char *Name;
3606 if (BuiltinID == Builtin::BIcommit_read_pipe)
3607 Name = "__commit_read_pipe";
3608 else if (BuiltinID == Builtin::BIcommit_write_pipe)
3609 Name = "__commit_write_pipe";
3610 else if (BuiltinID == Builtin::BIwork_group_commit_read_pipe)
3611 Name = "__work_group_commit_read_pipe";
3612 else if (BuiltinID == Builtin::BIwork_group_commit_write_pipe)
3613 Name = "__work_group_commit_write_pipe";
3614 else if (BuiltinID == Builtin::BIsub_group_commit_read_pipe)
3615 Name = "__sub_group_commit_read_pipe";
3616 else
3617 Name = "__sub_group_commit_write_pipe";
3618
3619 Value *Arg0 = EmitScalarExpr(E->getArg(0)),
3620 *Arg1 = EmitScalarExpr(E->getArg(1));
Alexey Bader465c1892016-09-23 14:20:00 +00003621 CGOpenCLRuntime OpenCLRT(CGM);
3622 Value *PacketSize = OpenCLRT.getPipeElemSize(E->getArg(0));
3623 Value *PacketAlign = OpenCLRT.getPipeElemAlign(E->getArg(0));
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003624
3625 // Building the generic function prototype.
Alexey Bader465c1892016-09-23 14:20:00 +00003626 llvm::Type *ArgTys[] = {Arg0->getType(), Arg1->getType(), Int32Ty, Int32Ty};
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003627 llvm::FunctionType *FTy =
3628 llvm::FunctionType::get(llvm::Type::getVoidTy(getLLVMContext()),
3629 llvm::ArrayRef<llvm::Type *>(ArgTys), false);
3630
3631 return RValue::get(
Alexey Bader465c1892016-09-23 14:20:00 +00003632 Builder.CreateCall(CGM.CreateRuntimeFunction(FTy, Name),
3633 {Arg0, Arg1, PacketSize, PacketAlign}));
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003634 }
3635 // OpenCL v2.0 s6.13.16.4 Built-in pipe query functions
3636 case Builtin::BIget_pipe_num_packets:
3637 case Builtin::BIget_pipe_max_packets: {
Sven van Haastregt4700faa2018-04-27 10:37:04 +00003638 const char *BaseName;
3639 const PipeType *PipeTy = E->getArg(0)->getType()->getAs<PipeType>();
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003640 if (BuiltinID == Builtin::BIget_pipe_num_packets)
Sven van Haastregt4700faa2018-04-27 10:37:04 +00003641 BaseName = "__get_pipe_num_packets";
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003642 else
Sven van Haastregt4700faa2018-04-27 10:37:04 +00003643 BaseName = "__get_pipe_max_packets";
3644 auto Name = std::string(BaseName) +
3645 std::string(PipeTy->isReadOnly() ? "_ro" : "_wo");
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003646
3647 // Building the generic function prototype.
3648 Value *Arg0 = EmitScalarExpr(E->getArg(0));
Alexey Bader465c1892016-09-23 14:20:00 +00003649 CGOpenCLRuntime OpenCLRT(CGM);
3650 Value *PacketSize = OpenCLRT.getPipeElemSize(E->getArg(0));
3651 Value *PacketAlign = OpenCLRT.getPipeElemAlign(E->getArg(0));
3652 llvm::Type *ArgTys[] = {Arg0->getType(), Int32Ty, Int32Ty};
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003653 llvm::FunctionType *FTy = llvm::FunctionType::get(
3654 Int32Ty, llvm::ArrayRef<llvm::Type *>(ArgTys), false);
3655
Alexey Bader465c1892016-09-23 14:20:00 +00003656 return RValue::get(Builder.CreateCall(CGM.CreateRuntimeFunction(FTy, Name),
3657 {Arg0, PacketSize, PacketAlign}));
Xiuli Panbb4d8d32016-01-26 04:03:48 +00003658 }
3659
Yaxun Liuf7449a12016-05-20 19:54:38 +00003660 // OpenCL v2.0 s6.13.9 - Address space qualifier functions.
3661 case Builtin::BIto_global:
3662 case Builtin::BIto_local:
3663 case Builtin::BIto_private: {
3664 auto Arg0 = EmitScalarExpr(E->getArg(0));
3665 auto NewArgT = llvm::PointerType::get(Int8Ty,
3666 CGM.getContext().getTargetAddressSpace(LangAS::opencl_generic));
3667 auto NewRetT = llvm::PointerType::get(Int8Ty,
3668 CGM.getContext().getTargetAddressSpace(
3669 E->getType()->getPointeeType().getAddressSpace()));
3670 auto FTy = llvm::FunctionType::get(NewRetT, {NewArgT}, false);
3671 llvm::Value *NewArg;
3672 if (Arg0->getType()->getPointerAddressSpace() !=
3673 NewArgT->getPointerAddressSpace())
3674 NewArg = Builder.CreateAddrSpaceCast(Arg0, NewArgT);
3675 else
3676 NewArg = Builder.CreateBitOrPointerCast(Arg0, NewArgT);
Alexey Baderd81623262016-08-04 18:06:27 +00003677 auto NewName = std::string("__") + E->getDirectCallee()->getName().str();
3678 auto NewCall =
3679 Builder.CreateCall(CGM.CreateRuntimeFunction(FTy, NewName), {NewArg});
Yaxun Liuf7449a12016-05-20 19:54:38 +00003680 return RValue::get(Builder.CreateBitOrPointerCast(NewCall,
3681 ConvertType(E->getType())));
3682 }
3683
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003684 // OpenCL v2.0, s6.13.17 - Enqueue kernel function.
3685 // It contains four different overload formats specified in Table 6.13.17.1.
3686 case Builtin::BIenqueue_kernel: {
3687 StringRef Name; // Generated function call name
3688 unsigned NumArgs = E->getNumArgs();
3689
3690 llvm::Type *QueueTy = ConvertType(getContext().OCLQueueTy);
Anastasia Stulovaaf0a7bb2017-01-27 15:11:34 +00003691 llvm::Type *GenericVoidPtrTy = Builder.getInt8PtrTy(
3692 getContext().getTargetAddressSpace(LangAS::opencl_generic));
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003693
3694 llvm::Value *Queue = EmitScalarExpr(E->getArg(0));
3695 llvm::Value *Flags = EmitScalarExpr(E->getArg(1));
Anastasia Stulova58984e72017-02-16 12:27:47 +00003696 LValue NDRangeL = EmitAggExprToLValue(E->getArg(2));
3697 llvm::Value *Range = NDRangeL.getAddress().getPointer();
3698 llvm::Type *RangeTy = NDRangeL.getAddress().getType();
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003699
3700 if (NumArgs == 4) {
3701 // The most basic form of the call with parameters:
3702 // queue_t, kernel_enqueue_flags_t, ndrange_t, block(void)
3703 Name = "__enqueue_kernel_basic";
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003704 llvm::Type *ArgTys[] = {QueueTy, Int32Ty, RangeTy, GenericVoidPtrTy,
3705 GenericVoidPtrTy};
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003706 llvm::FunctionType *FTy = llvm::FunctionType::get(
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003707 Int32Ty, llvm::ArrayRef<llvm::Type *>(ArgTys), false);
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003708
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003709 auto Info =
3710 CGM.getOpenCLRuntime().emitOpenCLEnqueuedBlock(*this, E->getArg(3));
3711 llvm::Value *Kernel =
3712 Builder.CreatePointerCast(Info.Kernel, GenericVoidPtrTy);
3713 llvm::Value *Block =
3714 Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003715
Anastasia Stulova58984e72017-02-16 12:27:47 +00003716 AttrBuilder B;
3717 B.addAttribute(Attribute::ByVal);
Reid Klecknerde864822017-03-21 16:57:30 +00003718 llvm::AttributeList ByValAttrSet =
3719 llvm::AttributeList::get(CGM.getModule().getContext(), 3U, B);
Anastasia Stulova58984e72017-02-16 12:27:47 +00003720
3721 auto RTCall =
3722 Builder.CreateCall(CGM.CreateRuntimeFunction(FTy, Name, ByValAttrSet),
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003723 {Queue, Flags, Range, Kernel, Block});
Anastasia Stulova58984e72017-02-16 12:27:47 +00003724 RTCall->setAttributes(ByValAttrSet);
3725 return RValue::get(RTCall);
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003726 }
3727 assert(NumArgs >= 5 && "Invalid enqueue_kernel signature");
3728
Yaxun Liu29a5ee32017-09-03 13:52:24 +00003729 // Create a temporary array to hold the sizes of local pointer arguments
3730 // for the block. \p First is the position of the first size argument.
Scott Linderf8b3df42018-08-07 15:52:49 +00003731 auto CreateArrayForSizeVar = [=](unsigned First)
3732 -> std::tuple<llvm::Value *, llvm::Value *, llvm::Value *> {
3733 llvm::APInt ArraySize(32, NumArgs - First);
3734 QualType SizeArrayTy = getContext().getConstantArrayType(
3735 getContext().getSizeType(), ArraySize, ArrayType::Normal,
3736 /*IndexTypeQuals=*/0);
3737 auto Tmp = CreateMemTemp(SizeArrayTy, "block_sizes");
3738 llvm::Value *TmpPtr = Tmp.getPointer();
3739 llvm::Value *TmpSize = EmitLifetimeStart(
3740 CGM.getDataLayout().getTypeAllocSize(Tmp.getElementType()), TmpPtr);
3741 llvm::Value *ElemPtr;
Yaxun Liu29a5ee32017-09-03 13:52:24 +00003742 // Each of the following arguments specifies the size of the corresponding
3743 // argument passed to the enqueued block.
3744 auto *Zero = llvm::ConstantInt::get(IntTy, 0);
3745 for (unsigned I = First; I < NumArgs; ++I) {
3746 auto *Index = llvm::ConstantInt::get(IntTy, I - First);
Scott Linderf8b3df42018-08-07 15:52:49 +00003747 auto *GEP = Builder.CreateGEP(TmpPtr, {Zero, Index});
Yaxun Liu29a5ee32017-09-03 13:52:24 +00003748 if (I == First)
Scott Linderf8b3df42018-08-07 15:52:49 +00003749 ElemPtr = GEP;
Yaxun Liu29a5ee32017-09-03 13:52:24 +00003750 auto *V =
3751 Builder.CreateZExtOrTrunc(EmitScalarExpr(E->getArg(I)), SizeTy);
3752 Builder.CreateAlignedStore(
3753 V, GEP, CGM.getDataLayout().getPrefTypeAlignment(SizeTy));
3754 }
Scott Linderf8b3df42018-08-07 15:52:49 +00003755 return std::tie(ElemPtr, TmpSize, TmpPtr);
Yaxun Liu29a5ee32017-09-03 13:52:24 +00003756 };
3757
Yaxun Liu3cab24a2018-05-09 17:07:06 +00003758 // Could have events and/or varargs.
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003759 if (E->getArg(3)->getType()->isBlockPointerType()) {
3760 // No events passed, but has variadic arguments.
Yaxun Liu3cab24a2018-05-09 17:07:06 +00003761 Name = "__enqueue_kernel_varargs";
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003762 auto Info =
3763 CGM.getOpenCLRuntime().emitOpenCLEnqueuedBlock(*this, E->getArg(3));
3764 llvm::Value *Kernel =
3765 Builder.CreatePointerCast(Info.Kernel, GenericVoidPtrTy);
3766 auto *Block = Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
Scott Linderf8b3df42018-08-07 15:52:49 +00003767 llvm::Value *ElemPtr, *TmpSize, *TmpPtr;
3768 std::tie(ElemPtr, TmpSize, TmpPtr) = CreateArrayForSizeVar(4);
Yaxun Liu29a5ee32017-09-03 13:52:24 +00003769
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003770 // Create a vector of the arguments, as well as a constant value to
3771 // express to the runtime the number of variadic arguments.
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003772 std::vector<llvm::Value *> Args = {
3773 Queue, Flags, Range,
3774 Kernel, Block, ConstantInt::get(IntTy, NumArgs - 4),
Scott Linderf8b3df42018-08-07 15:52:49 +00003775 ElemPtr};
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003776 std::vector<llvm::Type *> ArgTys = {
Scott Linderf8b3df42018-08-07 15:52:49 +00003777 QueueTy, IntTy, RangeTy, GenericVoidPtrTy,
3778 GenericVoidPtrTy, IntTy, ElemPtr->getType()};
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003779
3780 llvm::FunctionType *FTy = llvm::FunctionType::get(
Yaxun Liu29a5ee32017-09-03 13:52:24 +00003781 Int32Ty, llvm::ArrayRef<llvm::Type *>(ArgTys), false);
Scott Linderf8b3df42018-08-07 15:52:49 +00003782 auto Call =
3783 RValue::get(Builder.CreateCall(CGM.CreateRuntimeFunction(FTy, Name),
3784 llvm::ArrayRef<llvm::Value *>(Args)));
3785 if (TmpSize)
3786 EmitLifetimeEnd(TmpSize, TmpPtr);
3787 return Call;
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003788 }
3789 // Any calls now have event arguments passed.
3790 if (NumArgs >= 7) {
3791 llvm::Type *EventTy = ConvertType(getContext().OCLClkEventTy);
Alexey Sotkin1b01f972019-04-11 06:18:17 +00003792 llvm::PointerType *EventPtrTy = EventTy->getPointerTo(
Anastasia Stulova2b461202016-11-14 15:34:01 +00003793 CGM.getContext().getTargetAddressSpace(LangAS::opencl_generic));
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003794
Anastasia Stulova0df4ac32016-11-14 17:39:58 +00003795 llvm::Value *NumEvents =
3796 Builder.CreateZExtOrTrunc(EmitScalarExpr(E->getArg(3)), Int32Ty);
Alexey Sotkin1b01f972019-04-11 06:18:17 +00003797
3798 // Since SemaOpenCLBuiltinEnqueueKernel allows fifth and sixth arguments
3799 // to be a null pointer constant (including `0` literal), we can take it
3800 // into account and emit null pointer directly.
3801 llvm::Value *EventWaitList = nullptr;
3802 if (E->getArg(4)->isNullPointerConstant(
3803 getContext(), Expr::NPC_ValueDependentIsNotNull)) {
3804 EventWaitList = llvm::ConstantPointerNull::get(EventPtrTy);
3805 } else {
3806 EventWaitList = E->getArg(4)->getType()->isArrayType()
3807 ? EmitArrayToPointerDecay(E->getArg(4)).getPointer()
3808 : EmitScalarExpr(E->getArg(4));
3809 // Convert to generic address space.
3810 EventWaitList = Builder.CreatePointerCast(EventWaitList, EventPtrTy);
3811 }
3812 llvm::Value *EventRet = nullptr;
3813 if (E->getArg(5)->isNullPointerConstant(
3814 getContext(), Expr::NPC_ValueDependentIsNotNull)) {
3815 EventRet = llvm::ConstantPointerNull::get(EventPtrTy);
3816 } else {
3817 EventRet =
3818 Builder.CreatePointerCast(EmitScalarExpr(E->getArg(5)), EventPtrTy);
3819 }
3820
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003821 auto Info =
3822 CGM.getOpenCLRuntime().emitOpenCLEnqueuedBlock(*this, E->getArg(6));
3823 llvm::Value *Kernel =
3824 Builder.CreatePointerCast(Info.Kernel, GenericVoidPtrTy);
3825 llvm::Value *Block =
3826 Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003827
Anastasia Stulovaaf0a7bb2017-01-27 15:11:34 +00003828 std::vector<llvm::Type *> ArgTys = {
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003829 QueueTy, Int32Ty, RangeTy, Int32Ty,
3830 EventPtrTy, EventPtrTy, GenericVoidPtrTy, GenericVoidPtrTy};
Anastasia Stulova2b461202016-11-14 15:34:01 +00003831
Alexey Sotkin1b01f972019-04-11 06:18:17 +00003832 std::vector<llvm::Value *> Args = {Queue, Flags, Range,
3833 NumEvents, EventWaitList, EventRet,
3834 Kernel, Block};
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003835
3836 if (NumArgs == 7) {
3837 // Has events but no variadics.
3838 Name = "__enqueue_kernel_basic_events";
3839 llvm::FunctionType *FTy = llvm::FunctionType::get(
3840 Int32Ty, llvm::ArrayRef<llvm::Type *>(ArgTys), false);
3841 return RValue::get(
3842 Builder.CreateCall(CGM.CreateRuntimeFunction(FTy, Name),
3843 llvm::ArrayRef<llvm::Value *>(Args)));
3844 }
3845 // Has event info and variadics
3846 // Pass the number of variadics to the runtime function too.
3847 Args.push_back(ConstantInt::get(Int32Ty, NumArgs - 7));
3848 ArgTys.push_back(Int32Ty);
Yaxun Liu3cab24a2018-05-09 17:07:06 +00003849 Name = "__enqueue_kernel_events_varargs";
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003850
Scott Linderf8b3df42018-08-07 15:52:49 +00003851 llvm::Value *ElemPtr, *TmpSize, *TmpPtr;
3852 std::tie(ElemPtr, TmpSize, TmpPtr) = CreateArrayForSizeVar(7);
3853 Args.push_back(ElemPtr);
3854 ArgTys.push_back(ElemPtr->getType());
Anastasia Stulova0df4ac32016-11-14 17:39:58 +00003855
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003856 llvm::FunctionType *FTy = llvm::FunctionType::get(
Yaxun Liu29a5ee32017-09-03 13:52:24 +00003857 Int32Ty, llvm::ArrayRef<llvm::Type *>(ArgTys), false);
Scott Linderf8b3df42018-08-07 15:52:49 +00003858 auto Call =
3859 RValue::get(Builder.CreateCall(CGM.CreateRuntimeFunction(FTy, Name),
3860 llvm::ArrayRef<llvm::Value *>(Args)));
3861 if (TmpSize)
3862 EmitLifetimeEnd(TmpSize, TmpPtr);
3863 return Call;
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003864 }
Galina Kistanova0872d6c2017-06-03 06:30:46 +00003865 LLVM_FALLTHROUGH;
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003866 }
3867 // OpenCL v2.0 s6.13.17.6 - Kernel query functions need bitcast of block
3868 // parameter.
3869 case Builtin::BIget_kernel_work_group_size: {
Anastasia Stulovaaf0a7bb2017-01-27 15:11:34 +00003870 llvm::Type *GenericVoidPtrTy = Builder.getInt8PtrTy(
3871 getContext().getTargetAddressSpace(LangAS::opencl_generic));
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003872 auto Info =
3873 CGM.getOpenCLRuntime().emitOpenCLEnqueuedBlock(*this, E->getArg(0));
3874 Value *Kernel = Builder.CreatePointerCast(Info.Kernel, GenericVoidPtrTy);
3875 Value *Arg = Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003876 return RValue::get(Builder.CreateCall(
3877 CGM.CreateRuntimeFunction(
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003878 llvm::FunctionType::get(IntTy, {GenericVoidPtrTy, GenericVoidPtrTy},
3879 false),
Anastasia Stulovaaf0a7bb2017-01-27 15:11:34 +00003880 "__get_kernel_work_group_size_impl"),
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003881 {Kernel, Arg}));
Anastasia Stulovaaf0a7bb2017-01-27 15:11:34 +00003882 }
3883 case Builtin::BIget_kernel_preferred_work_group_size_multiple: {
3884 llvm::Type *GenericVoidPtrTy = Builder.getInt8PtrTy(
3885 getContext().getTargetAddressSpace(LangAS::opencl_generic));
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003886 auto Info =
3887 CGM.getOpenCLRuntime().emitOpenCLEnqueuedBlock(*this, E->getArg(0));
3888 Value *Kernel = Builder.CreatePointerCast(Info.Kernel, GenericVoidPtrTy);
3889 Value *Arg = Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
Anastasia Stulovaaf0a7bb2017-01-27 15:11:34 +00003890 return RValue::get(Builder.CreateCall(
3891 CGM.CreateRuntimeFunction(
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003892 llvm::FunctionType::get(IntTy, {GenericVoidPtrTy, GenericVoidPtrTy},
3893 false),
Yaxun Liu3cab24a2018-05-09 17:07:06 +00003894 "__get_kernel_preferred_work_group_size_multiple_impl"),
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003895 {Kernel, Arg}));
Anastasia Stulovadb7a31c2016-07-05 11:31:24 +00003896 }
Joey Goulyfa76b492017-08-01 13:27:09 +00003897 case Builtin::BIget_kernel_max_sub_group_size_for_ndrange:
3898 case Builtin::BIget_kernel_sub_group_count_for_ndrange: {
3899 llvm::Type *GenericVoidPtrTy = Builder.getInt8PtrTy(
3900 getContext().getTargetAddressSpace(LangAS::opencl_generic));
3901 LValue NDRangeL = EmitAggExprToLValue(E->getArg(0));
3902 llvm::Value *NDRange = NDRangeL.getAddress().getPointer();
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003903 auto Info =
3904 CGM.getOpenCLRuntime().emitOpenCLEnqueuedBlock(*this, E->getArg(1));
3905 Value *Kernel = Builder.CreatePointerCast(Info.Kernel, GenericVoidPtrTy);
3906 Value *Block = Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
Joey Goulyfa76b492017-08-01 13:27:09 +00003907 const char *Name =
3908 BuiltinID == Builtin::BIget_kernel_max_sub_group_size_for_ndrange
3909 ? "__get_kernel_max_sub_group_size_for_ndrange_impl"
3910 : "__get_kernel_sub_group_count_for_ndrange_impl";
3911 return RValue::get(Builder.CreateCall(
3912 CGM.CreateRuntimeFunction(
3913 llvm::FunctionType::get(
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003914 IntTy, {NDRange->getType(), GenericVoidPtrTy, GenericVoidPtrTy},
3915 false),
Joey Goulyfa76b492017-08-01 13:27:09 +00003916 Name),
Yaxun Liuc2a87a02017-10-14 12:23:50 +00003917 {NDRange, Kernel, Block}));
Joey Goulyfa76b492017-08-01 13:27:09 +00003918 }
Jan Vesely31ecb4b2017-09-07 19:39:10 +00003919
3920 case Builtin::BI__builtin_store_half:
3921 case Builtin::BI__builtin_store_halff: {
3922 Value *Val = EmitScalarExpr(E->getArg(0));
3923 Address Address = EmitPointerWithAlignment(E->getArg(1));
3924 Value *HalfVal = Builder.CreateFPTrunc(Val, Builder.getHalfTy());
3925 return RValue::get(Builder.CreateStore(HalfVal, Address));
3926 }
3927 case Builtin::BI__builtin_load_half: {
3928 Address Address = EmitPointerWithAlignment(E->getArg(0));
3929 Value *HalfVal = Builder.CreateLoad(Address);
3930 return RValue::get(Builder.CreateFPExt(HalfVal, Builder.getDoubleTy()));
3931 }
3932 case Builtin::BI__builtin_load_halff: {
3933 Address Address = EmitPointerWithAlignment(E->getArg(0));
3934 Value *HalfVal = Builder.CreateLoad(Address);
3935 return RValue::get(Builder.CreateFPExt(HalfVal, Builder.getFloatTy()));
3936 }
Justin Lebar3039a592016-01-23 21:28:14 +00003937 case Builtin::BIprintf:
Arpith Chacko Jacobcdda3daa2017-01-29 20:49:31 +00003938 if (getTarget().getTriple().isNVPTX())
3939 return EmitNVPTXDevicePrintfCallExpr(E, ReturnValue);
Matt Arsenault2d933982016-02-27 09:06:18 +00003940 break;
3941 case Builtin::BI__builtin_canonicalize:
3942 case Builtin::BI__builtin_canonicalizef:
3943 case Builtin::BI__builtin_canonicalizel:
3944 return RValue::get(emitUnaryBuiltin(*this, E, Intrinsic::canonicalize));
Marcin Koscielnickia46fade2016-06-16 13:41:54 +00003945
3946 case Builtin::BI__builtin_thread_pointer: {
3947 if (!getContext().getTargetInfo().isTLSSupported())
3948 CGM.ErrorUnsupported(E, "__builtin_thread_pointer");
3949 // Fall through - it's already mapped to the intrinsic by GCCBuiltin.
3950 break;
3951 }
Akira Hatanaka6b103bc2017-10-06 07:12:46 +00003952 case Builtin::BI__builtin_os_log_format:
3953 return emitBuiltinOSLogFormat(*E);
Mehdi Amini06d367c2016-10-24 20:39:34 +00003954
Dean Michael Berris42af6512017-05-09 00:45:40 +00003955 case Builtin::BI__xray_customevent: {
3956 if (!ShouldXRayInstrumentFunction())
3957 return RValue::getIgnored();
Dean Michael Berris488f7c22018-04-13 02:31:58 +00003958
3959 if (!CGM.getCodeGenOpts().XRayInstrumentationBundle.has(
3960 XRayInstrKind::Custom))
3961 return RValue::getIgnored();
3962
Dean Michael Berris1a5b10d2017-11-30 00:04:54 +00003963 if (const auto *XRayAttr = CurFuncDecl->getAttr<XRayInstrumentAttr>())
3964 if (XRayAttr->neverXRayInstrument() && !AlwaysEmitXRayCustomEvents())
Dean Michael Berris42af6512017-05-09 00:45:40 +00003965 return RValue::getIgnored();
Dean Michael Berris1a5b10d2017-11-30 00:04:54 +00003966
Dean Michael Berris42af6512017-05-09 00:45:40 +00003967 Function *F = CGM.getIntrinsic(Intrinsic::xray_customevent);
3968 auto FTy = F->getFunctionType();
3969 auto Arg0 = E->getArg(0);
3970 auto Arg0Val = EmitScalarExpr(Arg0);
3971 auto Arg0Ty = Arg0->getType();
3972 auto PTy0 = FTy->getParamType(0);
3973 if (PTy0 != Arg0Val->getType()) {
3974 if (Arg0Ty->isArrayType())
3975 Arg0Val = EmitArrayToPointerDecay(Arg0).getPointer();
3976 else
3977 Arg0Val = Builder.CreatePointerCast(Arg0Val, PTy0);
3978 }
3979 auto Arg1 = EmitScalarExpr(E->getArg(1));
3980 auto PTy1 = FTy->getParamType(1);
3981 if (PTy1 != Arg1->getType())
3982 Arg1 = Builder.CreateTruncOrBitCast(Arg1, PTy1);
3983 return RValue::get(Builder.CreateCall(F, {Arg0Val, Arg1}));
3984 }
Martin Storsjo022e7822017-07-17 20:49:45 +00003985
Keith Wyssf437e352018-04-17 21:32:43 +00003986 case Builtin::BI__xray_typedevent: {
3987 // TODO: There should be a way to always emit events even if the current
3988 // function is not instrumented. Losing events in a stream can cripple
3989 // a trace.
3990 if (!ShouldXRayInstrumentFunction())
3991 return RValue::getIgnored();
3992
3993 if (!CGM.getCodeGenOpts().XRayInstrumentationBundle.has(
3994 XRayInstrKind::Typed))
3995 return RValue::getIgnored();
3996
3997 if (const auto *XRayAttr = CurFuncDecl->getAttr<XRayInstrumentAttr>())
3998 if (XRayAttr->neverXRayInstrument() && !AlwaysEmitXRayTypedEvents())
3999 return RValue::getIgnored();
4000
4001 Function *F = CGM.getIntrinsic(Intrinsic::xray_typedevent);
4002 auto FTy = F->getFunctionType();
4003 auto Arg0 = EmitScalarExpr(E->getArg(0));
4004 auto PTy0 = FTy->getParamType(0);
4005 if (PTy0 != Arg0->getType())
4006 Arg0 = Builder.CreateTruncOrBitCast(Arg0, PTy0);
4007 auto Arg1 = E->getArg(1);
4008 auto Arg1Val = EmitScalarExpr(Arg1);
4009 auto Arg1Ty = Arg1->getType();
4010 auto PTy1 = FTy->getParamType(1);
4011 if (PTy1 != Arg1Val->getType()) {
4012 if (Arg1Ty->isArrayType())
4013 Arg1Val = EmitArrayToPointerDecay(Arg1).getPointer();
4014 else
4015 Arg1Val = Builder.CreatePointerCast(Arg1Val, PTy1);
4016 }
4017 auto Arg2 = EmitScalarExpr(E->getArg(2));
4018 auto PTy2 = FTy->getParamType(2);
4019 if (PTy2 != Arg2->getType())
4020 Arg2 = Builder.CreateTruncOrBitCast(Arg2, PTy2);
4021 return RValue::get(Builder.CreateCall(F, {Arg0, Arg1Val, Arg2}));
4022 }
4023
Martin Storsjo022e7822017-07-17 20:49:45 +00004024 case Builtin::BI__builtin_ms_va_start:
4025 case Builtin::BI__builtin_ms_va_end:
4026 return RValue::get(
4027 EmitVAStartEnd(EmitMSVAListRef(E->getArg(0)).getPointer(),
4028 BuiltinID == Builtin::BI__builtin_ms_va_start));
4029
4030 case Builtin::BI__builtin_ms_va_copy: {
4031 // Lower this manually. We can't reliably determine whether or not any
4032 // given va_copy() is for a Win64 va_list from the calling convention
4033 // alone, because it's legal to do this from a System V ABI function.
4034 // With opaque pointer types, we won't have enough information in LLVM
4035 // IR to determine this from the argument types, either. Best to do it
4036 // now, while we have enough information.
4037 Address DestAddr = EmitMSVAListRef(E->getArg(0));
4038 Address SrcAddr = EmitMSVAListRef(E->getArg(1));
4039
4040 llvm::Type *BPP = Int8PtrPtrTy;
4041
4042 DestAddr = Address(Builder.CreateBitCast(DestAddr.getPointer(), BPP, "cp"),
4043 DestAddr.getAlignment());
4044 SrcAddr = Address(Builder.CreateBitCast(SrcAddr.getPointer(), BPP, "ap"),
4045 SrcAddr.getAlignment());
4046
4047 Value *ArgPtr = Builder.CreateLoad(SrcAddr, "ap.val");
4048 return RValue::get(Builder.CreateStore(ArgPtr, DestAddr));
4049 }
Nate Begeman6c591322008-05-15 07:38:03 +00004050 }
Mike Stump11289f42009-09-09 15:08:12 +00004051
John McCall30e4efd2011-09-13 23:05:03 +00004052 // If this is an alias for a lib function (e.g. __builtin_sin), emit
4053 // the call using the normal call path, but using the unmangled
4054 // version of the function name.
4055 if (getContext().BuiltinInfo.isLibFunction(BuiltinID))
4056 return emitLibraryCall(*this, FD, E,
4057 CGM.getBuiltinLibFunction(FD, BuiltinID));
Jim Grosbachd3608f42012-09-21 00:18:27 +00004058
John McCall30e4efd2011-09-13 23:05:03 +00004059 // If this is a predefined lib function (e.g. malloc), emit the call
4060 // using exactly the normal call path.
4061 if (getContext().BuiltinInfo.isPredefinedLibFunction(BuiltinID))
John McCallb92ab1a2016-10-26 23:46:34 +00004062 return emitLibraryCall(*this, FD, E,
4063 cast<llvm::Constant>(EmitScalarExpr(E->getCallee())));
Mike Stump11289f42009-09-09 15:08:12 +00004064
Eric Christopher15709992015-10-15 23:47:11 +00004065 // Check that a call to a target specific builtin has the correct target
4066 // features.
4067 // This is down here to avoid non-target specific builtins, however, if
4068 // generic builtins start to require generic target features then we
4069 // can move this up to the beginning of the function.
Eric Christopherc7e79db2015-11-12 00:44:04 +00004070 checkTargetFeatures(E, FD);
Eric Christopher15709992015-10-15 23:47:11 +00004071
Craig Topper74c10e32018-07-09 19:00:16 +00004072 if (unsigned VectorWidth = getContext().BuiltinInfo.getRequiredVectorWidth(BuiltinID))
4073 LargestVectorWidth = std::max(LargestVectorWidth, VectorWidth);
4074
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004075 // See if we have a target specific intrinsic.
Mehdi Amini7186a432016-10-11 19:04:24 +00004076 const char *Name = getContext().BuiltinInfo.getName(BuiltinID);
Daniel Dunbar576d90d2009-08-24 09:54:37 +00004077 Intrinsic::ID IntrinsicID = Intrinsic::not_intrinsic;
Mehdi Aminib7fb1242016-10-01 01:16:22 +00004078 StringRef Prefix =
4079 llvm::Triple::getArchTypePrefix(getTarget().getTriple().getArch());
4080 if (!Prefix.empty()) {
4081 IntrinsicID = Intrinsic::getIntrinsicForGCCBuiltin(Prefix.data(), Name);
Alexander Kornienko2a8c18d2018-04-06 15:14:32 +00004082 // NOTE we don't need to perform a compatibility flag check here since the
Saleem Abdulrasool96bfda82014-07-04 21:49:39 +00004083 // intrinsics are declared in Builtins*.def via LANGBUILTIN which filter the
4084 // MS builtins via ALL_MS_LANGUAGES and are filtered earlier.
4085 if (IntrinsicID == Intrinsic::not_intrinsic)
Mehdi Aminib7fb1242016-10-01 01:16:22 +00004086 IntrinsicID = Intrinsic::getIntrinsicForMSBuiltin(Prefix.data(), Name);
Saleem Abdulrasool96bfda82014-07-04 21:49:39 +00004087 }
Mike Stump11289f42009-09-09 15:08:12 +00004088
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004089 if (IntrinsicID != Intrinsic::not_intrinsic) {
4090 SmallVector<Value*, 16> Args;
Mike Stump11289f42009-09-09 15:08:12 +00004091
Chris Lattner64d7f2a2010-10-02 00:09:12 +00004092 // Find out if any arguments are required to be integer constant
4093 // expressions.
4094 unsigned ICEArguments = 0;
4095 ASTContext::GetBuiltinTypeError Error;
4096 getContext().GetBuiltinType(BuiltinID, Error, &ICEArguments);
4097 assert(Error == ASTContext::GE_None && "Should not codegen an error");
4098
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004099 Function *F = CGM.getIntrinsic(IntrinsicID);
Chris Lattner2192fe52011-07-18 04:24:23 +00004100 llvm::FunctionType *FTy = F->getFunctionType();
Mike Stump11289f42009-09-09 15:08:12 +00004101
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004102 for (unsigned i = 0, e = E->getNumArgs(); i != e; ++i) {
Chris Lattner64d7f2a2010-10-02 00:09:12 +00004103 Value *ArgValue;
4104 // If this is a normal argument, just emit it as a scalar.
4105 if ((ICEArguments & (1 << i)) == 0) {
4106 ArgValue = EmitScalarExpr(E->getArg(i));
4107 } else {
Jim Grosbachd3608f42012-09-21 00:18:27 +00004108 // If this is required to be a constant, constant fold it so that we
Chris Lattner64d7f2a2010-10-02 00:09:12 +00004109 // know that the generated intrinsic gets a ConstantInt.
4110 llvm::APSInt Result;
4111 bool IsConst = E->getArg(i)->isIntegerConstantExpr(Result,getContext());
4112 assert(IsConst && "Constant arg isn't actually constant?");
4113 (void)IsConst;
John McCallad7c5c12011-02-08 08:22:06 +00004114 ArgValue = llvm::ConstantInt::get(getLLVMContext(), Result);
Chris Lattner64d7f2a2010-10-02 00:09:12 +00004115 }
Mike Stump11289f42009-09-09 15:08:12 +00004116
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004117 // If the intrinsic arg type is different from the builtin arg type
4118 // we need to do a bit cast.
Chris Lattner2192fe52011-07-18 04:24:23 +00004119 llvm::Type *PTy = FTy->getParamType(i);
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004120 if (PTy != ArgValue->getType()) {
Matt Arsenaultc65f9662018-08-02 12:14:28 +00004121 // XXX - vector of pointers?
4122 if (auto *PtrTy = dyn_cast<llvm::PointerType>(PTy)) {
4123 if (PtrTy->getAddressSpace() !=
4124 ArgValue->getType()->getPointerAddressSpace()) {
4125 ArgValue = Builder.CreateAddrSpaceCast(
4126 ArgValue,
4127 ArgValue->getType()->getPointerTo(PtrTy->getAddressSpace()));
4128 }
4129 }
4130
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004131 assert(PTy->canLosslesslyBitCastTo(FTy->getParamType(i)) &&
4132 "Must be able to losslessly bit cast to param");
4133 ArgValue = Builder.CreateBitCast(ArgValue, PTy);
4134 }
Mike Stump11289f42009-09-09 15:08:12 +00004135
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004136 Args.push_back(ArgValue);
4137 }
Mike Stump11289f42009-09-09 15:08:12 +00004138
Jay Foad5bd375a2011-07-15 08:37:34 +00004139 Value *V = Builder.CreateCall(F, Args);
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004140 QualType BuiltinRetType = E->getType();
Mike Stump11289f42009-09-09 15:08:12 +00004141
Chris Lattnerece04092012-02-07 00:39:47 +00004142 llvm::Type *RetTy = VoidTy;
Jim Grosbachd3608f42012-09-21 00:18:27 +00004143 if (!BuiltinRetType->isVoidType())
Chris Lattnerece04092012-02-07 00:39:47 +00004144 RetTy = ConvertType(BuiltinRetType);
Mike Stump11289f42009-09-09 15:08:12 +00004145
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004146 if (RetTy != V->getType()) {
Matt Arsenaultc65f9662018-08-02 12:14:28 +00004147 // XXX - vector of pointers?
4148 if (auto *PtrTy = dyn_cast<llvm::PointerType>(RetTy)) {
4149 if (PtrTy->getAddressSpace() != V->getType()->getPointerAddressSpace()) {
4150 V = Builder.CreateAddrSpaceCast(
4151 V, V->getType()->getPointerTo(PtrTy->getAddressSpace()));
4152 }
4153 }
4154
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004155 assert(V->getType()->canLosslesslyBitCastTo(RetTy) &&
4156 "Must be able to losslessly bit cast result type");
4157 V = Builder.CreateBitCast(V, RetTy);
4158 }
Mike Stump11289f42009-09-09 15:08:12 +00004159
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004160 return RValue::get(V);
4161 }
Mike Stump11289f42009-09-09 15:08:12 +00004162
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004163 // See if we have a target specific builtin that needs to be lowered.
Daniel Dunbareca513d2008-10-10 00:24:54 +00004164 if (Value *V = EmitTargetBuiltinExpr(BuiltinID, E))
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004165 return RValue::get(V);
Mike Stump11289f42009-09-09 15:08:12 +00004166
Daniel Dunbara7c8cf62008-08-16 00:56:44 +00004167 ErrorUnsupported(E, "builtin function");
Mike Stump11289f42009-09-09 15:08:12 +00004168
Chris Lattner9a8d1d92008-06-30 18:32:54 +00004169 // Unknown builtin, for now just dump it out and return undef.
John McCall47fb9502013-03-07 21:37:08 +00004170 return GetUndefRValue(E->getType());
Mike Stump11289f42009-09-09 15:08:12 +00004171}
Anders Carlsson895af082007-12-09 23:17:02 +00004172
Artem Belevichb5bc9232015-09-22 17:23:22 +00004173static Value *EmitTargetArchBuiltinExpr(CodeGenFunction *CGF,
4174 unsigned BuiltinID, const CallExpr *E,
4175 llvm::Triple::ArchType Arch) {
4176 switch (Arch) {
Chris Lattner5cc15e02010-03-03 19:03:45 +00004177 case llvm::Triple::arm:
Christian Pirkerf01cd6f2014-03-28 14:40:46 +00004178 case llvm::Triple::armeb:
Chris Lattner5cc15e02010-03-03 19:03:45 +00004179 case llvm::Triple::thumb:
Christian Pirkerf01cd6f2014-03-28 14:40:46 +00004180 case llvm::Triple::thumbeb:
Sjoerd Meijer95da8752018-03-13 19:38:56 +00004181 return CGF->EmitARMBuiltinExpr(BuiltinID, E, Arch);
Tim Northover25e8a672014-05-24 12:51:25 +00004182 case llvm::Triple::aarch64:
4183 case llvm::Triple::aarch64_be:
Sjoerd Meijer95da8752018-03-13 19:38:56 +00004184 return CGF->EmitAArch64BuiltinExpr(BuiltinID, E, Arch);
Daniel Dunbar576d90d2009-08-24 09:54:37 +00004185 case llvm::Triple::x86:
4186 case llvm::Triple::x86_64:
Artem Belevichb5bc9232015-09-22 17:23:22 +00004187 return CGF->EmitX86BuiltinExpr(BuiltinID, E);
Daniel Dunbar576d90d2009-08-24 09:54:37 +00004188 case llvm::Triple::ppc:
4189 case llvm::Triple::ppc64:
Bill Schmidt778d3872013-07-26 01:36:11 +00004190 case llvm::Triple::ppc64le:
Artem Belevichb5bc9232015-09-22 17:23:22 +00004191 return CGF->EmitPPCBuiltinExpr(BuiltinID, E);
Matt Arsenault56f008d2014-06-24 20:45:01 +00004192 case llvm::Triple::r600:
Tom Stellardd8e38a32015-01-06 20:34:47 +00004193 case llvm::Triple::amdgcn:
Artem Belevichb5bc9232015-09-22 17:23:22 +00004194 return CGF->EmitAMDGPUBuiltinExpr(BuiltinID, E);
Ulrich Weigand3a610eb2015-04-01 12:54:25 +00004195 case llvm::Triple::systemz:
Artem Belevichb5bc9232015-09-22 17:23:22 +00004196 return CGF->EmitSystemZBuiltinExpr(BuiltinID, E);
Artem Belevichd21e5c62015-06-25 18:29:42 +00004197 case llvm::Triple::nvptx:
4198 case llvm::Triple::nvptx64:
Artem Belevichb5bc9232015-09-22 17:23:22 +00004199 return CGF->EmitNVPTXBuiltinExpr(BuiltinID, E);
Dan Gohmanc2853072015-09-03 22:51:53 +00004200 case llvm::Triple::wasm32:
4201 case llvm::Triple::wasm64:
Artem Belevichb5bc9232015-09-22 17:23:22 +00004202 return CGF->EmitWebAssemblyBuiltinExpr(BuiltinID, E);
Krzysztof Parzyszek5a655832017-12-13 19:56:03 +00004203 case llvm::Triple::hexagon:
4204 return CGF->EmitHexagonBuiltinExpr(BuiltinID, E);
Daniel Dunbar576d90d2009-08-24 09:54:37 +00004205 default:
Craig Topper8a13c412014-05-21 05:09:00 +00004206 return nullptr;
Daniel Dunbar576d90d2009-08-24 09:54:37 +00004207 }
Daniel Dunbareca513d2008-10-10 00:24:54 +00004208}
4209
Artem Belevichb5bc9232015-09-22 17:23:22 +00004210Value *CodeGenFunction::EmitTargetBuiltinExpr(unsigned BuiltinID,
4211 const CallExpr *E) {
4212 if (getContext().BuiltinInfo.isAuxBuiltinID(BuiltinID)) {
4213 assert(getContext().getAuxTargetInfo() && "Missing aux target info");
4214 return EmitTargetArchBuiltinExpr(
4215 this, getContext().BuiltinInfo.getAuxBuiltinID(BuiltinID), E,
4216 getContext().getAuxTargetInfo()->getTriple().getArch());
4217 }
4218
4219 return EmitTargetArchBuiltinExpr(this, BuiltinID, E,
4220 getTarget().getTriple().getArch());
4221}
4222
Chris Lattnerece04092012-02-07 00:39:47 +00004223static llvm::VectorType *GetNeonType(CodeGenFunction *CGF,
Jiangning Liu036f16d2013-09-24 02:48:06 +00004224 NeonTypeFlags TypeFlags,
Sjoerd Meijer87793e72018-03-19 13:22:49 +00004225 bool HasLegalHalfType=true,
Jiangning Liu036f16d2013-09-24 02:48:06 +00004226 bool V1Ty=false) {
NAKAMURA Takumidabda6b2011-11-08 03:27:04 +00004227 int IsQuad = TypeFlags.isQuad();
4228 switch (TypeFlags.getEltType()) {
Bob Wilson98bc98c2011-11-08 01:16:11 +00004229 case NeonTypeFlags::Int8:
4230 case NeonTypeFlags::Poly8:
Jiangning Liu036f16d2013-09-24 02:48:06 +00004231 return llvm::VectorType::get(CGF->Int8Ty, V1Ty ? 1 : (8 << IsQuad));
Bob Wilson98bc98c2011-11-08 01:16:11 +00004232 case NeonTypeFlags::Int16:
4233 case NeonTypeFlags::Poly16:
Sjoerd Meijer98ee7852017-07-06 16:37:31 +00004234 return llvm::VectorType::get(CGF->Int16Ty, V1Ty ? 1 : (4 << IsQuad));
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004235 case NeonTypeFlags::Float16:
Sjoerd Meijer87793e72018-03-19 13:22:49 +00004236 if (HasLegalHalfType)
Sjoerd Meijer95da8752018-03-13 19:38:56 +00004237 return llvm::VectorType::get(CGF->HalfTy, V1Ty ? 1 : (4 << IsQuad));
4238 else
4239 return llvm::VectorType::get(CGF->Int16Ty, V1Ty ? 1 : (4 << IsQuad));
Bob Wilson98bc98c2011-11-08 01:16:11 +00004240 case NeonTypeFlags::Int32:
Jiangning Liu036f16d2013-09-24 02:48:06 +00004241 return llvm::VectorType::get(CGF->Int32Ty, V1Ty ? 1 : (2 << IsQuad));
Bob Wilson98bc98c2011-11-08 01:16:11 +00004242 case NeonTypeFlags::Int64:
Kevin Qincaac85e2013-11-14 03:29:16 +00004243 case NeonTypeFlags::Poly64:
Jiangning Liu036f16d2013-09-24 02:48:06 +00004244 return llvm::VectorType::get(CGF->Int64Ty, V1Ty ? 1 : (1 << IsQuad));
Kevin Qinfb79d7f2013-12-10 06:49:01 +00004245 case NeonTypeFlags::Poly128:
4246 // FIXME: i128 and f128 doesn't get fully support in Clang and llvm.
4247 // There is a lot of i128 and f128 API missing.
4248 // so we use v16i8 to represent poly128 and get pattern matched.
4249 return llvm::VectorType::get(CGF->Int8Ty, 16);
Bob Wilson98bc98c2011-11-08 01:16:11 +00004250 case NeonTypeFlags::Float32:
Jiangning Liu036f16d2013-09-24 02:48:06 +00004251 return llvm::VectorType::get(CGF->FloatTy, V1Ty ? 1 : (2 << IsQuad));
Tim Northover2fe823a2013-08-01 09:23:19 +00004252 case NeonTypeFlags::Float64:
Jiangning Liu036f16d2013-09-24 02:48:06 +00004253 return llvm::VectorType::get(CGF->DoubleTy, V1Ty ? 1 : (1 << IsQuad));
David Blaikief47fa302012-01-17 02:30:50 +00004254 }
Benjamin Kramer9b1dfe82013-09-26 16:36:08 +00004255 llvm_unreachable("Unknown vector element type!");
Nate Begeman5968eb22010-06-07 16:01:56 +00004256}
4257
Ahmed Bougacha774b5e22015-08-24 23:41:31 +00004258static llvm::VectorType *GetFloatNeonType(CodeGenFunction *CGF,
4259 NeonTypeFlags IntTypeFlags) {
4260 int IsQuad = IntTypeFlags.isQuad();
4261 switch (IntTypeFlags.getEltType()) {
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004262 case NeonTypeFlags::Int16:
4263 return llvm::VectorType::get(CGF->HalfTy, (4 << IsQuad));
Ahmed Bougacha774b5e22015-08-24 23:41:31 +00004264 case NeonTypeFlags::Int32:
4265 return llvm::VectorType::get(CGF->FloatTy, (2 << IsQuad));
4266 case NeonTypeFlags::Int64:
4267 return llvm::VectorType::get(CGF->DoubleTy, (1 << IsQuad));
4268 default:
4269 llvm_unreachable("Type can't be converted to floating-point!");
4270 }
4271}
4272
Bob Wilson210f6dd2010-12-07 22:40:02 +00004273Value *CodeGenFunction::EmitNeonSplat(Value *V, Constant *C) {
Craig Topperf2f1a092016-07-08 02:17:35 +00004274 unsigned nElts = V->getType()->getVectorNumElements();
Chris Lattner2d6b7b92012-01-25 05:34:41 +00004275 Value* SV = llvm::ConstantVector::getSplat(nElts, C);
Nate Begeman4a04b462010-06-10 00:17:56 +00004276 return Builder.CreateShuffleVector(V, V, SV, "lane");
4277}
4278
Nate Begemanae6b1d82010-06-08 06:03:01 +00004279Value *CodeGenFunction::EmitNeonCall(Function *F, SmallVectorImpl<Value*> &Ops,
Bob Wilson482afae2010-12-08 22:37:56 +00004280 const char *name,
Nate Begeman91e1fea2010-06-14 05:21:25 +00004281 unsigned shift, bool rightshift) {
Nate Begemanae6b1d82010-06-08 06:03:01 +00004282 unsigned j = 0;
4283 for (Function::const_arg_iterator ai = F->arg_begin(), ae = F->arg_end();
4284 ai != ae; ++ai, ++j)
Nate Begeman91e1fea2010-06-14 05:21:25 +00004285 if (shift > 0 && shift == j)
4286 Ops[j] = EmitNeonShiftVector(Ops[j], ai->getType(), rightshift);
4287 else
4288 Ops[j] = Builder.CreateBitCast(Ops[j], ai->getType(), name);
Nate Begemanae6b1d82010-06-08 06:03:01 +00004289
Jay Foad5bd375a2011-07-15 08:37:34 +00004290 return Builder.CreateCall(F, Ops, name);
Nate Begemanae6b1d82010-06-08 06:03:01 +00004291}
4292
Jim Grosbachd3608f42012-09-21 00:18:27 +00004293Value *CodeGenFunction::EmitNeonShiftVector(Value *V, llvm::Type *Ty,
Nate Begeman8ed060b2010-06-11 22:57:12 +00004294 bool neg) {
Chris Lattner2d6b7b92012-01-25 05:34:41 +00004295 int SV = cast<ConstantInt>(V)->getSExtValue();
Benjamin Kramerc385a802015-07-28 15:40:11 +00004296 return ConstantInt::get(Ty, neg ? -SV : SV);
Nate Begeman8ed060b2010-06-11 22:57:12 +00004297}
4298
Adrian Prantl9fc8faf2018-05-09 01:00:01 +00004299// Right-shift a vector by a constant.
Amaury de la Vieuville21bf6ed2013-10-04 13:13:15 +00004300Value *CodeGenFunction::EmitNeonRShiftImm(Value *Vec, Value *Shift,
4301 llvm::Type *Ty, bool usgn,
4302 const char *name) {
4303 llvm::VectorType *VTy = cast<llvm::VectorType>(Ty);
4304
4305 int ShiftAmt = cast<ConstantInt>(Shift)->getSExtValue();
4306 int EltSize = VTy->getScalarSizeInBits();
4307
4308 Vec = Builder.CreateBitCast(Vec, Ty);
4309
4310 // lshr/ashr are undefined when the shift amount is equal to the vector
4311 // element size.
4312 if (ShiftAmt == EltSize) {
4313 if (usgn) {
4314 // Right-shifting an unsigned value by its size yields 0.
Benjamin Kramerc385a802015-07-28 15:40:11 +00004315 return llvm::ConstantAggregateZero::get(VTy);
Amaury de la Vieuville21bf6ed2013-10-04 13:13:15 +00004316 } else {
4317 // Right-shifting a signed value by its size is equivalent
4318 // to a shift of size-1.
4319 --ShiftAmt;
4320 Shift = ConstantInt::get(VTy->getElementType(), ShiftAmt);
4321 }
4322 }
4323
4324 Shift = EmitNeonShiftVector(Shift, Ty, false);
4325 if (usgn)
4326 return Builder.CreateLShr(Vec, Shift, name);
4327 else
4328 return Builder.CreateAShr(Vec, Shift, name);
4329}
4330
Tim Northover2d837962014-02-21 11:57:20 +00004331enum {
4332 AddRetType = (1 << 0),
4333 Add1ArgType = (1 << 1),
4334 Add2ArgTypes = (1 << 2),
Tim Northoverdb3e5e22014-02-19 11:55:06 +00004335
Tim Northover2d837962014-02-21 11:57:20 +00004336 VectorizeRetType = (1 << 3),
4337 VectorizeArgTypes = (1 << 4),
4338
4339 InventFloatType = (1 << 5),
Tim Northover8fe03d62014-02-21 11:57:24 +00004340 UnsignedAlts = (1 << 6),
Tim Northover2d837962014-02-21 11:57:20 +00004341
Tim Northovera2ee4332014-03-29 15:09:45 +00004342 Use64BitVectors = (1 << 7),
4343 Use128BitVectors = (1 << 8),
4344
Tim Northover2d837962014-02-21 11:57:20 +00004345 Vectorize1ArgType = Add1ArgType | VectorizeArgTypes,
4346 VectorRet = AddRetType | VectorizeRetType,
4347 VectorRetGetArgs01 =
4348 AddRetType | Add2ArgTypes | VectorizeRetType | VectorizeArgTypes,
4349 FpCmpzModifiers =
Tim Northovera0c95eb2014-02-21 12:16:59 +00004350 AddRetType | VectorizeRetType | Add1ArgType | InventFloatType
Tim Northoverdb3e5e22014-02-19 11:55:06 +00004351};
4352
Benjamin Kramere003ca22015-10-28 13:54:16 +00004353namespace {
4354struct NeonIntrinsicInfo {
Ben Craigcd7e9f12015-12-14 21:54:11 +00004355 const char *NameHint;
Tim Northoverdb3e5e22014-02-19 11:55:06 +00004356 unsigned BuiltinID;
4357 unsigned LLVMIntrinsic;
Tim Northover8fe03d62014-02-21 11:57:24 +00004358 unsigned AltLLVMIntrinsic;
Tim Northoverdb3e5e22014-02-19 11:55:06 +00004359 unsigned TypeModifier;
4360
4361 bool operator<(unsigned RHSBuiltinID) const {
4362 return BuiltinID < RHSBuiltinID;
4363 }
Eric Christophered60b432015-11-11 02:04:08 +00004364 bool operator<(const NeonIntrinsicInfo &TE) const {
4365 return BuiltinID < TE.BuiltinID;
4366 }
Tim Northoverdb3e5e22014-02-19 11:55:06 +00004367};
Benjamin Kramere003ca22015-10-28 13:54:16 +00004368} // end anonymous namespace
Tim Northoverdb3e5e22014-02-19 11:55:06 +00004369
Tim Northover8fe03d62014-02-21 11:57:24 +00004370#define NEONMAP0(NameBase) \
Ben Craigcd7e9f12015-12-14 21:54:11 +00004371 { #NameBase, NEON::BI__builtin_neon_ ## NameBase, 0, 0, 0 }
Tim Northoverdb3e5e22014-02-19 11:55:06 +00004372
Tim Northover8fe03d62014-02-21 11:57:24 +00004373#define NEONMAP1(NameBase, LLVMIntrinsic, TypeModifier) \
Ben Craigcd7e9f12015-12-14 21:54:11 +00004374 { #NameBase, NEON:: BI__builtin_neon_ ## NameBase, \
4375 Intrinsic::LLVMIntrinsic, 0, TypeModifier }
Tim Northoverdb3e5e22014-02-19 11:55:06 +00004376
Tim Northover8fe03d62014-02-21 11:57:24 +00004377#define NEONMAP2(NameBase, LLVMIntrinsic, AltLLVMIntrinsic, TypeModifier) \
Ben Craigcd7e9f12015-12-14 21:54:11 +00004378 { #NameBase, NEON:: BI__builtin_neon_ ## NameBase, \
Tim Northover8fe03d62014-02-21 11:57:24 +00004379 Intrinsic::LLVMIntrinsic, Intrinsic::AltLLVMIntrinsic, \
Ben Craigcd7e9f12015-12-14 21:54:11 +00004380 TypeModifier }
Tim Northover8fe03d62014-02-21 11:57:24 +00004381
Craig Topper273dbc62015-10-18 05:29:26 +00004382static const NeonIntrinsicInfo ARMSIMDIntrinsicMap [] = {
Tim Northover8fe03d62014-02-21 11:57:24 +00004383 NEONMAP2(vabd_v, arm_neon_vabdu, arm_neon_vabds, Add1ArgType | UnsignedAlts),
4384 NEONMAP2(vabdq_v, arm_neon_vabdu, arm_neon_vabds, Add1ArgType | UnsignedAlts),
4385 NEONMAP1(vabs_v, arm_neon_vabs, 0),
4386 NEONMAP1(vabsq_v, arm_neon_vabs, 0),
4387 NEONMAP0(vaddhn_v),
4388 NEONMAP1(vaesdq_v, arm_neon_aesd, 0),
4389 NEONMAP1(vaeseq_v, arm_neon_aese, 0),
4390 NEONMAP1(vaesimcq_v, arm_neon_aesimc, 0),
4391 NEONMAP1(vaesmcq_v, arm_neon_aesmc, 0),
4392 NEONMAP1(vbsl_v, arm_neon_vbsl, AddRetType),
4393 NEONMAP1(vbslq_v, arm_neon_vbsl, AddRetType),
4394 NEONMAP1(vcage_v, arm_neon_vacge, 0),
4395 NEONMAP1(vcageq_v, arm_neon_vacge, 0),
4396 NEONMAP1(vcagt_v, arm_neon_vacgt, 0),
4397 NEONMAP1(vcagtq_v, arm_neon_vacgt, 0),
4398 NEONMAP1(vcale_v, arm_neon_vacge, 0),
4399 NEONMAP1(vcaleq_v, arm_neon_vacge, 0),
4400 NEONMAP1(vcalt_v, arm_neon_vacgt, 0),
4401 NEONMAP1(vcaltq_v, arm_neon_vacgt, 0),
Abderrazek Zaafranib5ac56f2018-03-23 00:08:40 +00004402 NEONMAP0(vceqz_v),
4403 NEONMAP0(vceqzq_v),
4404 NEONMAP0(vcgez_v),
4405 NEONMAP0(vcgezq_v),
4406 NEONMAP0(vcgtz_v),
4407 NEONMAP0(vcgtzq_v),
4408 NEONMAP0(vclez_v),
4409 NEONMAP0(vclezq_v),
Tim Northover8fe03d62014-02-21 11:57:24 +00004410 NEONMAP1(vcls_v, arm_neon_vcls, Add1ArgType),
4411 NEONMAP1(vclsq_v, arm_neon_vcls, Add1ArgType),
Abderrazek Zaafranib5ac56f2018-03-23 00:08:40 +00004412 NEONMAP0(vcltz_v),
4413 NEONMAP0(vcltzq_v),
Tim Northover8fe03d62014-02-21 11:57:24 +00004414 NEONMAP1(vclz_v, ctlz, Add1ArgType),
4415 NEONMAP1(vclzq_v, ctlz, Add1ArgType),
4416 NEONMAP1(vcnt_v, ctpop, Add1ArgType),
4417 NEONMAP1(vcntq_v, ctpop, Add1ArgType),
Ahmed Bougachacd5b8a02015-08-21 23:34:20 +00004418 NEONMAP1(vcvt_f16_f32, arm_neon_vcvtfp2hf, 0),
Abderrazek Zaafranib5ac56f2018-03-23 00:08:40 +00004419 NEONMAP0(vcvt_f16_v),
Tim Northover8fe03d62014-02-21 11:57:24 +00004420 NEONMAP1(vcvt_f32_f16, arm_neon_vcvthf2fp, 0),
4421 NEONMAP0(vcvt_f32_v),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004422 NEONMAP2(vcvt_n_f16_v, arm_neon_vcvtfxu2fp, arm_neon_vcvtfxs2fp, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004423 NEONMAP2(vcvt_n_f32_v, arm_neon_vcvtfxu2fp, arm_neon_vcvtfxs2fp, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004424 NEONMAP1(vcvt_n_s16_v, arm_neon_vcvtfp2fxs, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004425 NEONMAP1(vcvt_n_s32_v, arm_neon_vcvtfp2fxs, 0),
4426 NEONMAP1(vcvt_n_s64_v, arm_neon_vcvtfp2fxs, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004427 NEONMAP1(vcvt_n_u16_v, arm_neon_vcvtfp2fxu, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004428 NEONMAP1(vcvt_n_u32_v, arm_neon_vcvtfp2fxu, 0),
4429 NEONMAP1(vcvt_n_u64_v, arm_neon_vcvtfp2fxu, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004430 NEONMAP0(vcvt_s16_v),
Tim Northover8fe03d62014-02-21 11:57:24 +00004431 NEONMAP0(vcvt_s32_v),
4432 NEONMAP0(vcvt_s64_v),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004433 NEONMAP0(vcvt_u16_v),
Tim Northover8fe03d62014-02-21 11:57:24 +00004434 NEONMAP0(vcvt_u32_v),
4435 NEONMAP0(vcvt_u64_v),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004436 NEONMAP1(vcvta_s16_v, arm_neon_vcvtas, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004437 NEONMAP1(vcvta_s32_v, arm_neon_vcvtas, 0),
4438 NEONMAP1(vcvta_s64_v, arm_neon_vcvtas, 0),
Luke Geesonda2b2e82018-06-15 10:10:45 +00004439 NEONMAP1(vcvta_u16_v, arm_neon_vcvtau, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004440 NEONMAP1(vcvta_u32_v, arm_neon_vcvtau, 0),
4441 NEONMAP1(vcvta_u64_v, arm_neon_vcvtau, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004442 NEONMAP1(vcvtaq_s16_v, arm_neon_vcvtas, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004443 NEONMAP1(vcvtaq_s32_v, arm_neon_vcvtas, 0),
4444 NEONMAP1(vcvtaq_s64_v, arm_neon_vcvtas, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004445 NEONMAP1(vcvtaq_u16_v, arm_neon_vcvtau, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004446 NEONMAP1(vcvtaq_u32_v, arm_neon_vcvtau, 0),
4447 NEONMAP1(vcvtaq_u64_v, arm_neon_vcvtau, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004448 NEONMAP1(vcvtm_s16_v, arm_neon_vcvtms, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004449 NEONMAP1(vcvtm_s32_v, arm_neon_vcvtms, 0),
4450 NEONMAP1(vcvtm_s64_v, arm_neon_vcvtms, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004451 NEONMAP1(vcvtm_u16_v, arm_neon_vcvtmu, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004452 NEONMAP1(vcvtm_u32_v, arm_neon_vcvtmu, 0),
4453 NEONMAP1(vcvtm_u64_v, arm_neon_vcvtmu, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004454 NEONMAP1(vcvtmq_s16_v, arm_neon_vcvtms, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004455 NEONMAP1(vcvtmq_s32_v, arm_neon_vcvtms, 0),
4456 NEONMAP1(vcvtmq_s64_v, arm_neon_vcvtms, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004457 NEONMAP1(vcvtmq_u16_v, arm_neon_vcvtmu, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004458 NEONMAP1(vcvtmq_u32_v, arm_neon_vcvtmu, 0),
4459 NEONMAP1(vcvtmq_u64_v, arm_neon_vcvtmu, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004460 NEONMAP1(vcvtn_s16_v, arm_neon_vcvtns, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004461 NEONMAP1(vcvtn_s32_v, arm_neon_vcvtns, 0),
4462 NEONMAP1(vcvtn_s64_v, arm_neon_vcvtns, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004463 NEONMAP1(vcvtn_u16_v, arm_neon_vcvtnu, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004464 NEONMAP1(vcvtn_u32_v, arm_neon_vcvtnu, 0),
4465 NEONMAP1(vcvtn_u64_v, arm_neon_vcvtnu, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004466 NEONMAP1(vcvtnq_s16_v, arm_neon_vcvtns, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004467 NEONMAP1(vcvtnq_s32_v, arm_neon_vcvtns, 0),
4468 NEONMAP1(vcvtnq_s64_v, arm_neon_vcvtns, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004469 NEONMAP1(vcvtnq_u16_v, arm_neon_vcvtnu, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004470 NEONMAP1(vcvtnq_u32_v, arm_neon_vcvtnu, 0),
4471 NEONMAP1(vcvtnq_u64_v, arm_neon_vcvtnu, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004472 NEONMAP1(vcvtp_s16_v, arm_neon_vcvtps, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004473 NEONMAP1(vcvtp_s32_v, arm_neon_vcvtps, 0),
4474 NEONMAP1(vcvtp_s64_v, arm_neon_vcvtps, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004475 NEONMAP1(vcvtp_u16_v, arm_neon_vcvtpu, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004476 NEONMAP1(vcvtp_u32_v, arm_neon_vcvtpu, 0),
4477 NEONMAP1(vcvtp_u64_v, arm_neon_vcvtpu, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004478 NEONMAP1(vcvtpq_s16_v, arm_neon_vcvtps, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004479 NEONMAP1(vcvtpq_s32_v, arm_neon_vcvtps, 0),
4480 NEONMAP1(vcvtpq_s64_v, arm_neon_vcvtps, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004481 NEONMAP1(vcvtpq_u16_v, arm_neon_vcvtpu, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004482 NEONMAP1(vcvtpq_u32_v, arm_neon_vcvtpu, 0),
4483 NEONMAP1(vcvtpq_u64_v, arm_neon_vcvtpu, 0),
Abderrazek Zaafranib5ac56f2018-03-23 00:08:40 +00004484 NEONMAP0(vcvtq_f16_v),
Tim Northover8fe03d62014-02-21 11:57:24 +00004485 NEONMAP0(vcvtq_f32_v),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004486 NEONMAP2(vcvtq_n_f16_v, arm_neon_vcvtfxu2fp, arm_neon_vcvtfxs2fp, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004487 NEONMAP2(vcvtq_n_f32_v, arm_neon_vcvtfxu2fp, arm_neon_vcvtfxs2fp, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004488 NEONMAP1(vcvtq_n_s16_v, arm_neon_vcvtfp2fxs, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004489 NEONMAP1(vcvtq_n_s32_v, arm_neon_vcvtfp2fxs, 0),
4490 NEONMAP1(vcvtq_n_s64_v, arm_neon_vcvtfp2fxs, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004491 NEONMAP1(vcvtq_n_u16_v, arm_neon_vcvtfp2fxu, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004492 NEONMAP1(vcvtq_n_u32_v, arm_neon_vcvtfp2fxu, 0),
4493 NEONMAP1(vcvtq_n_u64_v, arm_neon_vcvtfp2fxu, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004494 NEONMAP0(vcvtq_s16_v),
Tim Northover8fe03d62014-02-21 11:57:24 +00004495 NEONMAP0(vcvtq_s32_v),
4496 NEONMAP0(vcvtq_s64_v),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004497 NEONMAP0(vcvtq_u16_v),
Tim Northover8fe03d62014-02-21 11:57:24 +00004498 NEONMAP0(vcvtq_u32_v),
4499 NEONMAP0(vcvtq_u64_v),
Oliver Stannard2fcee8b2018-04-27 14:03:32 +00004500 NEONMAP2(vdot_v, arm_neon_udot, arm_neon_sdot, 0),
4501 NEONMAP2(vdotq_v, arm_neon_udot, arm_neon_sdot, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004502 NEONMAP0(vext_v),
4503 NEONMAP0(vextq_v),
4504 NEONMAP0(vfma_v),
4505 NEONMAP0(vfmaq_v),
4506 NEONMAP2(vhadd_v, arm_neon_vhaddu, arm_neon_vhadds, Add1ArgType | UnsignedAlts),
4507 NEONMAP2(vhaddq_v, arm_neon_vhaddu, arm_neon_vhadds, Add1ArgType | UnsignedAlts),
4508 NEONMAP2(vhsub_v, arm_neon_vhsubu, arm_neon_vhsubs, Add1ArgType | UnsignedAlts),
4509 NEONMAP2(vhsubq_v, arm_neon_vhsubu, arm_neon_vhsubs, Add1ArgType | UnsignedAlts),
4510 NEONMAP0(vld1_dup_v),
4511 NEONMAP1(vld1_v, arm_neon_vld1, 0),
Ivan A. Kosarev9c40c0a2018-06-02 17:42:59 +00004512 NEONMAP1(vld1_x2_v, arm_neon_vld1x2, 0),
4513 NEONMAP1(vld1_x3_v, arm_neon_vld1x3, 0),
4514 NEONMAP1(vld1_x4_v, arm_neon_vld1x4, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004515 NEONMAP0(vld1q_dup_v),
4516 NEONMAP1(vld1q_v, arm_neon_vld1, 0),
Ivan A. Kosarev9c40c0a2018-06-02 17:42:59 +00004517 NEONMAP1(vld1q_x2_v, arm_neon_vld1x2, 0),
4518 NEONMAP1(vld1q_x3_v, arm_neon_vld1x3, 0),
4519 NEONMAP1(vld1q_x4_v, arm_neon_vld1x4, 0),
Ivan A. Kosareva9f484a2018-06-27 13:58:43 +00004520 NEONMAP1(vld2_dup_v, arm_neon_vld2dup, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004521 NEONMAP1(vld2_lane_v, arm_neon_vld2lane, 0),
4522 NEONMAP1(vld2_v, arm_neon_vld2, 0),
Ivan A. Kosareva9f484a2018-06-27 13:58:43 +00004523 NEONMAP1(vld2q_dup_v, arm_neon_vld2dup, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004524 NEONMAP1(vld2q_lane_v, arm_neon_vld2lane, 0),
4525 NEONMAP1(vld2q_v, arm_neon_vld2, 0),
Ivan A. Kosareva9f484a2018-06-27 13:58:43 +00004526 NEONMAP1(vld3_dup_v, arm_neon_vld3dup, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004527 NEONMAP1(vld3_lane_v, arm_neon_vld3lane, 0),
4528 NEONMAP1(vld3_v, arm_neon_vld3, 0),
Ivan A. Kosareva9f484a2018-06-27 13:58:43 +00004529 NEONMAP1(vld3q_dup_v, arm_neon_vld3dup, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004530 NEONMAP1(vld3q_lane_v, arm_neon_vld3lane, 0),
4531 NEONMAP1(vld3q_v, arm_neon_vld3, 0),
Ivan A. Kosareva9f484a2018-06-27 13:58:43 +00004532 NEONMAP1(vld4_dup_v, arm_neon_vld4dup, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004533 NEONMAP1(vld4_lane_v, arm_neon_vld4lane, 0),
4534 NEONMAP1(vld4_v, arm_neon_vld4, 0),
Ivan A. Kosareva9f484a2018-06-27 13:58:43 +00004535 NEONMAP1(vld4q_dup_v, arm_neon_vld4dup, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004536 NEONMAP1(vld4q_lane_v, arm_neon_vld4lane, 0),
4537 NEONMAP1(vld4q_v, arm_neon_vld4, 0),
4538 NEONMAP2(vmax_v, arm_neon_vmaxu, arm_neon_vmaxs, Add1ArgType | UnsignedAlts),
James Molloy163b1ba2014-09-05 13:50:34 +00004539 NEONMAP1(vmaxnm_v, arm_neon_vmaxnm, Add1ArgType),
4540 NEONMAP1(vmaxnmq_v, arm_neon_vmaxnm, Add1ArgType),
Tim Northover8fe03d62014-02-21 11:57:24 +00004541 NEONMAP2(vmaxq_v, arm_neon_vmaxu, arm_neon_vmaxs, Add1ArgType | UnsignedAlts),
4542 NEONMAP2(vmin_v, arm_neon_vminu, arm_neon_vmins, Add1ArgType | UnsignedAlts),
James Molloy163b1ba2014-09-05 13:50:34 +00004543 NEONMAP1(vminnm_v, arm_neon_vminnm, Add1ArgType),
4544 NEONMAP1(vminnmq_v, arm_neon_vminnm, Add1ArgType),
Tim Northover8fe03d62014-02-21 11:57:24 +00004545 NEONMAP2(vminq_v, arm_neon_vminu, arm_neon_vmins, Add1ArgType | UnsignedAlts),
4546 NEONMAP0(vmovl_v),
4547 NEONMAP0(vmovn_v),
4548 NEONMAP1(vmul_v, arm_neon_vmulp, Add1ArgType),
4549 NEONMAP0(vmull_v),
4550 NEONMAP1(vmulq_v, arm_neon_vmulp, Add1ArgType),
4551 NEONMAP2(vpadal_v, arm_neon_vpadalu, arm_neon_vpadals, UnsignedAlts),
4552 NEONMAP2(vpadalq_v, arm_neon_vpadalu, arm_neon_vpadals, UnsignedAlts),
4553 NEONMAP1(vpadd_v, arm_neon_vpadd, Add1ArgType),
4554 NEONMAP2(vpaddl_v, arm_neon_vpaddlu, arm_neon_vpaddls, UnsignedAlts),
4555 NEONMAP2(vpaddlq_v, arm_neon_vpaddlu, arm_neon_vpaddls, UnsignedAlts),
4556 NEONMAP1(vpaddq_v, arm_neon_vpadd, Add1ArgType),
4557 NEONMAP2(vpmax_v, arm_neon_vpmaxu, arm_neon_vpmaxs, Add1ArgType | UnsignedAlts),
4558 NEONMAP2(vpmin_v, arm_neon_vpminu, arm_neon_vpmins, Add1ArgType | UnsignedAlts),
4559 NEONMAP1(vqabs_v, arm_neon_vqabs, Add1ArgType),
4560 NEONMAP1(vqabsq_v, arm_neon_vqabs, Add1ArgType),
4561 NEONMAP2(vqadd_v, arm_neon_vqaddu, arm_neon_vqadds, Add1ArgType | UnsignedAlts),
4562 NEONMAP2(vqaddq_v, arm_neon_vqaddu, arm_neon_vqadds, Add1ArgType | UnsignedAlts),
4563 NEONMAP2(vqdmlal_v, arm_neon_vqdmull, arm_neon_vqadds, 0),
4564 NEONMAP2(vqdmlsl_v, arm_neon_vqdmull, arm_neon_vqsubs, 0),
4565 NEONMAP1(vqdmulh_v, arm_neon_vqdmulh, Add1ArgType),
4566 NEONMAP1(vqdmulhq_v, arm_neon_vqdmulh, Add1ArgType),
4567 NEONMAP1(vqdmull_v, arm_neon_vqdmull, Add1ArgType),
4568 NEONMAP2(vqmovn_v, arm_neon_vqmovnu, arm_neon_vqmovns, Add1ArgType | UnsignedAlts),
4569 NEONMAP1(vqmovun_v, arm_neon_vqmovnsu, Add1ArgType),
4570 NEONMAP1(vqneg_v, arm_neon_vqneg, Add1ArgType),
4571 NEONMAP1(vqnegq_v, arm_neon_vqneg, Add1ArgType),
4572 NEONMAP1(vqrdmulh_v, arm_neon_vqrdmulh, Add1ArgType),
4573 NEONMAP1(vqrdmulhq_v, arm_neon_vqrdmulh, Add1ArgType),
4574 NEONMAP2(vqrshl_v, arm_neon_vqrshiftu, arm_neon_vqrshifts, Add1ArgType | UnsignedAlts),
4575 NEONMAP2(vqrshlq_v, arm_neon_vqrshiftu, arm_neon_vqrshifts, Add1ArgType | UnsignedAlts),
4576 NEONMAP2(vqshl_n_v, arm_neon_vqshiftu, arm_neon_vqshifts, UnsignedAlts),
4577 NEONMAP2(vqshl_v, arm_neon_vqshiftu, arm_neon_vqshifts, Add1ArgType | UnsignedAlts),
4578 NEONMAP2(vqshlq_n_v, arm_neon_vqshiftu, arm_neon_vqshifts, UnsignedAlts),
4579 NEONMAP2(vqshlq_v, arm_neon_vqshiftu, arm_neon_vqshifts, Add1ArgType | UnsignedAlts),
Yi Kong1083eb52014-07-29 09:25:17 +00004580 NEONMAP1(vqshlu_n_v, arm_neon_vqshiftsu, 0),
4581 NEONMAP1(vqshluq_n_v, arm_neon_vqshiftsu, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004582 NEONMAP2(vqsub_v, arm_neon_vqsubu, arm_neon_vqsubs, Add1ArgType | UnsignedAlts),
4583 NEONMAP2(vqsubq_v, arm_neon_vqsubu, arm_neon_vqsubs, Add1ArgType | UnsignedAlts),
4584 NEONMAP1(vraddhn_v, arm_neon_vraddhn, Add1ArgType),
4585 NEONMAP2(vrecpe_v, arm_neon_vrecpe, arm_neon_vrecpe, 0),
4586 NEONMAP2(vrecpeq_v, arm_neon_vrecpe, arm_neon_vrecpe, 0),
4587 NEONMAP1(vrecps_v, arm_neon_vrecps, Add1ArgType),
4588 NEONMAP1(vrecpsq_v, arm_neon_vrecps, Add1ArgType),
4589 NEONMAP2(vrhadd_v, arm_neon_vrhaddu, arm_neon_vrhadds, Add1ArgType | UnsignedAlts),
4590 NEONMAP2(vrhaddq_v, arm_neon_vrhaddu, arm_neon_vrhadds, Add1ArgType | UnsignedAlts),
James Molloy163b1ba2014-09-05 13:50:34 +00004591 NEONMAP1(vrnd_v, arm_neon_vrintz, Add1ArgType),
4592 NEONMAP1(vrnda_v, arm_neon_vrinta, Add1ArgType),
4593 NEONMAP1(vrndaq_v, arm_neon_vrinta, Add1ArgType),
Ivan A. Kosarev8264bb82018-07-23 13:26:37 +00004594 NEONMAP0(vrndi_v),
4595 NEONMAP0(vrndiq_v),
James Molloy163b1ba2014-09-05 13:50:34 +00004596 NEONMAP1(vrndm_v, arm_neon_vrintm, Add1ArgType),
4597 NEONMAP1(vrndmq_v, arm_neon_vrintm, Add1ArgType),
4598 NEONMAP1(vrndn_v, arm_neon_vrintn, Add1ArgType),
4599 NEONMAP1(vrndnq_v, arm_neon_vrintn, Add1ArgType),
4600 NEONMAP1(vrndp_v, arm_neon_vrintp, Add1ArgType),
4601 NEONMAP1(vrndpq_v, arm_neon_vrintp, Add1ArgType),
4602 NEONMAP1(vrndq_v, arm_neon_vrintz, Add1ArgType),
4603 NEONMAP1(vrndx_v, arm_neon_vrintx, Add1ArgType),
4604 NEONMAP1(vrndxq_v, arm_neon_vrintx, Add1ArgType),
Tim Northover8fe03d62014-02-21 11:57:24 +00004605 NEONMAP2(vrshl_v, arm_neon_vrshiftu, arm_neon_vrshifts, Add1ArgType | UnsignedAlts),
4606 NEONMAP2(vrshlq_v, arm_neon_vrshiftu, arm_neon_vrshifts, Add1ArgType | UnsignedAlts),
Yi Kong1083eb52014-07-29 09:25:17 +00004607 NEONMAP2(vrshr_n_v, arm_neon_vrshiftu, arm_neon_vrshifts, UnsignedAlts),
4608 NEONMAP2(vrshrq_n_v, arm_neon_vrshiftu, arm_neon_vrshifts, UnsignedAlts),
Tim Northover8fe03d62014-02-21 11:57:24 +00004609 NEONMAP2(vrsqrte_v, arm_neon_vrsqrte, arm_neon_vrsqrte, 0),
4610 NEONMAP2(vrsqrteq_v, arm_neon_vrsqrte, arm_neon_vrsqrte, 0),
4611 NEONMAP1(vrsqrts_v, arm_neon_vrsqrts, Add1ArgType),
4612 NEONMAP1(vrsqrtsq_v, arm_neon_vrsqrts, Add1ArgType),
4613 NEONMAP1(vrsubhn_v, arm_neon_vrsubhn, Add1ArgType),
4614 NEONMAP1(vsha1su0q_v, arm_neon_sha1su0, 0),
4615 NEONMAP1(vsha1su1q_v, arm_neon_sha1su1, 0),
4616 NEONMAP1(vsha256h2q_v, arm_neon_sha256h2, 0),
4617 NEONMAP1(vsha256hq_v, arm_neon_sha256h, 0),
4618 NEONMAP1(vsha256su0q_v, arm_neon_sha256su0, 0),
4619 NEONMAP1(vsha256su1q_v, arm_neon_sha256su1, 0),
4620 NEONMAP0(vshl_n_v),
4621 NEONMAP2(vshl_v, arm_neon_vshiftu, arm_neon_vshifts, Add1ArgType | UnsignedAlts),
4622 NEONMAP0(vshll_n_v),
4623 NEONMAP0(vshlq_n_v),
4624 NEONMAP2(vshlq_v, arm_neon_vshiftu, arm_neon_vshifts, Add1ArgType | UnsignedAlts),
4625 NEONMAP0(vshr_n_v),
4626 NEONMAP0(vshrn_n_v),
4627 NEONMAP0(vshrq_n_v),
4628 NEONMAP1(vst1_v, arm_neon_vst1, 0),
Ivan A. Kosarev73c76c32018-06-10 09:28:10 +00004629 NEONMAP1(vst1_x2_v, arm_neon_vst1x2, 0),
4630 NEONMAP1(vst1_x3_v, arm_neon_vst1x3, 0),
4631 NEONMAP1(vst1_x4_v, arm_neon_vst1x4, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004632 NEONMAP1(vst1q_v, arm_neon_vst1, 0),
Ivan A. Kosarev73c76c32018-06-10 09:28:10 +00004633 NEONMAP1(vst1q_x2_v, arm_neon_vst1x2, 0),
4634 NEONMAP1(vst1q_x3_v, arm_neon_vst1x3, 0),
4635 NEONMAP1(vst1q_x4_v, arm_neon_vst1x4, 0),
Tim Northover8fe03d62014-02-21 11:57:24 +00004636 NEONMAP1(vst2_lane_v, arm_neon_vst2lane, 0),
4637 NEONMAP1(vst2_v, arm_neon_vst2, 0),
4638 NEONMAP1(vst2q_lane_v, arm_neon_vst2lane, 0),
4639 NEONMAP1(vst2q_v, arm_neon_vst2, 0),
4640 NEONMAP1(vst3_lane_v, arm_neon_vst3lane, 0),
4641 NEONMAP1(vst3_v, arm_neon_vst3, 0),
4642 NEONMAP1(vst3q_lane_v, arm_neon_vst3lane, 0),
4643 NEONMAP1(vst3q_v, arm_neon_vst3, 0),
4644 NEONMAP1(vst4_lane_v, arm_neon_vst4lane, 0),
4645 NEONMAP1(vst4_v, arm_neon_vst4, 0),
4646 NEONMAP1(vst4q_lane_v, arm_neon_vst4lane, 0),
4647 NEONMAP1(vst4q_v, arm_neon_vst4, 0),
4648 NEONMAP0(vsubhn_v),
4649 NEONMAP0(vtrn_v),
4650 NEONMAP0(vtrnq_v),
4651 NEONMAP0(vtst_v),
4652 NEONMAP0(vtstq_v),
4653 NEONMAP0(vuzp_v),
4654 NEONMAP0(vuzpq_v),
4655 NEONMAP0(vzip_v),
Tim Northovera0c95eb2014-02-21 12:16:59 +00004656 NEONMAP0(vzipq_v)
Tim Northover8fe03d62014-02-21 11:57:24 +00004657};
4658
Craig Topper273dbc62015-10-18 05:29:26 +00004659static const NeonIntrinsicInfo AArch64SIMDIntrinsicMap[] = {
Tim Northover573cbee2014-05-24 12:52:07 +00004660 NEONMAP1(vabs_v, aarch64_neon_abs, 0),
4661 NEONMAP1(vabsq_v, aarch64_neon_abs, 0),
Tim Northovera2ee4332014-03-29 15:09:45 +00004662 NEONMAP0(vaddhn_v),
Tim Northover573cbee2014-05-24 12:52:07 +00004663 NEONMAP1(vaesdq_v, aarch64_crypto_aesd, 0),
4664 NEONMAP1(vaeseq_v, aarch64_crypto_aese, 0),
4665 NEONMAP1(vaesimcq_v, aarch64_crypto_aesimc, 0),
4666 NEONMAP1(vaesmcq_v, aarch64_crypto_aesmc, 0),
4667 NEONMAP1(vcage_v, aarch64_neon_facge, 0),
4668 NEONMAP1(vcageq_v, aarch64_neon_facge, 0),
4669 NEONMAP1(vcagt_v, aarch64_neon_facgt, 0),
4670 NEONMAP1(vcagtq_v, aarch64_neon_facgt, 0),
4671 NEONMAP1(vcale_v, aarch64_neon_facge, 0),
4672 NEONMAP1(vcaleq_v, aarch64_neon_facge, 0),
4673 NEONMAP1(vcalt_v, aarch64_neon_facgt, 0),
4674 NEONMAP1(vcaltq_v, aarch64_neon_facgt, 0),
Abderrazek Zaafranib5ac56f2018-03-23 00:08:40 +00004675 NEONMAP0(vceqz_v),
4676 NEONMAP0(vceqzq_v),
4677 NEONMAP0(vcgez_v),
4678 NEONMAP0(vcgezq_v),
4679 NEONMAP0(vcgtz_v),
4680 NEONMAP0(vcgtzq_v),
4681 NEONMAP0(vclez_v),
4682 NEONMAP0(vclezq_v),
Tim Northover573cbee2014-05-24 12:52:07 +00004683 NEONMAP1(vcls_v, aarch64_neon_cls, Add1ArgType),
4684 NEONMAP1(vclsq_v, aarch64_neon_cls, Add1ArgType),
Abderrazek Zaafranib5ac56f2018-03-23 00:08:40 +00004685 NEONMAP0(vcltz_v),
4686 NEONMAP0(vcltzq_v),
Tim Northovera2ee4332014-03-29 15:09:45 +00004687 NEONMAP1(vclz_v, ctlz, Add1ArgType),
4688 NEONMAP1(vclzq_v, ctlz, Add1ArgType),
4689 NEONMAP1(vcnt_v, ctpop, Add1ArgType),
4690 NEONMAP1(vcntq_v, ctpop, Add1ArgType),
Ahmed Bougachacd5b8a02015-08-21 23:34:20 +00004691 NEONMAP1(vcvt_f16_f32, aarch64_neon_vcvtfp2hf, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004692 NEONMAP0(vcvt_f16_v),
Tim Northover573cbee2014-05-24 12:52:07 +00004693 NEONMAP1(vcvt_f32_f16, aarch64_neon_vcvthf2fp, 0),
Tim Northovera2ee4332014-03-29 15:09:45 +00004694 NEONMAP0(vcvt_f32_v),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004695 NEONMAP2(vcvt_n_f16_v, aarch64_neon_vcvtfxu2fp, aarch64_neon_vcvtfxs2fp, 0),
Tim Northover573cbee2014-05-24 12:52:07 +00004696 NEONMAP2(vcvt_n_f32_v, aarch64_neon_vcvtfxu2fp, aarch64_neon_vcvtfxs2fp, 0),
4697 NEONMAP2(vcvt_n_f64_v, aarch64_neon_vcvtfxu2fp, aarch64_neon_vcvtfxs2fp, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004698 NEONMAP1(vcvt_n_s16_v, aarch64_neon_vcvtfp2fxs, 0),
Tim Northover573cbee2014-05-24 12:52:07 +00004699 NEONMAP1(vcvt_n_s32_v, aarch64_neon_vcvtfp2fxs, 0),
4700 NEONMAP1(vcvt_n_s64_v, aarch64_neon_vcvtfp2fxs, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004701 NEONMAP1(vcvt_n_u16_v, aarch64_neon_vcvtfp2fxu, 0),
Tim Northover573cbee2014-05-24 12:52:07 +00004702 NEONMAP1(vcvt_n_u32_v, aarch64_neon_vcvtfp2fxu, 0),
4703 NEONMAP1(vcvt_n_u64_v, aarch64_neon_vcvtfp2fxu, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004704 NEONMAP0(vcvtq_f16_v),
Tim Northovera2ee4332014-03-29 15:09:45 +00004705 NEONMAP0(vcvtq_f32_v),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004706 NEONMAP2(vcvtq_n_f16_v, aarch64_neon_vcvtfxu2fp, aarch64_neon_vcvtfxs2fp, 0),
Tim Northover573cbee2014-05-24 12:52:07 +00004707 NEONMAP2(vcvtq_n_f32_v, aarch64_neon_vcvtfxu2fp, aarch64_neon_vcvtfxs2fp, 0),
4708 NEONMAP2(vcvtq_n_f64_v, aarch64_neon_vcvtfxu2fp, aarch64_neon_vcvtfxs2fp, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004709 NEONMAP1(vcvtq_n_s16_v, aarch64_neon_vcvtfp2fxs, 0),
Tim Northover573cbee2014-05-24 12:52:07 +00004710 NEONMAP1(vcvtq_n_s32_v, aarch64_neon_vcvtfp2fxs, 0),
4711 NEONMAP1(vcvtq_n_s64_v, aarch64_neon_vcvtfp2fxs, 0),
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00004712 NEONMAP1(vcvtq_n_u16_v, aarch64_neon_vcvtfp2fxu, 0),
Tim Northover573cbee2014-05-24 12:52:07 +00004713 NEONMAP1(vcvtq_n_u32_v, aarch64_neon_vcvtfp2fxu, 0),
4714 NEONMAP1(vcvtq_n_u64_v, aarch64_neon_vcvtfp2fxu, 0),
4715 NEONMAP1(vcvtx_f32_v, aarch64_neon_fcvtxn, AddRetType | Add1ArgType),
Oliver Stannard2fcee8b2018-04-27 14:03:32 +00004716 NEONMAP2(vdot_v, aarch64_neon_udot, aarch64_neon_sdot, 0),
4717 NEONMAP2(vdotq_v, aarch64_neon_udot, aarch64_neon_sdot, 0),
Tim Northovera2ee4332014-03-29 15:09:45 +00004718 NEONMAP0(vext_v),
4719 NEONMAP0(vextq_v),
4720 NEONMAP0(vfma_v),
4721 NEONMAP0(vfmaq_v),
Bryan Chan223307b2018-10-25 23:47:00 +00004722 NEONMAP1(vfmlal_high_v, aarch64_neon_fmlal2, 0),
4723 NEONMAP1(vfmlal_low_v, aarch64_neon_fmlal, 0),
4724 NEONMAP1(vfmlalq_high_v, aarch64_neon_fmlal2, 0),
4725 NEONMAP1(vfmlalq_low_v, aarch64_neon_fmlal, 0),
4726 NEONMAP1(vfmlsl_high_v, aarch64_neon_fmlsl2, 0),
4727 NEONMAP1(vfmlsl_low_v, aarch64_neon_fmlsl, 0),
4728 NEONMAP1(vfmlslq_high_v, aarch64_neon_fmlsl2, 0),
4729 NEONMAP1(vfmlslq_low_v, aarch64_neon_fmlsl, 0),
Tim Northover573cbee2014-05-24 12:52:07 +00004730 NEONMAP2(vhadd_v, aarch64_neon_uhadd, aarch64_neon_shadd, Add1ArgType | UnsignedAlts),
4731 NEONMAP2(vhaddq_v, aarch64_neon_uhadd, aarch64_neon_shadd, Add1ArgType | UnsignedAlts),
4732 NEONMAP2(vhsub_v, aarch64_neon_uhsub, aarch64_neon_shsub, Add1ArgType | UnsignedAlts),
4733 NEONMAP2(vhsubq_v, aarch64_neon_uhsub, aarch64_neon_shsub, Add1ArgType | UnsignedAlts),
Ivan A. Kosarev9c40c0a2018-06-02 17:42:59 +00004734 NEONMAP1(vld1_x2_v, aarch64_neon_ld1x2, 0),
4735 NEONMAP1(vld1_x3_v, aarch64_neon_ld1x3, 0),
4736 NEONMAP1(vld1_x4_v, aarch64_neon_ld1x4, 0),
4737 NEONMAP1(vld1q_x2_v, aarch64_neon_ld1x2, 0),
4738 NEONMAP1(vld1q_x3_v, aarch64_neon_ld1x3, 0),
4739 NEONMAP1(vld1q_x4_v, aarch64_neon_ld1x4, 0),
Tim Northovera2ee4332014-03-29 15:09:45 +00004740 NEONMAP0(vmovl_v),
4741 NEONMAP0(vmovn_v),
Tim Northover573cbee2014-05-24 12:52:07 +00004742 NEONMAP1(vmul_v, aarch64_neon_pmul, Add1ArgType),
4743 NEONMAP1(vmulq_v, aarch64_neon_pmul, Add1ArgType),
4744 NEONMAP1(vpadd_v, aarch64_neon_addp, Add1ArgType),
4745 NEONMAP2(vpaddl_v, aarch64_neon_uaddlp, aarch64_neon_saddlp, UnsignedAlts),
4746 NEONMAP2(vpaddlq_v, aarch64_neon_uaddlp, aarch64_neon_saddlp, UnsignedAlts),
4747 NEONMAP1(vpaddq_v, aarch64_neon_addp, Add1ArgType),
4748 NEONMAP1(vqabs_v, aarch64_neon_sqabs, Add1ArgType),
4749 NEONMAP1(vqabsq_v, aarch64_neon_sqabs, Add1ArgType),
4750 NEONMAP2(vqadd_v, aarch64_neon_uqadd, aarch64_neon_sqadd, Add1ArgType | UnsignedAlts),
4751 NEONMAP2(vqaddq_v, aarch64_neon_uqadd, aarch64_neon_sqadd, Add1ArgType | UnsignedAlts),
4752 NEONMAP2(vqdmlal_v, aarch64_neon_sqdmull, aarch64_neon_sqadd, 0),
4753 NEONMAP2(vqdmlsl_v, aarch64_neon_sqdmull, aarch64_neon_sqsub, 0),
4754 NEONMAP1(vqdmulh_v, aarch64_neon_sqdmulh, Add1ArgType),
4755 NEONMAP1(vqdmulhq_v, aarch64_neon_sqdmulh, Add1ArgType),
4756 NEONMAP1(vqdmull_v, aarch64_neon_sqdmull, Add1ArgType),
4757 NEONMAP2(vqmovn_v, aarch64_neon_uqxtn, aarch64_neon_sqxtn, Add1ArgType | UnsignedAlts),
4758 NEONMAP1(vqmovun_v, aarch64_neon_sqxtun, Add1ArgType),
4759 NEONMAP1(vqneg_v, aarch64_neon_sqneg, Add1ArgType),
4760 NEONMAP1(vqnegq_v, aarch64_neon_sqneg, Add1ArgType),
4761 NEONMAP1(vqrdmulh_v, aarch64_neon_sqrdmulh, Add1ArgType),
4762 NEONMAP1(vqrdmulhq_v, aarch64_neon_sqrdmulh, Add1ArgType),
4763 NEONMAP2(vqrshl_v, aarch64_neon_uqrshl, aarch64_neon_sqrshl, Add1ArgType | UnsignedAlts),
4764 NEONMAP2(vqrshlq_v, aarch64_neon_uqrshl, aarch64_neon_sqrshl, Add1ArgType | UnsignedAlts),
4765 NEONMAP2(vqshl_n_v, aarch64_neon_uqshl, aarch64_neon_sqshl, UnsignedAlts),
4766 NEONMAP2(vqshl_v, aarch64_neon_uqshl, aarch64_neon_sqshl, Add1ArgType | UnsignedAlts),
4767 NEONMAP2(vqshlq_n_v, aarch64_neon_uqshl, aarch64_neon_sqshl,UnsignedAlts),
4768 NEONMAP2(vqshlq_v, aarch64_neon_uqshl, aarch64_neon_sqshl, Add1ArgType | UnsignedAlts),
Yi Kong1083eb52014-07-29 09:25:17 +00004769 NEONMAP1(vqshlu_n_v, aarch64_neon_sqshlu, 0),
4770 NEONMAP1(vqshluq_n_v, aarch64_neon_sqshlu, 0),
Tim Northover573cbee2014-05-24 12:52:07 +00004771 NEONMAP2(vqsub_v, aarch64_neon_uqsub, aarch64_neon_sqsub, Add1ArgType | UnsignedAlts),
4772 NEONMAP2(vqsubq_v, aarch64_neon_uqsub, aarch64_neon_sqsub, Add1ArgType | UnsignedAlts),
4773 NEONMAP1(vraddhn_v, aarch64_neon_raddhn, Add1ArgType),
4774 NEONMAP2(vrecpe_v, aarch64_neon_frecpe, aarch64_neon_urecpe, 0),
4775 NEONMAP2(vrecpeq_v, aarch64_neon_frecpe, aarch64_neon_urecpe, 0),
4776 NEONMAP1(vrecps_v, aarch64_neon_frecps, Add1ArgType),
4777 NEONMAP1(vrecpsq_v, aarch64_neon_frecps, Add1ArgType),
4778 NEONMAP2(vrhadd_v, aarch64_neon_urhadd, aarch64_neon_srhadd, Add1ArgType | UnsignedAlts),
4779 NEONMAP2(vrhaddq_v, aarch64_neon_urhadd, aarch64_neon_srhadd, Add1ArgType | UnsignedAlts),
Ivan A. Kosarev8264bb82018-07-23 13:26:37 +00004780 NEONMAP0(vrndi_v),
4781 NEONMAP0(vrndiq_v),
Tim Northover573cbee2014-05-24 12:52:07 +00004782 NEONMAP2(vrshl_v, aarch64_neon_urshl, aarch64_neon_srshl, Add1ArgType | UnsignedAlts),
4783 NEONMAP2(vrshlq_v, aarch64_neon_urshl, aarch64_neon_srshl, Add1ArgType | UnsignedAlts),
Yi Kong1083eb52014-07-29 09:25:17 +00004784 NEONMAP2(vrshr_n_v, aarch64_neon_urshl, aarch64_neon_srshl, UnsignedAlts),
4785 NEONMAP2(vrshrq_n_v, aarch64_neon_urshl, aarch64_neon_srshl, UnsignedAlts),
Tim Northover573cbee2014-05-24 12:52:07 +00004786 NEONMAP2(vrsqrte_v, aarch64_neon_frsqrte, aarch64_neon_ursqrte, 0),
4787 NEONMAP2(vrsqrteq_v, aarch64_neon_frsqrte, aarch64_neon_ursqrte, 0),
4788 NEONMAP1(vrsqrts_v, aarch64_neon_frsqrts, Add1ArgType),
4789 NEONMAP1(vrsqrtsq_v, aarch64_neon_frsqrts, Add1ArgType),
4790 NEONMAP1(vrsubhn_v, aarch64_neon_rsubhn, Add1ArgType),
4791 NEONMAP1(vsha1su0q_v, aarch64_crypto_sha1su0, 0),
4792 NEONMAP1(vsha1su1q_v, aarch64_crypto_sha1su1, 0),
4793 NEONMAP1(vsha256h2q_v, aarch64_crypto_sha256h2, 0),
4794 NEONMAP1(vsha256hq_v, aarch64_crypto_sha256h, 0),
4795 NEONMAP1(vsha256su0q_v, aarch64_crypto_sha256su0, 0),
4796 NEONMAP1(vsha256su1q_v, aarch64_crypto_sha256su1, 0),
Tim Northovera2ee4332014-03-29 15:09:45 +00004797 NEONMAP0(vshl_n_v),
Tim Northover573cbee2014-05-24 12:52:07 +00004798 NEONMAP2(vshl_v, aarch64_neon_ushl, aarch64_neon_sshl, Add1ArgType | UnsignedAlts),
Tim Northovera2ee4332014-03-29 15:09:45 +00004799 NEONMAP0(vshll_n_v),
4800 NEONMAP0(vshlq_n_v),
Tim Northover573cbee2014-05-24 12:52:07 +00004801 NEONMAP2(vshlq_v, aarch64_neon_ushl, aarch64_neon_sshl, Add1ArgType | UnsignedAlts),
Tim Northovera2ee4332014-03-29 15:09:45 +00004802 NEONMAP0(vshr_n_v),
4803 NEONMAP0(vshrn_n_v),
4804 NEONMAP0(vshrq_n_v),
Ivan A. Kosarev73c76c32018-06-10 09:28:10 +00004805 NEONMAP1(vst1_x2_v, aarch64_neon_st1x2, 0),
4806 NEONMAP1(vst1_x3_v, aarch64_neon_st1x3, 0),
4807 NEONMAP1(vst1_x4_v, aarch64_neon_st1x4, 0),
4808 NEONMAP1(vst1q_x2_v, aarch64_neon_st1x2, 0),
4809 NEONMAP1(vst1q_x3_v, aarch64_neon_st1x3, 0),
4810 NEONMAP1(vst1q_x4_v, aarch64_neon_st1x4, 0),
Tim Northovera2ee4332014-03-29 15:09:45 +00004811 NEONMAP0(vsubhn_v),
4812 NEONMAP0(vtst_v),
4813 NEONMAP0(vtstq_v),
4814};
4815
Craig Topper273dbc62015-10-18 05:29:26 +00004816static const NeonIntrinsicInfo AArch64SISDIntrinsicMap[] = {
Tim Northover573cbee2014-05-24 12:52:07 +00004817 NEONMAP1(vabdd_f64, aarch64_sisd_fabd, Add1ArgType),
4818 NEONMAP1(vabds_f32, aarch64_sisd_fabd, Add1ArgType),
4819 NEONMAP1(vabsd_s64, aarch64_neon_abs, Add1ArgType),
4820 NEONMAP1(vaddlv_s32, aarch64_neon_saddlv, AddRetType | Add1ArgType),
4821 NEONMAP1(vaddlv_u32, aarch64_neon_uaddlv, AddRetType | Add1ArgType),
4822 NEONMAP1(vaddlvq_s32, aarch64_neon_saddlv, AddRetType | Add1ArgType),
4823 NEONMAP1(vaddlvq_u32, aarch64_neon_uaddlv, AddRetType | Add1ArgType),
4824 NEONMAP1(vaddv_f32, aarch64_neon_faddv, AddRetType | Add1ArgType),
4825 NEONMAP1(vaddv_s32, aarch64_neon_saddv, AddRetType | Add1ArgType),
4826 NEONMAP1(vaddv_u32, aarch64_neon_uaddv, AddRetType | Add1ArgType),
4827 NEONMAP1(vaddvq_f32, aarch64_neon_faddv, AddRetType | Add1ArgType),
4828 NEONMAP1(vaddvq_f64, aarch64_neon_faddv, AddRetType | Add1ArgType),
4829 NEONMAP1(vaddvq_s32, aarch64_neon_saddv, AddRetType | Add1ArgType),
4830 NEONMAP1(vaddvq_s64, aarch64_neon_saddv, AddRetType | Add1ArgType),
4831 NEONMAP1(vaddvq_u32, aarch64_neon_uaddv, AddRetType | Add1ArgType),
4832 NEONMAP1(vaddvq_u64, aarch64_neon_uaddv, AddRetType | Add1ArgType),
4833 NEONMAP1(vcaged_f64, aarch64_neon_facge, AddRetType | Add1ArgType),
4834 NEONMAP1(vcages_f32, aarch64_neon_facge, AddRetType | Add1ArgType),
4835 NEONMAP1(vcagtd_f64, aarch64_neon_facgt, AddRetType | Add1ArgType),
4836 NEONMAP1(vcagts_f32, aarch64_neon_facgt, AddRetType | Add1ArgType),
4837 NEONMAP1(vcaled_f64, aarch64_neon_facge, AddRetType | Add1ArgType),
4838 NEONMAP1(vcales_f32, aarch64_neon_facge, AddRetType | Add1ArgType),
4839 NEONMAP1(vcaltd_f64, aarch64_neon_facgt, AddRetType | Add1ArgType),
4840 NEONMAP1(vcalts_f32, aarch64_neon_facgt, AddRetType | Add1ArgType),
4841 NEONMAP1(vcvtad_s64_f64, aarch64_neon_fcvtas, AddRetType | Add1ArgType),
4842 NEONMAP1(vcvtad_u64_f64, aarch64_neon_fcvtau, AddRetType | Add1ArgType),
4843 NEONMAP1(vcvtas_s32_f32, aarch64_neon_fcvtas, AddRetType | Add1ArgType),
4844 NEONMAP1(vcvtas_u32_f32, aarch64_neon_fcvtau, AddRetType | Add1ArgType),
4845 NEONMAP1(vcvtd_n_f64_s64, aarch64_neon_vcvtfxs2fp, AddRetType | Add1ArgType),
4846 NEONMAP1(vcvtd_n_f64_u64, aarch64_neon_vcvtfxu2fp, AddRetType | Add1ArgType),
4847 NEONMAP1(vcvtd_n_s64_f64, aarch64_neon_vcvtfp2fxs, AddRetType | Add1ArgType),
4848 NEONMAP1(vcvtd_n_u64_f64, aarch64_neon_vcvtfp2fxu, AddRetType | Add1ArgType),
4849 NEONMAP1(vcvtmd_s64_f64, aarch64_neon_fcvtms, AddRetType | Add1ArgType),
4850 NEONMAP1(vcvtmd_u64_f64, aarch64_neon_fcvtmu, AddRetType | Add1ArgType),
4851 NEONMAP1(vcvtms_s32_f32, aarch64_neon_fcvtms, AddRetType | Add1ArgType),
4852 NEONMAP1(vcvtms_u32_f32, aarch64_neon_fcvtmu, AddRetType | Add1ArgType),
4853 NEONMAP1(vcvtnd_s64_f64, aarch64_neon_fcvtns, AddRetType | Add1ArgType),
4854 NEONMAP1(vcvtnd_u64_f64, aarch64_neon_fcvtnu, AddRetType | Add1ArgType),
4855 NEONMAP1(vcvtns_s32_f32, aarch64_neon_fcvtns, AddRetType | Add1ArgType),
4856 NEONMAP1(vcvtns_u32_f32, aarch64_neon_fcvtnu, AddRetType | Add1ArgType),
4857 NEONMAP1(vcvtpd_s64_f64, aarch64_neon_fcvtps, AddRetType | Add1ArgType),
4858 NEONMAP1(vcvtpd_u64_f64, aarch64_neon_fcvtpu, AddRetType | Add1ArgType),
4859 NEONMAP1(vcvtps_s32_f32, aarch64_neon_fcvtps, AddRetType | Add1ArgType),
4860 NEONMAP1(vcvtps_u32_f32, aarch64_neon_fcvtpu, AddRetType | Add1ArgType),
4861 NEONMAP1(vcvts_n_f32_s32, aarch64_neon_vcvtfxs2fp, AddRetType | Add1ArgType),
4862 NEONMAP1(vcvts_n_f32_u32, aarch64_neon_vcvtfxu2fp, AddRetType | Add1ArgType),
4863 NEONMAP1(vcvts_n_s32_f32, aarch64_neon_vcvtfp2fxs, AddRetType | Add1ArgType),
4864 NEONMAP1(vcvts_n_u32_f32, aarch64_neon_vcvtfp2fxu, AddRetType | Add1ArgType),
4865 NEONMAP1(vcvtxd_f32_f64, aarch64_sisd_fcvtxn, 0),
4866 NEONMAP1(vmaxnmv_f32, aarch64_neon_fmaxnmv, AddRetType | Add1ArgType),
4867 NEONMAP1(vmaxnmvq_f32, aarch64_neon_fmaxnmv, AddRetType | Add1ArgType),
4868 NEONMAP1(vmaxnmvq_f64, aarch64_neon_fmaxnmv, AddRetType | Add1ArgType),
4869 NEONMAP1(vmaxv_f32, aarch64_neon_fmaxv, AddRetType | Add1ArgType),
4870 NEONMAP1(vmaxv_s32, aarch64_neon_smaxv, AddRetType | Add1ArgType),
4871 NEONMAP1(vmaxv_u32, aarch64_neon_umaxv, AddRetType | Add1ArgType),
4872 NEONMAP1(vmaxvq_f32, aarch64_neon_fmaxv, AddRetType | Add1ArgType),
4873 NEONMAP1(vmaxvq_f64, aarch64_neon_fmaxv, AddRetType | Add1ArgType),
4874 NEONMAP1(vmaxvq_s32, aarch64_neon_smaxv, AddRetType | Add1ArgType),
4875 NEONMAP1(vmaxvq_u32, aarch64_neon_umaxv, AddRetType | Add1ArgType),
4876 NEONMAP1(vminnmv_f32, aarch64_neon_fminnmv, AddRetType | Add1ArgType),
4877 NEONMAP1(vminnmvq_f32, aarch64_neon_fminnmv, AddRetType | Add1ArgType),
4878 NEONMAP1(vminnmvq_f64, aarch64_neon_fminnmv, AddRetType | Add1ArgType),
4879 NEONMAP1(vminv_f32, aarch64_neon_fminv, AddRetType | Add1ArgType),
4880 NEONMAP1(vminv_s32, aarch64_neon_sminv, AddRetType | Add1ArgType),
4881 NEONMAP1(vminv_u32, aarch64_neon_uminv, AddRetType | Add1ArgType),
4882 NEONMAP1(vminvq_f32, aarch64_neon_fminv, AddRetType | Add1ArgType),
4883 NEONMAP1(vminvq_f64, aarch64_neon_fminv, AddRetType | Add1ArgType),
4884 NEONMAP1(vminvq_s32, aarch64_neon_sminv, AddRetType | Add1ArgType),
4885 NEONMAP1(vminvq_u32, aarch64_neon_uminv, AddRetType | Add1ArgType),
4886 NEONMAP1(vmull_p64, aarch64_neon_pmull64, 0),
4887 NEONMAP1(vmulxd_f64, aarch64_neon_fmulx, Add1ArgType),
4888 NEONMAP1(vmulxs_f32, aarch64_neon_fmulx, Add1ArgType),
4889 NEONMAP1(vpaddd_s64, aarch64_neon_uaddv, AddRetType | Add1ArgType),
4890 NEONMAP1(vpaddd_u64, aarch64_neon_uaddv, AddRetType | Add1ArgType),
4891 NEONMAP1(vpmaxnmqd_f64, aarch64_neon_fmaxnmv, AddRetType | Add1ArgType),
4892 NEONMAP1(vpmaxnms_f32, aarch64_neon_fmaxnmv, AddRetType | Add1ArgType),
4893 NEONMAP1(vpmaxqd_f64, aarch64_neon_fmaxv, AddRetType | Add1ArgType),
4894 NEONMAP1(vpmaxs_f32, aarch64_neon_fmaxv, AddRetType | Add1ArgType),
4895 NEONMAP1(vpminnmqd_f64, aarch64_neon_fminnmv, AddRetType | Add1ArgType),
4896 NEONMAP1(vpminnms_f32, aarch64_neon_fminnmv, AddRetType | Add1ArgType),
4897 NEONMAP1(vpminqd_f64, aarch64_neon_fminv, AddRetType | Add1ArgType),
4898 NEONMAP1(vpmins_f32, aarch64_neon_fminv, AddRetType | Add1ArgType),
4899 NEONMAP1(vqabsb_s8, aarch64_neon_sqabs, Vectorize1ArgType | Use64BitVectors),
4900 NEONMAP1(vqabsd_s64, aarch64_neon_sqabs, Add1ArgType),
4901 NEONMAP1(vqabsh_s16, aarch64_neon_sqabs, Vectorize1ArgType | Use64BitVectors),
4902 NEONMAP1(vqabss_s32, aarch64_neon_sqabs, Add1ArgType),
4903 NEONMAP1(vqaddb_s8, aarch64_neon_sqadd, Vectorize1ArgType | Use64BitVectors),
4904 NEONMAP1(vqaddb_u8, aarch64_neon_uqadd, Vectorize1ArgType | Use64BitVectors),
4905 NEONMAP1(vqaddd_s64, aarch64_neon_sqadd, Add1ArgType),
4906 NEONMAP1(vqaddd_u64, aarch64_neon_uqadd, Add1ArgType),
4907 NEONMAP1(vqaddh_s16, aarch64_neon_sqadd, Vectorize1ArgType | Use64BitVectors),
4908 NEONMAP1(vqaddh_u16, aarch64_neon_uqadd, Vectorize1ArgType | Use64BitVectors),
4909 NEONMAP1(vqadds_s32, aarch64_neon_sqadd, Add1ArgType),
4910 NEONMAP1(vqadds_u32, aarch64_neon_uqadd, Add1ArgType),
4911 NEONMAP1(vqdmulhh_s16, aarch64_neon_sqdmulh, Vectorize1ArgType | Use64BitVectors),
4912 NEONMAP1(vqdmulhs_s32, aarch64_neon_sqdmulh, Add1ArgType),
4913 NEONMAP1(vqdmullh_s16, aarch64_neon_sqdmull, VectorRet | Use128BitVectors),
4914 NEONMAP1(vqdmulls_s32, aarch64_neon_sqdmulls_scalar, 0),
4915 NEONMAP1(vqmovnd_s64, aarch64_neon_scalar_sqxtn, AddRetType | Add1ArgType),
4916 NEONMAP1(vqmovnd_u64, aarch64_neon_scalar_uqxtn, AddRetType | Add1ArgType),
4917 NEONMAP1(vqmovnh_s16, aarch64_neon_sqxtn, VectorRet | Use64BitVectors),
4918 NEONMAP1(vqmovnh_u16, aarch64_neon_uqxtn, VectorRet | Use64BitVectors),
4919 NEONMAP1(vqmovns_s32, aarch64_neon_sqxtn, VectorRet | Use64BitVectors),
4920 NEONMAP1(vqmovns_u32, aarch64_neon_uqxtn, VectorRet | Use64BitVectors),
4921 NEONMAP1(vqmovund_s64, aarch64_neon_scalar_sqxtun, AddRetType | Add1ArgType),
4922 NEONMAP1(vqmovunh_s16, aarch64_neon_sqxtun, VectorRet | Use64BitVectors),
4923 NEONMAP1(vqmovuns_s32, aarch64_neon_sqxtun, VectorRet | Use64BitVectors),
4924 NEONMAP1(vqnegb_s8, aarch64_neon_sqneg, Vectorize1ArgType | Use64BitVectors),
4925 NEONMAP1(vqnegd_s64, aarch64_neon_sqneg, Add1ArgType),
4926 NEONMAP1(vqnegh_s16, aarch64_neon_sqneg, Vectorize1ArgType | Use64BitVectors),
4927 NEONMAP1(vqnegs_s32, aarch64_neon_sqneg, Add1ArgType),
4928 NEONMAP1(vqrdmulhh_s16, aarch64_neon_sqrdmulh, Vectorize1ArgType | Use64BitVectors),
4929 NEONMAP1(vqrdmulhs_s32, aarch64_neon_sqrdmulh, Add1ArgType),
4930 NEONMAP1(vqrshlb_s8, aarch64_neon_sqrshl, Vectorize1ArgType | Use64BitVectors),
4931 NEONMAP1(vqrshlb_u8, aarch64_neon_uqrshl, Vectorize1ArgType | Use64BitVectors),
4932 NEONMAP1(vqrshld_s64, aarch64_neon_sqrshl, Add1ArgType),
4933 NEONMAP1(vqrshld_u64, aarch64_neon_uqrshl, Add1ArgType),
4934 NEONMAP1(vqrshlh_s16, aarch64_neon_sqrshl, Vectorize1ArgType | Use64BitVectors),
4935 NEONMAP1(vqrshlh_u16, aarch64_neon_uqrshl, Vectorize1ArgType | Use64BitVectors),
4936 NEONMAP1(vqrshls_s32, aarch64_neon_sqrshl, Add1ArgType),
4937 NEONMAP1(vqrshls_u32, aarch64_neon_uqrshl, Add1ArgType),
4938 NEONMAP1(vqrshrnd_n_s64, aarch64_neon_sqrshrn, AddRetType),
4939 NEONMAP1(vqrshrnd_n_u64, aarch64_neon_uqrshrn, AddRetType),
4940 NEONMAP1(vqrshrnh_n_s16, aarch64_neon_sqrshrn, VectorRet | Use64BitVectors),
4941 NEONMAP1(vqrshrnh_n_u16, aarch64_neon_uqrshrn, VectorRet | Use64BitVectors),
4942 NEONMAP1(vqrshrns_n_s32, aarch64_neon_sqrshrn, VectorRet | Use64BitVectors),
4943 NEONMAP1(vqrshrns_n_u32, aarch64_neon_uqrshrn, VectorRet | Use64BitVectors),
4944 NEONMAP1(vqrshrund_n_s64, aarch64_neon_sqrshrun, AddRetType),
4945 NEONMAP1(vqrshrunh_n_s16, aarch64_neon_sqrshrun, VectorRet | Use64BitVectors),
4946 NEONMAP1(vqrshruns_n_s32, aarch64_neon_sqrshrun, VectorRet | Use64BitVectors),
4947 NEONMAP1(vqshlb_n_s8, aarch64_neon_sqshl, Vectorize1ArgType | Use64BitVectors),
4948 NEONMAP1(vqshlb_n_u8, aarch64_neon_uqshl, Vectorize1ArgType | Use64BitVectors),
4949 NEONMAP1(vqshlb_s8, aarch64_neon_sqshl, Vectorize1ArgType | Use64BitVectors),
4950 NEONMAP1(vqshlb_u8, aarch64_neon_uqshl, Vectorize1ArgType | Use64BitVectors),
4951 NEONMAP1(vqshld_s64, aarch64_neon_sqshl, Add1ArgType),
4952 NEONMAP1(vqshld_u64, aarch64_neon_uqshl, Add1ArgType),
4953 NEONMAP1(vqshlh_n_s16, aarch64_neon_sqshl, Vectorize1ArgType | Use64BitVectors),
4954 NEONMAP1(vqshlh_n_u16, aarch64_neon_uqshl, Vectorize1ArgType | Use64BitVectors),
4955 NEONMAP1(vqshlh_s16, aarch64_neon_sqshl, Vectorize1ArgType | Use64BitVectors),
4956 NEONMAP1(vqshlh_u16, aarch64_neon_uqshl, Vectorize1ArgType | Use64BitVectors),
4957 NEONMAP1(vqshls_n_s32, aarch64_neon_sqshl, Add1ArgType),
4958 NEONMAP1(vqshls_n_u32, aarch64_neon_uqshl, Add1ArgType),
4959 NEONMAP1(vqshls_s32, aarch64_neon_sqshl, Add1ArgType),
4960 NEONMAP1(vqshls_u32, aarch64_neon_uqshl, Add1ArgType),
4961 NEONMAP1(vqshlub_n_s8, aarch64_neon_sqshlu, Vectorize1ArgType | Use64BitVectors),
4962 NEONMAP1(vqshluh_n_s16, aarch64_neon_sqshlu, Vectorize1ArgType | Use64BitVectors),
4963 NEONMAP1(vqshlus_n_s32, aarch64_neon_sqshlu, Add1ArgType),
4964 NEONMAP1(vqshrnd_n_s64, aarch64_neon_sqshrn, AddRetType),
4965 NEONMAP1(vqshrnd_n_u64, aarch64_neon_uqshrn, AddRetType),
4966 NEONMAP1(vqshrnh_n_s16, aarch64_neon_sqshrn, VectorRet | Use64BitVectors),
4967 NEONMAP1(vqshrnh_n_u16, aarch64_neon_uqshrn, VectorRet | Use64BitVectors),
4968 NEONMAP1(vqshrns_n_s32, aarch64_neon_sqshrn, VectorRet | Use64BitVectors),
4969 NEONMAP1(vqshrns_n_u32, aarch64_neon_uqshrn, VectorRet | Use64BitVectors),
4970 NEONMAP1(vqshrund_n_s64, aarch64_neon_sqshrun, AddRetType),
4971 NEONMAP1(vqshrunh_n_s16, aarch64_neon_sqshrun, VectorRet | Use64BitVectors),
4972 NEONMAP1(vqshruns_n_s32, aarch64_neon_sqshrun, VectorRet | Use64BitVectors),
4973 NEONMAP1(vqsubb_s8, aarch64_neon_sqsub, Vectorize1ArgType | Use64BitVectors),
4974 NEONMAP1(vqsubb_u8, aarch64_neon_uqsub, Vectorize1ArgType | Use64BitVectors),
4975 NEONMAP1(vqsubd_s64, aarch64_neon_sqsub, Add1ArgType),
4976 NEONMAP1(vqsubd_u64, aarch64_neon_uqsub, Add1ArgType),
4977 NEONMAP1(vqsubh_s16, aarch64_neon_sqsub, Vectorize1ArgType | Use64BitVectors),
4978 NEONMAP1(vqsubh_u16, aarch64_neon_uqsub, Vectorize1ArgType | Use64BitVectors),
4979 NEONMAP1(vqsubs_s32, aarch64_neon_sqsub, Add1ArgType),
4980 NEONMAP1(vqsubs_u32, aarch64_neon_uqsub, Add1ArgType),
4981 NEONMAP1(vrecped_f64, aarch64_neon_frecpe, Add1ArgType),
4982 NEONMAP1(vrecpes_f32, aarch64_neon_frecpe, Add1ArgType),
4983 NEONMAP1(vrecpxd_f64, aarch64_neon_frecpx, Add1ArgType),
4984 NEONMAP1(vrecpxs_f32, aarch64_neon_frecpx, Add1ArgType),
4985 NEONMAP1(vrshld_s64, aarch64_neon_srshl, Add1ArgType),
4986 NEONMAP1(vrshld_u64, aarch64_neon_urshl, Add1ArgType),
4987 NEONMAP1(vrsqrted_f64, aarch64_neon_frsqrte, Add1ArgType),
4988 NEONMAP1(vrsqrtes_f32, aarch64_neon_frsqrte, Add1ArgType),
4989 NEONMAP1(vrsqrtsd_f64, aarch64_neon_frsqrts, Add1ArgType),
4990 NEONMAP1(vrsqrtss_f32, aarch64_neon_frsqrts, Add1ArgType),
4991 NEONMAP1(vsha1cq_u32, aarch64_crypto_sha1c, 0),
4992 NEONMAP1(vsha1h_u32, aarch64_crypto_sha1h, 0),
4993 NEONMAP1(vsha1mq_u32, aarch64_crypto_sha1m, 0),
4994 NEONMAP1(vsha1pq_u32, aarch64_crypto_sha1p, 0),
4995 NEONMAP1(vshld_s64, aarch64_neon_sshl, Add1ArgType),
4996 NEONMAP1(vshld_u64, aarch64_neon_ushl, Add1ArgType),
4997 NEONMAP1(vslid_n_s64, aarch64_neon_vsli, Vectorize1ArgType),
4998 NEONMAP1(vslid_n_u64, aarch64_neon_vsli, Vectorize1ArgType),
4999 NEONMAP1(vsqaddb_u8, aarch64_neon_usqadd, Vectorize1ArgType | Use64BitVectors),
5000 NEONMAP1(vsqaddd_u64, aarch64_neon_usqadd, Add1ArgType),
5001 NEONMAP1(vsqaddh_u16, aarch64_neon_usqadd, Vectorize1ArgType | Use64BitVectors),
5002 NEONMAP1(vsqadds_u32, aarch64_neon_usqadd, Add1ArgType),
5003 NEONMAP1(vsrid_n_s64, aarch64_neon_vsri, Vectorize1ArgType),
5004 NEONMAP1(vsrid_n_u64, aarch64_neon_vsri, Vectorize1ArgType),
5005 NEONMAP1(vuqaddb_s8, aarch64_neon_suqadd, Vectorize1ArgType | Use64BitVectors),
5006 NEONMAP1(vuqaddd_s64, aarch64_neon_suqadd, Add1ArgType),
5007 NEONMAP1(vuqaddh_s16, aarch64_neon_suqadd, Vectorize1ArgType | Use64BitVectors),
5008 NEONMAP1(vuqadds_s32, aarch64_neon_suqadd, Add1ArgType),
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00005009 // FP16 scalar intrinisics go here.
5010 NEONMAP1(vabdh_f16, aarch64_sisd_fabd, Add1ArgType),
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00005011 NEONMAP1(vcvtah_s32_f16, aarch64_neon_fcvtas, AddRetType | Add1ArgType),
5012 NEONMAP1(vcvtah_s64_f16, aarch64_neon_fcvtas, AddRetType | Add1ArgType),
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00005013 NEONMAP1(vcvtah_u32_f16, aarch64_neon_fcvtau, AddRetType | Add1ArgType),
5014 NEONMAP1(vcvtah_u64_f16, aarch64_neon_fcvtau, AddRetType | Add1ArgType),
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00005015 NEONMAP1(vcvth_n_f16_s32, aarch64_neon_vcvtfxs2fp, AddRetType | Add1ArgType),
5016 NEONMAP1(vcvth_n_f16_s64, aarch64_neon_vcvtfxs2fp, AddRetType | Add1ArgType),
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00005017 NEONMAP1(vcvth_n_f16_u32, aarch64_neon_vcvtfxu2fp, AddRetType | Add1ArgType),
5018 NEONMAP1(vcvth_n_f16_u64, aarch64_neon_vcvtfxu2fp, AddRetType | Add1ArgType),
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00005019 NEONMAP1(vcvth_n_s32_f16, aarch64_neon_vcvtfp2fxs, AddRetType | Add1ArgType),
5020 NEONMAP1(vcvth_n_s64_f16, aarch64_neon_vcvtfp2fxs, AddRetType | Add1ArgType),
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00005021 NEONMAP1(vcvth_n_u32_f16, aarch64_neon_vcvtfp2fxu, AddRetType | Add1ArgType),
5022 NEONMAP1(vcvth_n_u64_f16, aarch64_neon_vcvtfp2fxu, AddRetType | Add1ArgType),
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00005023 NEONMAP1(vcvtmh_s32_f16, aarch64_neon_fcvtms, AddRetType | Add1ArgType),
5024 NEONMAP1(vcvtmh_s64_f16, aarch64_neon_fcvtms, AddRetType | Add1ArgType),
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00005025 NEONMAP1(vcvtmh_u32_f16, aarch64_neon_fcvtmu, AddRetType | Add1ArgType),
5026 NEONMAP1(vcvtmh_u64_f16, aarch64_neon_fcvtmu, AddRetType | Add1ArgType),
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00005027 NEONMAP1(vcvtnh_s32_f16, aarch64_neon_fcvtns, AddRetType | Add1ArgType),
5028 NEONMAP1(vcvtnh_s64_f16, aarch64_neon_fcvtns, AddRetType | Add1ArgType),
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00005029 NEONMAP1(vcvtnh_u32_f16, aarch64_neon_fcvtnu, AddRetType | Add1ArgType),
5030 NEONMAP1(vcvtnh_u64_f16, aarch64_neon_fcvtnu, AddRetType | Add1ArgType),
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00005031 NEONMAP1(vcvtph_s32_f16, aarch64_neon_fcvtps, AddRetType | Add1ArgType),
5032 NEONMAP1(vcvtph_s64_f16, aarch64_neon_fcvtps, AddRetType | Add1ArgType),
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00005033 NEONMAP1(vcvtph_u32_f16, aarch64_neon_fcvtpu, AddRetType | Add1ArgType),
5034 NEONMAP1(vcvtph_u64_f16, aarch64_neon_fcvtpu, AddRetType | Add1ArgType),
5035 NEONMAP1(vmulxh_f16, aarch64_neon_fmulx, Add1ArgType),
5036 NEONMAP1(vrecpeh_f16, aarch64_neon_frecpe, Add1ArgType),
5037 NEONMAP1(vrecpxh_f16, aarch64_neon_frecpx, Add1ArgType),
5038 NEONMAP1(vrsqrteh_f16, aarch64_neon_frsqrte, Add1ArgType),
5039 NEONMAP1(vrsqrtsh_f16, aarch64_neon_frsqrts, Add1ArgType),
Tim Northovera2ee4332014-03-29 15:09:45 +00005040};
5041
Tim Northover8fe03d62014-02-21 11:57:24 +00005042#undef NEONMAP0
5043#undef NEONMAP1
5044#undef NEONMAP2
5045
5046static bool NEONSIMDIntrinsicsProvenSorted = false;
Tim Northover8fe03d62014-02-21 11:57:24 +00005047
Tim Northover573cbee2014-05-24 12:52:07 +00005048static bool AArch64SIMDIntrinsicsProvenSorted = false;
5049static bool AArch64SISDIntrinsicsProvenSorted = false;
Tim Northovera2ee4332014-03-29 15:09:45 +00005050
5051
Tim Northover8fe03d62014-02-21 11:57:24 +00005052static const NeonIntrinsicInfo *
Craig Topper00bbdcf2014-06-28 23:22:23 +00005053findNeonIntrinsicInMap(ArrayRef<NeonIntrinsicInfo> IntrinsicMap,
Tim Northover8fe03d62014-02-21 11:57:24 +00005054 unsigned BuiltinID, bool &MapProvenSorted) {
Tim Northoverdb3e5e22014-02-19 11:55:06 +00005055
5056#ifndef NDEBUG
Tim Northover8fe03d62014-02-21 11:57:24 +00005057 if (!MapProvenSorted) {
Eric Christophered60b432015-11-11 02:04:08 +00005058 assert(std::is_sorted(std::begin(IntrinsicMap), std::end(IntrinsicMap)));
Tim Northover8fe03d62014-02-21 11:57:24 +00005059 MapProvenSorted = true;
5060 }
Tim Northoverdb3e5e22014-02-19 11:55:06 +00005061#endif
5062
Tim Northover8fe03d62014-02-21 11:57:24 +00005063 const NeonIntrinsicInfo *Builtin =
5064 std::lower_bound(IntrinsicMap.begin(), IntrinsicMap.end(), BuiltinID);
5065
5066 if (Builtin != IntrinsicMap.end() && Builtin->BuiltinID == BuiltinID)
5067 return Builtin;
5068
Craig Topper8a13c412014-05-21 05:09:00 +00005069 return nullptr;
Tim Northover8fe03d62014-02-21 11:57:24 +00005070}
5071
5072Function *CodeGenFunction::LookupNeonLLVMIntrinsic(unsigned IntrinsicID,
5073 unsigned Modifier,
5074 llvm::Type *ArgType,
5075 const CallExpr *E) {
Tim Northovera2ee4332014-03-29 15:09:45 +00005076 int VectorSize = 0;
5077 if (Modifier & Use64BitVectors)
5078 VectorSize = 64;
5079 else if (Modifier & Use128BitVectors)
5080 VectorSize = 128;
5081
Tim Northover2d837962014-02-21 11:57:20 +00005082 // Return type.
5083 SmallVector<llvm::Type *, 3> Tys;
5084 if (Modifier & AddRetType) {
David Majnemerced8bdf2015-02-25 17:36:15 +00005085 llvm::Type *Ty = ConvertType(E->getCallReturnType(getContext()));
Tim Northover2d837962014-02-21 11:57:20 +00005086 if (Modifier & VectorizeRetType)
Tim Northovera2ee4332014-03-29 15:09:45 +00005087 Ty = llvm::VectorType::get(
5088 Ty, VectorSize ? VectorSize / Ty->getPrimitiveSizeInBits() : 1);
Tim Northover2d837962014-02-21 11:57:20 +00005089
5090 Tys.push_back(Ty);
5091 }
5092
5093 // Arguments.
Tim Northovera2ee4332014-03-29 15:09:45 +00005094 if (Modifier & VectorizeArgTypes) {
5095 int Elts = VectorSize ? VectorSize / ArgType->getPrimitiveSizeInBits() : 1;
5096 ArgType = llvm::VectorType::get(ArgType, Elts);
5097 }
Tim Northover2d837962014-02-21 11:57:20 +00005098
5099 if (Modifier & (Add1ArgType | Add2ArgTypes))
5100 Tys.push_back(ArgType);
5101
5102 if (Modifier & Add2ArgTypes)
5103 Tys.push_back(ArgType);
5104
5105 if (Modifier & InventFloatType)
5106 Tys.push_back(FloatTy);
5107
5108 return CGM.getIntrinsic(IntrinsicID, Tys);
5109}
5110
Tim Northovera2ee4332014-03-29 15:09:45 +00005111static Value *EmitCommonNeonSISDBuiltinExpr(CodeGenFunction &CGF,
5112 const NeonIntrinsicInfo &SISDInfo,
5113 SmallVectorImpl<Value *> &Ops,
5114 const CallExpr *E) {
Tim Northover0c68faa2014-03-31 15:47:09 +00005115 unsigned BuiltinID = SISDInfo.BuiltinID;
Tim Northovera2ee4332014-03-29 15:09:45 +00005116 unsigned int Int = SISDInfo.LLVMIntrinsic;
5117 unsigned Modifier = SISDInfo.TypeModifier;
5118 const char *s = SISDInfo.NameHint;
5119
Tim Northover0c68faa2014-03-31 15:47:09 +00005120 switch (BuiltinID) {
5121 case NEON::BI__builtin_neon_vcled_s64:
5122 case NEON::BI__builtin_neon_vcled_u64:
5123 case NEON::BI__builtin_neon_vcles_f32:
5124 case NEON::BI__builtin_neon_vcled_f64:
5125 case NEON::BI__builtin_neon_vcltd_s64:
5126 case NEON::BI__builtin_neon_vcltd_u64:
5127 case NEON::BI__builtin_neon_vclts_f32:
5128 case NEON::BI__builtin_neon_vcltd_f64:
5129 case NEON::BI__builtin_neon_vcales_f32:
5130 case NEON::BI__builtin_neon_vcaled_f64:
5131 case NEON::BI__builtin_neon_vcalts_f32:
5132 case NEON::BI__builtin_neon_vcaltd_f64:
5133 // Only one direction of comparisons actually exist, cmle is actually a cmge
5134 // with swapped operands. The table gives us the right intrinsic but we
5135 // still need to do the swap.
5136 std::swap(Ops[0], Ops[1]);
5137 break;
5138 }
5139
Tim Northovera2ee4332014-03-29 15:09:45 +00005140 assert(Int && "Generic code assumes a valid intrinsic");
5141
5142 // Determine the type(s) of this overloaded AArch64 intrinsic.
5143 const Expr *Arg = E->getArg(0);
5144 llvm::Type *ArgTy = CGF.ConvertType(Arg->getType());
5145 Function *F = CGF.LookupNeonLLVMIntrinsic(Int, Modifier, ArgTy, E);
5146
5147 int j = 0;
Michael J. Spencerdd597752014-05-31 00:22:12 +00005148 ConstantInt *C0 = ConstantInt::get(CGF.SizeTy, 0);
Tim Northovera2ee4332014-03-29 15:09:45 +00005149 for (Function::const_arg_iterator ai = F->arg_begin(), ae = F->arg_end();
5150 ai != ae; ++ai, ++j) {
5151 llvm::Type *ArgTy = ai->getType();
5152 if (Ops[j]->getType()->getPrimitiveSizeInBits() ==
5153 ArgTy->getPrimitiveSizeInBits())
5154 continue;
5155
5156 assert(ArgTy->isVectorTy() && !Ops[j]->getType()->isVectorTy());
5157 // The constant argument to an _n_ intrinsic always has Int32Ty, so truncate
5158 // it before inserting.
5159 Ops[j] =
5160 CGF.Builder.CreateTruncOrBitCast(Ops[j], ArgTy->getVectorElementType());
5161 Ops[j] =
5162 CGF.Builder.CreateInsertElement(UndefValue::get(ArgTy), Ops[j], C0);
5163 }
5164
5165 Value *Result = CGF.EmitNeonCall(F, Ops, s);
5166 llvm::Type *ResultType = CGF.ConvertType(E->getType());
5167 if (ResultType->getPrimitiveSizeInBits() <
5168 Result->getType()->getPrimitiveSizeInBits())
5169 return CGF.Builder.CreateExtractElement(Result, C0);
5170
5171 return CGF.Builder.CreateBitCast(Result, ResultType, s);
5172}
Tim Northover8fe03d62014-02-21 11:57:24 +00005173
Tim Northover8fe03d62014-02-21 11:57:24 +00005174Value *CodeGenFunction::EmitCommonNeonBuiltinExpr(
5175 unsigned BuiltinID, unsigned LLVMIntrinsic, unsigned AltLLVMIntrinsic,
5176 const char *NameHint, unsigned Modifier, const CallExpr *E,
Sjoerd Meijer95da8752018-03-13 19:38:56 +00005177 SmallVectorImpl<llvm::Value *> &Ops, Address PtrOp0, Address PtrOp1,
5178 llvm::Triple::ArchType Arch) {
Tim Northover8fe03d62014-02-21 11:57:24 +00005179 // Get the last argument, which specifies the vector type.
5180 llvm::APSInt NeonTypeConst;
5181 const Expr *Arg = E->getArg(E->getNumArgs() - 1);
5182 if (!Arg->isIntegerConstantExpr(NeonTypeConst, getContext()))
Craig Topper8a13c412014-05-21 05:09:00 +00005183 return nullptr;
Tim Northover8fe03d62014-02-21 11:57:24 +00005184
5185 // Determine the type of this overloaded NEON intrinsic.
5186 NeonTypeFlags Type(NeonTypeConst.getZExtValue());
5187 bool Usgn = Type.isUnsigned();
5188 bool Quad = Type.isQuad();
Sjoerd Meijer87793e72018-03-19 13:22:49 +00005189 const bool HasLegalHalfType = getTarget().hasLegalHalfType();
Tim Northover8fe03d62014-02-21 11:57:24 +00005190
Sjoerd Meijer87793e72018-03-19 13:22:49 +00005191 llvm::VectorType *VTy = GetNeonType(this, Type, HasLegalHalfType);
Tim Northover8fe03d62014-02-21 11:57:24 +00005192 llvm::Type *Ty = VTy;
5193 if (!Ty)
Craig Topper8a13c412014-05-21 05:09:00 +00005194 return nullptr;
Tim Northover8fe03d62014-02-21 11:57:24 +00005195
John McCall7f416cc2015-09-08 08:05:57 +00005196 auto getAlignmentValue32 = [&](Address addr) -> Value* {
5197 return Builder.getInt32(addr.getAlignment().getQuantity());
5198 };
5199
Tim Northover8fe03d62014-02-21 11:57:24 +00005200 unsigned Int = LLVMIntrinsic;
5201 if ((Modifier & UnsignedAlts) && !Usgn)
5202 Int = AltLLVMIntrinsic;
5203
5204 switch (BuiltinID) {
5205 default: break;
Amara Emersonc10b2462019-03-21 22:31:37 +00005206 case NEON::BI__builtin_neon_vpadd_v:
5207 case NEON::BI__builtin_neon_vpaddq_v:
5208 // We don't allow fp/int overloading of intrinsics.
5209 if (VTy->getElementType()->isFloatingPointTy() &&
5210 Int == Intrinsic::aarch64_neon_addp)
5211 Int = Intrinsic::aarch64_neon_faddp;
5212 break;
Tim Northover8fe03d62014-02-21 11:57:24 +00005213 case NEON::BI__builtin_neon_vabs_v:
5214 case NEON::BI__builtin_neon_vabsq_v:
5215 if (VTy->getElementType()->isFloatingPointTy())
5216 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::fabs, Ty), Ops, "vabs");
5217 return EmitNeonCall(CGM.getIntrinsic(LLVMIntrinsic, Ty), Ops, "vabs");
5218 case NEON::BI__builtin_neon_vaddhn_v: {
5219 llvm::VectorType *SrcTy =
5220 llvm::VectorType::getExtendedElementVectorType(VTy);
5221
5222 // %sum = add <4 x i32> %lhs, %rhs
5223 Ops[0] = Builder.CreateBitCast(Ops[0], SrcTy);
5224 Ops[1] = Builder.CreateBitCast(Ops[1], SrcTy);
5225 Ops[0] = Builder.CreateAdd(Ops[0], Ops[1], "vaddhn");
5226
5227 // %high = lshr <4 x i32> %sum, <i32 16, i32 16, i32 16, i32 16>
Benjamin Kramerc385a802015-07-28 15:40:11 +00005228 Constant *ShiftAmt =
5229 ConstantInt::get(SrcTy, SrcTy->getScalarSizeInBits() / 2);
Tim Northover8fe03d62014-02-21 11:57:24 +00005230 Ops[0] = Builder.CreateLShr(Ops[0], ShiftAmt, "vaddhn");
5231
5232 // %res = trunc <4 x i32> %high to <4 x i16>
5233 return Builder.CreateTrunc(Ops[0], VTy, "vaddhn");
5234 }
5235 case NEON::BI__builtin_neon_vcale_v:
5236 case NEON::BI__builtin_neon_vcaleq_v:
5237 case NEON::BI__builtin_neon_vcalt_v:
5238 case NEON::BI__builtin_neon_vcaltq_v:
5239 std::swap(Ops[0], Ops[1]);
Galina Kistanova0872d6c2017-06-03 06:30:46 +00005240 LLVM_FALLTHROUGH;
Tim Northover8fe03d62014-02-21 11:57:24 +00005241 case NEON::BI__builtin_neon_vcage_v:
5242 case NEON::BI__builtin_neon_vcageq_v:
5243 case NEON::BI__builtin_neon_vcagt_v:
5244 case NEON::BI__builtin_neon_vcagtq_v: {
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005245 llvm::Type *Ty;
5246 switch (VTy->getScalarSizeInBits()) {
5247 default: llvm_unreachable("unexpected type");
5248 case 32:
5249 Ty = FloatTy;
5250 break;
5251 case 64:
5252 Ty = DoubleTy;
5253 break;
5254 case 16:
5255 Ty = HalfTy;
5256 break;
5257 }
5258 llvm::Type *VecFlt = llvm::VectorType::get(Ty, VTy->getNumElements());
Tim Northover8fe03d62014-02-21 11:57:24 +00005259 llvm::Type *Tys[] = { VTy, VecFlt };
5260 Function *F = CGM.getIntrinsic(LLVMIntrinsic, Tys);
5261 return EmitNeonCall(F, Ops, NameHint);
5262 }
Abderrazek Zaafranib5ac56f2018-03-23 00:08:40 +00005263 case NEON::BI__builtin_neon_vceqz_v:
5264 case NEON::BI__builtin_neon_vceqzq_v:
5265 return EmitAArch64CompareBuiltinExpr(Ops[0], Ty, ICmpInst::FCMP_OEQ,
5266 ICmpInst::ICMP_EQ, "vceqz");
5267 case NEON::BI__builtin_neon_vcgez_v:
5268 case NEON::BI__builtin_neon_vcgezq_v:
5269 return EmitAArch64CompareBuiltinExpr(Ops[0], Ty, ICmpInst::FCMP_OGE,
5270 ICmpInst::ICMP_SGE, "vcgez");
5271 case NEON::BI__builtin_neon_vclez_v:
5272 case NEON::BI__builtin_neon_vclezq_v:
5273 return EmitAArch64CompareBuiltinExpr(Ops[0], Ty, ICmpInst::FCMP_OLE,
5274 ICmpInst::ICMP_SLE, "vclez");
5275 case NEON::BI__builtin_neon_vcgtz_v:
5276 case NEON::BI__builtin_neon_vcgtzq_v:
5277 return EmitAArch64CompareBuiltinExpr(Ops[0], Ty, ICmpInst::FCMP_OGT,
5278 ICmpInst::ICMP_SGT, "vcgtz");
5279 case NEON::BI__builtin_neon_vcltz_v:
5280 case NEON::BI__builtin_neon_vcltzq_v:
5281 return EmitAArch64CompareBuiltinExpr(Ops[0], Ty, ICmpInst::FCMP_OLT,
5282 ICmpInst::ICMP_SLT, "vcltz");
Tim Northover8fe03d62014-02-21 11:57:24 +00005283 case NEON::BI__builtin_neon_vclz_v:
5284 case NEON::BI__builtin_neon_vclzq_v:
5285 // We generate target-independent intrinsic, which needs a second argument
5286 // for whether or not clz of zero is undefined; on ARM it isn't.
5287 Ops.push_back(Builder.getInt1(getTarget().isCLZForZeroUndef()));
5288 break;
5289 case NEON::BI__builtin_neon_vcvt_f32_v:
5290 case NEON::BI__builtin_neon_vcvtq_f32_v:
5291 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
Sjoerd Meijer87793e72018-03-19 13:22:49 +00005292 Ty = GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float32, false, Quad),
5293 HasLegalHalfType);
Tim Northover8fe03d62014-02-21 11:57:24 +00005294 return Usgn ? Builder.CreateUIToFP(Ops[0], Ty, "vcvt")
5295 : Builder.CreateSIToFP(Ops[0], Ty, "vcvt");
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005296 case NEON::BI__builtin_neon_vcvt_f16_v:
5297 case NEON::BI__builtin_neon_vcvtq_f16_v:
5298 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
Sjoerd Meijer87793e72018-03-19 13:22:49 +00005299 Ty = GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float16, false, Quad),
5300 HasLegalHalfType);
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005301 return Usgn ? Builder.CreateUIToFP(Ops[0], Ty, "vcvt")
5302 : Builder.CreateSIToFP(Ops[0], Ty, "vcvt");
5303 case NEON::BI__builtin_neon_vcvt_n_f16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005304 case NEON::BI__builtin_neon_vcvt_n_f32_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00005305 case NEON::BI__builtin_neon_vcvt_n_f64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005306 case NEON::BI__builtin_neon_vcvtq_n_f16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00005307 case NEON::BI__builtin_neon_vcvtq_n_f32_v:
5308 case NEON::BI__builtin_neon_vcvtq_n_f64_v: {
Ahmed Bougacha774b5e22015-08-24 23:41:31 +00005309 llvm::Type *Tys[2] = { GetFloatNeonType(this, Type), Ty };
Tim Northover8fe03d62014-02-21 11:57:24 +00005310 Int = Usgn ? LLVMIntrinsic : AltLLVMIntrinsic;
5311 Function *F = CGM.getIntrinsic(Int, Tys);
5312 return EmitNeonCall(F, Ops, "vcvt_n");
5313 }
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005314 case NEON::BI__builtin_neon_vcvt_n_s16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005315 case NEON::BI__builtin_neon_vcvt_n_s32_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005316 case NEON::BI__builtin_neon_vcvt_n_u16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005317 case NEON::BI__builtin_neon_vcvt_n_u32_v:
5318 case NEON::BI__builtin_neon_vcvt_n_s64_v:
5319 case NEON::BI__builtin_neon_vcvt_n_u64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005320 case NEON::BI__builtin_neon_vcvtq_n_s16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005321 case NEON::BI__builtin_neon_vcvtq_n_s32_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005322 case NEON::BI__builtin_neon_vcvtq_n_u16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005323 case NEON::BI__builtin_neon_vcvtq_n_u32_v:
5324 case NEON::BI__builtin_neon_vcvtq_n_s64_v:
5325 case NEON::BI__builtin_neon_vcvtq_n_u64_v: {
Ahmed Bougacha774b5e22015-08-24 23:41:31 +00005326 llvm::Type *Tys[2] = { Ty, GetFloatNeonType(this, Type) };
Tim Northover8fe03d62014-02-21 11:57:24 +00005327 Function *F = CGM.getIntrinsic(LLVMIntrinsic, Tys);
5328 return EmitNeonCall(F, Ops, "vcvt_n");
5329 }
5330 case NEON::BI__builtin_neon_vcvt_s32_v:
5331 case NEON::BI__builtin_neon_vcvt_u32_v:
5332 case NEON::BI__builtin_neon_vcvt_s64_v:
5333 case NEON::BI__builtin_neon_vcvt_u64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005334 case NEON::BI__builtin_neon_vcvt_s16_v:
5335 case NEON::BI__builtin_neon_vcvt_u16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005336 case NEON::BI__builtin_neon_vcvtq_s32_v:
5337 case NEON::BI__builtin_neon_vcvtq_u32_v:
5338 case NEON::BI__builtin_neon_vcvtq_s64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005339 case NEON::BI__builtin_neon_vcvtq_u64_v:
5340 case NEON::BI__builtin_neon_vcvtq_s16_v:
5341 case NEON::BI__builtin_neon_vcvtq_u16_v: {
Ahmed Bougacha774b5e22015-08-24 23:41:31 +00005342 Ops[0] = Builder.CreateBitCast(Ops[0], GetFloatNeonType(this, Type));
Tim Northover8fe03d62014-02-21 11:57:24 +00005343 return Usgn ? Builder.CreateFPToUI(Ops[0], Ty, "vcvt")
5344 : Builder.CreateFPToSI(Ops[0], Ty, "vcvt");
5345 }
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005346 case NEON::BI__builtin_neon_vcvta_s16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005347 case NEON::BI__builtin_neon_vcvta_s32_v:
5348 case NEON::BI__builtin_neon_vcvta_s64_v:
Luke Geesonda2b2e82018-06-15 10:10:45 +00005349 case NEON::BI__builtin_neon_vcvta_u16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005350 case NEON::BI__builtin_neon_vcvta_u32_v:
5351 case NEON::BI__builtin_neon_vcvta_u64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005352 case NEON::BI__builtin_neon_vcvtaq_s16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005353 case NEON::BI__builtin_neon_vcvtaq_s32_v:
5354 case NEON::BI__builtin_neon_vcvtaq_s64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005355 case NEON::BI__builtin_neon_vcvtaq_u16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005356 case NEON::BI__builtin_neon_vcvtaq_u32_v:
5357 case NEON::BI__builtin_neon_vcvtaq_u64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005358 case NEON::BI__builtin_neon_vcvtn_s16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005359 case NEON::BI__builtin_neon_vcvtn_s32_v:
5360 case NEON::BI__builtin_neon_vcvtn_s64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005361 case NEON::BI__builtin_neon_vcvtn_u16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005362 case NEON::BI__builtin_neon_vcvtn_u32_v:
5363 case NEON::BI__builtin_neon_vcvtn_u64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005364 case NEON::BI__builtin_neon_vcvtnq_s16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005365 case NEON::BI__builtin_neon_vcvtnq_s32_v:
5366 case NEON::BI__builtin_neon_vcvtnq_s64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005367 case NEON::BI__builtin_neon_vcvtnq_u16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005368 case NEON::BI__builtin_neon_vcvtnq_u32_v:
5369 case NEON::BI__builtin_neon_vcvtnq_u64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005370 case NEON::BI__builtin_neon_vcvtp_s16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005371 case NEON::BI__builtin_neon_vcvtp_s32_v:
5372 case NEON::BI__builtin_neon_vcvtp_s64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005373 case NEON::BI__builtin_neon_vcvtp_u16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005374 case NEON::BI__builtin_neon_vcvtp_u32_v:
5375 case NEON::BI__builtin_neon_vcvtp_u64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005376 case NEON::BI__builtin_neon_vcvtpq_s16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005377 case NEON::BI__builtin_neon_vcvtpq_s32_v:
5378 case NEON::BI__builtin_neon_vcvtpq_s64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005379 case NEON::BI__builtin_neon_vcvtpq_u16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005380 case NEON::BI__builtin_neon_vcvtpq_u32_v:
5381 case NEON::BI__builtin_neon_vcvtpq_u64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005382 case NEON::BI__builtin_neon_vcvtm_s16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005383 case NEON::BI__builtin_neon_vcvtm_s32_v:
5384 case NEON::BI__builtin_neon_vcvtm_s64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005385 case NEON::BI__builtin_neon_vcvtm_u16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005386 case NEON::BI__builtin_neon_vcvtm_u32_v:
5387 case NEON::BI__builtin_neon_vcvtm_u64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005388 case NEON::BI__builtin_neon_vcvtmq_s16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005389 case NEON::BI__builtin_neon_vcvtmq_s32_v:
5390 case NEON::BI__builtin_neon_vcvtmq_s64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00005391 case NEON::BI__builtin_neon_vcvtmq_u16_v:
Tim Northover8fe03d62014-02-21 11:57:24 +00005392 case NEON::BI__builtin_neon_vcvtmq_u32_v:
5393 case NEON::BI__builtin_neon_vcvtmq_u64_v: {
Ahmed Bougacha774b5e22015-08-24 23:41:31 +00005394 llvm::Type *Tys[2] = { Ty, GetFloatNeonType(this, Type) };
Tim Northover8fe03d62014-02-21 11:57:24 +00005395 return EmitNeonCall(CGM.getIntrinsic(LLVMIntrinsic, Tys), Ops, NameHint);
5396 }
5397 case NEON::BI__builtin_neon_vext_v:
5398 case NEON::BI__builtin_neon_vextq_v: {
5399 int CV = cast<ConstantInt>(Ops[2])->getSExtValue();
Craig Topperd1cb4ce2016-06-12 00:41:24 +00005400 SmallVector<uint32_t, 16> Indices;
Tim Northover8fe03d62014-02-21 11:57:24 +00005401 for (unsigned i = 0, e = VTy->getNumElements(); i != e; ++i)
Craig Topper832caf02016-05-29 02:39:30 +00005402 Indices.push_back(i+CV);
Tim Northover8fe03d62014-02-21 11:57:24 +00005403
5404 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
5405 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
Craig Topper832caf02016-05-29 02:39:30 +00005406 return Builder.CreateShuffleVector(Ops[0], Ops[1], Indices, "vext");
Tim Northover8fe03d62014-02-21 11:57:24 +00005407 }
5408 case NEON::BI__builtin_neon_vfma_v:
5409 case NEON::BI__builtin_neon_vfmaq_v: {
James Y Knight8799cae2019-02-03 21:53:49 +00005410 Function *F = CGM.getIntrinsic(Intrinsic::fma, Ty);
Tim Northover8fe03d62014-02-21 11:57:24 +00005411 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
5412 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
5413 Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
5414
5415 // NEON intrinsic puts accumulator first, unlike the LLVM fma.
David Blaikie43f9bb72015-05-18 22:14:03 +00005416 return Builder.CreateCall(F, {Ops[1], Ops[2], Ops[0]});
Tim Northover8fe03d62014-02-21 11:57:24 +00005417 }
5418 case NEON::BI__builtin_neon_vld1_v:
Jeroen Ketema55a8e802015-09-30 10:56:56 +00005419 case NEON::BI__builtin_neon_vld1q_v: {
5420 llvm::Type *Tys[] = {Ty, Int8PtrTy};
John McCall7f416cc2015-09-08 08:05:57 +00005421 Ops.push_back(getAlignmentValue32(PtrOp0));
Jeroen Ketema55a8e802015-09-30 10:56:56 +00005422 return EmitNeonCall(CGM.getIntrinsic(LLVMIntrinsic, Tys), Ops, "vld1");
5423 }
Ivan A. Kosarev9c40c0a2018-06-02 17:42:59 +00005424 case NEON::BI__builtin_neon_vld1_x2_v:
5425 case NEON::BI__builtin_neon_vld1q_x2_v:
5426 case NEON::BI__builtin_neon_vld1_x3_v:
5427 case NEON::BI__builtin_neon_vld1q_x3_v:
5428 case NEON::BI__builtin_neon_vld1_x4_v:
5429 case NEON::BI__builtin_neon_vld1q_x4_v: {
5430 llvm::Type *PTy = llvm::PointerType::getUnqual(VTy->getVectorElementType());
5431 Ops[1] = Builder.CreateBitCast(Ops[1], PTy);
5432 llvm::Type *Tys[2] = { VTy, PTy };
5433 Function *F = CGM.getIntrinsic(LLVMIntrinsic, Tys);
5434 Ops[1] = Builder.CreateCall(F, Ops[1], "vld1xN");
5435 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
5436 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
5437 return Builder.CreateDefaultAlignedStore(Ops[1], Ops[0]);
5438 }
Tim Northover8fe03d62014-02-21 11:57:24 +00005439 case NEON::BI__builtin_neon_vld2_v:
5440 case NEON::BI__builtin_neon_vld2q_v:
5441 case NEON::BI__builtin_neon_vld3_v:
5442 case NEON::BI__builtin_neon_vld3q_v:
5443 case NEON::BI__builtin_neon_vld4_v:
Ivan A. Kosareva9f484a2018-06-27 13:58:43 +00005444 case NEON::BI__builtin_neon_vld4q_v:
5445 case NEON::BI__builtin_neon_vld2_dup_v:
5446 case NEON::BI__builtin_neon_vld2q_dup_v:
5447 case NEON::BI__builtin_neon_vld3_dup_v:
5448 case NEON::BI__builtin_neon_vld3q_dup_v:
5449 case NEON::BI__builtin_neon_vld4_dup_v:
5450 case NEON::BI__builtin_neon_vld4q_dup_v: {
Jeroen Ketema55a8e802015-09-30 10:56:56 +00005451 llvm::Type *Tys[] = {Ty, Int8PtrTy};
5452 Function *F = CGM.getIntrinsic(LLVMIntrinsic, Tys);
John McCall7f416cc2015-09-08 08:05:57 +00005453 Value *Align = getAlignmentValue32(PtrOp1);
David Blaikie43f9bb72015-05-18 22:14:03 +00005454 Ops[1] = Builder.CreateCall(F, {Ops[1], Align}, NameHint);
Tim Northover8fe03d62014-02-21 11:57:24 +00005455 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
5456 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
John McCall7f416cc2015-09-08 08:05:57 +00005457 return Builder.CreateDefaultAlignedStore(Ops[1], Ops[0]);
Tim Northover8fe03d62014-02-21 11:57:24 +00005458 }
5459 case NEON::BI__builtin_neon_vld1_dup_v:
5460 case NEON::BI__builtin_neon_vld1q_dup_v: {
5461 Value *V = UndefValue::get(Ty);
5462 Ty = llvm::PointerType::getUnqual(VTy->getElementType());
John McCall7f416cc2015-09-08 08:05:57 +00005463 PtrOp0 = Builder.CreateBitCast(PtrOp0, Ty);
5464 LoadInst *Ld = Builder.CreateLoad(PtrOp0);
Michael J. Spencerdd597752014-05-31 00:22:12 +00005465 llvm::Constant *CI = ConstantInt::get(SizeTy, 0);
Tim Northover8fe03d62014-02-21 11:57:24 +00005466 Ops[0] = Builder.CreateInsertElement(V, Ld, CI);
5467 return EmitNeonSplat(Ops[0], CI);
5468 }
5469 case NEON::BI__builtin_neon_vld2_lane_v:
5470 case NEON::BI__builtin_neon_vld2q_lane_v:
5471 case NEON::BI__builtin_neon_vld3_lane_v:
5472 case NEON::BI__builtin_neon_vld3q_lane_v:
5473 case NEON::BI__builtin_neon_vld4_lane_v:
5474 case NEON::BI__builtin_neon_vld4q_lane_v: {
Jeroen Ketema55a8e802015-09-30 10:56:56 +00005475 llvm::Type *Tys[] = {Ty, Int8PtrTy};
5476 Function *F = CGM.getIntrinsic(LLVMIntrinsic, Tys);
Tim Northover8fe03d62014-02-21 11:57:24 +00005477 for (unsigned I = 2; I < Ops.size() - 1; ++I)
5478 Ops[I] = Builder.CreateBitCast(Ops[I], Ty);
John McCall7f416cc2015-09-08 08:05:57 +00005479 Ops.push_back(getAlignmentValue32(PtrOp1));
Tim Northover8fe03d62014-02-21 11:57:24 +00005480 Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), NameHint);
5481 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
5482 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
John McCall7f416cc2015-09-08 08:05:57 +00005483 return Builder.CreateDefaultAlignedStore(Ops[1], Ops[0]);
Tim Northover8fe03d62014-02-21 11:57:24 +00005484 }
5485 case NEON::BI__builtin_neon_vmovl_v: {
5486 llvm::Type *DTy =llvm::VectorType::getTruncatedElementVectorType(VTy);
5487 Ops[0] = Builder.CreateBitCast(Ops[0], DTy);
5488 if (Usgn)
5489 return Builder.CreateZExt(Ops[0], Ty, "vmovl");
5490 return Builder.CreateSExt(Ops[0], Ty, "vmovl");
5491 }
5492 case NEON::BI__builtin_neon_vmovn_v: {
5493 llvm::Type *QTy = llvm::VectorType::getExtendedElementVectorType(VTy);
5494 Ops[0] = Builder.CreateBitCast(Ops[0], QTy);
5495 return Builder.CreateTrunc(Ops[0], Ty, "vmovn");
5496 }
5497 case NEON::BI__builtin_neon_vmull_v:
5498 // FIXME: the integer vmull operations could be emitted in terms of pure
5499 // LLVM IR (2 exts followed by a mul). Unfortunately LLVM has a habit of
5500 // hoisting the exts outside loops. Until global ISel comes along that can
5501 // see through such movement this leads to bad CodeGen. So we need an
5502 // intrinsic for now.
5503 Int = Usgn ? Intrinsic::arm_neon_vmullu : Intrinsic::arm_neon_vmulls;
5504 Int = Type.isPoly() ? (unsigned)Intrinsic::arm_neon_vmullp : Int;
5505 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmull");
5506 case NEON::BI__builtin_neon_vpadal_v:
5507 case NEON::BI__builtin_neon_vpadalq_v: {
5508 // The source operand type has twice as many elements of half the size.
5509 unsigned EltBits = VTy->getElementType()->getPrimitiveSizeInBits();
5510 llvm::Type *EltTy =
5511 llvm::IntegerType::get(getLLVMContext(), EltBits / 2);
5512 llvm::Type *NarrowTy =
5513 llvm::VectorType::get(EltTy, VTy->getNumElements() * 2);
5514 llvm::Type *Tys[2] = { Ty, NarrowTy };
5515 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, NameHint);
5516 }
5517 case NEON::BI__builtin_neon_vpaddl_v:
5518 case NEON::BI__builtin_neon_vpaddlq_v: {
5519 // The source operand type has twice as many elements of half the size.
5520 unsigned EltBits = VTy->getElementType()->getPrimitiveSizeInBits();
5521 llvm::Type *EltTy = llvm::IntegerType::get(getLLVMContext(), EltBits / 2);
5522 llvm::Type *NarrowTy =
5523 llvm::VectorType::get(EltTy, VTy->getNumElements() * 2);
5524 llvm::Type *Tys[2] = { Ty, NarrowTy };
5525 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vpaddl");
5526 }
5527 case NEON::BI__builtin_neon_vqdmlal_v:
5528 case NEON::BI__builtin_neon_vqdmlsl_v: {
5529 SmallVector<Value *, 2> MulOps(Ops.begin() + 1, Ops.end());
Benjamin Kramerc385a802015-07-28 15:40:11 +00005530 Ops[1] =
5531 EmitNeonCall(CGM.getIntrinsic(LLVMIntrinsic, Ty), MulOps, "vqdmlal");
5532 Ops.resize(2);
5533 return EmitNeonCall(CGM.getIntrinsic(AltLLVMIntrinsic, Ty), Ops, NameHint);
Tim Northover8fe03d62014-02-21 11:57:24 +00005534 }
5535 case NEON::BI__builtin_neon_vqshl_n_v:
5536 case NEON::BI__builtin_neon_vqshlq_n_v:
5537 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshl_n",
5538 1, false);
Yi Kong1083eb52014-07-29 09:25:17 +00005539 case NEON::BI__builtin_neon_vqshlu_n_v:
5540 case NEON::BI__builtin_neon_vqshluq_n_v:
5541 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshlu_n",
5542 1, false);
Tim Northover8fe03d62014-02-21 11:57:24 +00005543 case NEON::BI__builtin_neon_vrecpe_v:
5544 case NEON::BI__builtin_neon_vrecpeq_v:
5545 case NEON::BI__builtin_neon_vrsqrte_v:
5546 case NEON::BI__builtin_neon_vrsqrteq_v:
5547 Int = Ty->isFPOrFPVectorTy() ? LLVMIntrinsic : AltLLVMIntrinsic;
5548 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, NameHint);
Ivan A. Kosarev8264bb82018-07-23 13:26:37 +00005549 case NEON::BI__builtin_neon_vrndi_v:
5550 case NEON::BI__builtin_neon_vrndiq_v:
5551 Int = Intrinsic::nearbyint;
5552 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, NameHint);
Yi Kong1083eb52014-07-29 09:25:17 +00005553 case NEON::BI__builtin_neon_vrshr_n_v:
5554 case NEON::BI__builtin_neon_vrshrq_n_v:
5555 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrshr_n",
5556 1, true);
Tim Northover8fe03d62014-02-21 11:57:24 +00005557 case NEON::BI__builtin_neon_vshl_n_v:
5558 case NEON::BI__builtin_neon_vshlq_n_v:
5559 Ops[1] = EmitNeonShiftVector(Ops[1], Ty, false);
5560 return Builder.CreateShl(Builder.CreateBitCast(Ops[0],Ty), Ops[1],
5561 "vshl_n");
5562 case NEON::BI__builtin_neon_vshll_n_v: {
5563 llvm::Type *SrcTy = llvm::VectorType::getTruncatedElementVectorType(VTy);
5564 Ops[0] = Builder.CreateBitCast(Ops[0], SrcTy);
5565 if (Usgn)
5566 Ops[0] = Builder.CreateZExt(Ops[0], VTy);
5567 else
5568 Ops[0] = Builder.CreateSExt(Ops[0], VTy);
5569 Ops[1] = EmitNeonShiftVector(Ops[1], VTy, false);
5570 return Builder.CreateShl(Ops[0], Ops[1], "vshll_n");
5571 }
5572 case NEON::BI__builtin_neon_vshrn_n_v: {
5573 llvm::Type *SrcTy = llvm::VectorType::getExtendedElementVectorType(VTy);
5574 Ops[0] = Builder.CreateBitCast(Ops[0], SrcTy);
5575 Ops[1] = EmitNeonShiftVector(Ops[1], SrcTy, false);
5576 if (Usgn)
5577 Ops[0] = Builder.CreateLShr(Ops[0], Ops[1]);
5578 else
5579 Ops[0] = Builder.CreateAShr(Ops[0], Ops[1]);
5580 return Builder.CreateTrunc(Ops[0], Ty, "vshrn_n");
5581 }
5582 case NEON::BI__builtin_neon_vshr_n_v:
5583 case NEON::BI__builtin_neon_vshrq_n_v:
5584 return EmitNeonRShiftImm(Ops[0], Ops[1], Ty, Usgn, "vshr_n");
5585 case NEON::BI__builtin_neon_vst1_v:
5586 case NEON::BI__builtin_neon_vst1q_v:
5587 case NEON::BI__builtin_neon_vst2_v:
5588 case NEON::BI__builtin_neon_vst2q_v:
5589 case NEON::BI__builtin_neon_vst3_v:
5590 case NEON::BI__builtin_neon_vst3q_v:
5591 case NEON::BI__builtin_neon_vst4_v:
5592 case NEON::BI__builtin_neon_vst4q_v:
5593 case NEON::BI__builtin_neon_vst2_lane_v:
5594 case NEON::BI__builtin_neon_vst2q_lane_v:
5595 case NEON::BI__builtin_neon_vst3_lane_v:
5596 case NEON::BI__builtin_neon_vst3q_lane_v:
5597 case NEON::BI__builtin_neon_vst4_lane_v:
Jeroen Ketema55a8e802015-09-30 10:56:56 +00005598 case NEON::BI__builtin_neon_vst4q_lane_v: {
5599 llvm::Type *Tys[] = {Int8PtrTy, Ty};
John McCall7f416cc2015-09-08 08:05:57 +00005600 Ops.push_back(getAlignmentValue32(PtrOp0));
Jeroen Ketema55a8e802015-09-30 10:56:56 +00005601 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "");
5602 }
Ivan A. Kosarev73c76c32018-06-10 09:28:10 +00005603 case NEON::BI__builtin_neon_vst1_x2_v:
5604 case NEON::BI__builtin_neon_vst1q_x2_v:
5605 case NEON::BI__builtin_neon_vst1_x3_v:
5606 case NEON::BI__builtin_neon_vst1q_x3_v:
5607 case NEON::BI__builtin_neon_vst1_x4_v:
5608 case NEON::BI__builtin_neon_vst1q_x4_v: {
5609 llvm::Type *PTy = llvm::PointerType::getUnqual(VTy->getVectorElementType());
5610 // TODO: Currently in AArch32 mode the pointer operand comes first, whereas
5611 // in AArch64 it comes last. We may want to stick to one or another.
5612 if (Arch == llvm::Triple::aarch64 || Arch == llvm::Triple::aarch64_be) {
5613 llvm::Type *Tys[2] = { VTy, PTy };
5614 std::rotate(Ops.begin(), Ops.begin() + 1, Ops.end());
5615 return EmitNeonCall(CGM.getIntrinsic(LLVMIntrinsic, Tys), Ops, "");
5616 }
5617 llvm::Type *Tys[2] = { PTy, VTy };
5618 return EmitNeonCall(CGM.getIntrinsic(LLVMIntrinsic, Tys), Ops, "");
5619 }
Tim Northover8fe03d62014-02-21 11:57:24 +00005620 case NEON::BI__builtin_neon_vsubhn_v: {
5621 llvm::VectorType *SrcTy =
5622 llvm::VectorType::getExtendedElementVectorType(VTy);
5623
5624 // %sum = add <4 x i32> %lhs, %rhs
5625 Ops[0] = Builder.CreateBitCast(Ops[0], SrcTy);
5626 Ops[1] = Builder.CreateBitCast(Ops[1], SrcTy);
5627 Ops[0] = Builder.CreateSub(Ops[0], Ops[1], "vsubhn");
5628
5629 // %high = lshr <4 x i32> %sum, <i32 16, i32 16, i32 16, i32 16>
Benjamin Kramerc385a802015-07-28 15:40:11 +00005630 Constant *ShiftAmt =
5631 ConstantInt::get(SrcTy, SrcTy->getScalarSizeInBits() / 2);
Tim Northover8fe03d62014-02-21 11:57:24 +00005632 Ops[0] = Builder.CreateLShr(Ops[0], ShiftAmt, "vsubhn");
5633
5634 // %res = trunc <4 x i32> %high to <4 x i16>
5635 return Builder.CreateTrunc(Ops[0], VTy, "vsubhn");
5636 }
5637 case NEON::BI__builtin_neon_vtrn_v:
5638 case NEON::BI__builtin_neon_vtrnq_v: {
5639 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
5640 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
5641 Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
Craig Topper8a13c412014-05-21 05:09:00 +00005642 Value *SV = nullptr;
Tim Northover8fe03d62014-02-21 11:57:24 +00005643
5644 for (unsigned vi = 0; vi != 2; ++vi) {
Craig Topperd1cb4ce2016-06-12 00:41:24 +00005645 SmallVector<uint32_t, 16> Indices;
Tim Northover8fe03d62014-02-21 11:57:24 +00005646 for (unsigned i = 0, e = VTy->getNumElements(); i != e; i += 2) {
Craig Topper832caf02016-05-29 02:39:30 +00005647 Indices.push_back(i+vi);
5648 Indices.push_back(i+e+vi);
Tim Northover8fe03d62014-02-21 11:57:24 +00005649 }
David Blaikiefb901c7a2015-04-04 15:12:29 +00005650 Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi);
Craig Topper832caf02016-05-29 02:39:30 +00005651 SV = Builder.CreateShuffleVector(Ops[1], Ops[2], Indices, "vtrn");
John McCall7f416cc2015-09-08 08:05:57 +00005652 SV = Builder.CreateDefaultAlignedStore(SV, Addr);
Tim Northover8fe03d62014-02-21 11:57:24 +00005653 }
5654 return SV;
5655 }
5656 case NEON::BI__builtin_neon_vtst_v:
5657 case NEON::BI__builtin_neon_vtstq_v: {
5658 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
5659 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
5660 Ops[0] = Builder.CreateAnd(Ops[0], Ops[1]);
5661 Ops[0] = Builder.CreateICmp(ICmpInst::ICMP_NE, Ops[0],
5662 ConstantAggregateZero::get(Ty));
5663 return Builder.CreateSExt(Ops[0], Ty, "vtst");
5664 }
5665 case NEON::BI__builtin_neon_vuzp_v:
5666 case NEON::BI__builtin_neon_vuzpq_v: {
5667 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
5668 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
5669 Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
Craig Topper8a13c412014-05-21 05:09:00 +00005670 Value *SV = nullptr;
Tim Northover8fe03d62014-02-21 11:57:24 +00005671
5672 for (unsigned vi = 0; vi != 2; ++vi) {
Craig Topperd1cb4ce2016-06-12 00:41:24 +00005673 SmallVector<uint32_t, 16> Indices;
Tim Northover8fe03d62014-02-21 11:57:24 +00005674 for (unsigned i = 0, e = VTy->getNumElements(); i != e; ++i)
Craig Topper832caf02016-05-29 02:39:30 +00005675 Indices.push_back(2*i+vi);
Tim Northover8fe03d62014-02-21 11:57:24 +00005676
David Blaikiefb901c7a2015-04-04 15:12:29 +00005677 Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi);
Craig Topper832caf02016-05-29 02:39:30 +00005678 SV = Builder.CreateShuffleVector(Ops[1], Ops[2], Indices, "vuzp");
John McCall7f416cc2015-09-08 08:05:57 +00005679 SV = Builder.CreateDefaultAlignedStore(SV, Addr);
Tim Northover8fe03d62014-02-21 11:57:24 +00005680 }
5681 return SV;
5682 }
5683 case NEON::BI__builtin_neon_vzip_v:
5684 case NEON::BI__builtin_neon_vzipq_v: {
5685 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
5686 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
5687 Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
Craig Topper8a13c412014-05-21 05:09:00 +00005688 Value *SV = nullptr;
Tim Northover8fe03d62014-02-21 11:57:24 +00005689
5690 for (unsigned vi = 0; vi != 2; ++vi) {
Craig Topperd1cb4ce2016-06-12 00:41:24 +00005691 SmallVector<uint32_t, 16> Indices;
Tim Northover8fe03d62014-02-21 11:57:24 +00005692 for (unsigned i = 0, e = VTy->getNumElements(); i != e; i += 2) {
Craig Topper832caf02016-05-29 02:39:30 +00005693 Indices.push_back((i + vi*e) >> 1);
5694 Indices.push_back(((i + vi*e) >> 1)+e);
Tim Northover8fe03d62014-02-21 11:57:24 +00005695 }
David Blaikiefb901c7a2015-04-04 15:12:29 +00005696 Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi);
Craig Topper832caf02016-05-29 02:39:30 +00005697 SV = Builder.CreateShuffleVector(Ops[1], Ops[2], Indices, "vzip");
John McCall7f416cc2015-09-08 08:05:57 +00005698 SV = Builder.CreateDefaultAlignedStore(SV, Addr);
Tim Northover8fe03d62014-02-21 11:57:24 +00005699 }
5700 return SV;
5701 }
Oliver Stannard2fcee8b2018-04-27 14:03:32 +00005702 case NEON::BI__builtin_neon_vdot_v:
5703 case NEON::BI__builtin_neon_vdotq_v: {
5704 llvm::Type *InputTy =
5705 llvm::VectorType::get(Int8Ty, Ty->getPrimitiveSizeInBits() / 8);
5706 llvm::Type *Tys[2] = { Ty, InputTy };
5707 Int = Usgn ? LLVMIntrinsic : AltLLVMIntrinsic;
5708 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vdot");
5709 }
Bryan Chan223307b2018-10-25 23:47:00 +00005710 case NEON::BI__builtin_neon_vfmlal_low_v:
5711 case NEON::BI__builtin_neon_vfmlalq_low_v: {
5712 llvm::Type *InputTy =
5713 llvm::VectorType::get(HalfTy, Ty->getPrimitiveSizeInBits() / 16);
5714 llvm::Type *Tys[2] = { Ty, InputTy };
5715 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vfmlal_low");
5716 }
5717 case NEON::BI__builtin_neon_vfmlsl_low_v:
5718 case NEON::BI__builtin_neon_vfmlslq_low_v: {
5719 llvm::Type *InputTy =
5720 llvm::VectorType::get(HalfTy, Ty->getPrimitiveSizeInBits() / 16);
5721 llvm::Type *Tys[2] = { Ty, InputTy };
5722 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vfmlsl_low");
5723 }
5724 case NEON::BI__builtin_neon_vfmlal_high_v:
5725 case NEON::BI__builtin_neon_vfmlalq_high_v: {
5726 llvm::Type *InputTy =
5727 llvm::VectorType::get(HalfTy, Ty->getPrimitiveSizeInBits() / 16);
5728 llvm::Type *Tys[2] = { Ty, InputTy };
5729 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vfmlal_high");
5730 }
5731 case NEON::BI__builtin_neon_vfmlsl_high_v:
5732 case NEON::BI__builtin_neon_vfmlslq_high_v: {
5733 llvm::Type *InputTy =
5734 llvm::VectorType::get(HalfTy, Ty->getPrimitiveSizeInBits() / 16);
5735 llvm::Type *Tys[2] = { Ty, InputTy };
5736 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vfmlsl_high");
5737 }
Tim Northover8fe03d62014-02-21 11:57:24 +00005738 }
5739
5740 assert(Int && "Expected valid intrinsic number");
5741
5742 // Determine the type(s) of this overloaded AArch64 intrinsic.
5743 Function *F = LookupNeonLLVMIntrinsic(Int, Modifier, Ty, E);
5744
5745 Value *Result = EmitNeonCall(F, Ops, NameHint);
5746 llvm::Type *ResultType = ConvertType(E->getType());
5747 // AArch64 intrinsic one-element vector type cast to
5748 // scalar type expected by the builtin
5749 return Builder.CreateBitCast(Result, ResultType, NameHint);
5750}
5751
Kevin Qin1718af62013-11-14 02:45:18 +00005752Value *CodeGenFunction::EmitAArch64CompareBuiltinExpr(
5753 Value *Op, llvm::Type *Ty, const CmpInst::Predicate Fp,
5754 const CmpInst::Predicate Ip, const Twine &Name) {
Tim Northovera2ee4332014-03-29 15:09:45 +00005755 llvm::Type *OTy = Op->getType();
5756
5757 // FIXME: this is utterly horrific. We should not be looking at previous
5758 // codegen context to find out what needs doing. Unfortunately TableGen
5759 // currently gives us exactly the same calls for vceqz_f32 and vceqz_s32
5760 // (etc).
5761 if (BitCastInst *BI = dyn_cast<BitCastInst>(Op))
5762 OTy = BI->getOperand(0)->getType();
5763
Kevin Qin1718af62013-11-14 02:45:18 +00005764 Op = Builder.CreateBitCast(Op, OTy);
Tim Northovera2ee4332014-03-29 15:09:45 +00005765 if (OTy->getScalarType()->isFloatingPointTy()) {
5766 Op = Builder.CreateFCmp(Fp, Op, Constant::getNullValue(OTy));
Kevin Qin1718af62013-11-14 02:45:18 +00005767 } else {
Tim Northovera2ee4332014-03-29 15:09:45 +00005768 Op = Builder.CreateICmp(Ip, Op, Constant::getNullValue(OTy));
Kevin Qin1718af62013-11-14 02:45:18 +00005769 }
Hao Liuf96fd372013-12-23 02:44:00 +00005770 return Builder.CreateSExt(Op, Ty, Name);
Kevin Qin1718af62013-11-14 02:45:18 +00005771}
5772
Jiangning Liu18b707c2013-11-14 01:57:55 +00005773static Value *packTBLDVectorList(CodeGenFunction &CGF, ArrayRef<Value *> Ops,
5774 Value *ExtOp, Value *IndexOp,
5775 llvm::Type *ResTy, unsigned IntID,
5776 const char *Name) {
5777 SmallVector<Value *, 2> TblOps;
Simon Pilgrim532de1c2016-06-13 10:05:19 +00005778 if (ExtOp)
5779 TblOps.push_back(ExtOp);
5780
5781 // Build a vector containing sequential number like (0, 1, 2, ..., 15)
5782 SmallVector<uint32_t, 16> Indices;
5783 llvm::VectorType *TblTy = cast<llvm::VectorType>(Ops[0]->getType());
5784 for (unsigned i = 0, e = TblTy->getNumElements(); i != e; ++i) {
Craig Topper832caf02016-05-29 02:39:30 +00005785 Indices.push_back(2*i);
5786 Indices.push_back(2*i+1);
Jiangning Liu18b707c2013-11-14 01:57:55 +00005787 }
Jiangning Liu18b707c2013-11-14 01:57:55 +00005788
5789 int PairPos = 0, End = Ops.size() - 1;
5790 while (PairPos < End) {
5791 TblOps.push_back(CGF.Builder.CreateShuffleVector(Ops[PairPos],
Craig Topper832caf02016-05-29 02:39:30 +00005792 Ops[PairPos+1], Indices,
5793 Name));
Jiangning Liu18b707c2013-11-14 01:57:55 +00005794 PairPos += 2;
5795 }
5796
5797 // If there's an odd number of 64-bit lookup table, fill the high 64-bit
5798 // of the 128-bit lookup table with zero.
5799 if (PairPos == End) {
5800 Value *ZeroTbl = ConstantAggregateZero::get(TblTy);
5801 TblOps.push_back(CGF.Builder.CreateShuffleVector(Ops[PairPos],
Craig Topper832caf02016-05-29 02:39:30 +00005802 ZeroTbl, Indices, Name));
Jiangning Liu18b707c2013-11-14 01:57:55 +00005803 }
5804
Simon Pilgrim532de1c2016-06-13 10:05:19 +00005805 Function *TblF;
5806 TblOps.push_back(IndexOp);
5807 TblF = CGF.CGM.getIntrinsic(IntID, ResTy);
5808
5809 return CGF.EmitNeonCall(TblF, TblOps, Name);
5810}
5811
Saleem Abdulrasoola14ac3f42014-12-04 04:52:37 +00005812Value *CodeGenFunction::GetValueForARMHint(unsigned BuiltinID) {
Benjamin Kramerc385a802015-07-28 15:40:11 +00005813 unsigned Value;
Saleem Abdulrasool956c2ec2014-05-04 02:52:25 +00005814 switch (BuiltinID) {
Saleem Abdulrasoola14ac3f42014-12-04 04:52:37 +00005815 default:
5816 return nullptr;
Yi Kong4d5e23f2014-07-14 15:20:09 +00005817 case ARM::BI__builtin_arm_nop:
Benjamin Kramerc385a802015-07-28 15:40:11 +00005818 Value = 0;
5819 break;
Saleem Abdulrasoolece72172014-07-03 02:43:20 +00005820 case ARM::BI__builtin_arm_yield:
Saleem Abdulrasool956c2ec2014-05-04 02:52:25 +00005821 case ARM::BI__yield:
Benjamin Kramerc385a802015-07-28 15:40:11 +00005822 Value = 1;
5823 break;
Saleem Abdulrasoolece72172014-07-03 02:43:20 +00005824 case ARM::BI__builtin_arm_wfe:
Saleem Abdulrasool956c2ec2014-05-04 02:52:25 +00005825 case ARM::BI__wfe:
Benjamin Kramerc385a802015-07-28 15:40:11 +00005826 Value = 2;
5827 break;
Saleem Abdulrasoolece72172014-07-03 02:43:20 +00005828 case ARM::BI__builtin_arm_wfi:
Saleem Abdulrasool956c2ec2014-05-04 02:52:25 +00005829 case ARM::BI__wfi:
Benjamin Kramerc385a802015-07-28 15:40:11 +00005830 Value = 3;
5831 break;
Saleem Abdulrasoolece72172014-07-03 02:43:20 +00005832 case ARM::BI__builtin_arm_sev:
Saleem Abdulrasool956c2ec2014-05-04 02:52:25 +00005833 case ARM::BI__sev:
Benjamin Kramerc385a802015-07-28 15:40:11 +00005834 Value = 4;
5835 break;
Saleem Abdulrasoolece72172014-07-03 02:43:20 +00005836 case ARM::BI__builtin_arm_sevl:
Saleem Abdulrasool956c2ec2014-05-04 02:52:25 +00005837 case ARM::BI__sevl:
Benjamin Kramerc385a802015-07-28 15:40:11 +00005838 Value = 5;
5839 break;
Saleem Abdulrasoolb9f07e32014-04-25 21:13:29 +00005840 }
Benjamin Kramerc385a802015-07-28 15:40:11 +00005841
5842 return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::arm_hint),
5843 llvm::ConstantInt::get(Int32Ty, Value));
Saleem Abdulrasoola14ac3f42014-12-04 04:52:37 +00005844}
Saleem Abdulrasoolb9f07e32014-04-25 21:13:29 +00005845
Luke Cheeseman59b2d832015-06-15 17:51:01 +00005846// Generates the IR for the read/write special register builtin,
5847// ValueType is the type of the value that is to be written or read,
5848// RegisterType is the type of the register being written to or read from.
5849static Value *EmitSpecialRegisterBuiltin(CodeGenFunction &CGF,
5850 const CallExpr *E,
5851 llvm::Type *RegisterType,
Matt Arsenault64665bc2016-06-28 00:13:17 +00005852 llvm::Type *ValueType,
5853 bool IsRead,
5854 StringRef SysReg = "") {
Luke Cheeseman59b2d832015-06-15 17:51:01 +00005855 // write and register intrinsics only support 32 and 64 bit operations.
5856 assert((RegisterType->isIntegerTy(32) || RegisterType->isIntegerTy(64))
5857 && "Unsupported size for register.");
5858
5859 CodeGen::CGBuilderTy &Builder = CGF.Builder;
5860 CodeGen::CodeGenModule &CGM = CGF.CGM;
5861 LLVMContext &Context = CGM.getLLVMContext();
5862
Matt Arsenault64665bc2016-06-28 00:13:17 +00005863 if (SysReg.empty()) {
5864 const Expr *SysRegStrExpr = E->getArg(0)->IgnoreParenCasts();
Zachary Turner26dab122016-12-13 17:10:16 +00005865 SysReg = cast<clang::StringLiteral>(SysRegStrExpr)->getString();
Matt Arsenault64665bc2016-06-28 00:13:17 +00005866 }
Luke Cheeseman59b2d832015-06-15 17:51:01 +00005867
5868 llvm::Metadata *Ops[] = { llvm::MDString::get(Context, SysReg) };
5869 llvm::MDNode *RegName = llvm::MDNode::get(Context, Ops);
5870 llvm::Value *Metadata = llvm::MetadataAsValue::get(Context, RegName);
5871
5872 llvm::Type *Types[] = { RegisterType };
5873
5874 bool MixedTypes = RegisterType->isIntegerTy(64) && ValueType->isIntegerTy(32);
5875 assert(!(RegisterType->isIntegerTy(32) && ValueType->isIntegerTy(64))
5876 && "Can't fit 64-bit value in 32-bit register");
5877
5878 if (IsRead) {
James Y Knight8799cae2019-02-03 21:53:49 +00005879 llvm::Function *F = CGM.getIntrinsic(llvm::Intrinsic::read_register, Types);
Luke Cheeseman59b2d832015-06-15 17:51:01 +00005880 llvm::Value *Call = Builder.CreateCall(F, Metadata);
5881
5882 if (MixedTypes)
5883 // Read into 64 bit register and then truncate result to 32 bit.
5884 return Builder.CreateTrunc(Call, ValueType);
5885
5886 if (ValueType->isPointerTy())
5887 // Have i32/i64 result (Call) but want to return a VoidPtrTy (i8*).
5888 return Builder.CreateIntToPtr(Call, ValueType);
5889
5890 return Call;
5891 }
5892
James Y Knight8799cae2019-02-03 21:53:49 +00005893 llvm::Function *F = CGM.getIntrinsic(llvm::Intrinsic::write_register, Types);
Luke Cheeseman59b2d832015-06-15 17:51:01 +00005894 llvm::Value *ArgValue = CGF.EmitScalarExpr(E->getArg(1));
5895 if (MixedTypes) {
5896 // Extend 32 bit write value to 64 bit to pass to write.
5897 ArgValue = Builder.CreateZExt(ArgValue, RegisterType);
5898 return Builder.CreateCall(F, { Metadata, ArgValue });
5899 }
5900
5901 if (ValueType->isPointerTy()) {
5902 // Have VoidPtrTy ArgValue but want to return an i32/i64.
5903 ArgValue = Builder.CreatePtrToInt(ArgValue, RegisterType);
5904 return Builder.CreateCall(F, { Metadata, ArgValue });
5905 }
5906
5907 return Builder.CreateCall(F, { Metadata, ArgValue });
5908}
5909
Bob Wilson63c93142015-06-24 06:05:20 +00005910/// Return true if BuiltinID is an overloaded Neon intrinsic with an extra
5911/// argument that specifies the vector type.
5912static bool HasExtraNeonArgument(unsigned BuiltinID) {
5913 switch (BuiltinID) {
5914 default: break;
5915 case NEON::BI__builtin_neon_vget_lane_i8:
5916 case NEON::BI__builtin_neon_vget_lane_i16:
5917 case NEON::BI__builtin_neon_vget_lane_i32:
5918 case NEON::BI__builtin_neon_vget_lane_i64:
5919 case NEON::BI__builtin_neon_vget_lane_f32:
5920 case NEON::BI__builtin_neon_vgetq_lane_i8:
5921 case NEON::BI__builtin_neon_vgetq_lane_i16:
5922 case NEON::BI__builtin_neon_vgetq_lane_i32:
5923 case NEON::BI__builtin_neon_vgetq_lane_i64:
5924 case NEON::BI__builtin_neon_vgetq_lane_f32:
5925 case NEON::BI__builtin_neon_vset_lane_i8:
5926 case NEON::BI__builtin_neon_vset_lane_i16:
5927 case NEON::BI__builtin_neon_vset_lane_i32:
5928 case NEON::BI__builtin_neon_vset_lane_i64:
5929 case NEON::BI__builtin_neon_vset_lane_f32:
5930 case NEON::BI__builtin_neon_vsetq_lane_i8:
5931 case NEON::BI__builtin_neon_vsetq_lane_i16:
5932 case NEON::BI__builtin_neon_vsetq_lane_i32:
5933 case NEON::BI__builtin_neon_vsetq_lane_i64:
5934 case NEON::BI__builtin_neon_vsetq_lane_f32:
5935 case NEON::BI__builtin_neon_vsha1h_u32:
5936 case NEON::BI__builtin_neon_vsha1cq_u32:
5937 case NEON::BI__builtin_neon_vsha1pq_u32:
5938 case NEON::BI__builtin_neon_vsha1mq_u32:
Erich Keane82025212017-11-15 00:11:24 +00005939 case clang::ARM::BI_MoveToCoprocessor:
5940 case clang::ARM::BI_MoveToCoprocessor2:
Bob Wilson63c93142015-06-24 06:05:20 +00005941 return false;
5942 }
5943 return true;
5944}
5945
Saleem Abdulrasoola14ac3f42014-12-04 04:52:37 +00005946Value *CodeGenFunction::EmitARMBuiltinExpr(unsigned BuiltinID,
Sjoerd Meijer95da8752018-03-13 19:38:56 +00005947 const CallExpr *E,
5948 llvm::Triple::ArchType Arch) {
Saleem Abdulrasoola14ac3f42014-12-04 04:52:37 +00005949 if (auto Hint = GetValueForARMHint(BuiltinID))
5950 return Hint;
Saleem Abdulrasool38ed6de2014-05-02 06:53:57 +00005951
Saleem Abdulrasool86b881c2014-12-17 17:52:30 +00005952 if (BuiltinID == ARM::BI__emit) {
5953 bool IsThumb = getTarget().getTriple().getArch() == llvm::Triple::thumb;
5954 llvm::FunctionType *FTy =
5955 llvm::FunctionType::get(VoidTy, /*Variadic=*/false);
5956
Fangrui Song407659a2018-11-30 23:41:18 +00005957 Expr::EvalResult Result;
5958 if (!E->getArg(0)->EvaluateAsInt(Result, CGM.getContext()))
Saleem Abdulrasool86b881c2014-12-17 17:52:30 +00005959 llvm_unreachable("Sema will ensure that the parameter is constant");
5960
Fangrui Song407659a2018-11-30 23:41:18 +00005961 llvm::APSInt Value = Result.Val.getInt();
Saleem Abdulrasool86b881c2014-12-17 17:52:30 +00005962 uint64_t ZExtValue = Value.zextOrTrunc(IsThumb ? 16 : 32).getZExtValue();
5963
5964 llvm::InlineAsm *Emit =
5965 IsThumb ? InlineAsm::get(FTy, ".inst.n 0x" + utohexstr(ZExtValue), "",
5966 /*SideEffects=*/true)
5967 : InlineAsm::get(FTy, ".inst 0x" + utohexstr(ZExtValue), "",
5968 /*SideEffects=*/true);
5969
David Blaikie4ba525b2015-07-14 17:27:39 +00005970 return Builder.CreateCall(Emit);
Saleem Abdulrasool86b881c2014-12-17 17:52:30 +00005971 }
5972
Yi Kong1d268af2014-08-26 12:48:06 +00005973 if (BuiltinID == ARM::BI__builtin_arm_dbg) {
5974 Value *Option = EmitScalarExpr(E->getArg(0));
5975 return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::arm_dbg), Option);
5976 }
5977
Yi Kong26d104a2014-08-13 19:18:14 +00005978 if (BuiltinID == ARM::BI__builtin_arm_prefetch) {
5979 Value *Address = EmitScalarExpr(E->getArg(0));
5980 Value *RW = EmitScalarExpr(E->getArg(1));
5981 Value *IsData = EmitScalarExpr(E->getArg(2));
5982
5983 // Locality is not supported on ARM target
5984 Value *Locality = llvm::ConstantInt::get(Int32Ty, 3);
5985
James Y Knight8799cae2019-02-03 21:53:49 +00005986 Function *F = CGM.getIntrinsic(Intrinsic::prefetch);
David Blaikie43f9bb72015-05-18 22:14:03 +00005987 return Builder.CreateCall(F, {Address, RW, Locality, IsData});
Yi Kong26d104a2014-08-13 19:18:14 +00005988 }
5989
Jim Grosbach171ec342014-06-16 21:55:58 +00005990 if (BuiltinID == ARM::BI__builtin_arm_rbit) {
Chad Rosierc22abb32017-01-10 18:55:11 +00005991 llvm::Value *Arg = EmitScalarExpr(E->getArg(0));
5992 return Builder.CreateCall(
5993 CGM.getIntrinsic(Intrinsic::bitreverse, Arg->getType()), Arg, "rbit");
Jim Grosbach171ec342014-06-16 21:55:58 +00005994 }
5995
Rafael Espindola6bb986d2010-06-09 03:48:40 +00005996 if (BuiltinID == ARM::BI__clear_cache) {
Rafael Espindola2219fc52013-05-14 12:45:47 +00005997 assert(E->getNumArgs() == 2 && "__clear_cache takes 2 arguments");
Rafael Espindolaa54062e2010-06-07 17:26:50 +00005998 const FunctionDecl *FD = E->getDirectCallee();
Benjamin Kramerc385a802015-07-28 15:40:11 +00005999 Value *Ops[2];
Rafael Espindola2219fc52013-05-14 12:45:47 +00006000 for (unsigned i = 0; i < 2; i++)
Benjamin Kramerc385a802015-07-28 15:40:11 +00006001 Ops[i] = EmitScalarExpr(E->getArg(i));
Chris Lattner2192fe52011-07-18 04:24:23 +00006002 llvm::Type *Ty = CGM.getTypes().ConvertType(FD->getType());
6003 llvm::FunctionType *FTy = cast<llvm::FunctionType>(Ty);
Chris Lattner0e62c1c2011-07-23 10:55:15 +00006004 StringRef Name = FD->getName();
John McCall882987f2013-02-28 19:01:20 +00006005 return EmitNounwindRuntimeCall(CGM.CreateRuntimeFunction(FTy, Name), Ops);
Chris Lattner5cc15e02010-03-03 19:03:45 +00006006 }
Rafael Espindola6bb986d2010-06-09 03:48:40 +00006007
Ranjeet Singhca2b3e7b2016-06-17 00:59:41 +00006008 if (BuiltinID == ARM::BI__builtin_arm_mcrr ||
6009 BuiltinID == ARM::BI__builtin_arm_mcrr2) {
6010 Function *F;
6011
6012 switch (BuiltinID) {
6013 default: llvm_unreachable("unexpected builtin");
6014 case ARM::BI__builtin_arm_mcrr:
6015 F = CGM.getIntrinsic(Intrinsic::arm_mcrr);
6016 break;
6017 case ARM::BI__builtin_arm_mcrr2:
6018 F = CGM.getIntrinsic(Intrinsic::arm_mcrr2);
6019 break;
6020 }
6021
6022 // MCRR{2} instruction has 5 operands but
6023 // the intrinsic has 4 because Rt and Rt2
6024 // are represented as a single unsigned 64
6025 // bit integer in the intrinsic definition
6026 // but internally it's represented as 2 32
6027 // bit integers.
6028
6029 Value *Coproc = EmitScalarExpr(E->getArg(0));
6030 Value *Opc1 = EmitScalarExpr(E->getArg(1));
6031 Value *RtAndRt2 = EmitScalarExpr(E->getArg(2));
6032 Value *CRm = EmitScalarExpr(E->getArg(3));
6033
6034 Value *C1 = llvm::ConstantInt::get(Int64Ty, 32);
6035 Value *Rt = Builder.CreateTruncOrBitCast(RtAndRt2, Int32Ty);
6036 Value *Rt2 = Builder.CreateLShr(RtAndRt2, C1);
6037 Rt2 = Builder.CreateTruncOrBitCast(Rt2, Int32Ty);
6038
6039 return Builder.CreateCall(F, {Coproc, Opc1, Rt, Rt2, CRm});
6040 }
6041
6042 if (BuiltinID == ARM::BI__builtin_arm_mrrc ||
6043 BuiltinID == ARM::BI__builtin_arm_mrrc2) {
6044 Function *F;
6045
6046 switch (BuiltinID) {
6047 default: llvm_unreachable("unexpected builtin");
6048 case ARM::BI__builtin_arm_mrrc:
6049 F = CGM.getIntrinsic(Intrinsic::arm_mrrc);
6050 break;
6051 case ARM::BI__builtin_arm_mrrc2:
6052 F = CGM.getIntrinsic(Intrinsic::arm_mrrc2);
6053 break;
6054 }
6055
6056 Value *Coproc = EmitScalarExpr(E->getArg(0));
6057 Value *Opc1 = EmitScalarExpr(E->getArg(1));
6058 Value *CRm = EmitScalarExpr(E->getArg(2));
6059 Value *RtAndRt2 = Builder.CreateCall(F, {Coproc, Opc1, CRm});
6060
6061 // Returns an unsigned 64 bit integer, represented
6062 // as two 32 bit integers.
6063
6064 Value *Rt = Builder.CreateExtractValue(RtAndRt2, 1);
6065 Value *Rt1 = Builder.CreateExtractValue(RtAndRt2, 0);
6066 Rt = Builder.CreateZExt(Rt, Int64Ty);
6067 Rt1 = Builder.CreateZExt(Rt1, Int64Ty);
6068
6069 Value *ShiftCast = llvm::ConstantInt::get(Int64Ty, 32);
6070 RtAndRt2 = Builder.CreateShl(Rt, ShiftCast, "shl", true);
6071 RtAndRt2 = Builder.CreateOr(RtAndRt2, Rt1);
6072
6073 return Builder.CreateBitCast(RtAndRt2, ConvertType(E->getType()));
6074 }
6075
Tim Northover6aacd492013-07-16 09:47:53 +00006076 if (BuiltinID == ARM::BI__builtin_arm_ldrexd ||
Tim Northover3acd6bd2014-07-02 12:56:02 +00006077 ((BuiltinID == ARM::BI__builtin_arm_ldrex ||
6078 BuiltinID == ARM::BI__builtin_arm_ldaex) &&
Saleem Abdulrasoole700cab2014-07-05 20:10:05 +00006079 getContext().getTypeSize(E->getType()) == 64) ||
6080 BuiltinID == ARM::BI__ldrexd) {
6081 Function *F;
6082
6083 switch (BuiltinID) {
6084 default: llvm_unreachable("unexpected builtin");
6085 case ARM::BI__builtin_arm_ldaex:
6086 F = CGM.getIntrinsic(Intrinsic::arm_ldaexd);
6087 break;
6088 case ARM::BI__builtin_arm_ldrexd:
6089 case ARM::BI__builtin_arm_ldrex:
6090 case ARM::BI__ldrexd:
6091 F = CGM.getIntrinsic(Intrinsic::arm_ldrexd);
6092 break;
6093 }
Bruno Cardoso Lopesfe733742011-05-28 04:11:33 +00006094
6095 Value *LdPtr = EmitScalarExpr(E->getArg(0));
Tim Northover6aacd492013-07-16 09:47:53 +00006096 Value *Val = Builder.CreateCall(F, Builder.CreateBitCast(LdPtr, Int8PtrTy),
6097 "ldrexd");
Bruno Cardoso Lopesfe733742011-05-28 04:11:33 +00006098
6099 Value *Val0 = Builder.CreateExtractValue(Val, 1);
6100 Value *Val1 = Builder.CreateExtractValue(Val, 0);
6101 Val0 = Builder.CreateZExt(Val0, Int64Ty);
6102 Val1 = Builder.CreateZExt(Val1, Int64Ty);
6103
6104 Value *ShiftCst = llvm::ConstantInt::get(Int64Ty, 32);
6105 Val = Builder.CreateShl(Val0, ShiftCst, "shl", true /* nuw */);
Tim Northover6aacd492013-07-16 09:47:53 +00006106 Val = Builder.CreateOr(Val, Val1);
6107 return Builder.CreateBitCast(Val, ConvertType(E->getType()));
Bruno Cardoso Lopesfe733742011-05-28 04:11:33 +00006108 }
6109
Tim Northover3acd6bd2014-07-02 12:56:02 +00006110 if (BuiltinID == ARM::BI__builtin_arm_ldrex ||
6111 BuiltinID == ARM::BI__builtin_arm_ldaex) {
Tim Northover6aacd492013-07-16 09:47:53 +00006112 Value *LoadAddr = EmitScalarExpr(E->getArg(0));
6113
6114 QualType Ty = E->getType();
6115 llvm::Type *RealResTy = ConvertType(Ty);
Akira Hatanaka6c299ca2016-12-01 19:25:14 +00006116 llvm::Type *PtrTy = llvm::IntegerType::get(
6117 getLLVMContext(), getContext().getTypeSize(Ty))->getPointerTo();
6118 LoadAddr = Builder.CreateBitCast(LoadAddr, PtrTy);
Tim Northover6aacd492013-07-16 09:47:53 +00006119
Tim Northover3acd6bd2014-07-02 12:56:02 +00006120 Function *F = CGM.getIntrinsic(BuiltinID == ARM::BI__builtin_arm_ldaex
6121 ? Intrinsic::arm_ldaex
6122 : Intrinsic::arm_ldrex,
Akira Hatanaka6c299ca2016-12-01 19:25:14 +00006123 PtrTy);
Tim Northover6aacd492013-07-16 09:47:53 +00006124 Value *Val = Builder.CreateCall(F, LoadAddr, "ldrex");
6125
6126 if (RealResTy->isPointerTy())
6127 return Builder.CreateIntToPtr(Val, RealResTy);
6128 else {
Akira Hatanaka6c299ca2016-12-01 19:25:14 +00006129 llvm::Type *IntResTy = llvm::IntegerType::get(
6130 getLLVMContext(), CGM.getDataLayout().getTypeSizeInBits(RealResTy));
Tim Northover6aacd492013-07-16 09:47:53 +00006131 Val = Builder.CreateTruncOrBitCast(Val, IntResTy);
6132 return Builder.CreateBitCast(Val, RealResTy);
6133 }
6134 }
6135
6136 if (BuiltinID == ARM::BI__builtin_arm_strexd ||
Tim Northover3acd6bd2014-07-02 12:56:02 +00006137 ((BuiltinID == ARM::BI__builtin_arm_stlex ||
6138 BuiltinID == ARM::BI__builtin_arm_strex) &&
Tim Northover6aacd492013-07-16 09:47:53 +00006139 getContext().getTypeSize(E->getArg(0)->getType()) == 64)) {
Tim Northover3acd6bd2014-07-02 12:56:02 +00006140 Function *F = CGM.getIntrinsic(BuiltinID == ARM::BI__builtin_arm_stlex
6141 ? Intrinsic::arm_stlexd
6142 : Intrinsic::arm_strexd);
Serge Guelton1d993272017-05-09 19:31:30 +00006143 llvm::Type *STy = llvm::StructType::get(Int32Ty, Int32Ty);
Bruno Cardoso Lopesfe733742011-05-28 04:11:33 +00006144
John McCall7f416cc2015-09-08 08:05:57 +00006145 Address Tmp = CreateMemTemp(E->getArg(0)->getType());
Bruno Cardoso Lopesfe733742011-05-28 04:11:33 +00006146 Value *Val = EmitScalarExpr(E->getArg(0));
6147 Builder.CreateStore(Val, Tmp);
6148
John McCall7f416cc2015-09-08 08:05:57 +00006149 Address LdPtr = Builder.CreateBitCast(Tmp,llvm::PointerType::getUnqual(STy));
Bruno Cardoso Lopesfe733742011-05-28 04:11:33 +00006150 Val = Builder.CreateLoad(LdPtr);
6151
6152 Value *Arg0 = Builder.CreateExtractValue(Val, 0);
6153 Value *Arg1 = Builder.CreateExtractValue(Val, 1);
Tim Northover6aacd492013-07-16 09:47:53 +00006154 Value *StPtr = Builder.CreateBitCast(EmitScalarExpr(E->getArg(1)), Int8PtrTy);
David Blaikie43f9bb72015-05-18 22:14:03 +00006155 return Builder.CreateCall(F, {Arg0, Arg1, StPtr}, "strexd");
Bruno Cardoso Lopesfe733742011-05-28 04:11:33 +00006156 }
6157
Tim Northover3acd6bd2014-07-02 12:56:02 +00006158 if (BuiltinID == ARM::BI__builtin_arm_strex ||
6159 BuiltinID == ARM::BI__builtin_arm_stlex) {
Tim Northover6aacd492013-07-16 09:47:53 +00006160 Value *StoreVal = EmitScalarExpr(E->getArg(0));
6161 Value *StoreAddr = EmitScalarExpr(E->getArg(1));
6162
6163 QualType Ty = E->getArg(0)->getType();
6164 llvm::Type *StoreTy = llvm::IntegerType::get(getLLVMContext(),
6165 getContext().getTypeSize(Ty));
6166 StoreAddr = Builder.CreateBitCast(StoreAddr, StoreTy->getPointerTo());
6167
6168 if (StoreVal->getType()->isPointerTy())
6169 StoreVal = Builder.CreatePtrToInt(StoreVal, Int32Ty);
6170 else {
Akira Hatanaka6c299ca2016-12-01 19:25:14 +00006171 llvm::Type *IntTy = llvm::IntegerType::get(
6172 getLLVMContext(),
6173 CGM.getDataLayout().getTypeSizeInBits(StoreVal->getType()));
6174 StoreVal = Builder.CreateBitCast(StoreVal, IntTy);
Tim Northover6aacd492013-07-16 09:47:53 +00006175 StoreVal = Builder.CreateZExtOrBitCast(StoreVal, Int32Ty);
6176 }
6177
Tim Northover3acd6bd2014-07-02 12:56:02 +00006178 Function *F = CGM.getIntrinsic(BuiltinID == ARM::BI__builtin_arm_stlex
6179 ? Intrinsic::arm_stlex
6180 : Intrinsic::arm_strex,
6181 StoreAddr->getType());
David Blaikie43f9bb72015-05-18 22:14:03 +00006182 return Builder.CreateCall(F, {StoreVal, StoreAddr}, "strex");
Tim Northover6aacd492013-07-16 09:47:53 +00006183 }
6184
6185 if (BuiltinID == ARM::BI__builtin_arm_clrex) {
6186 Function *F = CGM.getIntrinsic(Intrinsic::arm_clrex);
David Blaikie4ba525b2015-07-14 17:27:39 +00006187 return Builder.CreateCall(F);
Tim Northover6aacd492013-07-16 09:47:53 +00006188 }
6189
Joey Gouly1e8637b2013-09-18 10:07:09 +00006190 // CRC32
6191 Intrinsic::ID CRCIntrinsicID = Intrinsic::not_intrinsic;
6192 switch (BuiltinID) {
6193 case ARM::BI__builtin_arm_crc32b:
6194 CRCIntrinsicID = Intrinsic::arm_crc32b; break;
6195 case ARM::BI__builtin_arm_crc32cb:
6196 CRCIntrinsicID = Intrinsic::arm_crc32cb; break;
6197 case ARM::BI__builtin_arm_crc32h:
6198 CRCIntrinsicID = Intrinsic::arm_crc32h; break;
6199 case ARM::BI__builtin_arm_crc32ch:
6200 CRCIntrinsicID = Intrinsic::arm_crc32ch; break;
6201 case ARM::BI__builtin_arm_crc32w:
6202 case ARM::BI__builtin_arm_crc32d:
6203 CRCIntrinsicID = Intrinsic::arm_crc32w; break;
6204 case ARM::BI__builtin_arm_crc32cw:
6205 case ARM::BI__builtin_arm_crc32cd:
6206 CRCIntrinsicID = Intrinsic::arm_crc32cw; break;
6207 }
6208
6209 if (CRCIntrinsicID != Intrinsic::not_intrinsic) {
6210 Value *Arg0 = EmitScalarExpr(E->getArg(0));
6211 Value *Arg1 = EmitScalarExpr(E->getArg(1));
6212
6213 // crc32{c,}d intrinsics are implemnted as two calls to crc32{c,}w
6214 // intrinsics, hence we need different codegen for these cases.
6215 if (BuiltinID == ARM::BI__builtin_arm_crc32d ||
6216 BuiltinID == ARM::BI__builtin_arm_crc32cd) {
6217 Value *C1 = llvm::ConstantInt::get(Int64Ty, 32);
6218 Value *Arg1a = Builder.CreateTruncOrBitCast(Arg1, Int32Ty);
6219 Value *Arg1b = Builder.CreateLShr(Arg1, C1);
6220 Arg1b = Builder.CreateTruncOrBitCast(Arg1b, Int32Ty);
6221
6222 Function *F = CGM.getIntrinsic(CRCIntrinsicID);
David Blaikie43f9bb72015-05-18 22:14:03 +00006223 Value *Res = Builder.CreateCall(F, {Arg0, Arg1a});
6224 return Builder.CreateCall(F, {Res, Arg1b});
Joey Gouly1e8637b2013-09-18 10:07:09 +00006225 } else {
6226 Arg1 = Builder.CreateZExtOrBitCast(Arg1, Int32Ty);
6227
6228 Function *F = CGM.getIntrinsic(CRCIntrinsicID);
David Blaikie43f9bb72015-05-18 22:14:03 +00006229 return Builder.CreateCall(F, {Arg0, Arg1});
Joey Gouly1e8637b2013-09-18 10:07:09 +00006230 }
6231 }
6232
Luke Cheeseman59b2d832015-06-15 17:51:01 +00006233 if (BuiltinID == ARM::BI__builtin_arm_rsr ||
6234 BuiltinID == ARM::BI__builtin_arm_rsr64 ||
6235 BuiltinID == ARM::BI__builtin_arm_rsrp ||
6236 BuiltinID == ARM::BI__builtin_arm_wsr ||
6237 BuiltinID == ARM::BI__builtin_arm_wsr64 ||
6238 BuiltinID == ARM::BI__builtin_arm_wsrp) {
6239
6240 bool IsRead = BuiltinID == ARM::BI__builtin_arm_rsr ||
6241 BuiltinID == ARM::BI__builtin_arm_rsr64 ||
6242 BuiltinID == ARM::BI__builtin_arm_rsrp;
6243
6244 bool IsPointerBuiltin = BuiltinID == ARM::BI__builtin_arm_rsrp ||
6245 BuiltinID == ARM::BI__builtin_arm_wsrp;
6246
6247 bool Is64Bit = BuiltinID == ARM::BI__builtin_arm_rsr64 ||
6248 BuiltinID == ARM::BI__builtin_arm_wsr64;
6249
6250 llvm::Type *ValueType;
6251 llvm::Type *RegisterType;
6252 if (IsPointerBuiltin) {
6253 ValueType = VoidPtrTy;
6254 RegisterType = Int32Ty;
6255 } else if (Is64Bit) {
6256 ValueType = RegisterType = Int64Ty;
6257 } else {
6258 ValueType = RegisterType = Int32Ty;
6259 }
6260
6261 return EmitSpecialRegisterBuiltin(*this, E, RegisterType, ValueType, IsRead);
6262 }
6263
Ahmed Bougacha94df7302015-06-04 01:43:41 +00006264 // Find out if any arguments are required to be integer constant
6265 // expressions.
6266 unsigned ICEArguments = 0;
6267 ASTContext::GetBuiltinTypeError Error;
6268 getContext().GetBuiltinType(BuiltinID, Error, &ICEArguments);
6269 assert(Error == ASTContext::GE_None && "Should not codegen an error");
6270
John McCall7f416cc2015-09-08 08:05:57 +00006271 auto getAlignmentValue32 = [&](Address addr) -> Value* {
6272 return Builder.getInt32(addr.getAlignment().getQuantity());
6273 };
6274
6275 Address PtrOp0 = Address::invalid();
6276 Address PtrOp1 = Address::invalid();
Chris Lattner0e62c1c2011-07-23 10:55:15 +00006277 SmallVector<Value*, 4> Ops;
Bob Wilson63c93142015-06-24 06:05:20 +00006278 bool HasExtraArg = HasExtraNeonArgument(BuiltinID);
6279 unsigned NumArgs = E->getNumArgs() - (HasExtraArg ? 1 : 0);
6280 for (unsigned i = 0, e = NumArgs; i != e; i++) {
Eli Friedmana5dd5682012-08-23 03:10:17 +00006281 if (i == 0) {
6282 switch (BuiltinID) {
Tim Northoverc322f832014-01-30 14:47:51 +00006283 case NEON::BI__builtin_neon_vld1_v:
6284 case NEON::BI__builtin_neon_vld1q_v:
6285 case NEON::BI__builtin_neon_vld1q_lane_v:
6286 case NEON::BI__builtin_neon_vld1_lane_v:
6287 case NEON::BI__builtin_neon_vld1_dup_v:
6288 case NEON::BI__builtin_neon_vld1q_dup_v:
6289 case NEON::BI__builtin_neon_vst1_v:
6290 case NEON::BI__builtin_neon_vst1q_v:
6291 case NEON::BI__builtin_neon_vst1q_lane_v:
6292 case NEON::BI__builtin_neon_vst1_lane_v:
6293 case NEON::BI__builtin_neon_vst2_v:
6294 case NEON::BI__builtin_neon_vst2q_v:
6295 case NEON::BI__builtin_neon_vst2_lane_v:
6296 case NEON::BI__builtin_neon_vst2q_lane_v:
6297 case NEON::BI__builtin_neon_vst3_v:
6298 case NEON::BI__builtin_neon_vst3q_v:
6299 case NEON::BI__builtin_neon_vst3_lane_v:
6300 case NEON::BI__builtin_neon_vst3q_lane_v:
6301 case NEON::BI__builtin_neon_vst4_v:
6302 case NEON::BI__builtin_neon_vst4q_v:
6303 case NEON::BI__builtin_neon_vst4_lane_v:
6304 case NEON::BI__builtin_neon_vst4q_lane_v:
Eli Friedmana5dd5682012-08-23 03:10:17 +00006305 // Get the alignment for the argument in addition to the value;
6306 // we'll use it later.
John McCall7f416cc2015-09-08 08:05:57 +00006307 PtrOp0 = EmitPointerWithAlignment(E->getArg(0));
6308 Ops.push_back(PtrOp0.getPointer());
Eli Friedmana5dd5682012-08-23 03:10:17 +00006309 continue;
6310 }
6311 }
6312 if (i == 1) {
6313 switch (BuiltinID) {
Tim Northoverc322f832014-01-30 14:47:51 +00006314 case NEON::BI__builtin_neon_vld2_v:
6315 case NEON::BI__builtin_neon_vld2q_v:
6316 case NEON::BI__builtin_neon_vld3_v:
6317 case NEON::BI__builtin_neon_vld3q_v:
6318 case NEON::BI__builtin_neon_vld4_v:
6319 case NEON::BI__builtin_neon_vld4q_v:
6320 case NEON::BI__builtin_neon_vld2_lane_v:
6321 case NEON::BI__builtin_neon_vld2q_lane_v:
6322 case NEON::BI__builtin_neon_vld3_lane_v:
6323 case NEON::BI__builtin_neon_vld3q_lane_v:
6324 case NEON::BI__builtin_neon_vld4_lane_v:
6325 case NEON::BI__builtin_neon_vld4q_lane_v:
6326 case NEON::BI__builtin_neon_vld2_dup_v:
Ivan A. Kosareva9f484a2018-06-27 13:58:43 +00006327 case NEON::BI__builtin_neon_vld2q_dup_v:
Tim Northoverc322f832014-01-30 14:47:51 +00006328 case NEON::BI__builtin_neon_vld3_dup_v:
Ivan A. Kosareva9f484a2018-06-27 13:58:43 +00006329 case NEON::BI__builtin_neon_vld3q_dup_v:
Tim Northoverc322f832014-01-30 14:47:51 +00006330 case NEON::BI__builtin_neon_vld4_dup_v:
Ivan A. Kosareva9f484a2018-06-27 13:58:43 +00006331 case NEON::BI__builtin_neon_vld4q_dup_v:
Eli Friedmana5dd5682012-08-23 03:10:17 +00006332 // Get the alignment for the argument in addition to the value;
6333 // we'll use it later.
John McCall7f416cc2015-09-08 08:05:57 +00006334 PtrOp1 = EmitPointerWithAlignment(E->getArg(1));
6335 Ops.push_back(PtrOp1.getPointer());
Eli Friedmana5dd5682012-08-23 03:10:17 +00006336 continue;
6337 }
6338 }
Ahmed Bougacha94df7302015-06-04 01:43:41 +00006339
6340 if ((ICEArguments & (1 << i)) == 0) {
6341 Ops.push_back(EmitScalarExpr(E->getArg(i)));
6342 } else {
6343 // If this is required to be a constant, constant fold it so that we know
6344 // that the generated intrinsic gets a ConstantInt.
6345 llvm::APSInt Result;
6346 bool IsConst = E->getArg(i)->isIntegerConstantExpr(Result, getContext());
6347 assert(IsConst && "Constant arg isn't actually constant?"); (void)IsConst;
6348 Ops.push_back(llvm::ConstantInt::get(getLLVMContext(), Result));
6349 }
Eli Friedmana5dd5682012-08-23 03:10:17 +00006350 }
Rafael Espindola6bb986d2010-06-09 03:48:40 +00006351
Bob Wilson445c24f2011-08-13 05:03:46 +00006352 switch (BuiltinID) {
6353 default: break;
Bob Wilson63c93142015-06-24 06:05:20 +00006354
Tim Northoverc322f832014-01-30 14:47:51 +00006355 case NEON::BI__builtin_neon_vget_lane_i8:
6356 case NEON::BI__builtin_neon_vget_lane_i16:
6357 case NEON::BI__builtin_neon_vget_lane_i32:
6358 case NEON::BI__builtin_neon_vget_lane_i64:
6359 case NEON::BI__builtin_neon_vget_lane_f32:
6360 case NEON::BI__builtin_neon_vgetq_lane_i8:
6361 case NEON::BI__builtin_neon_vgetq_lane_i16:
6362 case NEON::BI__builtin_neon_vgetq_lane_i32:
6363 case NEON::BI__builtin_neon_vgetq_lane_i64:
6364 case NEON::BI__builtin_neon_vgetq_lane_f32:
Bob Wilson63c93142015-06-24 06:05:20 +00006365 return Builder.CreateExtractElement(Ops[0], Ops[1], "vget_lane");
6366
Ivan A. Kosarev9cdb2c72018-04-13 12:46:02 +00006367 case NEON::BI__builtin_neon_vrndns_f32: {
6368 Value *Arg = EmitScalarExpr(E->getArg(0));
6369 llvm::Type *Tys[] = {Arg->getType()};
6370 Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vrintn, Tys);
6371 return Builder.CreateCall(F, {Arg}, "vrndn"); }
6372
Tim Northoverc322f832014-01-30 14:47:51 +00006373 case NEON::BI__builtin_neon_vset_lane_i8:
6374 case NEON::BI__builtin_neon_vset_lane_i16:
6375 case NEON::BI__builtin_neon_vset_lane_i32:
6376 case NEON::BI__builtin_neon_vset_lane_i64:
6377 case NEON::BI__builtin_neon_vset_lane_f32:
6378 case NEON::BI__builtin_neon_vsetq_lane_i8:
6379 case NEON::BI__builtin_neon_vsetq_lane_i16:
6380 case NEON::BI__builtin_neon_vsetq_lane_i32:
6381 case NEON::BI__builtin_neon_vsetq_lane_i64:
6382 case NEON::BI__builtin_neon_vsetq_lane_f32:
Bob Wilson445c24f2011-08-13 05:03:46 +00006383 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2], "vset_lane");
Tim Northover02e38602014-02-03 17:28:04 +00006384
Tim Northover02e38602014-02-03 17:28:04 +00006385 case NEON::BI__builtin_neon_vsha1h_u32:
Tim Northover02e38602014-02-03 17:28:04 +00006386 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_sha1h), Ops,
6387 "vsha1h");
6388 case NEON::BI__builtin_neon_vsha1cq_u32:
Tim Northover02e38602014-02-03 17:28:04 +00006389 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_sha1c), Ops,
6390 "vsha1h");
6391 case NEON::BI__builtin_neon_vsha1pq_u32:
Tim Northover02e38602014-02-03 17:28:04 +00006392 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_sha1p), Ops,
6393 "vsha1h");
6394 case NEON::BI__builtin_neon_vsha1mq_u32:
Tim Northover02e38602014-02-03 17:28:04 +00006395 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_sha1m), Ops,
6396 "vsha1h");
Bob Wilson63c93142015-06-24 06:05:20 +00006397
6398 // The ARM _MoveToCoprocessor builtins put the input register value as
Simon Pilgrim532de1c2016-06-13 10:05:19 +00006399 // the first argument, but the LLVM intrinsic expects it as the third one.
6400 case ARM::BI_MoveToCoprocessor:
6401 case ARM::BI_MoveToCoprocessor2: {
6402 Function *F = CGM.getIntrinsic(BuiltinID == ARM::BI_MoveToCoprocessor ?
6403 Intrinsic::arm_mcr : Intrinsic::arm_mcr2);
6404 return Builder.CreateCall(F, {Ops[1], Ops[2], Ops[0],
6405 Ops[3], Ops[4], Ops[5]});
Bob Wilson63c93142015-06-24 06:05:20 +00006406 }
Albert Gutowski2a0621e2016-10-12 22:01:05 +00006407 case ARM::BI_BitScanForward:
6408 case ARM::BI_BitScanForward64:
6409 return EmitMSVCBuiltinExpr(MSVCIntrin::_BitScanForward, E);
6410 case ARM::BI_BitScanReverse:
6411 case ARM::BI_BitScanReverse64:
6412 return EmitMSVCBuiltinExpr(MSVCIntrin::_BitScanReverse, E);
Albert Gutowski5e08df02016-10-13 22:35:07 +00006413
6414 case ARM::BI_InterlockedAnd64:
6415 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedAnd, E);
6416 case ARM::BI_InterlockedExchange64:
6417 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchange, E);
6418 case ARM::BI_InterlockedExchangeAdd64:
6419 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeAdd, E);
6420 case ARM::BI_InterlockedExchangeSub64:
6421 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeSub, E);
6422 case ARM::BI_InterlockedOr64:
6423 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedOr, E);
6424 case ARM::BI_InterlockedXor64:
6425 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedXor, E);
6426 case ARM::BI_InterlockedDecrement64:
6427 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedDecrement, E);
6428 case ARM::BI_InterlockedIncrement64:
6429 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedIncrement, E);
Eli Friedmanb262d162018-10-31 21:31:09 +00006430 case ARM::BI_InterlockedExchangeAdd8_acq:
6431 case ARM::BI_InterlockedExchangeAdd16_acq:
6432 case ARM::BI_InterlockedExchangeAdd_acq:
6433 case ARM::BI_InterlockedExchangeAdd64_acq:
6434 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeAdd_acq, E);
6435 case ARM::BI_InterlockedExchangeAdd8_rel:
6436 case ARM::BI_InterlockedExchangeAdd16_rel:
6437 case ARM::BI_InterlockedExchangeAdd_rel:
6438 case ARM::BI_InterlockedExchangeAdd64_rel:
6439 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeAdd_rel, E);
6440 case ARM::BI_InterlockedExchangeAdd8_nf:
6441 case ARM::BI_InterlockedExchangeAdd16_nf:
6442 case ARM::BI_InterlockedExchangeAdd_nf:
6443 case ARM::BI_InterlockedExchangeAdd64_nf:
6444 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeAdd_nf, E);
Mandeep Singh Grang7fa07e52018-11-02 21:18:23 +00006445 case ARM::BI_InterlockedExchange8_acq:
6446 case ARM::BI_InterlockedExchange16_acq:
6447 case ARM::BI_InterlockedExchange_acq:
6448 case ARM::BI_InterlockedExchange64_acq:
6449 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchange_acq, E);
6450 case ARM::BI_InterlockedExchange8_rel:
6451 case ARM::BI_InterlockedExchange16_rel:
6452 case ARM::BI_InterlockedExchange_rel:
6453 case ARM::BI_InterlockedExchange64_rel:
6454 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchange_rel, E);
6455 case ARM::BI_InterlockedExchange8_nf:
6456 case ARM::BI_InterlockedExchange16_nf:
6457 case ARM::BI_InterlockedExchange_nf:
6458 case ARM::BI_InterlockedExchange64_nf:
6459 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchange_nf, E);
Mandeep Singh Grang6b880682018-11-06 00:36:48 +00006460 case ARM::BI_InterlockedCompareExchange8_acq:
6461 case ARM::BI_InterlockedCompareExchange16_acq:
6462 case ARM::BI_InterlockedCompareExchange_acq:
6463 case ARM::BI_InterlockedCompareExchange64_acq:
6464 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedCompareExchange_acq, E);
6465 case ARM::BI_InterlockedCompareExchange8_rel:
6466 case ARM::BI_InterlockedCompareExchange16_rel:
6467 case ARM::BI_InterlockedCompareExchange_rel:
6468 case ARM::BI_InterlockedCompareExchange64_rel:
6469 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedCompareExchange_rel, E);
6470 case ARM::BI_InterlockedCompareExchange8_nf:
6471 case ARM::BI_InterlockedCompareExchange16_nf:
6472 case ARM::BI_InterlockedCompareExchange_nf:
6473 case ARM::BI_InterlockedCompareExchange64_nf:
6474 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedCompareExchange_nf, E);
Mandeep Singh Grangec62b312018-11-06 01:11:25 +00006475 case ARM::BI_InterlockedOr8_acq:
6476 case ARM::BI_InterlockedOr16_acq:
6477 case ARM::BI_InterlockedOr_acq:
6478 case ARM::BI_InterlockedOr64_acq:
6479 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedOr_acq, E);
6480 case ARM::BI_InterlockedOr8_rel:
6481 case ARM::BI_InterlockedOr16_rel:
6482 case ARM::BI_InterlockedOr_rel:
6483 case ARM::BI_InterlockedOr64_rel:
6484 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedOr_rel, E);
6485 case ARM::BI_InterlockedOr8_nf:
6486 case ARM::BI_InterlockedOr16_nf:
6487 case ARM::BI_InterlockedOr_nf:
6488 case ARM::BI_InterlockedOr64_nf:
6489 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedOr_nf, E);
Mandeep Singh Grang806f1072018-11-06 04:55:20 +00006490 case ARM::BI_InterlockedXor8_acq:
6491 case ARM::BI_InterlockedXor16_acq:
6492 case ARM::BI_InterlockedXor_acq:
6493 case ARM::BI_InterlockedXor64_acq:
6494 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedXor_acq, E);
6495 case ARM::BI_InterlockedXor8_rel:
6496 case ARM::BI_InterlockedXor16_rel:
6497 case ARM::BI_InterlockedXor_rel:
6498 case ARM::BI_InterlockedXor64_rel:
6499 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedXor_rel, E);
6500 case ARM::BI_InterlockedXor8_nf:
6501 case ARM::BI_InterlockedXor16_nf:
6502 case ARM::BI_InterlockedXor_nf:
6503 case ARM::BI_InterlockedXor64_nf:
6504 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedXor_nf, E);
Mandeep Singh Grangc89157b2018-11-06 05:03:13 +00006505 case ARM::BI_InterlockedAnd8_acq:
6506 case ARM::BI_InterlockedAnd16_acq:
6507 case ARM::BI_InterlockedAnd_acq:
6508 case ARM::BI_InterlockedAnd64_acq:
6509 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedAnd_acq, E);
6510 case ARM::BI_InterlockedAnd8_rel:
6511 case ARM::BI_InterlockedAnd16_rel:
6512 case ARM::BI_InterlockedAnd_rel:
6513 case ARM::BI_InterlockedAnd64_rel:
6514 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedAnd_rel, E);
6515 case ARM::BI_InterlockedAnd8_nf:
6516 case ARM::BI_InterlockedAnd16_nf:
6517 case ARM::BI_InterlockedAnd_nf:
6518 case ARM::BI_InterlockedAnd64_nf:
6519 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedAnd_nf, E);
Mandeep Singh Grangfdf74d92018-11-06 05:05:32 +00006520 case ARM::BI_InterlockedIncrement16_acq:
6521 case ARM::BI_InterlockedIncrement_acq:
6522 case ARM::BI_InterlockedIncrement64_acq:
6523 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedIncrement_acq, E);
6524 case ARM::BI_InterlockedIncrement16_rel:
6525 case ARM::BI_InterlockedIncrement_rel:
6526 case ARM::BI_InterlockedIncrement64_rel:
6527 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedIncrement_rel, E);
6528 case ARM::BI_InterlockedIncrement16_nf:
6529 case ARM::BI_InterlockedIncrement_nf:
6530 case ARM::BI_InterlockedIncrement64_nf:
6531 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedIncrement_nf, E);
Mandeep Singh Grang574cadd2018-11-06 05:07:43 +00006532 case ARM::BI_InterlockedDecrement16_acq:
6533 case ARM::BI_InterlockedDecrement_acq:
6534 case ARM::BI_InterlockedDecrement64_acq:
6535 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedDecrement_acq, E);
6536 case ARM::BI_InterlockedDecrement16_rel:
6537 case ARM::BI_InterlockedDecrement_rel:
6538 case ARM::BI_InterlockedDecrement64_rel:
6539 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedDecrement_rel, E);
6540 case ARM::BI_InterlockedDecrement16_nf:
6541 case ARM::BI_InterlockedDecrement_nf:
6542 case ARM::BI_InterlockedDecrement64_nf:
6543 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedDecrement_nf, E);
Bob Wilson445c24f2011-08-13 05:03:46 +00006544 }
6545
6546 // Get the last argument, which specifies the vector type.
Bob Wilson63c93142015-06-24 06:05:20 +00006547 assert(HasExtraArg);
Rafael Espindola6bb986d2010-06-09 03:48:40 +00006548 llvm::APSInt Result;
6549 const Expr *Arg = E->getArg(E->getNumArgs()-1);
6550 if (!Arg->isIntegerConstantExpr(Result, getContext()))
Craig Topper8a13c412014-05-21 05:09:00 +00006551 return nullptr;
Rafael Espindola6bb986d2010-06-09 03:48:40 +00006552
Nate Begemanf568b072010-08-03 21:32:34 +00006553 if (BuiltinID == ARM::BI__builtin_arm_vcvtr_f ||
6554 BuiltinID == ARM::BI__builtin_arm_vcvtr_d) {
6555 // Determine the overloaded type of this builtin.
Chris Lattnera5f58b02011-07-09 17:41:47 +00006556 llvm::Type *Ty;
Nate Begemanf568b072010-08-03 21:32:34 +00006557 if (BuiltinID == ARM::BI__builtin_arm_vcvtr_f)
Chris Lattnerece04092012-02-07 00:39:47 +00006558 Ty = FloatTy;
Nate Begemanf568b072010-08-03 21:32:34 +00006559 else
Chris Lattnerece04092012-02-07 00:39:47 +00006560 Ty = DoubleTy;
Jim Grosbachd3608f42012-09-21 00:18:27 +00006561
Nate Begemanf568b072010-08-03 21:32:34 +00006562 // Determine whether this is an unsigned conversion or not.
6563 bool usgn = Result.getZExtValue() == 1;
6564 unsigned Int = usgn ? Intrinsic::arm_vcvtru : Intrinsic::arm_vcvtr;
6565
6566 // Call the appropriate intrinsic.
Benjamin Kramer8d375ce2011-07-14 17:45:50 +00006567 Function *F = CGM.getIntrinsic(Int, Ty);
Jay Foad5bd375a2011-07-15 08:37:34 +00006568 return Builder.CreateCall(F, Ops, "vcvtr");
Nate Begemanf568b072010-08-03 21:32:34 +00006569 }
Jim Grosbachd3608f42012-09-21 00:18:27 +00006570
Nate Begemanf568b072010-08-03 21:32:34 +00006571 // Determine the type of this overloaded NEON intrinsic.
Bob Wilson98bc98c2011-11-08 01:16:11 +00006572 NeonTypeFlags Type(Result.getZExtValue());
6573 bool usgn = Type.isUnsigned();
Bob Wilson4fa993f2010-12-03 17:10:22 +00006574 bool rightShift = false;
Rafael Espindola6bb986d2010-06-09 03:48:40 +00006575
Sjoerd Meijer87793e72018-03-19 13:22:49 +00006576 llvm::VectorType *VTy = GetNeonType(this, Type,
6577 getTarget().hasLegalHalfType());
Chris Lattnera5f58b02011-07-09 17:41:47 +00006578 llvm::Type *Ty = VTy;
Rafael Espindola6bb986d2010-06-09 03:48:40 +00006579 if (!Ty)
Craig Topper8a13c412014-05-21 05:09:00 +00006580 return nullptr;
Rafael Espindola6bb986d2010-06-09 03:48:40 +00006581
Tim Northoverac85c342014-01-30 14:47:57 +00006582 // Many NEON builtins have identical semantics and uses in ARM and
6583 // AArch64. Emit these in a single function.
Craig Topper5fc8fc22014-08-27 06:28:36 +00006584 auto IntrinsicMap = makeArrayRef(ARMSIMDIntrinsicMap);
Tim Northover8fe03d62014-02-21 11:57:24 +00006585 const NeonIntrinsicInfo *Builtin = findNeonIntrinsicInMap(
6586 IntrinsicMap, BuiltinID, NEONSIMDIntrinsicsProvenSorted);
6587 if (Builtin)
6588 return EmitCommonNeonBuiltinExpr(
6589 Builtin->BuiltinID, Builtin->LLVMIntrinsic, Builtin->AltLLVMIntrinsic,
Sjoerd Meijer95da8752018-03-13 19:38:56 +00006590 Builtin->NameHint, Builtin->TypeModifier, E, Ops, PtrOp0, PtrOp1, Arch);
Tim Northoverac85c342014-01-30 14:47:57 +00006591
Rafael Espindola6bb986d2010-06-09 03:48:40 +00006592 unsigned Int;
6593 switch (BuiltinID) {
Craig Topper8a13c412014-05-21 05:09:00 +00006594 default: return nullptr;
Tim Northoverc322f832014-01-30 14:47:51 +00006595 case NEON::BI__builtin_neon_vld1q_lane_v:
Bob Wilson2605fef2012-08-14 17:27:04 +00006596 // Handle 64-bit integer elements as a special case. Use shuffles of
6597 // one-element vectors to avoid poor code for i64 in the backend.
6598 if (VTy->getElementType()->isIntegerTy(64)) {
6599 // Extract the other lane.
6600 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
Benjamin Kramerc385a802015-07-28 15:40:11 +00006601 uint32_t Lane = cast<ConstantInt>(Ops[2])->getZExtValue();
Bob Wilson2605fef2012-08-14 17:27:04 +00006602 Value *SV = llvm::ConstantVector::get(ConstantInt::get(Int32Ty, 1-Lane));
6603 Ops[1] = Builder.CreateShuffleVector(Ops[1], Ops[1], SV);
6604 // Load the value as a one-element vector.
6605 Ty = llvm::VectorType::get(VTy->getElementType(), 1);
Jeroen Ketema55a8e802015-09-30 10:56:56 +00006606 llvm::Type *Tys[] = {Ty, Int8PtrTy};
6607 Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld1, Tys);
John McCall7f416cc2015-09-08 08:05:57 +00006608 Value *Align = getAlignmentValue32(PtrOp0);
David Blaikie43f9bb72015-05-18 22:14:03 +00006609 Value *Ld = Builder.CreateCall(F, {Ops[0], Align});
Bob Wilson2605fef2012-08-14 17:27:04 +00006610 // Combine them.
Benjamin Kramerc385a802015-07-28 15:40:11 +00006611 uint32_t Indices[] = {1 - Lane, Lane};
6612 SV = llvm::ConstantDataVector::get(getLLVMContext(), Indices);
Bob Wilson2605fef2012-08-14 17:27:04 +00006613 return Builder.CreateShuffleVector(Ops[1], Ld, SV, "vld1q_lane");
6614 }
Adrian Prantlf3b3ccd2017-12-19 22:06:11 +00006615 LLVM_FALLTHROUGH;
Tim Northoverc322f832014-01-30 14:47:51 +00006616 case NEON::BI__builtin_neon_vld1_lane_v: {
Nate Begemaned48c852010-06-20 23:05:28 +00006617 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
Steven Wu0d22f2d2015-09-09 01:37:18 +00006618 PtrOp0 = Builder.CreateElementBitCast(PtrOp0, VTy->getElementType());
John McCall7f416cc2015-09-08 08:05:57 +00006619 Value *Ld = Builder.CreateLoad(PtrOp0);
Bob Wilson49708d42012-02-04 23:58:08 +00006620 return Builder.CreateInsertElement(Ops[1], Ld, Ops[2], "vld1_lane");
6621 }
Tim Northoverc322f832014-01-30 14:47:51 +00006622 case NEON::BI__builtin_neon_vqrshrn_n_v:
Jim Grosbachd3608f42012-09-21 00:18:27 +00006623 Int =
6624 usgn ? Intrinsic::arm_neon_vqrshiftnu : Intrinsic::arm_neon_vqrshiftns;
Benjamin Kramer8d375ce2011-07-14 17:45:50 +00006625 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqrshrn_n",
Nate Begeman91e1fea2010-06-14 05:21:25 +00006626 1, true);
Tim Northoverc322f832014-01-30 14:47:51 +00006627 case NEON::BI__builtin_neon_vqrshrun_n_v:
Benjamin Kramer8d375ce2011-07-14 17:45:50 +00006628 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqrshiftnsu, Ty),
Bob Wilson482afae2010-12-08 22:37:56 +00006629 Ops, "vqrshrun_n", 1, true);
Tim Northoverc322f832014-01-30 14:47:51 +00006630 case NEON::BI__builtin_neon_vqshrn_n_v:
Nate Begeman91e1fea2010-06-14 05:21:25 +00006631 Int = usgn ? Intrinsic::arm_neon_vqshiftnu : Intrinsic::arm_neon_vqshiftns;
Benjamin Kramer8d375ce2011-07-14 17:45:50 +00006632 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshrn_n",
Nate Begeman91e1fea2010-06-14 05:21:25 +00006633 1, true);
Tim Northoverc322f832014-01-30 14:47:51 +00006634 case NEON::BI__builtin_neon_vqshrun_n_v:
Benjamin Kramer8d375ce2011-07-14 17:45:50 +00006635 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqshiftnsu, Ty),
Bob Wilson482afae2010-12-08 22:37:56 +00006636 Ops, "vqshrun_n", 1, true);
Tim Northoverc322f832014-01-30 14:47:51 +00006637 case NEON::BI__builtin_neon_vrecpe_v:
6638 case NEON::BI__builtin_neon_vrecpeq_v:
Benjamin Kramer8d375ce2011-07-14 17:45:50 +00006639 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrecpe, Ty),
Nate Begeman8ed060b2010-06-11 22:57:12 +00006640 Ops, "vrecpe");
Tim Northoverc322f832014-01-30 14:47:51 +00006641 case NEON::BI__builtin_neon_vrshrn_n_v:
Benjamin Kramer8d375ce2011-07-14 17:45:50 +00006642 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrshiftn, Ty),
Bob Wilson482afae2010-12-08 22:37:56 +00006643 Ops, "vrshrn_n", 1, true);
Tim Northoverc322f832014-01-30 14:47:51 +00006644 case NEON::BI__builtin_neon_vrsra_n_v:
6645 case NEON::BI__builtin_neon_vrsraq_n_v:
Nate Begemanc6ac0ce2010-06-12 06:06:07 +00006646 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
6647 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
6648 Ops[2] = EmitNeonShiftVector(Ops[2], Ty, true);
6649 Int = usgn ? Intrinsic::arm_neon_vrshiftu : Intrinsic::arm_neon_vrshifts;
David Blaikie43f9bb72015-05-18 22:14:03 +00006650 Ops[1] = Builder.CreateCall(CGM.getIntrinsic(Int, Ty), {Ops[1], Ops[2]});
Nate Begemanc6ac0ce2010-06-12 06:06:07 +00006651 return Builder.CreateAdd(Ops[0], Ops[1], "vrsra_n");
Tim Northoverc322f832014-01-30 14:47:51 +00006652 case NEON::BI__builtin_neon_vsri_n_v:
6653 case NEON::BI__builtin_neon_vsriq_n_v:
Bob Wilson4fa993f2010-12-03 17:10:22 +00006654 rightShift = true;
Galina Kistanova0872d6c2017-06-03 06:30:46 +00006655 LLVM_FALLTHROUGH;
Tim Northoverc322f832014-01-30 14:47:51 +00006656 case NEON::BI__builtin_neon_vsli_n_v:
6657 case NEON::BI__builtin_neon_vsliq_n_v:
Bob Wilson4fa993f2010-12-03 17:10:22 +00006658 Ops[2] = EmitNeonShiftVector(Ops[2], Ty, rightShift);
Benjamin Kramer8d375ce2011-07-14 17:45:50 +00006659 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vshiftins, Ty),
Nate Begeman8ed060b2010-06-11 22:57:12 +00006660 Ops, "vsli_n");
Tim Northoverc322f832014-01-30 14:47:51 +00006661 case NEON::BI__builtin_neon_vsra_n_v:
6662 case NEON::BI__builtin_neon_vsraq_n_v:
Nate Begeman8ed060b2010-06-11 22:57:12 +00006663 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
Amaury de la Vieuville21bf6ed2013-10-04 13:13:15 +00006664 Ops[1] = EmitNeonRShiftImm(Ops[1], Ops[2], Ty, usgn, "vsra_n");
Nate Begeman8ed060b2010-06-11 22:57:12 +00006665 return Builder.CreateAdd(Ops[0], Ops[1]);
Tim Northoverc322f832014-01-30 14:47:51 +00006666 case NEON::BI__builtin_neon_vst1q_lane_v:
Bob Wilson2605fef2012-08-14 17:27:04 +00006667 // Handle 64-bit integer elements as a special case. Use a shuffle to get
6668 // a one-element vector and avoid poor code for i64 in the backend.
6669 if (VTy->getElementType()->isIntegerTy(64)) {
6670 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
6671 Value *SV = llvm::ConstantVector::get(cast<llvm::Constant>(Ops[2]));
6672 Ops[1] = Builder.CreateShuffleVector(Ops[1], Ops[1], SV);
John McCall7f416cc2015-09-08 08:05:57 +00006673 Ops[2] = getAlignmentValue32(PtrOp0);
Jeroen Ketema55a8e802015-09-30 10:56:56 +00006674 llvm::Type *Tys[] = {Int8PtrTy, Ops[1]->getType()};
Bob Wilson2605fef2012-08-14 17:27:04 +00006675 return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst1,
Jeroen Ketema55a8e802015-09-30 10:56:56 +00006676 Tys), Ops);
Bob Wilson2605fef2012-08-14 17:27:04 +00006677 }
Adrian Prantlf3b3ccd2017-12-19 22:06:11 +00006678 LLVM_FALLTHROUGH;
Tim Northoverc322f832014-01-30 14:47:51 +00006679 case NEON::BI__builtin_neon_vst1_lane_v: {
Nate Begeman8ed060b2010-06-11 22:57:12 +00006680 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
6681 Ops[1] = Builder.CreateExtractElement(Ops[1], Ops[2]);
6682 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
John McCall7f416cc2015-09-08 08:05:57 +00006683 auto St = Builder.CreateStore(Ops[1], Builder.CreateBitCast(PtrOp0, Ty));
Bob Wilson49708d42012-02-04 23:58:08 +00006684 return St;
6685 }
Tim Northoverc322f832014-01-30 14:47:51 +00006686 case NEON::BI__builtin_neon_vtbl1_v:
Nate Begeman55483092010-06-09 01:10:23 +00006687 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl1),
6688 Ops, "vtbl1");
Tim Northoverc322f832014-01-30 14:47:51 +00006689 case NEON::BI__builtin_neon_vtbl2_v:
Nate Begeman55483092010-06-09 01:10:23 +00006690 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl2),
6691 Ops, "vtbl2");
Tim Northoverc322f832014-01-30 14:47:51 +00006692 case NEON::BI__builtin_neon_vtbl3_v:
Nate Begeman55483092010-06-09 01:10:23 +00006693 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl3),
6694 Ops, "vtbl3");
Tim Northoverc322f832014-01-30 14:47:51 +00006695 case NEON::BI__builtin_neon_vtbl4_v:
Nate Begeman55483092010-06-09 01:10:23 +00006696 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl4),
6697 Ops, "vtbl4");
Tim Northoverc322f832014-01-30 14:47:51 +00006698 case NEON::BI__builtin_neon_vtbx1_v:
Nate Begeman55483092010-06-09 01:10:23 +00006699 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx1),
6700 Ops, "vtbx1");
Tim Northoverc322f832014-01-30 14:47:51 +00006701 case NEON::BI__builtin_neon_vtbx2_v:
Nate Begeman55483092010-06-09 01:10:23 +00006702 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx2),
6703 Ops, "vtbx2");
Tim Northoverc322f832014-01-30 14:47:51 +00006704 case NEON::BI__builtin_neon_vtbx3_v:
Nate Begeman55483092010-06-09 01:10:23 +00006705 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx3),
6706 Ops, "vtbx3");
Tim Northoverc322f832014-01-30 14:47:51 +00006707 case NEON::BI__builtin_neon_vtbx4_v:
Nate Begeman55483092010-06-09 01:10:23 +00006708 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx4),
6709 Ops, "vtbx4");
Chris Lattner5cc15e02010-03-03 19:03:45 +00006710 }
6711}
6712
Tim Northover573cbee2014-05-24 12:52:07 +00006713static Value *EmitAArch64TblBuiltinExpr(CodeGenFunction &CGF, unsigned BuiltinID,
Tim Northovera2ee4332014-03-29 15:09:45 +00006714 const CallExpr *E,
Sjoerd Meijer95da8752018-03-13 19:38:56 +00006715 SmallVectorImpl<Value *> &Ops,
6716 llvm::Triple::ArchType Arch) {
Tim Northovera2ee4332014-03-29 15:09:45 +00006717 unsigned int Int = 0;
Craig Topper8a13c412014-05-21 05:09:00 +00006718 const char *s = nullptr;
Tim Northovera2ee4332014-03-29 15:09:45 +00006719
Tim Northovera2ee4332014-03-29 15:09:45 +00006720 switch (BuiltinID) {
6721 default:
Craig Topper8a13c412014-05-21 05:09:00 +00006722 return nullptr;
Tim Northovera2ee4332014-03-29 15:09:45 +00006723 case NEON::BI__builtin_neon_vtbl1_v:
6724 case NEON::BI__builtin_neon_vqtbl1_v:
6725 case NEON::BI__builtin_neon_vqtbl1q_v:
6726 case NEON::BI__builtin_neon_vtbl2_v:
6727 case NEON::BI__builtin_neon_vqtbl2_v:
6728 case NEON::BI__builtin_neon_vqtbl2q_v:
6729 case NEON::BI__builtin_neon_vtbl3_v:
6730 case NEON::BI__builtin_neon_vqtbl3_v:
6731 case NEON::BI__builtin_neon_vqtbl3q_v:
6732 case NEON::BI__builtin_neon_vtbl4_v:
6733 case NEON::BI__builtin_neon_vqtbl4_v:
6734 case NEON::BI__builtin_neon_vqtbl4q_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00006735 break;
6736 case NEON::BI__builtin_neon_vtbx1_v:
6737 case NEON::BI__builtin_neon_vqtbx1_v:
6738 case NEON::BI__builtin_neon_vqtbx1q_v:
6739 case NEON::BI__builtin_neon_vtbx2_v:
6740 case NEON::BI__builtin_neon_vqtbx2_v:
6741 case NEON::BI__builtin_neon_vqtbx2q_v:
6742 case NEON::BI__builtin_neon_vtbx3_v:
6743 case NEON::BI__builtin_neon_vqtbx3_v:
6744 case NEON::BI__builtin_neon_vqtbx3q_v:
6745 case NEON::BI__builtin_neon_vtbx4_v:
6746 case NEON::BI__builtin_neon_vqtbx4_v:
6747 case NEON::BI__builtin_neon_vqtbx4q_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00006748 break;
6749 }
6750
6751 assert(E->getNumArgs() >= 3);
6752
6753 // Get the last argument, which specifies the vector type.
6754 llvm::APSInt Result;
6755 const Expr *Arg = E->getArg(E->getNumArgs() - 1);
6756 if (!Arg->isIntegerConstantExpr(Result, CGF.getContext()))
Craig Topper8a13c412014-05-21 05:09:00 +00006757 return nullptr;
Tim Northovera2ee4332014-03-29 15:09:45 +00006758
6759 // Determine the type of this overloaded NEON intrinsic.
6760 NeonTypeFlags Type(Result.getZExtValue());
Sjoerd Meijer87793e72018-03-19 13:22:49 +00006761 llvm::VectorType *Ty = GetNeonType(&CGF, Type);
Tim Northovera2ee4332014-03-29 15:09:45 +00006762 if (!Ty)
Craig Topper8a13c412014-05-21 05:09:00 +00006763 return nullptr;
Tim Northovera2ee4332014-03-29 15:09:45 +00006764
Tim Northovera2ee4332014-03-29 15:09:45 +00006765 CodeGen::CGBuilderTy &Builder = CGF.Builder;
6766
6767 // AArch64 scalar builtins are not overloaded, they do not have an extra
6768 // argument that specifies the vector type, need to handle each case.
Tim Northovera2ee4332014-03-29 15:09:45 +00006769 switch (BuiltinID) {
6770 case NEON::BI__builtin_neon_vtbl1_v: {
Benjamin Kramerc385a802015-07-28 15:40:11 +00006771 return packTBLDVectorList(CGF, makeArrayRef(Ops).slice(0, 1), nullptr,
6772 Ops[1], Ty, Intrinsic::aarch64_neon_tbl1,
6773 "vtbl1");
Tim Northovera2ee4332014-03-29 15:09:45 +00006774 }
6775 case NEON::BI__builtin_neon_vtbl2_v: {
Benjamin Kramerc385a802015-07-28 15:40:11 +00006776 return packTBLDVectorList(CGF, makeArrayRef(Ops).slice(0, 2), nullptr,
6777 Ops[2], Ty, Intrinsic::aarch64_neon_tbl1,
6778 "vtbl1");
Tim Northovera2ee4332014-03-29 15:09:45 +00006779 }
6780 case NEON::BI__builtin_neon_vtbl3_v: {
Benjamin Kramerc385a802015-07-28 15:40:11 +00006781 return packTBLDVectorList(CGF, makeArrayRef(Ops).slice(0, 3), nullptr,
6782 Ops[3], Ty, Intrinsic::aarch64_neon_tbl2,
6783 "vtbl2");
Tim Northovera2ee4332014-03-29 15:09:45 +00006784 }
6785 case NEON::BI__builtin_neon_vtbl4_v: {
Benjamin Kramerc385a802015-07-28 15:40:11 +00006786 return packTBLDVectorList(CGF, makeArrayRef(Ops).slice(0, 4), nullptr,
6787 Ops[4], Ty, Intrinsic::aarch64_neon_tbl2,
6788 "vtbl2");
Tim Northovera2ee4332014-03-29 15:09:45 +00006789 }
6790 case NEON::BI__builtin_neon_vtbx1_v: {
Benjamin Kramerc385a802015-07-28 15:40:11 +00006791 Value *TblRes =
6792 packTBLDVectorList(CGF, makeArrayRef(Ops).slice(1, 1), nullptr, Ops[2],
6793 Ty, Intrinsic::aarch64_neon_tbl1, "vtbl1");
Tim Northovera2ee4332014-03-29 15:09:45 +00006794
Benjamin Kramerc385a802015-07-28 15:40:11 +00006795 llvm::Constant *EightV = ConstantInt::get(Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00006796 Value *CmpRes = Builder.CreateICmp(ICmpInst::ICMP_UGE, Ops[2], EightV);
6797 CmpRes = Builder.CreateSExt(CmpRes, Ty);
6798
6799 Value *EltsFromInput = Builder.CreateAnd(CmpRes, Ops[0]);
6800 Value *EltsFromTbl = Builder.CreateAnd(Builder.CreateNot(CmpRes), TblRes);
6801 return Builder.CreateOr(EltsFromInput, EltsFromTbl, "vtbx");
6802 }
6803 case NEON::BI__builtin_neon_vtbx2_v: {
Benjamin Kramerc385a802015-07-28 15:40:11 +00006804 return packTBLDVectorList(CGF, makeArrayRef(Ops).slice(1, 2), Ops[0],
6805 Ops[3], Ty, Intrinsic::aarch64_neon_tbx1,
6806 "vtbx1");
Tim Northovera2ee4332014-03-29 15:09:45 +00006807 }
6808 case NEON::BI__builtin_neon_vtbx3_v: {
Benjamin Kramerc385a802015-07-28 15:40:11 +00006809 Value *TblRes =
6810 packTBLDVectorList(CGF, makeArrayRef(Ops).slice(1, 3), nullptr, Ops[4],
6811 Ty, Intrinsic::aarch64_neon_tbl2, "vtbl2");
Tim Northovera2ee4332014-03-29 15:09:45 +00006812
Benjamin Kramerc385a802015-07-28 15:40:11 +00006813 llvm::Constant *TwentyFourV = ConstantInt::get(Ty, 24);
Tim Northovera2ee4332014-03-29 15:09:45 +00006814 Value *CmpRes = Builder.CreateICmp(ICmpInst::ICMP_UGE, Ops[4],
6815 TwentyFourV);
6816 CmpRes = Builder.CreateSExt(CmpRes, Ty);
6817
6818 Value *EltsFromInput = Builder.CreateAnd(CmpRes, Ops[0]);
6819 Value *EltsFromTbl = Builder.CreateAnd(Builder.CreateNot(CmpRes), TblRes);
6820 return Builder.CreateOr(EltsFromInput, EltsFromTbl, "vtbx");
6821 }
6822 case NEON::BI__builtin_neon_vtbx4_v: {
Benjamin Kramerc385a802015-07-28 15:40:11 +00006823 return packTBLDVectorList(CGF, makeArrayRef(Ops).slice(1, 4), Ops[0],
6824 Ops[5], Ty, Intrinsic::aarch64_neon_tbx2,
6825 "vtbx2");
Tim Northovera2ee4332014-03-29 15:09:45 +00006826 }
6827 case NEON::BI__builtin_neon_vqtbl1_v:
6828 case NEON::BI__builtin_neon_vqtbl1q_v:
Tim Northover573cbee2014-05-24 12:52:07 +00006829 Int = Intrinsic::aarch64_neon_tbl1; s = "vtbl1"; break;
Tim Northovera2ee4332014-03-29 15:09:45 +00006830 case NEON::BI__builtin_neon_vqtbl2_v:
6831 case NEON::BI__builtin_neon_vqtbl2q_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00006832 Int = Intrinsic::aarch64_neon_tbl2; s = "vtbl2"; break;
Tim Northovera2ee4332014-03-29 15:09:45 +00006833 case NEON::BI__builtin_neon_vqtbl3_v:
6834 case NEON::BI__builtin_neon_vqtbl3q_v:
Tim Northover573cbee2014-05-24 12:52:07 +00006835 Int = Intrinsic::aarch64_neon_tbl3; s = "vtbl3"; break;
Tim Northovera2ee4332014-03-29 15:09:45 +00006836 case NEON::BI__builtin_neon_vqtbl4_v:
6837 case NEON::BI__builtin_neon_vqtbl4q_v:
Tim Northover573cbee2014-05-24 12:52:07 +00006838 Int = Intrinsic::aarch64_neon_tbl4; s = "vtbl4"; break;
Tim Northovera2ee4332014-03-29 15:09:45 +00006839 case NEON::BI__builtin_neon_vqtbx1_v:
6840 case NEON::BI__builtin_neon_vqtbx1q_v:
Tim Northover573cbee2014-05-24 12:52:07 +00006841 Int = Intrinsic::aarch64_neon_tbx1; s = "vtbx1"; break;
Tim Northovera2ee4332014-03-29 15:09:45 +00006842 case NEON::BI__builtin_neon_vqtbx2_v:
6843 case NEON::BI__builtin_neon_vqtbx2q_v:
Tim Northover573cbee2014-05-24 12:52:07 +00006844 Int = Intrinsic::aarch64_neon_tbx2; s = "vtbx2"; break;
Tim Northovera2ee4332014-03-29 15:09:45 +00006845 case NEON::BI__builtin_neon_vqtbx3_v:
6846 case NEON::BI__builtin_neon_vqtbx3q_v:
Tim Northover573cbee2014-05-24 12:52:07 +00006847 Int = Intrinsic::aarch64_neon_tbx3; s = "vtbx3"; break;
Tim Northovera2ee4332014-03-29 15:09:45 +00006848 case NEON::BI__builtin_neon_vqtbx4_v:
6849 case NEON::BI__builtin_neon_vqtbx4q_v:
Tim Northover573cbee2014-05-24 12:52:07 +00006850 Int = Intrinsic::aarch64_neon_tbx4; s = "vtbx4"; break;
Tim Northovera2ee4332014-03-29 15:09:45 +00006851 }
6852 }
6853
6854 if (!Int)
Craig Topper8a13c412014-05-21 05:09:00 +00006855 return nullptr;
Tim Northovera2ee4332014-03-29 15:09:45 +00006856
6857 Function *F = CGF.CGM.getIntrinsic(Int, Ty);
6858 return CGF.EmitNeonCall(F, Ops, s);
6859}
6860
6861Value *CodeGenFunction::vectorWrapScalar16(Value *Op) {
6862 llvm::Type *VTy = llvm::VectorType::get(Int16Ty, 4);
6863 Op = Builder.CreateBitCast(Op, Int16Ty);
6864 Value *V = UndefValue::get(VTy);
Michael J. Spencerdd597752014-05-31 00:22:12 +00006865 llvm::Constant *CI = ConstantInt::get(SizeTy, 0);
Tim Northovera2ee4332014-03-29 15:09:45 +00006866 Op = Builder.CreateInsertElement(V, Op, CI);
6867 return Op;
6868}
6869
Tim Northover573cbee2014-05-24 12:52:07 +00006870Value *CodeGenFunction::EmitAArch64BuiltinExpr(unsigned BuiltinID,
Sjoerd Meijer95da8752018-03-13 19:38:56 +00006871 const CallExpr *E,
6872 llvm::Triple::ArchType Arch) {
Saleem Abdulrasool572250d2014-07-12 23:27:22 +00006873 unsigned HintID = static_cast<unsigned>(-1);
6874 switch (BuiltinID) {
6875 default: break;
Yi Kong4d5e23f2014-07-14 15:20:09 +00006876 case AArch64::BI__builtin_arm_nop:
6877 HintID = 0;
6878 break;
Saleem Abdulrasool572250d2014-07-12 23:27:22 +00006879 case AArch64::BI__builtin_arm_yield:
Mandeep Singh Grang2d2838302018-06-13 18:49:35 +00006880 case AArch64::BI__yield:
Saleem Abdulrasool572250d2014-07-12 23:27:22 +00006881 HintID = 1;
6882 break;
6883 case AArch64::BI__builtin_arm_wfe:
Mandeep Singh Grang2d2838302018-06-13 18:49:35 +00006884 case AArch64::BI__wfe:
Saleem Abdulrasool572250d2014-07-12 23:27:22 +00006885 HintID = 2;
6886 break;
6887 case AArch64::BI__builtin_arm_wfi:
Mandeep Singh Grang2d2838302018-06-13 18:49:35 +00006888 case AArch64::BI__wfi:
Saleem Abdulrasool572250d2014-07-12 23:27:22 +00006889 HintID = 3;
6890 break;
6891 case AArch64::BI__builtin_arm_sev:
Mandeep Singh Grang2d2838302018-06-13 18:49:35 +00006892 case AArch64::BI__sev:
Saleem Abdulrasool572250d2014-07-12 23:27:22 +00006893 HintID = 4;
6894 break;
6895 case AArch64::BI__builtin_arm_sevl:
Mandeep Singh Grang2d2838302018-06-13 18:49:35 +00006896 case AArch64::BI__sevl:
Saleem Abdulrasool572250d2014-07-12 23:27:22 +00006897 HintID = 5;
6898 break;
6899 }
6900
6901 if (HintID != static_cast<unsigned>(-1)) {
6902 Function *F = CGM.getIntrinsic(Intrinsic::aarch64_hint);
6903 return Builder.CreateCall(F, llvm::ConstantInt::get(Int32Ty, HintID));
6904 }
6905
Yi Konga5548432014-08-13 19:18:20 +00006906 if (BuiltinID == AArch64::BI__builtin_arm_prefetch) {
6907 Value *Address = EmitScalarExpr(E->getArg(0));
6908 Value *RW = EmitScalarExpr(E->getArg(1));
6909 Value *CacheLevel = EmitScalarExpr(E->getArg(2));
6910 Value *RetentionPolicy = EmitScalarExpr(E->getArg(3));
6911 Value *IsData = EmitScalarExpr(E->getArg(4));
6912
6913 Value *Locality = nullptr;
6914 if (cast<llvm::ConstantInt>(RetentionPolicy)->isZero()) {
6915 // Temporal fetch, needs to convert cache level to locality.
6916 Locality = llvm::ConstantInt::get(Int32Ty,
6917 -cast<llvm::ConstantInt>(CacheLevel)->getValue() + 3);
6918 } else {
6919 // Streaming fetch.
6920 Locality = llvm::ConstantInt::get(Int32Ty, 0);
6921 }
6922
6923 // FIXME: We need AArch64 specific LLVM intrinsic if we want to specify
6924 // PLDL3STRM or PLDL2STRM.
James Y Knight8799cae2019-02-03 21:53:49 +00006925 Function *F = CGM.getIntrinsic(Intrinsic::prefetch);
David Blaikie43f9bb72015-05-18 22:14:03 +00006926 return Builder.CreateCall(F, {Address, RW, Locality, IsData});
Yi Konga5548432014-08-13 19:18:20 +00006927 }
6928
Jim Grosbach79140822014-06-16 21:56:02 +00006929 if (BuiltinID == AArch64::BI__builtin_arm_rbit) {
6930 assert((getContext().getTypeSize(E->getType()) == 32) &&
6931 "rbit of unusual size!");
6932 llvm::Value *Arg = EmitScalarExpr(E->getArg(0));
6933 return Builder.CreateCall(
Chad Rosier5a4a1be2017-01-10 17:20:28 +00006934 CGM.getIntrinsic(Intrinsic::bitreverse, Arg->getType()), Arg, "rbit");
Jim Grosbach79140822014-06-16 21:56:02 +00006935 }
6936 if (BuiltinID == AArch64::BI__builtin_arm_rbit64) {
6937 assert((getContext().getTypeSize(E->getType()) == 64) &&
6938 "rbit of unusual size!");
6939 llvm::Value *Arg = EmitScalarExpr(E->getArg(0));
6940 return Builder.CreateCall(
Chad Rosier5a4a1be2017-01-10 17:20:28 +00006941 CGM.getIntrinsic(Intrinsic::bitreverse, Arg->getType()), Arg, "rbit");
Jim Grosbach79140822014-06-16 21:56:02 +00006942 }
6943
Tim Northover573cbee2014-05-24 12:52:07 +00006944 if (BuiltinID == AArch64::BI__clear_cache) {
Tim Northovera2ee4332014-03-29 15:09:45 +00006945 assert(E->getNumArgs() == 2 && "__clear_cache takes 2 arguments");
6946 const FunctionDecl *FD = E->getDirectCallee();
Benjamin Kramerc385a802015-07-28 15:40:11 +00006947 Value *Ops[2];
Tim Northovera2ee4332014-03-29 15:09:45 +00006948 for (unsigned i = 0; i < 2; i++)
Benjamin Kramerc385a802015-07-28 15:40:11 +00006949 Ops[i] = EmitScalarExpr(E->getArg(i));
Tim Northovera2ee4332014-03-29 15:09:45 +00006950 llvm::Type *Ty = CGM.getTypes().ConvertType(FD->getType());
6951 llvm::FunctionType *FTy = cast<llvm::FunctionType>(Ty);
6952 StringRef Name = FD->getName();
6953 return EmitNounwindRuntimeCall(CGM.CreateRuntimeFunction(FTy, Name), Ops);
6954 }
6955
Tim Northover3acd6bd2014-07-02 12:56:02 +00006956 if ((BuiltinID == AArch64::BI__builtin_arm_ldrex ||
6957 BuiltinID == AArch64::BI__builtin_arm_ldaex) &&
Tim Northovera2ee4332014-03-29 15:09:45 +00006958 getContext().getTypeSize(E->getType()) == 128) {
Tim Northover3acd6bd2014-07-02 12:56:02 +00006959 Function *F = CGM.getIntrinsic(BuiltinID == AArch64::BI__builtin_arm_ldaex
6960 ? Intrinsic::aarch64_ldaxp
6961 : Intrinsic::aarch64_ldxp);
Tim Northovera2ee4332014-03-29 15:09:45 +00006962
6963 Value *LdPtr = EmitScalarExpr(E->getArg(0));
6964 Value *Val = Builder.CreateCall(F, Builder.CreateBitCast(LdPtr, Int8PtrTy),
6965 "ldxp");
6966
6967 Value *Val0 = Builder.CreateExtractValue(Val, 1);
6968 Value *Val1 = Builder.CreateExtractValue(Val, 0);
6969 llvm::Type *Int128Ty = llvm::IntegerType::get(getLLVMContext(), 128);
6970 Val0 = Builder.CreateZExt(Val0, Int128Ty);
6971 Val1 = Builder.CreateZExt(Val1, Int128Ty);
6972
6973 Value *ShiftCst = llvm::ConstantInt::get(Int128Ty, 64);
6974 Val = Builder.CreateShl(Val0, ShiftCst, "shl", true /* nuw */);
6975 Val = Builder.CreateOr(Val, Val1);
6976 return Builder.CreateBitCast(Val, ConvertType(E->getType()));
Tim Northover3acd6bd2014-07-02 12:56:02 +00006977 } else if (BuiltinID == AArch64::BI__builtin_arm_ldrex ||
6978 BuiltinID == AArch64::BI__builtin_arm_ldaex) {
Tim Northovera2ee4332014-03-29 15:09:45 +00006979 Value *LoadAddr = EmitScalarExpr(E->getArg(0));
6980
6981 QualType Ty = E->getType();
6982 llvm::Type *RealResTy = ConvertType(Ty);
Akira Hatanaka6c299ca2016-12-01 19:25:14 +00006983 llvm::Type *PtrTy = llvm::IntegerType::get(
6984 getLLVMContext(), getContext().getTypeSize(Ty))->getPointerTo();
6985 LoadAddr = Builder.CreateBitCast(LoadAddr, PtrTy);
Tim Northovera2ee4332014-03-29 15:09:45 +00006986
Tim Northover3acd6bd2014-07-02 12:56:02 +00006987 Function *F = CGM.getIntrinsic(BuiltinID == AArch64::BI__builtin_arm_ldaex
6988 ? Intrinsic::aarch64_ldaxr
6989 : Intrinsic::aarch64_ldxr,
Akira Hatanaka6c299ca2016-12-01 19:25:14 +00006990 PtrTy);
Tim Northovera2ee4332014-03-29 15:09:45 +00006991 Value *Val = Builder.CreateCall(F, LoadAddr, "ldxr");
6992
6993 if (RealResTy->isPointerTy())
6994 return Builder.CreateIntToPtr(Val, RealResTy);
6995
Akira Hatanaka6c299ca2016-12-01 19:25:14 +00006996 llvm::Type *IntResTy = llvm::IntegerType::get(
6997 getLLVMContext(), CGM.getDataLayout().getTypeSizeInBits(RealResTy));
Tim Northovera2ee4332014-03-29 15:09:45 +00006998 Val = Builder.CreateTruncOrBitCast(Val, IntResTy);
6999 return Builder.CreateBitCast(Val, RealResTy);
7000 }
7001
Tim Northover3acd6bd2014-07-02 12:56:02 +00007002 if ((BuiltinID == AArch64::BI__builtin_arm_strex ||
7003 BuiltinID == AArch64::BI__builtin_arm_stlex) &&
Tim Northovera2ee4332014-03-29 15:09:45 +00007004 getContext().getTypeSize(E->getArg(0)->getType()) == 128) {
Tim Northover3acd6bd2014-07-02 12:56:02 +00007005 Function *F = CGM.getIntrinsic(BuiltinID == AArch64::BI__builtin_arm_stlex
7006 ? Intrinsic::aarch64_stlxp
7007 : Intrinsic::aarch64_stxp);
Serge Guelton1d993272017-05-09 19:31:30 +00007008 llvm::Type *STy = llvm::StructType::get(Int64Ty, Int64Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00007009
John McCall7f416cc2015-09-08 08:05:57 +00007010 Address Tmp = CreateMemTemp(E->getArg(0)->getType());
7011 EmitAnyExprToMem(E->getArg(0), Tmp, Qualifiers(), /*init*/ true);
Tim Northovera2ee4332014-03-29 15:09:45 +00007012
John McCall7f416cc2015-09-08 08:05:57 +00007013 Tmp = Builder.CreateBitCast(Tmp, llvm::PointerType::getUnqual(STy));
7014 llvm::Value *Val = Builder.CreateLoad(Tmp);
Tim Northovera2ee4332014-03-29 15:09:45 +00007015
7016 Value *Arg0 = Builder.CreateExtractValue(Val, 0);
7017 Value *Arg1 = Builder.CreateExtractValue(Val, 1);
7018 Value *StPtr = Builder.CreateBitCast(EmitScalarExpr(E->getArg(1)),
7019 Int8PtrTy);
David Blaikie43f9bb72015-05-18 22:14:03 +00007020 return Builder.CreateCall(F, {Arg0, Arg1, StPtr}, "stxp");
7021 }
7022
7023 if (BuiltinID == AArch64::BI__builtin_arm_strex ||
7024 BuiltinID == AArch64::BI__builtin_arm_stlex) {
Tim Northovera2ee4332014-03-29 15:09:45 +00007025 Value *StoreVal = EmitScalarExpr(E->getArg(0));
7026 Value *StoreAddr = EmitScalarExpr(E->getArg(1));
7027
7028 QualType Ty = E->getArg(0)->getType();
7029 llvm::Type *StoreTy = llvm::IntegerType::get(getLLVMContext(),
7030 getContext().getTypeSize(Ty));
7031 StoreAddr = Builder.CreateBitCast(StoreAddr, StoreTy->getPointerTo());
7032
7033 if (StoreVal->getType()->isPointerTy())
7034 StoreVal = Builder.CreatePtrToInt(StoreVal, Int64Ty);
7035 else {
Akira Hatanaka6c299ca2016-12-01 19:25:14 +00007036 llvm::Type *IntTy = llvm::IntegerType::get(
7037 getLLVMContext(),
7038 CGM.getDataLayout().getTypeSizeInBits(StoreVal->getType()));
7039 StoreVal = Builder.CreateBitCast(StoreVal, IntTy);
Tim Northovera2ee4332014-03-29 15:09:45 +00007040 StoreVal = Builder.CreateZExtOrBitCast(StoreVal, Int64Ty);
7041 }
7042
Tim Northover3acd6bd2014-07-02 12:56:02 +00007043 Function *F = CGM.getIntrinsic(BuiltinID == AArch64::BI__builtin_arm_stlex
7044 ? Intrinsic::aarch64_stlxr
7045 : Intrinsic::aarch64_stxr,
7046 StoreAddr->getType());
David Blaikie43f9bb72015-05-18 22:14:03 +00007047 return Builder.CreateCall(F, {StoreVal, StoreAddr}, "stxr");
Tim Northovera2ee4332014-03-29 15:09:45 +00007048 }
7049
Mandeep Singh Grangecc82ef2018-10-04 22:32:42 +00007050 if (BuiltinID == AArch64::BI__getReg) {
Fangrui Song407659a2018-11-30 23:41:18 +00007051 Expr::EvalResult Result;
7052 if (!E->getArg(0)->EvaluateAsInt(Result, CGM.getContext()))
Mandeep Singh Grangecc82ef2018-10-04 22:32:42 +00007053 llvm_unreachable("Sema will ensure that the parameter is constant");
7054
Fangrui Song407659a2018-11-30 23:41:18 +00007055 llvm::APSInt Value = Result.Val.getInt();
Mandeep Singh Grangecc82ef2018-10-04 22:32:42 +00007056 LLVMContext &Context = CGM.getLLVMContext();
7057 std::string Reg = Value == 31 ? "sp" : "x" + Value.toString(10);
7058
7059 llvm::Metadata *Ops[] = {llvm::MDString::get(Context, Reg)};
7060 llvm::MDNode *RegName = llvm::MDNode::get(Context, Ops);
7061 llvm::Value *Metadata = llvm::MetadataAsValue::get(Context, RegName);
7062
James Y Knight8799cae2019-02-03 21:53:49 +00007063 llvm::Function *F =
Mandeep Singh Grangecc82ef2018-10-04 22:32:42 +00007064 CGM.getIntrinsic(llvm::Intrinsic::read_register, {Int64Ty});
7065 return Builder.CreateCall(F, Metadata);
7066 }
7067
Tim Northover573cbee2014-05-24 12:52:07 +00007068 if (BuiltinID == AArch64::BI__builtin_arm_clrex) {
7069 Function *F = CGM.getIntrinsic(Intrinsic::aarch64_clrex);
David Blaikie4ba525b2015-07-14 17:27:39 +00007070 return Builder.CreateCall(F);
Tim Northovera2ee4332014-03-29 15:09:45 +00007071 }
7072
Mandeep Singh Grangaef87982018-10-03 17:24:21 +00007073 if (BuiltinID == AArch64::BI_ReadWriteBarrier)
7074 return Builder.CreateFence(llvm::AtomicOrdering::SequentiallyConsistent,
7075 llvm::SyncScope::SingleThread);
7076
Tim Northovera2ee4332014-03-29 15:09:45 +00007077 // CRC32
7078 Intrinsic::ID CRCIntrinsicID = Intrinsic::not_intrinsic;
7079 switch (BuiltinID) {
Tim Northover573cbee2014-05-24 12:52:07 +00007080 case AArch64::BI__builtin_arm_crc32b:
7081 CRCIntrinsicID = Intrinsic::aarch64_crc32b; break;
7082 case AArch64::BI__builtin_arm_crc32cb:
7083 CRCIntrinsicID = Intrinsic::aarch64_crc32cb; break;
7084 case AArch64::BI__builtin_arm_crc32h:
7085 CRCIntrinsicID = Intrinsic::aarch64_crc32h; break;
7086 case AArch64::BI__builtin_arm_crc32ch:
7087 CRCIntrinsicID = Intrinsic::aarch64_crc32ch; break;
7088 case AArch64::BI__builtin_arm_crc32w:
7089 CRCIntrinsicID = Intrinsic::aarch64_crc32w; break;
7090 case AArch64::BI__builtin_arm_crc32cw:
7091 CRCIntrinsicID = Intrinsic::aarch64_crc32cw; break;
7092 case AArch64::BI__builtin_arm_crc32d:
7093 CRCIntrinsicID = Intrinsic::aarch64_crc32x; break;
7094 case AArch64::BI__builtin_arm_crc32cd:
7095 CRCIntrinsicID = Intrinsic::aarch64_crc32cx; break;
Tim Northovera2ee4332014-03-29 15:09:45 +00007096 }
7097
7098 if (CRCIntrinsicID != Intrinsic::not_intrinsic) {
7099 Value *Arg0 = EmitScalarExpr(E->getArg(0));
7100 Value *Arg1 = EmitScalarExpr(E->getArg(1));
7101 Function *F = CGM.getIntrinsic(CRCIntrinsicID);
7102
7103 llvm::Type *DataTy = F->getFunctionType()->getParamType(1);
7104 Arg1 = Builder.CreateZExtOrBitCast(Arg1, DataTy);
7105
David Blaikie43f9bb72015-05-18 22:14:03 +00007106 return Builder.CreateCall(F, {Arg0, Arg1});
Tim Northovera2ee4332014-03-29 15:09:45 +00007107 }
7108
Javed Absar18b0c402019-04-26 21:08:11 +00007109 // Memory Tagging Extensions (MTE) Intrinsics
7110 Intrinsic::ID MTEIntrinsicID = Intrinsic::not_intrinsic;
7111 switch (BuiltinID) {
7112 case AArch64::BI__builtin_arm_irg:
7113 MTEIntrinsicID = Intrinsic::aarch64_irg; break;
7114 case AArch64::BI__builtin_arm_addg:
7115 MTEIntrinsicID = Intrinsic::aarch64_addg; break;
7116 case AArch64::BI__builtin_arm_gmi:
7117 MTEIntrinsicID = Intrinsic::aarch64_gmi; break;
7118 case AArch64::BI__builtin_arm_ldg:
7119 MTEIntrinsicID = Intrinsic::aarch64_ldg; break;
7120 case AArch64::BI__builtin_arm_stg:
7121 MTEIntrinsicID = Intrinsic::aarch64_stg; break;
7122 case AArch64::BI__builtin_arm_subp:
7123 MTEIntrinsicID = Intrinsic::aarch64_subp; break;
7124 }
7125
7126 if (MTEIntrinsicID != Intrinsic::not_intrinsic) {
7127 llvm::Type *T = ConvertType(E->getType());
7128
7129 if (MTEIntrinsicID == Intrinsic::aarch64_irg) {
7130 Value *Pointer = EmitScalarExpr(E->getArg(0));
7131 Value *Mask = EmitScalarExpr(E->getArg(1));
7132
7133 Pointer = Builder.CreatePointerCast(Pointer, Int8PtrTy);
7134 Mask = Builder.CreateZExt(Mask, Int64Ty);
7135 Value *RV = Builder.CreateCall(
7136 CGM.getIntrinsic(MTEIntrinsicID), {Pointer, Mask});
7137 return Builder.CreatePointerCast(RV, T);
7138 }
7139 if (MTEIntrinsicID == Intrinsic::aarch64_addg) {
7140 Value *Pointer = EmitScalarExpr(E->getArg(0));
7141 Value *TagOffset = EmitScalarExpr(E->getArg(1));
7142
7143 Pointer = Builder.CreatePointerCast(Pointer, Int8PtrTy);
7144 TagOffset = Builder.CreateZExt(TagOffset, Int64Ty);
7145 Value *RV = Builder.CreateCall(
7146 CGM.getIntrinsic(MTEIntrinsicID), {Pointer, TagOffset});
7147 return Builder.CreatePointerCast(RV, T);
7148 }
7149 if (MTEIntrinsicID == Intrinsic::aarch64_gmi) {
7150 Value *Pointer = EmitScalarExpr(E->getArg(0));
7151 Value *ExcludedMask = EmitScalarExpr(E->getArg(1));
7152
7153 ExcludedMask = Builder.CreateZExt(ExcludedMask, Int64Ty);
7154 Pointer = Builder.CreatePointerCast(Pointer, Int8PtrTy);
7155 return Builder.CreateCall(
7156 CGM.getIntrinsic(MTEIntrinsicID), {Pointer, ExcludedMask});
7157 }
7158 // Although it is possible to supply a different return
7159 // address (first arg) to this intrinsic, for now we set
7160 // return address same as input address.
7161 if (MTEIntrinsicID == Intrinsic::aarch64_ldg) {
7162 Value *TagAddress = EmitScalarExpr(E->getArg(0));
7163 TagAddress = Builder.CreatePointerCast(TagAddress, Int8PtrTy);
7164 Value *RV = Builder.CreateCall(
7165 CGM.getIntrinsic(MTEIntrinsicID), {TagAddress, TagAddress});
7166 return Builder.CreatePointerCast(RV, T);
7167 }
7168 // Although it is possible to supply a different tag (to set)
7169 // to this intrinsic (as first arg), for now we supply
7170 // the tag that is in input address arg (common use case).
7171 if (MTEIntrinsicID == Intrinsic::aarch64_stg) {
7172 Value *TagAddress = EmitScalarExpr(E->getArg(0));
7173 TagAddress = Builder.CreatePointerCast(TagAddress, Int8PtrTy);
7174 return Builder.CreateCall(
7175 CGM.getIntrinsic(MTEIntrinsicID), {TagAddress, TagAddress});
7176 }
7177 if (MTEIntrinsicID == Intrinsic::aarch64_subp) {
7178 Value *PointerA = EmitScalarExpr(E->getArg(0));
7179 Value *PointerB = EmitScalarExpr(E->getArg(1));
7180 PointerA = Builder.CreatePointerCast(PointerA, Int8PtrTy);
7181 PointerB = Builder.CreatePointerCast(PointerB, Int8PtrTy);
7182 return Builder.CreateCall(
7183 CGM.getIntrinsic(MTEIntrinsicID), {PointerA, PointerB});
7184 }
7185 }
7186
Luke Cheeseman59b2d832015-06-15 17:51:01 +00007187 if (BuiltinID == AArch64::BI__builtin_arm_rsr ||
7188 BuiltinID == AArch64::BI__builtin_arm_rsr64 ||
7189 BuiltinID == AArch64::BI__builtin_arm_rsrp ||
7190 BuiltinID == AArch64::BI__builtin_arm_wsr ||
7191 BuiltinID == AArch64::BI__builtin_arm_wsr64 ||
7192 BuiltinID == AArch64::BI__builtin_arm_wsrp) {
7193
7194 bool IsRead = BuiltinID == AArch64::BI__builtin_arm_rsr ||
7195 BuiltinID == AArch64::BI__builtin_arm_rsr64 ||
7196 BuiltinID == AArch64::BI__builtin_arm_rsrp;
7197
7198 bool IsPointerBuiltin = BuiltinID == AArch64::BI__builtin_arm_rsrp ||
7199 BuiltinID == AArch64::BI__builtin_arm_wsrp;
7200
7201 bool Is64Bit = BuiltinID != AArch64::BI__builtin_arm_rsr &&
7202 BuiltinID != AArch64::BI__builtin_arm_wsr;
7203
7204 llvm::Type *ValueType;
7205 llvm::Type *RegisterType = Int64Ty;
7206 if (IsPointerBuiltin) {
7207 ValueType = VoidPtrTy;
7208 } else if (Is64Bit) {
7209 ValueType = Int64Ty;
7210 } else {
7211 ValueType = Int32Ty;
7212 }
7213
7214 return EmitSpecialRegisterBuiltin(*this, E, RegisterType, ValueType, IsRead);
7215 }
7216
Mandeep Singh Grang2147b1a2018-10-18 23:35:35 +00007217 if (BuiltinID == AArch64::BI_ReadStatusReg ||
7218 BuiltinID == AArch64::BI_WriteStatusReg) {
7219 LLVMContext &Context = CGM.getLLVMContext();
7220
7221 unsigned SysReg =
7222 E->getArg(0)->EvaluateKnownConstInt(getContext()).getZExtValue();
7223
7224 std::string SysRegStr;
7225 llvm::raw_string_ostream(SysRegStr) <<
7226 ((1 << 1) | ((SysReg >> 14) & 1)) << ":" <<
7227 ((SysReg >> 11) & 7) << ":" <<
7228 ((SysReg >> 7) & 15) << ":" <<
7229 ((SysReg >> 3) & 15) << ":" <<
7230 ( SysReg & 7);
7231
7232 llvm::Metadata *Ops[] = { llvm::MDString::get(Context, SysRegStr) };
7233 llvm::MDNode *RegName = llvm::MDNode::get(Context, Ops);
7234 llvm::Value *Metadata = llvm::MetadataAsValue::get(Context, RegName);
7235
7236 llvm::Type *RegisterType = Int64Ty;
Mandeep Singh Grang2147b1a2018-10-18 23:35:35 +00007237 llvm::Type *Types[] = { RegisterType };
7238
7239 if (BuiltinID == AArch64::BI_ReadStatusReg) {
James Y Knight8799cae2019-02-03 21:53:49 +00007240 llvm::Function *F = CGM.getIntrinsic(llvm::Intrinsic::read_register, Types);
Mandeep Singh Grang2147b1a2018-10-18 23:35:35 +00007241
Eli Friedman3189d5f2019-02-08 01:17:49 +00007242 return Builder.CreateCall(F, Metadata);
Mandeep Singh Grang2147b1a2018-10-18 23:35:35 +00007243 }
7244
James Y Knight8799cae2019-02-03 21:53:49 +00007245 llvm::Function *F = CGM.getIntrinsic(llvm::Intrinsic::write_register, Types);
Mandeep Singh Grang2147b1a2018-10-18 23:35:35 +00007246 llvm::Value *ArgValue = EmitScalarExpr(E->getArg(1));
Mandeep Singh Grang2147b1a2018-10-18 23:35:35 +00007247
7248 return Builder.CreateCall(F, { Metadata, ArgValue });
7249 }
7250
Mandeep Singh Grangbe0e78e2018-11-01 01:35:34 +00007251 if (BuiltinID == AArch64::BI_AddressOfReturnAddress) {
James Y Knight8799cae2019-02-03 21:53:49 +00007252 llvm::Function *F = CGM.getIntrinsic(Intrinsic::addressofreturnaddress);
Mandeep Singh Grangbe0e78e2018-11-01 01:35:34 +00007253 return Builder.CreateCall(F);
7254 }
7255
Martin Storsjo7037a132019-05-06 21:19:07 +00007256 if (BuiltinID == AArch64::BI__builtin_sponentry) {
7257 llvm::Function *F = CGM.getIntrinsic(Intrinsic::sponentry);
7258 return Builder.CreateCall(F);
7259 }
7260
Ahmed Bougacha94df7302015-06-04 01:43:41 +00007261 // Find out if any arguments are required to be integer constant
7262 // expressions.
7263 unsigned ICEArguments = 0;
7264 ASTContext::GetBuiltinTypeError Error;
7265 getContext().GetBuiltinType(BuiltinID, Error, &ICEArguments);
7266 assert(Error == ASTContext::GE_None && "Should not codegen an error");
7267
Tim Northovera2ee4332014-03-29 15:09:45 +00007268 llvm::SmallVector<Value*, 4> Ops;
Ahmed Bougacha94df7302015-06-04 01:43:41 +00007269 for (unsigned i = 0, e = E->getNumArgs() - 1; i != e; i++) {
7270 if ((ICEArguments & (1 << i)) == 0) {
7271 Ops.push_back(EmitScalarExpr(E->getArg(i)));
7272 } else {
7273 // If this is required to be a constant, constant fold it so that we know
7274 // that the generated intrinsic gets a ConstantInt.
7275 llvm::APSInt Result;
7276 bool IsConst = E->getArg(i)->isIntegerConstantExpr(Result, getContext());
7277 assert(IsConst && "Constant arg isn't actually constant?");
7278 (void)IsConst;
7279 Ops.push_back(llvm::ConstantInt::get(getLLVMContext(), Result));
7280 }
7281 }
Tim Northovera2ee4332014-03-29 15:09:45 +00007282
Craig Topper5fc8fc22014-08-27 06:28:36 +00007283 auto SISDMap = makeArrayRef(AArch64SISDIntrinsicMap);
Tim Northovera2ee4332014-03-29 15:09:45 +00007284 const NeonIntrinsicInfo *Builtin = findNeonIntrinsicInMap(
Tim Northover573cbee2014-05-24 12:52:07 +00007285 SISDMap, BuiltinID, AArch64SISDIntrinsicsProvenSorted);
Tim Northovera2ee4332014-03-29 15:09:45 +00007286
7287 if (Builtin) {
7288 Ops.push_back(EmitScalarExpr(E->getArg(E->getNumArgs() - 1)));
7289 Value *Result = EmitCommonNeonSISDBuiltinExpr(*this, *Builtin, Ops, E);
7290 assert(Result && "SISD intrinsic should have been handled");
7291 return Result;
7292 }
7293
7294 llvm::APSInt Result;
7295 const Expr *Arg = E->getArg(E->getNumArgs()-1);
7296 NeonTypeFlags Type(0);
7297 if (Arg->isIntegerConstantExpr(Result, getContext()))
7298 // Determine the type of this overloaded NEON intrinsic.
7299 Type = NeonTypeFlags(Result.getZExtValue());
7300
7301 bool usgn = Type.isUnsigned();
7302 bool quad = Type.isQuad();
7303
7304 // Handle non-overloaded intrinsics first.
7305 switch (BuiltinID) {
7306 default: break;
Abderrazek Zaafranie7ed8802018-02-12 21:26:06 +00007307 case NEON::BI__builtin_neon_vabsh_f16:
7308 Ops.push_back(EmitScalarExpr(E->getArg(0)));
7309 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::fabs, HalfTy), Ops, "vabs");
Tim Northoverb17f9a42014-04-01 12:23:08 +00007310 case NEON::BI__builtin_neon_vldrq_p128: {
Peter Collingbourneb367c562016-11-28 22:30:21 +00007311 llvm::Type *Int128Ty = llvm::Type::getIntNTy(getLLVMContext(), 128);
7312 llvm::Type *Int128PTy = llvm::PointerType::get(Int128Ty, 0);
Tim Northoverb17f9a42014-04-01 12:23:08 +00007313 Value *Ptr = Builder.CreateBitCast(EmitScalarExpr(E->getArg(0)), Int128PTy);
Peter Collingbourneb367c562016-11-28 22:30:21 +00007314 return Builder.CreateAlignedLoad(Int128Ty, Ptr,
7315 CharUnits::fromQuantity(16));
Tim Northoverb17f9a42014-04-01 12:23:08 +00007316 }
7317 case NEON::BI__builtin_neon_vstrq_p128: {
7318 llvm::Type *Int128PTy = llvm::Type::getIntNPtrTy(getLLVMContext(), 128);
7319 Value *Ptr = Builder.CreateBitCast(Ops[0], Int128PTy);
John McCall7f416cc2015-09-08 08:05:57 +00007320 return Builder.CreateDefaultAlignedStore(EmitScalarExpr(E->getArg(1)), Ptr);
Tim Northoverb17f9a42014-04-01 12:23:08 +00007321 }
Tim Northovera2ee4332014-03-29 15:09:45 +00007322 case NEON::BI__builtin_neon_vcvts_u32_f32:
7323 case NEON::BI__builtin_neon_vcvtd_u64_f64:
7324 usgn = true;
Adrian Prantlf3b3ccd2017-12-19 22:06:11 +00007325 LLVM_FALLTHROUGH;
Tim Northovera2ee4332014-03-29 15:09:45 +00007326 case NEON::BI__builtin_neon_vcvts_s32_f32:
7327 case NEON::BI__builtin_neon_vcvtd_s64_f64: {
7328 Ops.push_back(EmitScalarExpr(E->getArg(0)));
7329 bool Is64 = Ops[0]->getType()->getPrimitiveSizeInBits() == 64;
7330 llvm::Type *InTy = Is64 ? Int64Ty : Int32Ty;
7331 llvm::Type *FTy = Is64 ? DoubleTy : FloatTy;
7332 Ops[0] = Builder.CreateBitCast(Ops[0], FTy);
7333 if (usgn)
7334 return Builder.CreateFPToUI(Ops[0], InTy);
7335 return Builder.CreateFPToSI(Ops[0], InTy);
7336 }
7337 case NEON::BI__builtin_neon_vcvts_f32_u32:
7338 case NEON::BI__builtin_neon_vcvtd_f64_u64:
7339 usgn = true;
Adrian Prantlf3b3ccd2017-12-19 22:06:11 +00007340 LLVM_FALLTHROUGH;
Tim Northovera2ee4332014-03-29 15:09:45 +00007341 case NEON::BI__builtin_neon_vcvts_f32_s32:
7342 case NEON::BI__builtin_neon_vcvtd_f64_s64: {
7343 Ops.push_back(EmitScalarExpr(E->getArg(0)));
7344 bool Is64 = Ops[0]->getType()->getPrimitiveSizeInBits() == 64;
7345 llvm::Type *InTy = Is64 ? Int64Ty : Int32Ty;
7346 llvm::Type *FTy = Is64 ? DoubleTy : FloatTy;
7347 Ops[0] = Builder.CreateBitCast(Ops[0], InTy);
7348 if (usgn)
7349 return Builder.CreateUIToFP(Ops[0], FTy);
7350 return Builder.CreateSIToFP(Ops[0], FTy);
7351 }
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00007352 case NEON::BI__builtin_neon_vcvth_f16_u16:
7353 case NEON::BI__builtin_neon_vcvth_f16_u32:
7354 case NEON::BI__builtin_neon_vcvth_f16_u64:
7355 usgn = true;
Reid Kleckner4dc0b1a2018-11-01 19:54:45 +00007356 LLVM_FALLTHROUGH;
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00007357 case NEON::BI__builtin_neon_vcvth_f16_s16:
7358 case NEON::BI__builtin_neon_vcvth_f16_s32:
7359 case NEON::BI__builtin_neon_vcvth_f16_s64: {
7360 Ops.push_back(EmitScalarExpr(E->getArg(0)));
7361 llvm::Type *FTy = HalfTy;
7362 llvm::Type *InTy;
7363 if (Ops[0]->getType()->getPrimitiveSizeInBits() == 64)
7364 InTy = Int64Ty;
7365 else if (Ops[0]->getType()->getPrimitiveSizeInBits() == 32)
7366 InTy = Int32Ty;
7367 else
7368 InTy = Int16Ty;
7369 Ops[0] = Builder.CreateBitCast(Ops[0], InTy);
7370 if (usgn)
7371 return Builder.CreateUIToFP(Ops[0], FTy);
7372 return Builder.CreateSIToFP(Ops[0], FTy);
7373 }
7374 case NEON::BI__builtin_neon_vcvth_u16_f16:
7375 usgn = true;
Reid Kleckner4dc0b1a2018-11-01 19:54:45 +00007376 LLVM_FALLTHROUGH;
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00007377 case NEON::BI__builtin_neon_vcvth_s16_f16: {
7378 Ops.push_back(EmitScalarExpr(E->getArg(0)));
7379 Ops[0] = Builder.CreateBitCast(Ops[0], HalfTy);
7380 if (usgn)
7381 return Builder.CreateFPToUI(Ops[0], Int16Ty);
7382 return Builder.CreateFPToSI(Ops[0], Int16Ty);
7383 }
7384 case NEON::BI__builtin_neon_vcvth_u32_f16:
7385 usgn = true;
Reid Kleckner4dc0b1a2018-11-01 19:54:45 +00007386 LLVM_FALLTHROUGH;
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00007387 case NEON::BI__builtin_neon_vcvth_s32_f16: {
7388 Ops.push_back(EmitScalarExpr(E->getArg(0)));
7389 Ops[0] = Builder.CreateBitCast(Ops[0], HalfTy);
7390 if (usgn)
7391 return Builder.CreateFPToUI(Ops[0], Int32Ty);
7392 return Builder.CreateFPToSI(Ops[0], Int32Ty);
7393 }
7394 case NEON::BI__builtin_neon_vcvth_u64_f16:
7395 usgn = true;
Reid Kleckner4dc0b1a2018-11-01 19:54:45 +00007396 LLVM_FALLTHROUGH;
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00007397 case NEON::BI__builtin_neon_vcvth_s64_f16: {
7398 Ops.push_back(EmitScalarExpr(E->getArg(0)));
7399 Ops[0] = Builder.CreateBitCast(Ops[0], HalfTy);
7400 if (usgn)
7401 return Builder.CreateFPToUI(Ops[0], Int64Ty);
7402 return Builder.CreateFPToSI(Ops[0], Int64Ty);
7403 }
Abderrazek Zaafranie7ed8802018-02-12 21:26:06 +00007404 case NEON::BI__builtin_neon_vcvtah_u16_f16:
7405 case NEON::BI__builtin_neon_vcvtmh_u16_f16:
7406 case NEON::BI__builtin_neon_vcvtnh_u16_f16:
7407 case NEON::BI__builtin_neon_vcvtph_u16_f16:
7408 case NEON::BI__builtin_neon_vcvtah_s16_f16:
7409 case NEON::BI__builtin_neon_vcvtmh_s16_f16:
7410 case NEON::BI__builtin_neon_vcvtnh_s16_f16:
7411 case NEON::BI__builtin_neon_vcvtph_s16_f16: {
7412 unsigned Int;
7413 llvm::Type* InTy = Int32Ty;
7414 llvm::Type* FTy = HalfTy;
7415 llvm::Type *Tys[2] = {InTy, FTy};
7416 Ops.push_back(EmitScalarExpr(E->getArg(0)));
7417 switch (BuiltinID) {
7418 default: llvm_unreachable("missing builtin ID in switch!");
7419 case NEON::BI__builtin_neon_vcvtah_u16_f16:
7420 Int = Intrinsic::aarch64_neon_fcvtau; break;
7421 case NEON::BI__builtin_neon_vcvtmh_u16_f16:
7422 Int = Intrinsic::aarch64_neon_fcvtmu; break;
7423 case NEON::BI__builtin_neon_vcvtnh_u16_f16:
7424 Int = Intrinsic::aarch64_neon_fcvtnu; break;
7425 case NEON::BI__builtin_neon_vcvtph_u16_f16:
7426 Int = Intrinsic::aarch64_neon_fcvtpu; break;
7427 case NEON::BI__builtin_neon_vcvtah_s16_f16:
7428 Int = Intrinsic::aarch64_neon_fcvtas; break;
7429 case NEON::BI__builtin_neon_vcvtmh_s16_f16:
7430 Int = Intrinsic::aarch64_neon_fcvtms; break;
7431 case NEON::BI__builtin_neon_vcvtnh_s16_f16:
7432 Int = Intrinsic::aarch64_neon_fcvtns; break;
7433 case NEON::BI__builtin_neon_vcvtph_s16_f16:
7434 Int = Intrinsic::aarch64_neon_fcvtps; break;
7435 }
7436 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "fcvt");
7437 return Builder.CreateTrunc(Ops[0], Int16Ty);
7438 }
7439 case NEON::BI__builtin_neon_vcaleh_f16:
7440 case NEON::BI__builtin_neon_vcalth_f16:
7441 case NEON::BI__builtin_neon_vcageh_f16:
7442 case NEON::BI__builtin_neon_vcagth_f16: {
7443 unsigned Int;
7444 llvm::Type* InTy = Int32Ty;
7445 llvm::Type* FTy = HalfTy;
7446 llvm::Type *Tys[2] = {InTy, FTy};
7447 Ops.push_back(EmitScalarExpr(E->getArg(1)));
7448 switch (BuiltinID) {
7449 default: llvm_unreachable("missing builtin ID in switch!");
7450 case NEON::BI__builtin_neon_vcageh_f16:
7451 Int = Intrinsic::aarch64_neon_facge; break;
7452 case NEON::BI__builtin_neon_vcagth_f16:
7453 Int = Intrinsic::aarch64_neon_facgt; break;
7454 case NEON::BI__builtin_neon_vcaleh_f16:
7455 Int = Intrinsic::aarch64_neon_facge; std::swap(Ops[0], Ops[1]); break;
7456 case NEON::BI__builtin_neon_vcalth_f16:
7457 Int = Intrinsic::aarch64_neon_facgt; std::swap(Ops[0], Ops[1]); break;
7458 }
7459 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "facg");
7460 return Builder.CreateTrunc(Ops[0], Int16Ty);
7461 }
7462 case NEON::BI__builtin_neon_vcvth_n_s16_f16:
7463 case NEON::BI__builtin_neon_vcvth_n_u16_f16: {
7464 unsigned Int;
7465 llvm::Type* InTy = Int32Ty;
7466 llvm::Type* FTy = HalfTy;
7467 llvm::Type *Tys[2] = {InTy, FTy};
7468 Ops.push_back(EmitScalarExpr(E->getArg(1)));
7469 switch (BuiltinID) {
7470 default: llvm_unreachable("missing builtin ID in switch!");
7471 case NEON::BI__builtin_neon_vcvth_n_s16_f16:
7472 Int = Intrinsic::aarch64_neon_vcvtfp2fxs; break;
7473 case NEON::BI__builtin_neon_vcvth_n_u16_f16:
7474 Int = Intrinsic::aarch64_neon_vcvtfp2fxu; break;
7475 }
7476 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "fcvth_n");
7477 return Builder.CreateTrunc(Ops[0], Int16Ty);
7478 }
7479 case NEON::BI__builtin_neon_vcvth_n_f16_s16:
7480 case NEON::BI__builtin_neon_vcvth_n_f16_u16: {
7481 unsigned Int;
7482 llvm::Type* FTy = HalfTy;
7483 llvm::Type* InTy = Int32Ty;
7484 llvm::Type *Tys[2] = {FTy, InTy};
7485 Ops.push_back(EmitScalarExpr(E->getArg(1)));
7486 switch (BuiltinID) {
7487 default: llvm_unreachable("missing builtin ID in switch!");
7488 case NEON::BI__builtin_neon_vcvth_n_f16_s16:
7489 Int = Intrinsic::aarch64_neon_vcvtfxs2fp;
7490 Ops[0] = Builder.CreateSExt(Ops[0], InTy, "sext");
7491 break;
7492 case NEON::BI__builtin_neon_vcvth_n_f16_u16:
7493 Int = Intrinsic::aarch64_neon_vcvtfxu2fp;
7494 Ops[0] = Builder.CreateZExt(Ops[0], InTy);
7495 break;
7496 }
7497 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "fcvth_n");
7498 }
Tim Northovera2ee4332014-03-29 15:09:45 +00007499 case NEON::BI__builtin_neon_vpaddd_s64: {
Benjamin Kramerc385a802015-07-28 15:40:11 +00007500 llvm::Type *Ty = llvm::VectorType::get(Int64Ty, 2);
Tim Northovera2ee4332014-03-29 15:09:45 +00007501 Value *Vec = EmitScalarExpr(E->getArg(0));
7502 // The vector is v2f64, so make sure it's bitcast to that.
7503 Vec = Builder.CreateBitCast(Vec, Ty, "v2i64");
Michael J. Spencerdd597752014-05-31 00:22:12 +00007504 llvm::Value *Idx0 = llvm::ConstantInt::get(SizeTy, 0);
7505 llvm::Value *Idx1 = llvm::ConstantInt::get(SizeTy, 1);
Tim Northovera2ee4332014-03-29 15:09:45 +00007506 Value *Op0 = Builder.CreateExtractElement(Vec, Idx0, "lane0");
7507 Value *Op1 = Builder.CreateExtractElement(Vec, Idx1, "lane1");
7508 // Pairwise addition of a v2f64 into a scalar f64.
7509 return Builder.CreateAdd(Op0, Op1, "vpaddd");
7510 }
7511 case NEON::BI__builtin_neon_vpaddd_f64: {
7512 llvm::Type *Ty =
Ahmed Bougacha40882bb2015-08-24 23:47:29 +00007513 llvm::VectorType::get(DoubleTy, 2);
Tim Northovera2ee4332014-03-29 15:09:45 +00007514 Value *Vec = EmitScalarExpr(E->getArg(0));
7515 // The vector is v2f64, so make sure it's bitcast to that.
7516 Vec = Builder.CreateBitCast(Vec, Ty, "v2f64");
Michael J. Spencerdd597752014-05-31 00:22:12 +00007517 llvm::Value *Idx0 = llvm::ConstantInt::get(SizeTy, 0);
7518 llvm::Value *Idx1 = llvm::ConstantInt::get(SizeTy, 1);
Tim Northovera2ee4332014-03-29 15:09:45 +00007519 Value *Op0 = Builder.CreateExtractElement(Vec, Idx0, "lane0");
7520 Value *Op1 = Builder.CreateExtractElement(Vec, Idx1, "lane1");
7521 // Pairwise addition of a v2f64 into a scalar f64.
7522 return Builder.CreateFAdd(Op0, Op1, "vpaddd");
7523 }
7524 case NEON::BI__builtin_neon_vpadds_f32: {
7525 llvm::Type *Ty =
Ahmed Bougacha40882bb2015-08-24 23:47:29 +00007526 llvm::VectorType::get(FloatTy, 2);
Tim Northovera2ee4332014-03-29 15:09:45 +00007527 Value *Vec = EmitScalarExpr(E->getArg(0));
7528 // The vector is v2f32, so make sure it's bitcast to that.
7529 Vec = Builder.CreateBitCast(Vec, Ty, "v2f32");
Michael J. Spencerdd597752014-05-31 00:22:12 +00007530 llvm::Value *Idx0 = llvm::ConstantInt::get(SizeTy, 0);
7531 llvm::Value *Idx1 = llvm::ConstantInt::get(SizeTy, 1);
Tim Northovera2ee4332014-03-29 15:09:45 +00007532 Value *Op0 = Builder.CreateExtractElement(Vec, Idx0, "lane0");
7533 Value *Op1 = Builder.CreateExtractElement(Vec, Idx1, "lane1");
7534 // Pairwise addition of a v2f32 into a scalar f32.
7535 return Builder.CreateFAdd(Op0, Op1, "vpaddd");
7536 }
7537 case NEON::BI__builtin_neon_vceqzd_s64:
7538 case NEON::BI__builtin_neon_vceqzd_f64:
7539 case NEON::BI__builtin_neon_vceqzs_f32:
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00007540 case NEON::BI__builtin_neon_vceqzh_f16:
Tim Northovera2ee4332014-03-29 15:09:45 +00007541 Ops.push_back(EmitScalarExpr(E->getArg(0)));
7542 return EmitAArch64CompareBuiltinExpr(
David Majnemerced8bdf2015-02-25 17:36:15 +00007543 Ops[0], ConvertType(E->getCallReturnType(getContext())),
7544 ICmpInst::FCMP_OEQ, ICmpInst::ICMP_EQ, "vceqz");
Tim Northovera2ee4332014-03-29 15:09:45 +00007545 case NEON::BI__builtin_neon_vcgezd_s64:
7546 case NEON::BI__builtin_neon_vcgezd_f64:
7547 case NEON::BI__builtin_neon_vcgezs_f32:
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00007548 case NEON::BI__builtin_neon_vcgezh_f16:
Tim Northovera2ee4332014-03-29 15:09:45 +00007549 Ops.push_back(EmitScalarExpr(E->getArg(0)));
7550 return EmitAArch64CompareBuiltinExpr(
David Majnemerced8bdf2015-02-25 17:36:15 +00007551 Ops[0], ConvertType(E->getCallReturnType(getContext())),
7552 ICmpInst::FCMP_OGE, ICmpInst::ICMP_SGE, "vcgez");
Tim Northovera2ee4332014-03-29 15:09:45 +00007553 case NEON::BI__builtin_neon_vclezd_s64:
7554 case NEON::BI__builtin_neon_vclezd_f64:
7555 case NEON::BI__builtin_neon_vclezs_f32:
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00007556 case NEON::BI__builtin_neon_vclezh_f16:
Tim Northovera2ee4332014-03-29 15:09:45 +00007557 Ops.push_back(EmitScalarExpr(E->getArg(0)));
7558 return EmitAArch64CompareBuiltinExpr(
David Majnemerced8bdf2015-02-25 17:36:15 +00007559 Ops[0], ConvertType(E->getCallReturnType(getContext())),
7560 ICmpInst::FCMP_OLE, ICmpInst::ICMP_SLE, "vclez");
Tim Northovera2ee4332014-03-29 15:09:45 +00007561 case NEON::BI__builtin_neon_vcgtzd_s64:
7562 case NEON::BI__builtin_neon_vcgtzd_f64:
7563 case NEON::BI__builtin_neon_vcgtzs_f32:
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00007564 case NEON::BI__builtin_neon_vcgtzh_f16:
Tim Northovera2ee4332014-03-29 15:09:45 +00007565 Ops.push_back(EmitScalarExpr(E->getArg(0)));
7566 return EmitAArch64CompareBuiltinExpr(
David Majnemerced8bdf2015-02-25 17:36:15 +00007567 Ops[0], ConvertType(E->getCallReturnType(getContext())),
7568 ICmpInst::FCMP_OGT, ICmpInst::ICMP_SGT, "vcgtz");
Tim Northovera2ee4332014-03-29 15:09:45 +00007569 case NEON::BI__builtin_neon_vcltzd_s64:
7570 case NEON::BI__builtin_neon_vcltzd_f64:
7571 case NEON::BI__builtin_neon_vcltzs_f32:
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00007572 case NEON::BI__builtin_neon_vcltzh_f16:
Tim Northovera2ee4332014-03-29 15:09:45 +00007573 Ops.push_back(EmitScalarExpr(E->getArg(0)));
7574 return EmitAArch64CompareBuiltinExpr(
David Majnemerced8bdf2015-02-25 17:36:15 +00007575 Ops[0], ConvertType(E->getCallReturnType(getContext())),
7576 ICmpInst::FCMP_OLT, ICmpInst::ICMP_SLT, "vcltz");
Tim Northovera2ee4332014-03-29 15:09:45 +00007577
7578 case NEON::BI__builtin_neon_vceqzd_u64: {
Tim Northovera2ee4332014-03-29 15:09:45 +00007579 Ops.push_back(EmitScalarExpr(E->getArg(0)));
Benjamin Kramerc385a802015-07-28 15:40:11 +00007580 Ops[0] = Builder.CreateBitCast(Ops[0], Int64Ty);
7581 Ops[0] =
7582 Builder.CreateICmpEQ(Ops[0], llvm::Constant::getNullValue(Int64Ty));
7583 return Builder.CreateSExt(Ops[0], Int64Ty, "vceqzd");
Tim Northovera2ee4332014-03-29 15:09:45 +00007584 }
7585 case NEON::BI__builtin_neon_vceqd_f64:
7586 case NEON::BI__builtin_neon_vcled_f64:
7587 case NEON::BI__builtin_neon_vcltd_f64:
7588 case NEON::BI__builtin_neon_vcged_f64:
7589 case NEON::BI__builtin_neon_vcgtd_f64: {
7590 llvm::CmpInst::Predicate P;
7591 switch (BuiltinID) {
7592 default: llvm_unreachable("missing builtin ID in switch!");
7593 case NEON::BI__builtin_neon_vceqd_f64: P = llvm::FCmpInst::FCMP_OEQ; break;
7594 case NEON::BI__builtin_neon_vcled_f64: P = llvm::FCmpInst::FCMP_OLE; break;
7595 case NEON::BI__builtin_neon_vcltd_f64: P = llvm::FCmpInst::FCMP_OLT; break;
7596 case NEON::BI__builtin_neon_vcged_f64: P = llvm::FCmpInst::FCMP_OGE; break;
7597 case NEON::BI__builtin_neon_vcgtd_f64: P = llvm::FCmpInst::FCMP_OGT; break;
7598 }
7599 Ops.push_back(EmitScalarExpr(E->getArg(1)));
7600 Ops[0] = Builder.CreateBitCast(Ops[0], DoubleTy);
7601 Ops[1] = Builder.CreateBitCast(Ops[1], DoubleTy);
7602 Ops[0] = Builder.CreateFCmp(P, Ops[0], Ops[1]);
7603 return Builder.CreateSExt(Ops[0], Int64Ty, "vcmpd");
7604 }
7605 case NEON::BI__builtin_neon_vceqs_f32:
7606 case NEON::BI__builtin_neon_vcles_f32:
7607 case NEON::BI__builtin_neon_vclts_f32:
7608 case NEON::BI__builtin_neon_vcges_f32:
7609 case NEON::BI__builtin_neon_vcgts_f32: {
7610 llvm::CmpInst::Predicate P;
7611 switch (BuiltinID) {
7612 default: llvm_unreachable("missing builtin ID in switch!");
7613 case NEON::BI__builtin_neon_vceqs_f32: P = llvm::FCmpInst::FCMP_OEQ; break;
7614 case NEON::BI__builtin_neon_vcles_f32: P = llvm::FCmpInst::FCMP_OLE; break;
7615 case NEON::BI__builtin_neon_vclts_f32: P = llvm::FCmpInst::FCMP_OLT; break;
7616 case NEON::BI__builtin_neon_vcges_f32: P = llvm::FCmpInst::FCMP_OGE; break;
7617 case NEON::BI__builtin_neon_vcgts_f32: P = llvm::FCmpInst::FCMP_OGT; break;
7618 }
7619 Ops.push_back(EmitScalarExpr(E->getArg(1)));
7620 Ops[0] = Builder.CreateBitCast(Ops[0], FloatTy);
7621 Ops[1] = Builder.CreateBitCast(Ops[1], FloatTy);
7622 Ops[0] = Builder.CreateFCmp(P, Ops[0], Ops[1]);
7623 return Builder.CreateSExt(Ops[0], Int32Ty, "vcmpd");
7624 }
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00007625 case NEON::BI__builtin_neon_vceqh_f16:
7626 case NEON::BI__builtin_neon_vcleh_f16:
7627 case NEON::BI__builtin_neon_vclth_f16:
7628 case NEON::BI__builtin_neon_vcgeh_f16:
7629 case NEON::BI__builtin_neon_vcgth_f16: {
7630 llvm::CmpInst::Predicate P;
7631 switch (BuiltinID) {
7632 default: llvm_unreachable("missing builtin ID in switch!");
7633 case NEON::BI__builtin_neon_vceqh_f16: P = llvm::FCmpInst::FCMP_OEQ; break;
7634 case NEON::BI__builtin_neon_vcleh_f16: P = llvm::FCmpInst::FCMP_OLE; break;
7635 case NEON::BI__builtin_neon_vclth_f16: P = llvm::FCmpInst::FCMP_OLT; break;
7636 case NEON::BI__builtin_neon_vcgeh_f16: P = llvm::FCmpInst::FCMP_OGE; break;
7637 case NEON::BI__builtin_neon_vcgth_f16: P = llvm::FCmpInst::FCMP_OGT; break;
7638 }
7639 Ops.push_back(EmitScalarExpr(E->getArg(1)));
7640 Ops[0] = Builder.CreateBitCast(Ops[0], HalfTy);
7641 Ops[1] = Builder.CreateBitCast(Ops[1], HalfTy);
7642 Ops[0] = Builder.CreateFCmp(P, Ops[0], Ops[1]);
7643 return Builder.CreateSExt(Ops[0], Int16Ty, "vcmpd");
7644 }
Tim Northovera2ee4332014-03-29 15:09:45 +00007645 case NEON::BI__builtin_neon_vceqd_s64:
7646 case NEON::BI__builtin_neon_vceqd_u64:
7647 case NEON::BI__builtin_neon_vcgtd_s64:
7648 case NEON::BI__builtin_neon_vcgtd_u64:
7649 case NEON::BI__builtin_neon_vcltd_s64:
7650 case NEON::BI__builtin_neon_vcltd_u64:
7651 case NEON::BI__builtin_neon_vcged_u64:
7652 case NEON::BI__builtin_neon_vcged_s64:
7653 case NEON::BI__builtin_neon_vcled_u64:
7654 case NEON::BI__builtin_neon_vcled_s64: {
7655 llvm::CmpInst::Predicate P;
7656 switch (BuiltinID) {
7657 default: llvm_unreachable("missing builtin ID in switch!");
7658 case NEON::BI__builtin_neon_vceqd_s64:
7659 case NEON::BI__builtin_neon_vceqd_u64:P = llvm::ICmpInst::ICMP_EQ;break;
7660 case NEON::BI__builtin_neon_vcgtd_s64:P = llvm::ICmpInst::ICMP_SGT;break;
7661 case NEON::BI__builtin_neon_vcgtd_u64:P = llvm::ICmpInst::ICMP_UGT;break;
7662 case NEON::BI__builtin_neon_vcltd_s64:P = llvm::ICmpInst::ICMP_SLT;break;
7663 case NEON::BI__builtin_neon_vcltd_u64:P = llvm::ICmpInst::ICMP_ULT;break;
7664 case NEON::BI__builtin_neon_vcged_u64:P = llvm::ICmpInst::ICMP_UGE;break;
7665 case NEON::BI__builtin_neon_vcged_s64:P = llvm::ICmpInst::ICMP_SGE;break;
7666 case NEON::BI__builtin_neon_vcled_u64:P = llvm::ICmpInst::ICMP_ULE;break;
7667 case NEON::BI__builtin_neon_vcled_s64:P = llvm::ICmpInst::ICMP_SLE;break;
7668 }
Tim Northovera2ee4332014-03-29 15:09:45 +00007669 Ops.push_back(EmitScalarExpr(E->getArg(1)));
Tim Northover0c68faa2014-03-31 15:47:09 +00007670 Ops[0] = Builder.CreateBitCast(Ops[0], Int64Ty);
7671 Ops[1] = Builder.CreateBitCast(Ops[1], Int64Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00007672 Ops[0] = Builder.CreateICmp(P, Ops[0], Ops[1]);
Tim Northover0c68faa2014-03-31 15:47:09 +00007673 return Builder.CreateSExt(Ops[0], Int64Ty, "vceqd");
Tim Northovera2ee4332014-03-29 15:09:45 +00007674 }
7675 case NEON::BI__builtin_neon_vtstd_s64:
7676 case NEON::BI__builtin_neon_vtstd_u64: {
Tim Northovera2ee4332014-03-29 15:09:45 +00007677 Ops.push_back(EmitScalarExpr(E->getArg(1)));
Benjamin Kramerc385a802015-07-28 15:40:11 +00007678 Ops[0] = Builder.CreateBitCast(Ops[0], Int64Ty);
7679 Ops[1] = Builder.CreateBitCast(Ops[1], Int64Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00007680 Ops[0] = Builder.CreateAnd(Ops[0], Ops[1]);
7681 Ops[0] = Builder.CreateICmp(ICmpInst::ICMP_NE, Ops[0],
Benjamin Kramerc385a802015-07-28 15:40:11 +00007682 llvm::Constant::getNullValue(Int64Ty));
7683 return Builder.CreateSExt(Ops[0], Int64Ty, "vtstd");
Tim Northovera2ee4332014-03-29 15:09:45 +00007684 }
7685 case NEON::BI__builtin_neon_vset_lane_i8:
7686 case NEON::BI__builtin_neon_vset_lane_i16:
7687 case NEON::BI__builtin_neon_vset_lane_i32:
7688 case NEON::BI__builtin_neon_vset_lane_i64:
7689 case NEON::BI__builtin_neon_vset_lane_f32:
7690 case NEON::BI__builtin_neon_vsetq_lane_i8:
7691 case NEON::BI__builtin_neon_vsetq_lane_i16:
7692 case NEON::BI__builtin_neon_vsetq_lane_i32:
7693 case NEON::BI__builtin_neon_vsetq_lane_i64:
7694 case NEON::BI__builtin_neon_vsetq_lane_f32:
7695 Ops.push_back(EmitScalarExpr(E->getArg(2)));
7696 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2], "vset_lane");
7697 case NEON::BI__builtin_neon_vset_lane_f64:
7698 // The vector type needs a cast for the v1f64 variant.
7699 Ops[1] = Builder.CreateBitCast(Ops[1],
7700 llvm::VectorType::get(DoubleTy, 1));
7701 Ops.push_back(EmitScalarExpr(E->getArg(2)));
7702 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2], "vset_lane");
7703 case NEON::BI__builtin_neon_vsetq_lane_f64:
7704 // The vector type needs a cast for the v2f64 variant.
7705 Ops[1] = Builder.CreateBitCast(Ops[1],
Ahmed Bougacha40882bb2015-08-24 23:47:29 +00007706 llvm::VectorType::get(DoubleTy, 2));
Tim Northovera2ee4332014-03-29 15:09:45 +00007707 Ops.push_back(EmitScalarExpr(E->getArg(2)));
7708 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2], "vset_lane");
7709
7710 case NEON::BI__builtin_neon_vget_lane_i8:
7711 case NEON::BI__builtin_neon_vdupb_lane_i8:
Benjamin Kramerc385a802015-07-28 15:40:11 +00007712 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::VectorType::get(Int8Ty, 8));
Tim Northovera2ee4332014-03-29 15:09:45 +00007713 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7714 "vget_lane");
7715 case NEON::BI__builtin_neon_vgetq_lane_i8:
7716 case NEON::BI__builtin_neon_vdupb_laneq_i8:
Benjamin Kramerc385a802015-07-28 15:40:11 +00007717 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::VectorType::get(Int8Ty, 16));
Tim Northovera2ee4332014-03-29 15:09:45 +00007718 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7719 "vgetq_lane");
7720 case NEON::BI__builtin_neon_vget_lane_i16:
7721 case NEON::BI__builtin_neon_vduph_lane_i16:
Benjamin Kramerc385a802015-07-28 15:40:11 +00007722 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::VectorType::get(Int16Ty, 4));
Tim Northovera2ee4332014-03-29 15:09:45 +00007723 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7724 "vget_lane");
7725 case NEON::BI__builtin_neon_vgetq_lane_i16:
7726 case NEON::BI__builtin_neon_vduph_laneq_i16:
Benjamin Kramerc385a802015-07-28 15:40:11 +00007727 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::VectorType::get(Int16Ty, 8));
Tim Northovera2ee4332014-03-29 15:09:45 +00007728 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7729 "vgetq_lane");
7730 case NEON::BI__builtin_neon_vget_lane_i32:
7731 case NEON::BI__builtin_neon_vdups_lane_i32:
Benjamin Kramerc385a802015-07-28 15:40:11 +00007732 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::VectorType::get(Int32Ty, 2));
Tim Northovera2ee4332014-03-29 15:09:45 +00007733 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7734 "vget_lane");
7735 case NEON::BI__builtin_neon_vdups_lane_f32:
7736 Ops[0] = Builder.CreateBitCast(Ops[0],
Ahmed Bougacha40882bb2015-08-24 23:47:29 +00007737 llvm::VectorType::get(FloatTy, 2));
Tim Northovera2ee4332014-03-29 15:09:45 +00007738 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7739 "vdups_lane");
7740 case NEON::BI__builtin_neon_vgetq_lane_i32:
7741 case NEON::BI__builtin_neon_vdups_laneq_i32:
Benjamin Kramerc385a802015-07-28 15:40:11 +00007742 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::VectorType::get(Int32Ty, 4));
Tim Northovera2ee4332014-03-29 15:09:45 +00007743 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7744 "vgetq_lane");
7745 case NEON::BI__builtin_neon_vget_lane_i64:
7746 case NEON::BI__builtin_neon_vdupd_lane_i64:
Benjamin Kramerc385a802015-07-28 15:40:11 +00007747 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::VectorType::get(Int64Ty, 1));
Tim Northovera2ee4332014-03-29 15:09:45 +00007748 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7749 "vget_lane");
7750 case NEON::BI__builtin_neon_vdupd_lane_f64:
7751 Ops[0] = Builder.CreateBitCast(Ops[0],
Ahmed Bougacha40882bb2015-08-24 23:47:29 +00007752 llvm::VectorType::get(DoubleTy, 1));
Tim Northovera2ee4332014-03-29 15:09:45 +00007753 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7754 "vdupd_lane");
7755 case NEON::BI__builtin_neon_vgetq_lane_i64:
7756 case NEON::BI__builtin_neon_vdupd_laneq_i64:
Benjamin Kramerc385a802015-07-28 15:40:11 +00007757 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::VectorType::get(Int64Ty, 2));
Tim Northovera2ee4332014-03-29 15:09:45 +00007758 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7759 "vgetq_lane");
7760 case NEON::BI__builtin_neon_vget_lane_f32:
7761 Ops[0] = Builder.CreateBitCast(Ops[0],
Ahmed Bougacha40882bb2015-08-24 23:47:29 +00007762 llvm::VectorType::get(FloatTy, 2));
Tim Northovera2ee4332014-03-29 15:09:45 +00007763 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7764 "vget_lane");
7765 case NEON::BI__builtin_neon_vget_lane_f64:
7766 Ops[0] = Builder.CreateBitCast(Ops[0],
Ahmed Bougacha40882bb2015-08-24 23:47:29 +00007767 llvm::VectorType::get(DoubleTy, 1));
Tim Northovera2ee4332014-03-29 15:09:45 +00007768 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7769 "vget_lane");
7770 case NEON::BI__builtin_neon_vgetq_lane_f32:
7771 case NEON::BI__builtin_neon_vdups_laneq_f32:
7772 Ops[0] = Builder.CreateBitCast(Ops[0],
Ahmed Bougacha40882bb2015-08-24 23:47:29 +00007773 llvm::VectorType::get(FloatTy, 4));
Tim Northovera2ee4332014-03-29 15:09:45 +00007774 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7775 "vgetq_lane");
7776 case NEON::BI__builtin_neon_vgetq_lane_f64:
7777 case NEON::BI__builtin_neon_vdupd_laneq_f64:
7778 Ops[0] = Builder.CreateBitCast(Ops[0],
Ahmed Bougacha40882bb2015-08-24 23:47:29 +00007779 llvm::VectorType::get(DoubleTy, 2));
Tim Northovera2ee4332014-03-29 15:09:45 +00007780 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7781 "vgetq_lane");
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00007782 case NEON::BI__builtin_neon_vaddh_f16:
7783 Ops.push_back(EmitScalarExpr(E->getArg(1)));
7784 return Builder.CreateFAdd(Ops[0], Ops[1], "vaddh");
7785 case NEON::BI__builtin_neon_vsubh_f16:
7786 Ops.push_back(EmitScalarExpr(E->getArg(1)));
7787 return Builder.CreateFSub(Ops[0], Ops[1], "vsubh");
7788 case NEON::BI__builtin_neon_vmulh_f16:
7789 Ops.push_back(EmitScalarExpr(E->getArg(1)));
7790 return Builder.CreateFMul(Ops[0], Ops[1], "vmulh");
7791 case NEON::BI__builtin_neon_vdivh_f16:
7792 Ops.push_back(EmitScalarExpr(E->getArg(1)));
7793 return Builder.CreateFDiv(Ops[0], Ops[1], "vdivh");
7794 case NEON::BI__builtin_neon_vfmah_f16: {
James Y Knight8799cae2019-02-03 21:53:49 +00007795 Function *F = CGM.getIntrinsic(Intrinsic::fma, HalfTy);
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00007796 // NEON intrinsic puts accumulator first, unlike the LLVM fma.
7797 return Builder.CreateCall(F,
7798 {EmitScalarExpr(E->getArg(1)), EmitScalarExpr(E->getArg(2)), Ops[0]});
7799 }
7800 case NEON::BI__builtin_neon_vfmsh_f16: {
James Y Knight8799cae2019-02-03 21:53:49 +00007801 Function *F = CGM.getIntrinsic(Intrinsic::fma, HalfTy);
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00007802 Value *Zero = llvm::ConstantFP::getZeroValueForNegation(HalfTy);
7803 Value* Sub = Builder.CreateFSub(Zero, EmitScalarExpr(E->getArg(1)), "vsubh");
7804 // NEON intrinsic puts accumulator first, unlike the LLVM fma.
7805 return Builder.CreateCall(F, {Sub, EmitScalarExpr(E->getArg(2)), Ops[0]});
7806 }
Tim Northovera2ee4332014-03-29 15:09:45 +00007807 case NEON::BI__builtin_neon_vaddd_s64:
7808 case NEON::BI__builtin_neon_vaddd_u64:
7809 return Builder.CreateAdd(Ops[0], EmitScalarExpr(E->getArg(1)), "vaddd");
7810 case NEON::BI__builtin_neon_vsubd_s64:
7811 case NEON::BI__builtin_neon_vsubd_u64:
7812 return Builder.CreateSub(Ops[0], EmitScalarExpr(E->getArg(1)), "vsubd");
7813 case NEON::BI__builtin_neon_vqdmlalh_s16:
7814 case NEON::BI__builtin_neon_vqdmlslh_s16: {
7815 SmallVector<Value *, 2> ProductOps;
7816 ProductOps.push_back(vectorWrapScalar16(Ops[1]));
7817 ProductOps.push_back(vectorWrapScalar16(EmitScalarExpr(E->getArg(2))));
7818 llvm::Type *VTy = llvm::VectorType::get(Int32Ty, 4);
Tim Northover573cbee2014-05-24 12:52:07 +00007819 Ops[1] = EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_sqdmull, VTy),
Tim Northovera2ee4332014-03-29 15:09:45 +00007820 ProductOps, "vqdmlXl");
Michael J. Spencerdd597752014-05-31 00:22:12 +00007821 Constant *CI = ConstantInt::get(SizeTy, 0);
Tim Northovera2ee4332014-03-29 15:09:45 +00007822 Ops[1] = Builder.CreateExtractElement(Ops[1], CI, "lane0");
7823
7824 unsigned AccumInt = BuiltinID == NEON::BI__builtin_neon_vqdmlalh_s16
Tim Northover573cbee2014-05-24 12:52:07 +00007825 ? Intrinsic::aarch64_neon_sqadd
7826 : Intrinsic::aarch64_neon_sqsub;
Tim Northovera2ee4332014-03-29 15:09:45 +00007827 return EmitNeonCall(CGM.getIntrinsic(AccumInt, Int32Ty), Ops, "vqdmlXl");
7828 }
7829 case NEON::BI__builtin_neon_vqshlud_n_s64: {
7830 Ops.push_back(EmitScalarExpr(E->getArg(1)));
7831 Ops[1] = Builder.CreateZExt(Ops[1], Int64Ty);
Tim Northover573cbee2014-05-24 12:52:07 +00007832 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_sqshlu, Int64Ty),
Hao Liua19a2e22014-04-28 07:36:12 +00007833 Ops, "vqshlu_n");
Tim Northovera2ee4332014-03-29 15:09:45 +00007834 }
7835 case NEON::BI__builtin_neon_vqshld_n_u64:
7836 case NEON::BI__builtin_neon_vqshld_n_s64: {
7837 unsigned Int = BuiltinID == NEON::BI__builtin_neon_vqshld_n_u64
Tim Northover573cbee2014-05-24 12:52:07 +00007838 ? Intrinsic::aarch64_neon_uqshl
7839 : Intrinsic::aarch64_neon_sqshl;
Tim Northovera2ee4332014-03-29 15:09:45 +00007840 Ops.push_back(EmitScalarExpr(E->getArg(1)));
7841 Ops[1] = Builder.CreateZExt(Ops[1], Int64Ty);
Hao Liua19a2e22014-04-28 07:36:12 +00007842 return EmitNeonCall(CGM.getIntrinsic(Int, Int64Ty), Ops, "vqshl_n");
Tim Northovera2ee4332014-03-29 15:09:45 +00007843 }
7844 case NEON::BI__builtin_neon_vrshrd_n_u64:
7845 case NEON::BI__builtin_neon_vrshrd_n_s64: {
7846 unsigned Int = BuiltinID == NEON::BI__builtin_neon_vrshrd_n_u64
Tim Northover573cbee2014-05-24 12:52:07 +00007847 ? Intrinsic::aarch64_neon_urshl
7848 : Intrinsic::aarch64_neon_srshl;
Tim Northovera2ee4332014-03-29 15:09:45 +00007849 Ops.push_back(EmitScalarExpr(E->getArg(1)));
Hao Liua19a2e22014-04-28 07:36:12 +00007850 int SV = cast<ConstantInt>(Ops[1])->getSExtValue();
7851 Ops[1] = ConstantInt::get(Int64Ty, -SV);
7852 return EmitNeonCall(CGM.getIntrinsic(Int, Int64Ty), Ops, "vrshr_n");
Tim Northovera2ee4332014-03-29 15:09:45 +00007853 }
7854 case NEON::BI__builtin_neon_vrsrad_n_u64:
7855 case NEON::BI__builtin_neon_vrsrad_n_s64: {
7856 unsigned Int = BuiltinID == NEON::BI__builtin_neon_vrsrad_n_u64
Tim Northover573cbee2014-05-24 12:52:07 +00007857 ? Intrinsic::aarch64_neon_urshl
7858 : Intrinsic::aarch64_neon_srshl;
Tim Northover0c68faa2014-03-31 15:47:09 +00007859 Ops[1] = Builder.CreateBitCast(Ops[1], Int64Ty);
7860 Ops.push_back(Builder.CreateNeg(EmitScalarExpr(E->getArg(2))));
David Blaikie43f9bb72015-05-18 22:14:03 +00007861 Ops[1] = Builder.CreateCall(CGM.getIntrinsic(Int, Int64Ty),
7862 {Ops[1], Builder.CreateSExt(Ops[2], Int64Ty)});
Tim Northover0c68faa2014-03-31 15:47:09 +00007863 return Builder.CreateAdd(Ops[0], Builder.CreateBitCast(Ops[1], Int64Ty));
Tim Northovera2ee4332014-03-29 15:09:45 +00007864 }
7865 case NEON::BI__builtin_neon_vshld_n_s64:
7866 case NEON::BI__builtin_neon_vshld_n_u64: {
7867 llvm::ConstantInt *Amt = cast<ConstantInt>(EmitScalarExpr(E->getArg(1)));
7868 return Builder.CreateShl(
Hao Liu9f9492b2014-05-14 08:59:30 +00007869 Ops[0], ConstantInt::get(Int64Ty, Amt->getZExtValue()), "shld_n");
Tim Northovera2ee4332014-03-29 15:09:45 +00007870 }
7871 case NEON::BI__builtin_neon_vshrd_n_s64: {
7872 llvm::ConstantInt *Amt = cast<ConstantInt>(EmitScalarExpr(E->getArg(1)));
7873 return Builder.CreateAShr(
7874 Ops[0], ConstantInt::get(Int64Ty, std::min(static_cast<uint64_t>(63),
7875 Amt->getZExtValue())),
Hao Liu9f9492b2014-05-14 08:59:30 +00007876 "shrd_n");
Tim Northovera2ee4332014-03-29 15:09:45 +00007877 }
7878 case NEON::BI__builtin_neon_vshrd_n_u64: {
7879 llvm::ConstantInt *Amt = cast<ConstantInt>(EmitScalarExpr(E->getArg(1)));
Hao Liu9f9492b2014-05-14 08:59:30 +00007880 uint64_t ShiftAmt = Amt->getZExtValue();
7881 // Right-shifting an unsigned value by its size yields 0.
7882 if (ShiftAmt == 64)
7883 return ConstantInt::get(Int64Ty, 0);
7884 return Builder.CreateLShr(Ops[0], ConstantInt::get(Int64Ty, ShiftAmt),
7885 "shrd_n");
Tim Northovera2ee4332014-03-29 15:09:45 +00007886 }
7887 case NEON::BI__builtin_neon_vsrad_n_s64: {
7888 llvm::ConstantInt *Amt = cast<ConstantInt>(EmitScalarExpr(E->getArg(2)));
7889 Ops[1] = Builder.CreateAShr(
7890 Ops[1], ConstantInt::get(Int64Ty, std::min(static_cast<uint64_t>(63),
7891 Amt->getZExtValue())),
Hao Liu9f9492b2014-05-14 08:59:30 +00007892 "shrd_n");
Tim Northovera2ee4332014-03-29 15:09:45 +00007893 return Builder.CreateAdd(Ops[0], Ops[1]);
7894 }
7895 case NEON::BI__builtin_neon_vsrad_n_u64: {
7896 llvm::ConstantInt *Amt = cast<ConstantInt>(EmitScalarExpr(E->getArg(2)));
Hao Liu9f9492b2014-05-14 08:59:30 +00007897 uint64_t ShiftAmt = Amt->getZExtValue();
7898 // Right-shifting an unsigned value by its size yields 0.
7899 // As Op + 0 = Op, return Ops[0] directly.
7900 if (ShiftAmt == 64)
7901 return Ops[0];
7902 Ops[1] = Builder.CreateLShr(Ops[1], ConstantInt::get(Int64Ty, ShiftAmt),
7903 "shrd_n");
Tim Northovera2ee4332014-03-29 15:09:45 +00007904 return Builder.CreateAdd(Ops[0], Ops[1]);
7905 }
7906 case NEON::BI__builtin_neon_vqdmlalh_lane_s16:
7907 case NEON::BI__builtin_neon_vqdmlalh_laneq_s16:
7908 case NEON::BI__builtin_neon_vqdmlslh_lane_s16:
7909 case NEON::BI__builtin_neon_vqdmlslh_laneq_s16: {
7910 Ops[2] = Builder.CreateExtractElement(Ops[2], EmitScalarExpr(E->getArg(3)),
7911 "lane");
7912 SmallVector<Value *, 2> ProductOps;
7913 ProductOps.push_back(vectorWrapScalar16(Ops[1]));
7914 ProductOps.push_back(vectorWrapScalar16(Ops[2]));
7915 llvm::Type *VTy = llvm::VectorType::get(Int32Ty, 4);
Tim Northover573cbee2014-05-24 12:52:07 +00007916 Ops[1] = EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_sqdmull, VTy),
Tim Northovera2ee4332014-03-29 15:09:45 +00007917 ProductOps, "vqdmlXl");
Michael J. Spencerdd597752014-05-31 00:22:12 +00007918 Constant *CI = ConstantInt::get(SizeTy, 0);
Tim Northovera2ee4332014-03-29 15:09:45 +00007919 Ops[1] = Builder.CreateExtractElement(Ops[1], CI, "lane0");
7920 Ops.pop_back();
7921
7922 unsigned AccInt = (BuiltinID == NEON::BI__builtin_neon_vqdmlalh_lane_s16 ||
7923 BuiltinID == NEON::BI__builtin_neon_vqdmlalh_laneq_s16)
Tim Northover573cbee2014-05-24 12:52:07 +00007924 ? Intrinsic::aarch64_neon_sqadd
7925 : Intrinsic::aarch64_neon_sqsub;
Tim Northovera2ee4332014-03-29 15:09:45 +00007926 return EmitNeonCall(CGM.getIntrinsic(AccInt, Int32Ty), Ops, "vqdmlXl");
7927 }
7928 case NEON::BI__builtin_neon_vqdmlals_s32:
7929 case NEON::BI__builtin_neon_vqdmlsls_s32: {
7930 SmallVector<Value *, 2> ProductOps;
7931 ProductOps.push_back(Ops[1]);
7932 ProductOps.push_back(EmitScalarExpr(E->getArg(2)));
7933 Ops[1] =
Tim Northover573cbee2014-05-24 12:52:07 +00007934 EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_sqdmulls_scalar),
Tim Northovera2ee4332014-03-29 15:09:45 +00007935 ProductOps, "vqdmlXl");
7936
7937 unsigned AccumInt = BuiltinID == NEON::BI__builtin_neon_vqdmlals_s32
Tim Northover573cbee2014-05-24 12:52:07 +00007938 ? Intrinsic::aarch64_neon_sqadd
7939 : Intrinsic::aarch64_neon_sqsub;
Tim Northovera2ee4332014-03-29 15:09:45 +00007940 return EmitNeonCall(CGM.getIntrinsic(AccumInt, Int64Ty), Ops, "vqdmlXl");
7941 }
7942 case NEON::BI__builtin_neon_vqdmlals_lane_s32:
7943 case NEON::BI__builtin_neon_vqdmlals_laneq_s32:
7944 case NEON::BI__builtin_neon_vqdmlsls_lane_s32:
7945 case NEON::BI__builtin_neon_vqdmlsls_laneq_s32: {
7946 Ops[2] = Builder.CreateExtractElement(Ops[2], EmitScalarExpr(E->getArg(3)),
7947 "lane");
7948 SmallVector<Value *, 2> ProductOps;
7949 ProductOps.push_back(Ops[1]);
7950 ProductOps.push_back(Ops[2]);
7951 Ops[1] =
Tim Northover573cbee2014-05-24 12:52:07 +00007952 EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_sqdmulls_scalar),
Tim Northovera2ee4332014-03-29 15:09:45 +00007953 ProductOps, "vqdmlXl");
7954 Ops.pop_back();
7955
7956 unsigned AccInt = (BuiltinID == NEON::BI__builtin_neon_vqdmlals_lane_s32 ||
7957 BuiltinID == NEON::BI__builtin_neon_vqdmlals_laneq_s32)
Tim Northover573cbee2014-05-24 12:52:07 +00007958 ? Intrinsic::aarch64_neon_sqadd
7959 : Intrinsic::aarch64_neon_sqsub;
Tim Northovera2ee4332014-03-29 15:09:45 +00007960 return EmitNeonCall(CGM.getIntrinsic(AccInt, Int64Ty), Ops, "vqdmlXl");
7961 }
Diogo N. Sampaioeb312dd2019-04-12 10:43:48 +00007962 case NEON::BI__builtin_neon_vduph_lane_f16: {
7963 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7964 "vget_lane");
7965 }
7966 case NEON::BI__builtin_neon_vduph_laneq_f16: {
7967 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
7968 "vgetq_lane");
7969 }
Tim Northovera2ee4332014-03-29 15:09:45 +00007970 }
7971
Sjoerd Meijer87793e72018-03-19 13:22:49 +00007972 llvm::VectorType *VTy = GetNeonType(this, Type);
Tim Northovera2ee4332014-03-29 15:09:45 +00007973 llvm::Type *Ty = VTy;
7974 if (!Ty)
Craig Topper8a13c412014-05-21 05:09:00 +00007975 return nullptr;
Tim Northovera2ee4332014-03-29 15:09:45 +00007976
Tim Northover573cbee2014-05-24 12:52:07 +00007977 // Not all intrinsics handled by the common case work for AArch64 yet, so only
Tim Northovera2ee4332014-03-29 15:09:45 +00007978 // defer to common code if it's been added to our special map.
Tim Northover573cbee2014-05-24 12:52:07 +00007979 Builtin = findNeonIntrinsicInMap(AArch64SIMDIntrinsicMap, BuiltinID,
7980 AArch64SIMDIntrinsicsProvenSorted);
Tim Northovera2ee4332014-03-29 15:09:45 +00007981
7982 if (Builtin)
7983 return EmitCommonNeonBuiltinExpr(
7984 Builtin->BuiltinID, Builtin->LLVMIntrinsic, Builtin->AltLLVMIntrinsic,
John McCall7f416cc2015-09-08 08:05:57 +00007985 Builtin->NameHint, Builtin->TypeModifier, E, Ops,
Sjoerd Meijer95da8752018-03-13 19:38:56 +00007986 /*never use addresses*/ Address::invalid(), Address::invalid(), Arch);
Tim Northovera2ee4332014-03-29 15:09:45 +00007987
Sjoerd Meijer95da8752018-03-13 19:38:56 +00007988 if (Value *V = EmitAArch64TblBuiltinExpr(*this, BuiltinID, E, Ops, Arch))
Tim Northovera2ee4332014-03-29 15:09:45 +00007989 return V;
7990
7991 unsigned Int;
7992 switch (BuiltinID) {
Craig Topper8a13c412014-05-21 05:09:00 +00007993 default: return nullptr;
Tim Northovera2ee4332014-03-29 15:09:45 +00007994 case NEON::BI__builtin_neon_vbsl_v:
7995 case NEON::BI__builtin_neon_vbslq_v: {
7996 llvm::Type *BitTy = llvm::VectorType::getInteger(VTy);
7997 Ops[0] = Builder.CreateBitCast(Ops[0], BitTy, "vbsl");
7998 Ops[1] = Builder.CreateBitCast(Ops[1], BitTy, "vbsl");
7999 Ops[2] = Builder.CreateBitCast(Ops[2], BitTy, "vbsl");
8000
8001 Ops[1] = Builder.CreateAnd(Ops[0], Ops[1], "vbsl");
8002 Ops[2] = Builder.CreateAnd(Builder.CreateNot(Ops[0]), Ops[2], "vbsl");
8003 Ops[0] = Builder.CreateOr(Ops[1], Ops[2], "vbsl");
8004 return Builder.CreateBitCast(Ops[0], Ty);
8005 }
8006 case NEON::BI__builtin_neon_vfma_lane_v:
8007 case NEON::BI__builtin_neon_vfmaq_lane_v: { // Only used for FP types
8008 // The ARM builtins (and instructions) have the addend as the first
8009 // operand, but the 'fma' intrinsics have it last. Swap it around here.
8010 Value *Addend = Ops[0];
8011 Value *Multiplicand = Ops[1];
8012 Value *LaneSource = Ops[2];
8013 Ops[0] = Multiplicand;
8014 Ops[1] = LaneSource;
8015 Ops[2] = Addend;
8016
8017 // Now adjust things to handle the lane access.
8018 llvm::Type *SourceTy = BuiltinID == NEON::BI__builtin_neon_vfmaq_lane_v ?
8019 llvm::VectorType::get(VTy->getElementType(), VTy->getNumElements() / 2) :
8020 VTy;
8021 llvm::Constant *cst = cast<Constant>(Ops[3]);
8022 Value *SV = llvm::ConstantVector::getSplat(VTy->getNumElements(), cst);
8023 Ops[1] = Builder.CreateBitCast(Ops[1], SourceTy);
8024 Ops[1] = Builder.CreateShuffleVector(Ops[1], Ops[1], SV, "lane");
8025
8026 Ops.pop_back();
8027 Int = Intrinsic::fma;
8028 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "fmla");
8029 }
8030 case NEON::BI__builtin_neon_vfma_laneq_v: {
8031 llvm::VectorType *VTy = cast<llvm::VectorType>(Ty);
8032 // v1f64 fma should be mapped to Neon scalar f64 fma
8033 if (VTy && VTy->getElementType() == DoubleTy) {
8034 Ops[0] = Builder.CreateBitCast(Ops[0], DoubleTy);
8035 Ops[1] = Builder.CreateBitCast(Ops[1], DoubleTy);
8036 llvm::Type *VTy = GetNeonType(this,
Sjoerd Meijer87793e72018-03-19 13:22:49 +00008037 NeonTypeFlags(NeonTypeFlags::Float64, false, true));
Tim Northovera2ee4332014-03-29 15:09:45 +00008038 Ops[2] = Builder.CreateBitCast(Ops[2], VTy);
8039 Ops[2] = Builder.CreateExtractElement(Ops[2], Ops[3], "extract");
James Y Knight8799cae2019-02-03 21:53:49 +00008040 Function *F = CGM.getIntrinsic(Intrinsic::fma, DoubleTy);
David Blaikie43f9bb72015-05-18 22:14:03 +00008041 Value *Result = Builder.CreateCall(F, {Ops[1], Ops[2], Ops[0]});
Tim Northovera2ee4332014-03-29 15:09:45 +00008042 return Builder.CreateBitCast(Result, Ty);
8043 }
James Y Knight8799cae2019-02-03 21:53:49 +00008044 Function *F = CGM.getIntrinsic(Intrinsic::fma, Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008045 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
8046 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
8047
8048 llvm::Type *STy = llvm::VectorType::get(VTy->getElementType(),
8049 VTy->getNumElements() * 2);
8050 Ops[2] = Builder.CreateBitCast(Ops[2], STy);
8051 Value* SV = llvm::ConstantVector::getSplat(VTy->getNumElements(),
8052 cast<ConstantInt>(Ops[3]));
8053 Ops[2] = Builder.CreateShuffleVector(Ops[2], Ops[2], SV, "lane");
8054
David Blaikie43f9bb72015-05-18 22:14:03 +00008055 return Builder.CreateCall(F, {Ops[2], Ops[1], Ops[0]});
Tim Northovera2ee4332014-03-29 15:09:45 +00008056 }
8057 case NEON::BI__builtin_neon_vfmaq_laneq_v: {
James Y Knight8799cae2019-02-03 21:53:49 +00008058 Function *F = CGM.getIntrinsic(Intrinsic::fma, Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008059 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
8060 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
8061
8062 Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
8063 Ops[2] = EmitNeonSplat(Ops[2], cast<ConstantInt>(Ops[3]));
David Blaikie43f9bb72015-05-18 22:14:03 +00008064 return Builder.CreateCall(F, {Ops[2], Ops[1], Ops[0]});
Tim Northovera2ee4332014-03-29 15:09:45 +00008065 }
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008066 case NEON::BI__builtin_neon_vfmah_lane_f16:
Tim Northovera2ee4332014-03-29 15:09:45 +00008067 case NEON::BI__builtin_neon_vfmas_lane_f32:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008068 case NEON::BI__builtin_neon_vfmah_laneq_f16:
Tim Northovera2ee4332014-03-29 15:09:45 +00008069 case NEON::BI__builtin_neon_vfmas_laneq_f32:
8070 case NEON::BI__builtin_neon_vfmad_lane_f64:
8071 case NEON::BI__builtin_neon_vfmad_laneq_f64: {
8072 Ops.push_back(EmitScalarExpr(E->getArg(3)));
David Majnemerced8bdf2015-02-25 17:36:15 +00008073 llvm::Type *Ty = ConvertType(E->getCallReturnType(getContext()));
James Y Knight8799cae2019-02-03 21:53:49 +00008074 Function *F = CGM.getIntrinsic(Intrinsic::fma, Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008075 Ops[2] = Builder.CreateExtractElement(Ops[2], Ops[3], "extract");
David Blaikie43f9bb72015-05-18 22:14:03 +00008076 return Builder.CreateCall(F, {Ops[1], Ops[2], Ops[0]});
Tim Northovera2ee4332014-03-29 15:09:45 +00008077 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008078 case NEON::BI__builtin_neon_vmull_v:
8079 // FIXME: improve sharing scheme to cope with 3 alternative LLVM intrinsics.
Tim Northover573cbee2014-05-24 12:52:07 +00008080 Int = usgn ? Intrinsic::aarch64_neon_umull : Intrinsic::aarch64_neon_smull;
8081 if (Type.isPoly()) Int = Intrinsic::aarch64_neon_pmull;
Tim Northovera2ee4332014-03-29 15:09:45 +00008082 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmull");
8083 case NEON::BI__builtin_neon_vmax_v:
8084 case NEON::BI__builtin_neon_vmaxq_v:
8085 // FIXME: improve sharing scheme to cope with 3 alternative LLVM intrinsics.
Tim Northover573cbee2014-05-24 12:52:07 +00008086 Int = usgn ? Intrinsic::aarch64_neon_umax : Intrinsic::aarch64_neon_smax;
8087 if (Ty->isFPOrFPVectorTy()) Int = Intrinsic::aarch64_neon_fmax;
Tim Northovera2ee4332014-03-29 15:09:45 +00008088 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmax");
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008089 case NEON::BI__builtin_neon_vmaxh_f16: {
8090 Ops.push_back(EmitScalarExpr(E->getArg(1)));
8091 Int = Intrinsic::aarch64_neon_fmax;
8092 return EmitNeonCall(CGM.getIntrinsic(Int, HalfTy), Ops, "vmax");
8093 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008094 case NEON::BI__builtin_neon_vmin_v:
8095 case NEON::BI__builtin_neon_vminq_v:
8096 // FIXME: improve sharing scheme to cope with 3 alternative LLVM intrinsics.
Tim Northover573cbee2014-05-24 12:52:07 +00008097 Int = usgn ? Intrinsic::aarch64_neon_umin : Intrinsic::aarch64_neon_smin;
8098 if (Ty->isFPOrFPVectorTy()) Int = Intrinsic::aarch64_neon_fmin;
Tim Northovera2ee4332014-03-29 15:09:45 +00008099 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmin");
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008100 case NEON::BI__builtin_neon_vminh_f16: {
8101 Ops.push_back(EmitScalarExpr(E->getArg(1)));
8102 Int = Intrinsic::aarch64_neon_fmin;
8103 return EmitNeonCall(CGM.getIntrinsic(Int, HalfTy), Ops, "vmin");
8104 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008105 case NEON::BI__builtin_neon_vabd_v:
8106 case NEON::BI__builtin_neon_vabdq_v:
8107 // FIXME: improve sharing scheme to cope with 3 alternative LLVM intrinsics.
Tim Northover573cbee2014-05-24 12:52:07 +00008108 Int = usgn ? Intrinsic::aarch64_neon_uabd : Intrinsic::aarch64_neon_sabd;
8109 if (Ty->isFPOrFPVectorTy()) Int = Intrinsic::aarch64_neon_fabd;
Tim Northovera2ee4332014-03-29 15:09:45 +00008110 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vabd");
8111 case NEON::BI__builtin_neon_vpadal_v:
8112 case NEON::BI__builtin_neon_vpadalq_v: {
8113 unsigned ArgElts = VTy->getNumElements();
8114 llvm::IntegerType *EltTy = cast<IntegerType>(VTy->getElementType());
8115 unsigned BitWidth = EltTy->getBitWidth();
8116 llvm::Type *ArgTy = llvm::VectorType::get(
8117 llvm::IntegerType::get(getLLVMContext(), BitWidth/2), 2*ArgElts);
8118 llvm::Type* Tys[2] = { VTy, ArgTy };
Tim Northover573cbee2014-05-24 12:52:07 +00008119 Int = usgn ? Intrinsic::aarch64_neon_uaddlp : Intrinsic::aarch64_neon_saddlp;
Tim Northovera2ee4332014-03-29 15:09:45 +00008120 SmallVector<llvm::Value*, 1> TmpOps;
8121 TmpOps.push_back(Ops[1]);
8122 Function *F = CGM.getIntrinsic(Int, Tys);
8123 llvm::Value *tmp = EmitNeonCall(F, TmpOps, "vpadal");
8124 llvm::Value *addend = Builder.CreateBitCast(Ops[0], tmp->getType());
8125 return Builder.CreateAdd(tmp, addend);
8126 }
8127 case NEON::BI__builtin_neon_vpmin_v:
8128 case NEON::BI__builtin_neon_vpminq_v:
8129 // FIXME: improve sharing scheme to cope with 3 alternative LLVM intrinsics.
Tim Northover573cbee2014-05-24 12:52:07 +00008130 Int = usgn ? Intrinsic::aarch64_neon_uminp : Intrinsic::aarch64_neon_sminp;
8131 if (Ty->isFPOrFPVectorTy()) Int = Intrinsic::aarch64_neon_fminp;
Tim Northovera2ee4332014-03-29 15:09:45 +00008132 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vpmin");
8133 case NEON::BI__builtin_neon_vpmax_v:
8134 case NEON::BI__builtin_neon_vpmaxq_v:
8135 // FIXME: improve sharing scheme to cope with 3 alternative LLVM intrinsics.
Tim Northover573cbee2014-05-24 12:52:07 +00008136 Int = usgn ? Intrinsic::aarch64_neon_umaxp : Intrinsic::aarch64_neon_smaxp;
8137 if (Ty->isFPOrFPVectorTy()) Int = Intrinsic::aarch64_neon_fmaxp;
Tim Northovera2ee4332014-03-29 15:09:45 +00008138 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vpmax");
8139 case NEON::BI__builtin_neon_vminnm_v:
8140 case NEON::BI__builtin_neon_vminnmq_v:
Tim Northover573cbee2014-05-24 12:52:07 +00008141 Int = Intrinsic::aarch64_neon_fminnm;
Tim Northovera2ee4332014-03-29 15:09:45 +00008142 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vminnm");
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008143 case NEON::BI__builtin_neon_vminnmh_f16:
8144 Ops.push_back(EmitScalarExpr(E->getArg(1)));
8145 Int = Intrinsic::aarch64_neon_fminnm;
8146 return EmitNeonCall(CGM.getIntrinsic(Int, HalfTy), Ops, "vminnm");
Tim Northovera2ee4332014-03-29 15:09:45 +00008147 case NEON::BI__builtin_neon_vmaxnm_v:
8148 case NEON::BI__builtin_neon_vmaxnmq_v:
Tim Northover573cbee2014-05-24 12:52:07 +00008149 Int = Intrinsic::aarch64_neon_fmaxnm;
Tim Northovera2ee4332014-03-29 15:09:45 +00008150 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmaxnm");
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008151 case NEON::BI__builtin_neon_vmaxnmh_f16:
8152 Ops.push_back(EmitScalarExpr(E->getArg(1)));
8153 Int = Intrinsic::aarch64_neon_fmaxnm;
8154 return EmitNeonCall(CGM.getIntrinsic(Int, HalfTy), Ops, "vmaxnm");
Tim Northovera2ee4332014-03-29 15:09:45 +00008155 case NEON::BI__builtin_neon_vrecpss_f32: {
Tim Northovera2ee4332014-03-29 15:09:45 +00008156 Ops.push_back(EmitScalarExpr(E->getArg(1)));
Ahmed Bougacha40882bb2015-08-24 23:47:29 +00008157 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_frecps, FloatTy),
Tim Northovera2ee4332014-03-29 15:09:45 +00008158 Ops, "vrecps");
8159 }
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008160 case NEON::BI__builtin_neon_vrecpsd_f64:
Tim Northovera2ee4332014-03-29 15:09:45 +00008161 Ops.push_back(EmitScalarExpr(E->getArg(1)));
Ahmed Bougacha40882bb2015-08-24 23:47:29 +00008162 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_frecps, DoubleTy),
Tim Northovera2ee4332014-03-29 15:09:45 +00008163 Ops, "vrecps");
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008164 case NEON::BI__builtin_neon_vrecpsh_f16:
8165 Ops.push_back(EmitScalarExpr(E->getArg(1)));
8166 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_frecps, HalfTy),
8167 Ops, "vrecps");
Tim Northovera2ee4332014-03-29 15:09:45 +00008168 case NEON::BI__builtin_neon_vqshrun_n_v:
Tim Northover573cbee2014-05-24 12:52:07 +00008169 Int = Intrinsic::aarch64_neon_sqshrun;
Tim Northovera2ee4332014-03-29 15:09:45 +00008170 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshrun_n");
8171 case NEON::BI__builtin_neon_vqrshrun_n_v:
Tim Northover573cbee2014-05-24 12:52:07 +00008172 Int = Intrinsic::aarch64_neon_sqrshrun;
Tim Northovera2ee4332014-03-29 15:09:45 +00008173 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqrshrun_n");
8174 case NEON::BI__builtin_neon_vqshrn_n_v:
Tim Northover573cbee2014-05-24 12:52:07 +00008175 Int = usgn ? Intrinsic::aarch64_neon_uqshrn : Intrinsic::aarch64_neon_sqshrn;
Tim Northovera2ee4332014-03-29 15:09:45 +00008176 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshrn_n");
8177 case NEON::BI__builtin_neon_vrshrn_n_v:
Tim Northover573cbee2014-05-24 12:52:07 +00008178 Int = Intrinsic::aarch64_neon_rshrn;
Tim Northovera2ee4332014-03-29 15:09:45 +00008179 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrshrn_n");
8180 case NEON::BI__builtin_neon_vqrshrn_n_v:
Tim Northover573cbee2014-05-24 12:52:07 +00008181 Int = usgn ? Intrinsic::aarch64_neon_uqrshrn : Intrinsic::aarch64_neon_sqrshrn;
Tim Northovera2ee4332014-03-29 15:09:45 +00008182 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqrshrn_n");
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008183 case NEON::BI__builtin_neon_vrndah_f16: {
8184 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8185 Int = Intrinsic::round;
8186 return EmitNeonCall(CGM.getIntrinsic(Int, HalfTy), Ops, "vrnda");
8187 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008188 case NEON::BI__builtin_neon_vrnda_v:
8189 case NEON::BI__builtin_neon_vrndaq_v: {
8190 Int = Intrinsic::round;
8191 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrnda");
8192 }
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008193 case NEON::BI__builtin_neon_vrndih_f16: {
8194 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8195 Int = Intrinsic::nearbyint;
8196 return EmitNeonCall(CGM.getIntrinsic(Int, HalfTy), Ops, "vrndi");
8197 }
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008198 case NEON::BI__builtin_neon_vrndmh_f16: {
8199 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8200 Int = Intrinsic::floor;
8201 return EmitNeonCall(CGM.getIntrinsic(Int, HalfTy), Ops, "vrndm");
8202 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008203 case NEON::BI__builtin_neon_vrndm_v:
8204 case NEON::BI__builtin_neon_vrndmq_v: {
8205 Int = Intrinsic::floor;
8206 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrndm");
8207 }
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008208 case NEON::BI__builtin_neon_vrndnh_f16: {
8209 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8210 Int = Intrinsic::aarch64_neon_frintn;
8211 return EmitNeonCall(CGM.getIntrinsic(Int, HalfTy), Ops, "vrndn");
8212 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008213 case NEON::BI__builtin_neon_vrndn_v:
8214 case NEON::BI__builtin_neon_vrndnq_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00008215 Int = Intrinsic::aarch64_neon_frintn;
Tim Northovera2ee4332014-03-29 15:09:45 +00008216 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrndn");
8217 }
Ivan A. Kosarev8264bb82018-07-23 13:26:37 +00008218 case NEON::BI__builtin_neon_vrndns_f32: {
8219 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8220 Int = Intrinsic::aarch64_neon_frintn;
8221 return EmitNeonCall(CGM.getIntrinsic(Int, FloatTy), Ops, "vrndn");
8222 }
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008223 case NEON::BI__builtin_neon_vrndph_f16: {
8224 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8225 Int = Intrinsic::ceil;
8226 return EmitNeonCall(CGM.getIntrinsic(Int, HalfTy), Ops, "vrndp");
8227 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008228 case NEON::BI__builtin_neon_vrndp_v:
8229 case NEON::BI__builtin_neon_vrndpq_v: {
8230 Int = Intrinsic::ceil;
8231 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrndp");
8232 }
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008233 case NEON::BI__builtin_neon_vrndxh_f16: {
8234 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8235 Int = Intrinsic::rint;
8236 return EmitNeonCall(CGM.getIntrinsic(Int, HalfTy), Ops, "vrndx");
8237 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008238 case NEON::BI__builtin_neon_vrndx_v:
8239 case NEON::BI__builtin_neon_vrndxq_v: {
8240 Int = Intrinsic::rint;
8241 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrndx");
8242 }
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008243 case NEON::BI__builtin_neon_vrndh_f16: {
8244 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8245 Int = Intrinsic::trunc;
8246 return EmitNeonCall(CGM.getIntrinsic(Int, HalfTy), Ops, "vrndz");
8247 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008248 case NEON::BI__builtin_neon_vrnd_v:
8249 case NEON::BI__builtin_neon_vrndq_v: {
8250 Int = Intrinsic::trunc;
8251 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrndz");
8252 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008253 case NEON::BI__builtin_neon_vcvt_f64_v:
8254 case NEON::BI__builtin_neon_vcvtq_f64_v:
8255 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
Sjoerd Meijer87793e72018-03-19 13:22:49 +00008256 Ty = GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float64, false, quad));
Tim Northovera2ee4332014-03-29 15:09:45 +00008257 return usgn ? Builder.CreateUIToFP(Ops[0], Ty, "vcvt")
8258 : Builder.CreateSIToFP(Ops[0], Ty, "vcvt");
8259 case NEON::BI__builtin_neon_vcvt_f64_f32: {
8260 assert(Type.getEltType() == NeonTypeFlags::Float64 && quad &&
8261 "unexpected vcvt_f64_f32 builtin");
8262 NeonTypeFlags SrcFlag = NeonTypeFlags(NeonTypeFlags::Float32, false, false);
Sjoerd Meijer87793e72018-03-19 13:22:49 +00008263 Ops[0] = Builder.CreateBitCast(Ops[0], GetNeonType(this, SrcFlag));
Tim Northovera2ee4332014-03-29 15:09:45 +00008264
8265 return Builder.CreateFPExt(Ops[0], Ty, "vcvt");
8266 }
8267 case NEON::BI__builtin_neon_vcvt_f32_f64: {
8268 assert(Type.getEltType() == NeonTypeFlags::Float32 &&
8269 "unexpected vcvt_f32_f64 builtin");
8270 NeonTypeFlags SrcFlag = NeonTypeFlags(NeonTypeFlags::Float64, false, true);
Sjoerd Meijer87793e72018-03-19 13:22:49 +00008271 Ops[0] = Builder.CreateBitCast(Ops[0], GetNeonType(this, SrcFlag));
Tim Northovera2ee4332014-03-29 15:09:45 +00008272
8273 return Builder.CreateFPTrunc(Ops[0], Ty, "vcvt");
8274 }
8275 case NEON::BI__builtin_neon_vcvt_s32_v:
8276 case NEON::BI__builtin_neon_vcvt_u32_v:
8277 case NEON::BI__builtin_neon_vcvt_s64_v:
8278 case NEON::BI__builtin_neon_vcvt_u64_v:
Fangrui Song99337e22018-07-20 08:19:20 +00008279 case NEON::BI__builtin_neon_vcvt_s16_v:
8280 case NEON::BI__builtin_neon_vcvt_u16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008281 case NEON::BI__builtin_neon_vcvtq_s32_v:
8282 case NEON::BI__builtin_neon_vcvtq_u32_v:
8283 case NEON::BI__builtin_neon_vcvtq_s64_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008284 case NEON::BI__builtin_neon_vcvtq_u64_v:
Fangrui Song99337e22018-07-20 08:19:20 +00008285 case NEON::BI__builtin_neon_vcvtq_s16_v:
8286 case NEON::BI__builtin_neon_vcvtq_u16_v: {
Ahmed Bougacha774b5e22015-08-24 23:41:31 +00008287 Ops[0] = Builder.CreateBitCast(Ops[0], GetFloatNeonType(this, Type));
Tim Northovera2ee4332014-03-29 15:09:45 +00008288 if (usgn)
8289 return Builder.CreateFPToUI(Ops[0], Ty);
8290 return Builder.CreateFPToSI(Ops[0], Ty);
8291 }
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008292 case NEON::BI__builtin_neon_vcvta_s16_v:
Luke Geesonda2b2e82018-06-15 10:10:45 +00008293 case NEON::BI__builtin_neon_vcvta_u16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008294 case NEON::BI__builtin_neon_vcvta_s32_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008295 case NEON::BI__builtin_neon_vcvtaq_s16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008296 case NEON::BI__builtin_neon_vcvtaq_s32_v:
8297 case NEON::BI__builtin_neon_vcvta_u32_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008298 case NEON::BI__builtin_neon_vcvtaq_u16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008299 case NEON::BI__builtin_neon_vcvtaq_u32_v:
8300 case NEON::BI__builtin_neon_vcvta_s64_v:
8301 case NEON::BI__builtin_neon_vcvtaq_s64_v:
8302 case NEON::BI__builtin_neon_vcvta_u64_v:
8303 case NEON::BI__builtin_neon_vcvtaq_u64_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00008304 Int = usgn ? Intrinsic::aarch64_neon_fcvtau : Intrinsic::aarch64_neon_fcvtas;
Ahmed Bougacha774b5e22015-08-24 23:41:31 +00008305 llvm::Type *Tys[2] = { Ty, GetFloatNeonType(this, Type) };
Tim Northovera2ee4332014-03-29 15:09:45 +00008306 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vcvta");
8307 }
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008308 case NEON::BI__builtin_neon_vcvtm_s16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008309 case NEON::BI__builtin_neon_vcvtm_s32_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008310 case NEON::BI__builtin_neon_vcvtmq_s16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008311 case NEON::BI__builtin_neon_vcvtmq_s32_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008312 case NEON::BI__builtin_neon_vcvtm_u16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008313 case NEON::BI__builtin_neon_vcvtm_u32_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008314 case NEON::BI__builtin_neon_vcvtmq_u16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008315 case NEON::BI__builtin_neon_vcvtmq_u32_v:
8316 case NEON::BI__builtin_neon_vcvtm_s64_v:
8317 case NEON::BI__builtin_neon_vcvtmq_s64_v:
8318 case NEON::BI__builtin_neon_vcvtm_u64_v:
8319 case NEON::BI__builtin_neon_vcvtmq_u64_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00008320 Int = usgn ? Intrinsic::aarch64_neon_fcvtmu : Intrinsic::aarch64_neon_fcvtms;
Ahmed Bougacha774b5e22015-08-24 23:41:31 +00008321 llvm::Type *Tys[2] = { Ty, GetFloatNeonType(this, Type) };
Tim Northovera2ee4332014-03-29 15:09:45 +00008322 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vcvtm");
8323 }
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008324 case NEON::BI__builtin_neon_vcvtn_s16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008325 case NEON::BI__builtin_neon_vcvtn_s32_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008326 case NEON::BI__builtin_neon_vcvtnq_s16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008327 case NEON::BI__builtin_neon_vcvtnq_s32_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008328 case NEON::BI__builtin_neon_vcvtn_u16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008329 case NEON::BI__builtin_neon_vcvtn_u32_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008330 case NEON::BI__builtin_neon_vcvtnq_u16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008331 case NEON::BI__builtin_neon_vcvtnq_u32_v:
8332 case NEON::BI__builtin_neon_vcvtn_s64_v:
8333 case NEON::BI__builtin_neon_vcvtnq_s64_v:
8334 case NEON::BI__builtin_neon_vcvtn_u64_v:
8335 case NEON::BI__builtin_neon_vcvtnq_u64_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00008336 Int = usgn ? Intrinsic::aarch64_neon_fcvtnu : Intrinsic::aarch64_neon_fcvtns;
Ahmed Bougacha774b5e22015-08-24 23:41:31 +00008337 llvm::Type *Tys[2] = { Ty, GetFloatNeonType(this, Type) };
Tim Northovera2ee4332014-03-29 15:09:45 +00008338 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vcvtn");
8339 }
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008340 case NEON::BI__builtin_neon_vcvtp_s16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008341 case NEON::BI__builtin_neon_vcvtp_s32_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008342 case NEON::BI__builtin_neon_vcvtpq_s16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008343 case NEON::BI__builtin_neon_vcvtpq_s32_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008344 case NEON::BI__builtin_neon_vcvtp_u16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008345 case NEON::BI__builtin_neon_vcvtp_u32_v:
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008346 case NEON::BI__builtin_neon_vcvtpq_u16_v:
Tim Northovera2ee4332014-03-29 15:09:45 +00008347 case NEON::BI__builtin_neon_vcvtpq_u32_v:
8348 case NEON::BI__builtin_neon_vcvtp_s64_v:
8349 case NEON::BI__builtin_neon_vcvtpq_s64_v:
8350 case NEON::BI__builtin_neon_vcvtp_u64_v:
8351 case NEON::BI__builtin_neon_vcvtpq_u64_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00008352 Int = usgn ? Intrinsic::aarch64_neon_fcvtpu : Intrinsic::aarch64_neon_fcvtps;
Ahmed Bougacha774b5e22015-08-24 23:41:31 +00008353 llvm::Type *Tys[2] = { Ty, GetFloatNeonType(this, Type) };
Tim Northovera2ee4332014-03-29 15:09:45 +00008354 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vcvtp");
8355 }
8356 case NEON::BI__builtin_neon_vmulx_v:
8357 case NEON::BI__builtin_neon_vmulxq_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00008358 Int = Intrinsic::aarch64_neon_fmulx;
Tim Northovera2ee4332014-03-29 15:09:45 +00008359 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmulx");
8360 }
Abderrazek Zaafrani585051a2018-03-20 20:37:31 +00008361 case NEON::BI__builtin_neon_vmulxh_lane_f16:
8362 case NEON::BI__builtin_neon_vmulxh_laneq_f16: {
8363 // vmulx_lane should be mapped to Neon scalar mulx after
8364 // extracting the scalar element
8365 Ops.push_back(EmitScalarExpr(E->getArg(2)));
8366 Ops[1] = Builder.CreateExtractElement(Ops[1], Ops[2], "extract");
8367 Ops.pop_back();
8368 Int = Intrinsic::aarch64_neon_fmulx;
8369 return EmitNeonCall(CGM.getIntrinsic(Int, HalfTy), Ops, "vmulx");
8370 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008371 case NEON::BI__builtin_neon_vmul_lane_v:
8372 case NEON::BI__builtin_neon_vmul_laneq_v: {
8373 // v1f64 vmul_lane should be mapped to Neon scalar mul lane
8374 bool Quad = false;
8375 if (BuiltinID == NEON::BI__builtin_neon_vmul_laneq_v)
8376 Quad = true;
8377 Ops[0] = Builder.CreateBitCast(Ops[0], DoubleTy);
8378 llvm::Type *VTy = GetNeonType(this,
Sjoerd Meijer87793e72018-03-19 13:22:49 +00008379 NeonTypeFlags(NeonTypeFlags::Float64, false, Quad));
Tim Northovera2ee4332014-03-29 15:09:45 +00008380 Ops[1] = Builder.CreateBitCast(Ops[1], VTy);
8381 Ops[1] = Builder.CreateExtractElement(Ops[1], Ops[2], "extract");
8382 Value *Result = Builder.CreateFMul(Ops[0], Ops[1]);
8383 return Builder.CreateBitCast(Result, Ty);
8384 }
Tim Northover0c68faa2014-03-31 15:47:09 +00008385 case NEON::BI__builtin_neon_vnegd_s64:
8386 return Builder.CreateNeg(EmitScalarExpr(E->getArg(0)), "vnegd");
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008387 case NEON::BI__builtin_neon_vnegh_f16:
8388 return Builder.CreateFNeg(EmitScalarExpr(E->getArg(0)), "vnegh");
Tim Northovera2ee4332014-03-29 15:09:45 +00008389 case NEON::BI__builtin_neon_vpmaxnm_v:
8390 case NEON::BI__builtin_neon_vpmaxnmq_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00008391 Int = Intrinsic::aarch64_neon_fmaxnmp;
Tim Northovera2ee4332014-03-29 15:09:45 +00008392 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vpmaxnm");
8393 }
8394 case NEON::BI__builtin_neon_vpminnm_v:
8395 case NEON::BI__builtin_neon_vpminnmq_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00008396 Int = Intrinsic::aarch64_neon_fminnmp;
Tim Northovera2ee4332014-03-29 15:09:45 +00008397 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vpminnm");
8398 }
Abderrazek Zaafranice8746d2018-01-19 23:11:18 +00008399 case NEON::BI__builtin_neon_vsqrth_f16: {
8400 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8401 Int = Intrinsic::sqrt;
8402 return EmitNeonCall(CGM.getIntrinsic(Int, HalfTy), Ops, "vsqrt");
8403 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008404 case NEON::BI__builtin_neon_vsqrt_v:
8405 case NEON::BI__builtin_neon_vsqrtq_v: {
8406 Int = Intrinsic::sqrt;
8407 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
8408 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vsqrt");
8409 }
8410 case NEON::BI__builtin_neon_vrbit_v:
8411 case NEON::BI__builtin_neon_vrbitq_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00008412 Int = Intrinsic::aarch64_neon_rbit;
Tim Northovera2ee4332014-03-29 15:09:45 +00008413 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrbit");
8414 }
8415 case NEON::BI__builtin_neon_vaddv_u8:
8416 // FIXME: These are handled by the AArch64 scalar code.
8417 usgn = true;
Adrian Prantlf3b3ccd2017-12-19 22:06:11 +00008418 LLVM_FALLTHROUGH;
Tim Northovera2ee4332014-03-29 15:09:45 +00008419 case NEON::BI__builtin_neon_vaddv_s8: {
Tim Northover573cbee2014-05-24 12:52:07 +00008420 Int = usgn ? Intrinsic::aarch64_neon_uaddv : Intrinsic::aarch64_neon_saddv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008421 Ty = Int32Ty;
8422 VTy = llvm::VectorType::get(Int8Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00008423 llvm::Type *Tys[2] = { Ty, VTy };
8424 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8425 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008426 return Builder.CreateTrunc(Ops[0], Int8Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008427 }
8428 case NEON::BI__builtin_neon_vaddv_u16:
8429 usgn = true;
Adrian Prantlf3b3ccd2017-12-19 22:06:11 +00008430 LLVM_FALLTHROUGH;
Tim Northovera2ee4332014-03-29 15:09:45 +00008431 case NEON::BI__builtin_neon_vaddv_s16: {
Tim Northover573cbee2014-05-24 12:52:07 +00008432 Int = usgn ? Intrinsic::aarch64_neon_uaddv : Intrinsic::aarch64_neon_saddv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008433 Ty = Int32Ty;
8434 VTy = llvm::VectorType::get(Int16Ty, 4);
Tim Northovera2ee4332014-03-29 15:09:45 +00008435 llvm::Type *Tys[2] = { Ty, VTy };
8436 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8437 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008438 return Builder.CreateTrunc(Ops[0], Int16Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008439 }
8440 case NEON::BI__builtin_neon_vaddvq_u8:
8441 usgn = true;
Adrian Prantlf3b3ccd2017-12-19 22:06:11 +00008442 LLVM_FALLTHROUGH;
Tim Northovera2ee4332014-03-29 15:09:45 +00008443 case NEON::BI__builtin_neon_vaddvq_s8: {
Tim Northover573cbee2014-05-24 12:52:07 +00008444 Int = usgn ? Intrinsic::aarch64_neon_uaddv : Intrinsic::aarch64_neon_saddv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008445 Ty = Int32Ty;
8446 VTy = llvm::VectorType::get(Int8Ty, 16);
Tim Northovera2ee4332014-03-29 15:09:45 +00008447 llvm::Type *Tys[2] = { Ty, VTy };
8448 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8449 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008450 return Builder.CreateTrunc(Ops[0], Int8Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008451 }
8452 case NEON::BI__builtin_neon_vaddvq_u16:
8453 usgn = true;
Adrian Prantlf3b3ccd2017-12-19 22:06:11 +00008454 LLVM_FALLTHROUGH;
Tim Northovera2ee4332014-03-29 15:09:45 +00008455 case NEON::BI__builtin_neon_vaddvq_s16: {
Tim Northover573cbee2014-05-24 12:52:07 +00008456 Int = usgn ? Intrinsic::aarch64_neon_uaddv : Intrinsic::aarch64_neon_saddv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008457 Ty = Int32Ty;
8458 VTy = llvm::VectorType::get(Int16Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00008459 llvm::Type *Tys[2] = { Ty, VTy };
8460 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8461 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008462 return Builder.CreateTrunc(Ops[0], Int16Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008463 }
8464 case NEON::BI__builtin_neon_vmaxv_u8: {
Tim Northover573cbee2014-05-24 12:52:07 +00008465 Int = Intrinsic::aarch64_neon_umaxv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008466 Ty = Int32Ty;
8467 VTy = llvm::VectorType::get(Int8Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00008468 llvm::Type *Tys[2] = { Ty, VTy };
8469 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8470 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008471 return Builder.CreateTrunc(Ops[0], Int8Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008472 }
8473 case NEON::BI__builtin_neon_vmaxv_u16: {
Tim Northover573cbee2014-05-24 12:52:07 +00008474 Int = Intrinsic::aarch64_neon_umaxv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008475 Ty = Int32Ty;
8476 VTy = llvm::VectorType::get(Int16Ty, 4);
Tim Northovera2ee4332014-03-29 15:09:45 +00008477 llvm::Type *Tys[2] = { Ty, VTy };
8478 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8479 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008480 return Builder.CreateTrunc(Ops[0], Int16Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008481 }
8482 case NEON::BI__builtin_neon_vmaxvq_u8: {
Tim Northover573cbee2014-05-24 12:52:07 +00008483 Int = Intrinsic::aarch64_neon_umaxv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008484 Ty = Int32Ty;
8485 VTy = llvm::VectorType::get(Int8Ty, 16);
Tim Northovera2ee4332014-03-29 15:09:45 +00008486 llvm::Type *Tys[2] = { Ty, VTy };
8487 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8488 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008489 return Builder.CreateTrunc(Ops[0], Int8Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008490 }
8491 case NEON::BI__builtin_neon_vmaxvq_u16: {
Tim Northover573cbee2014-05-24 12:52:07 +00008492 Int = Intrinsic::aarch64_neon_umaxv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008493 Ty = Int32Ty;
8494 VTy = llvm::VectorType::get(Int16Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00008495 llvm::Type *Tys[2] = { Ty, VTy };
8496 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8497 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008498 return Builder.CreateTrunc(Ops[0], Int16Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008499 }
8500 case NEON::BI__builtin_neon_vmaxv_s8: {
Tim Northover573cbee2014-05-24 12:52:07 +00008501 Int = Intrinsic::aarch64_neon_smaxv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008502 Ty = Int32Ty;
8503 VTy = llvm::VectorType::get(Int8Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00008504 llvm::Type *Tys[2] = { Ty, VTy };
8505 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8506 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008507 return Builder.CreateTrunc(Ops[0], Int8Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008508 }
8509 case NEON::BI__builtin_neon_vmaxv_s16: {
Tim Northover573cbee2014-05-24 12:52:07 +00008510 Int = Intrinsic::aarch64_neon_smaxv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008511 Ty = Int32Ty;
8512 VTy = llvm::VectorType::get(Int16Ty, 4);
Tim Northovera2ee4332014-03-29 15:09:45 +00008513 llvm::Type *Tys[2] = { Ty, VTy };
8514 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8515 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008516 return Builder.CreateTrunc(Ops[0], Int16Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008517 }
8518 case NEON::BI__builtin_neon_vmaxvq_s8: {
Tim Northover573cbee2014-05-24 12:52:07 +00008519 Int = Intrinsic::aarch64_neon_smaxv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008520 Ty = Int32Ty;
8521 VTy = llvm::VectorType::get(Int8Ty, 16);
Tim Northovera2ee4332014-03-29 15:09:45 +00008522 llvm::Type *Tys[2] = { Ty, VTy };
8523 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8524 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008525 return Builder.CreateTrunc(Ops[0], Int8Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008526 }
8527 case NEON::BI__builtin_neon_vmaxvq_s16: {
Tim Northover573cbee2014-05-24 12:52:07 +00008528 Int = Intrinsic::aarch64_neon_smaxv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008529 Ty = Int32Ty;
8530 VTy = llvm::VectorType::get(Int16Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00008531 llvm::Type *Tys[2] = { Ty, VTy };
8532 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8533 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008534 return Builder.CreateTrunc(Ops[0], Int16Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008535 }
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008536 case NEON::BI__builtin_neon_vmaxv_f16: {
8537 Int = Intrinsic::aarch64_neon_fmaxv;
8538 Ty = HalfTy;
8539 VTy = llvm::VectorType::get(HalfTy, 4);
8540 llvm::Type *Tys[2] = { Ty, VTy };
8541 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8542 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv");
8543 return Builder.CreateTrunc(Ops[0], HalfTy);
8544 }
8545 case NEON::BI__builtin_neon_vmaxvq_f16: {
8546 Int = Intrinsic::aarch64_neon_fmaxv;
8547 Ty = HalfTy;
8548 VTy = llvm::VectorType::get(HalfTy, 8);
8549 llvm::Type *Tys[2] = { Ty, VTy };
8550 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8551 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv");
8552 return Builder.CreateTrunc(Ops[0], HalfTy);
8553 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008554 case NEON::BI__builtin_neon_vminv_u8: {
Tim Northover573cbee2014-05-24 12:52:07 +00008555 Int = Intrinsic::aarch64_neon_uminv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008556 Ty = Int32Ty;
8557 VTy = llvm::VectorType::get(Int8Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00008558 llvm::Type *Tys[2] = { Ty, VTy };
8559 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8560 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008561 return Builder.CreateTrunc(Ops[0], Int8Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008562 }
8563 case NEON::BI__builtin_neon_vminv_u16: {
Tim Northover573cbee2014-05-24 12:52:07 +00008564 Int = Intrinsic::aarch64_neon_uminv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008565 Ty = Int32Ty;
8566 VTy = llvm::VectorType::get(Int16Ty, 4);
Tim Northovera2ee4332014-03-29 15:09:45 +00008567 llvm::Type *Tys[2] = { Ty, VTy };
8568 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8569 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008570 return Builder.CreateTrunc(Ops[0], Int16Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008571 }
8572 case NEON::BI__builtin_neon_vminvq_u8: {
Tim Northover573cbee2014-05-24 12:52:07 +00008573 Int = Intrinsic::aarch64_neon_uminv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008574 Ty = Int32Ty;
8575 VTy = llvm::VectorType::get(Int8Ty, 16);
Tim Northovera2ee4332014-03-29 15:09:45 +00008576 llvm::Type *Tys[2] = { Ty, VTy };
8577 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8578 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008579 return Builder.CreateTrunc(Ops[0], Int8Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008580 }
8581 case NEON::BI__builtin_neon_vminvq_u16: {
Tim Northover573cbee2014-05-24 12:52:07 +00008582 Int = Intrinsic::aarch64_neon_uminv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008583 Ty = Int32Ty;
8584 VTy = llvm::VectorType::get(Int16Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00008585 llvm::Type *Tys[2] = { Ty, VTy };
8586 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8587 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008588 return Builder.CreateTrunc(Ops[0], Int16Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008589 }
8590 case NEON::BI__builtin_neon_vminv_s8: {
Tim Northover573cbee2014-05-24 12:52:07 +00008591 Int = Intrinsic::aarch64_neon_sminv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008592 Ty = Int32Ty;
8593 VTy = llvm::VectorType::get(Int8Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00008594 llvm::Type *Tys[2] = { Ty, VTy };
8595 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8596 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008597 return Builder.CreateTrunc(Ops[0], Int8Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008598 }
8599 case NEON::BI__builtin_neon_vminv_s16: {
Tim Northover573cbee2014-05-24 12:52:07 +00008600 Int = Intrinsic::aarch64_neon_sminv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008601 Ty = Int32Ty;
8602 VTy = llvm::VectorType::get(Int16Ty, 4);
Tim Northovera2ee4332014-03-29 15:09:45 +00008603 llvm::Type *Tys[2] = { Ty, VTy };
8604 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8605 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008606 return Builder.CreateTrunc(Ops[0], Int16Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008607 }
8608 case NEON::BI__builtin_neon_vminvq_s8: {
Tim Northover573cbee2014-05-24 12:52:07 +00008609 Int = Intrinsic::aarch64_neon_sminv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008610 Ty = Int32Ty;
8611 VTy = llvm::VectorType::get(Int8Ty, 16);
Tim Northovera2ee4332014-03-29 15:09:45 +00008612 llvm::Type *Tys[2] = { Ty, VTy };
8613 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8614 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008615 return Builder.CreateTrunc(Ops[0], Int8Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008616 }
8617 case NEON::BI__builtin_neon_vminvq_s16: {
Tim Northover573cbee2014-05-24 12:52:07 +00008618 Int = Intrinsic::aarch64_neon_sminv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008619 Ty = Int32Ty;
8620 VTy = llvm::VectorType::get(Int16Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00008621 llvm::Type *Tys[2] = { Ty, VTy };
8622 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8623 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008624 return Builder.CreateTrunc(Ops[0], Int16Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008625 }
Abderrazek Zaafranif58a1322017-12-21 19:20:01 +00008626 case NEON::BI__builtin_neon_vminv_f16: {
8627 Int = Intrinsic::aarch64_neon_fminv;
8628 Ty = HalfTy;
8629 VTy = llvm::VectorType::get(HalfTy, 4);
8630 llvm::Type *Tys[2] = { Ty, VTy };
8631 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8632 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv");
8633 return Builder.CreateTrunc(Ops[0], HalfTy);
8634 }
8635 case NEON::BI__builtin_neon_vminvq_f16: {
8636 Int = Intrinsic::aarch64_neon_fminv;
8637 Ty = HalfTy;
8638 VTy = llvm::VectorType::get(HalfTy, 8);
8639 llvm::Type *Tys[2] = { Ty, VTy };
8640 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8641 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv");
8642 return Builder.CreateTrunc(Ops[0], HalfTy);
8643 }
8644 case NEON::BI__builtin_neon_vmaxnmv_f16: {
8645 Int = Intrinsic::aarch64_neon_fmaxnmv;
8646 Ty = HalfTy;
8647 VTy = llvm::VectorType::get(HalfTy, 4);
8648 llvm::Type *Tys[2] = { Ty, VTy };
8649 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8650 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxnmv");
8651 return Builder.CreateTrunc(Ops[0], HalfTy);
8652 }
8653 case NEON::BI__builtin_neon_vmaxnmvq_f16: {
8654 Int = Intrinsic::aarch64_neon_fmaxnmv;
8655 Ty = HalfTy;
8656 VTy = llvm::VectorType::get(HalfTy, 8);
8657 llvm::Type *Tys[2] = { Ty, VTy };
8658 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8659 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxnmv");
8660 return Builder.CreateTrunc(Ops[0], HalfTy);
8661 }
8662 case NEON::BI__builtin_neon_vminnmv_f16: {
8663 Int = Intrinsic::aarch64_neon_fminnmv;
8664 Ty = HalfTy;
8665 VTy = llvm::VectorType::get(HalfTy, 4);
8666 llvm::Type *Tys[2] = { Ty, VTy };
8667 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8668 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminnmv");
8669 return Builder.CreateTrunc(Ops[0], HalfTy);
8670 }
8671 case NEON::BI__builtin_neon_vminnmvq_f16: {
8672 Int = Intrinsic::aarch64_neon_fminnmv;
8673 Ty = HalfTy;
8674 VTy = llvm::VectorType::get(HalfTy, 8);
8675 llvm::Type *Tys[2] = { Ty, VTy };
8676 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8677 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminnmv");
8678 return Builder.CreateTrunc(Ops[0], HalfTy);
8679 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008680 case NEON::BI__builtin_neon_vmul_n_f64: {
8681 Ops[0] = Builder.CreateBitCast(Ops[0], DoubleTy);
8682 Value *RHS = Builder.CreateBitCast(EmitScalarExpr(E->getArg(1)), DoubleTy);
8683 return Builder.CreateFMul(Ops[0], RHS);
8684 }
8685 case NEON::BI__builtin_neon_vaddlv_u8: {
Tim Northover573cbee2014-05-24 12:52:07 +00008686 Int = Intrinsic::aarch64_neon_uaddlv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008687 Ty = Int32Ty;
8688 VTy = llvm::VectorType::get(Int8Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00008689 llvm::Type *Tys[2] = { Ty, VTy };
8690 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8691 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008692 return Builder.CreateTrunc(Ops[0], Int16Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008693 }
8694 case NEON::BI__builtin_neon_vaddlv_u16: {
Tim Northover573cbee2014-05-24 12:52:07 +00008695 Int = Intrinsic::aarch64_neon_uaddlv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008696 Ty = Int32Ty;
8697 VTy = llvm::VectorType::get(Int16Ty, 4);
Tim Northovera2ee4332014-03-29 15:09:45 +00008698 llvm::Type *Tys[2] = { Ty, VTy };
8699 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8700 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv");
8701 }
8702 case NEON::BI__builtin_neon_vaddlvq_u8: {
Tim Northover573cbee2014-05-24 12:52:07 +00008703 Int = Intrinsic::aarch64_neon_uaddlv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008704 Ty = Int32Ty;
8705 VTy = llvm::VectorType::get(Int8Ty, 16);
Tim Northovera2ee4332014-03-29 15:09:45 +00008706 llvm::Type *Tys[2] = { Ty, VTy };
8707 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8708 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008709 return Builder.CreateTrunc(Ops[0], Int16Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008710 }
8711 case NEON::BI__builtin_neon_vaddlvq_u16: {
Tim Northover573cbee2014-05-24 12:52:07 +00008712 Int = Intrinsic::aarch64_neon_uaddlv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008713 Ty = Int32Ty;
8714 VTy = llvm::VectorType::get(Int16Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00008715 llvm::Type *Tys[2] = { Ty, VTy };
8716 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8717 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv");
8718 }
8719 case NEON::BI__builtin_neon_vaddlv_s8: {
Tim Northover573cbee2014-05-24 12:52:07 +00008720 Int = Intrinsic::aarch64_neon_saddlv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008721 Ty = Int32Ty;
8722 VTy = llvm::VectorType::get(Int8Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00008723 llvm::Type *Tys[2] = { Ty, VTy };
8724 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8725 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008726 return Builder.CreateTrunc(Ops[0], Int16Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008727 }
8728 case NEON::BI__builtin_neon_vaddlv_s16: {
Tim Northover573cbee2014-05-24 12:52:07 +00008729 Int = Intrinsic::aarch64_neon_saddlv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008730 Ty = Int32Ty;
8731 VTy = llvm::VectorType::get(Int16Ty, 4);
Tim Northovera2ee4332014-03-29 15:09:45 +00008732 llvm::Type *Tys[2] = { Ty, VTy };
8733 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8734 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv");
8735 }
8736 case NEON::BI__builtin_neon_vaddlvq_s8: {
Tim Northover573cbee2014-05-24 12:52:07 +00008737 Int = Intrinsic::aarch64_neon_saddlv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008738 Ty = Int32Ty;
8739 VTy = llvm::VectorType::get(Int8Ty, 16);
Tim Northovera2ee4332014-03-29 15:09:45 +00008740 llvm::Type *Tys[2] = { Ty, VTy };
8741 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8742 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv");
Benjamin Kramerc385a802015-07-28 15:40:11 +00008743 return Builder.CreateTrunc(Ops[0], Int16Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008744 }
8745 case NEON::BI__builtin_neon_vaddlvq_s16: {
Tim Northover573cbee2014-05-24 12:52:07 +00008746 Int = Intrinsic::aarch64_neon_saddlv;
Benjamin Kramerc385a802015-07-28 15:40:11 +00008747 Ty = Int32Ty;
8748 VTy = llvm::VectorType::get(Int16Ty, 8);
Tim Northovera2ee4332014-03-29 15:09:45 +00008749 llvm::Type *Tys[2] = { Ty, VTy };
8750 Ops.push_back(EmitScalarExpr(E->getArg(0)));
8751 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv");
8752 }
8753 case NEON::BI__builtin_neon_vsri_n_v:
8754 case NEON::BI__builtin_neon_vsriq_n_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00008755 Int = Intrinsic::aarch64_neon_vsri;
Tim Northovera2ee4332014-03-29 15:09:45 +00008756 llvm::Function *Intrin = CGM.getIntrinsic(Int, Ty);
8757 return EmitNeonCall(Intrin, Ops, "vsri_n");
8758 }
8759 case NEON::BI__builtin_neon_vsli_n_v:
8760 case NEON::BI__builtin_neon_vsliq_n_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00008761 Int = Intrinsic::aarch64_neon_vsli;
Tim Northovera2ee4332014-03-29 15:09:45 +00008762 llvm::Function *Intrin = CGM.getIntrinsic(Int, Ty);
8763 return EmitNeonCall(Intrin, Ops, "vsli_n");
8764 }
8765 case NEON::BI__builtin_neon_vsra_n_v:
8766 case NEON::BI__builtin_neon_vsraq_n_v:
8767 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
8768 Ops[1] = EmitNeonRShiftImm(Ops[1], Ops[2], Ty, usgn, "vsra_n");
8769 return Builder.CreateAdd(Ops[0], Ops[1]);
8770 case NEON::BI__builtin_neon_vrsra_n_v:
8771 case NEON::BI__builtin_neon_vrsraq_n_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00008772 Int = usgn ? Intrinsic::aarch64_neon_urshl : Intrinsic::aarch64_neon_srshl;
Tim Northovera2ee4332014-03-29 15:09:45 +00008773 SmallVector<llvm::Value*,2> TmpOps;
8774 TmpOps.push_back(Ops[1]);
8775 TmpOps.push_back(Ops[2]);
8776 Function* F = CGM.getIntrinsic(Int, Ty);
8777 llvm::Value *tmp = EmitNeonCall(F, TmpOps, "vrshr_n", 1, true);
8778 Ops[0] = Builder.CreateBitCast(Ops[0], VTy);
8779 return Builder.CreateAdd(Ops[0], tmp);
8780 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008781 case NEON::BI__builtin_neon_vld1_v:
Peter Collingbourneb367c562016-11-28 22:30:21 +00008782 case NEON::BI__builtin_neon_vld1q_v: {
Tim Northovera2ee4332014-03-29 15:09:45 +00008783 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(VTy));
Peter Collingbourneb367c562016-11-28 22:30:21 +00008784 auto Alignment = CharUnits::fromQuantity(
8785 BuiltinID == NEON::BI__builtin_neon_vld1_v ? 8 : 16);
8786 return Builder.CreateAlignedLoad(VTy, Ops[0], Alignment);
8787 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008788 case NEON::BI__builtin_neon_vst1_v:
8789 case NEON::BI__builtin_neon_vst1q_v:
8790 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(VTy));
8791 Ops[1] = Builder.CreateBitCast(Ops[1], VTy);
John McCall7f416cc2015-09-08 08:05:57 +00008792 return Builder.CreateDefaultAlignedStore(Ops[1], Ops[0]);
Tim Northovera2ee4332014-03-29 15:09:45 +00008793 case NEON::BI__builtin_neon_vld1_lane_v:
Peter Collingbourneb367c562016-11-28 22:30:21 +00008794 case NEON::BI__builtin_neon_vld1q_lane_v: {
Tim Northovera2ee4332014-03-29 15:09:45 +00008795 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
8796 Ty = llvm::PointerType::getUnqual(VTy->getElementType());
8797 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
Peter Collingbourneb367c562016-11-28 22:30:21 +00008798 auto Alignment = CharUnits::fromQuantity(
8799 BuiltinID == NEON::BI__builtin_neon_vld1_lane_v ? 8 : 16);
8800 Ops[0] =
8801 Builder.CreateAlignedLoad(VTy->getElementType(), Ops[0], Alignment);
Tim Northovera2ee4332014-03-29 15:09:45 +00008802 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2], "vld1_lane");
Peter Collingbourneb367c562016-11-28 22:30:21 +00008803 }
Tim Northovera2ee4332014-03-29 15:09:45 +00008804 case NEON::BI__builtin_neon_vld1_dup_v:
8805 case NEON::BI__builtin_neon_vld1q_dup_v: {
8806 Value *V = UndefValue::get(Ty);
8807 Ty = llvm::PointerType::getUnqual(VTy->getElementType());
8808 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
Peter Collingbourneb367c562016-11-28 22:30:21 +00008809 auto Alignment = CharUnits::fromQuantity(
8810 BuiltinID == NEON::BI__builtin_neon_vld1_dup_v ? 8 : 16);
8811 Ops[0] =
8812 Builder.CreateAlignedLoad(VTy->getElementType(), Ops[0], Alignment);
Michael J. Spencer5ce26682014-06-02 19:48:59 +00008813 llvm::Constant *CI = ConstantInt::get(Int32Ty, 0);
Tim Northovera2ee4332014-03-29 15:09:45 +00008814 Ops[0] = Builder.CreateInsertElement(V, Ops[0], CI);
8815 return EmitNeonSplat(Ops[0], CI);
8816 }
8817 case NEON::BI__builtin_neon_vst1_lane_v:
8818 case NEON::BI__builtin_neon_vst1q_lane_v:
8819 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
8820 Ops[1] = Builder.CreateExtractElement(Ops[1], Ops[2]);
8821 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
John McCall7f416cc2015-09-08 08:05:57 +00008822 return Builder.CreateDefaultAlignedStore(Ops[1],
8823 Builder.CreateBitCast(Ops[0], Ty));
Tim Northovera2ee4332014-03-29 15:09:45 +00008824 case NEON::BI__builtin_neon_vld2_v:
8825 case NEON::BI__builtin_neon_vld2q_v: {
8826 llvm::Type *PTy = llvm::PointerType::getUnqual(VTy);
8827 Ops[1] = Builder.CreateBitCast(Ops[1], PTy);
8828 llvm::Type *Tys[2] = { VTy, PTy };
Tim Northover573cbee2014-05-24 12:52:07 +00008829 Function *F = CGM.getIntrinsic(Intrinsic::aarch64_neon_ld2, Tys);
Tim Northovera2ee4332014-03-29 15:09:45 +00008830 Ops[1] = Builder.CreateCall(F, Ops[1], "vld2");
8831 Ops[0] = Builder.CreateBitCast(Ops[0],
8832 llvm::PointerType::getUnqual(Ops[1]->getType()));
John McCall7f416cc2015-09-08 08:05:57 +00008833 return Builder.CreateDefaultAlignedStore(Ops[1], Ops[0]);
Tim Northovera2ee4332014-03-29 15:09:45 +00008834 }
8835 case NEON::BI__builtin_neon_vld3_v:
8836 case NEON::BI__builtin_neon_vld3q_v: {
8837 llvm::Type *PTy = llvm::PointerType::getUnqual(VTy);
8838 Ops[1] = Builder.CreateBitCast(Ops[1], PTy);
8839 llvm::Type *Tys[2] = { VTy, PTy };
Tim Northover573cbee2014-05-24 12:52:07 +00008840 Function *F = CGM.getIntrinsic(Intrinsic::aarch64_neon_ld3, Tys);
Tim Northovera2ee4332014-03-29 15:09:45 +00008841 Ops[1] = Builder.CreateCall(F, Ops[1], "vld3");
8842 Ops[0] = Builder.CreateBitCast(Ops[0],
8843 llvm::PointerType::getUnqual(Ops[1]->getType()));
John McCall7f416cc2015-09-08 08:05:57 +00008844 return Builder.CreateDefaultAlignedStore(Ops[1], Ops[0]);
Tim Northovera2ee4332014-03-29 15:09:45 +00008845 }
8846 case NEON::BI__builtin_neon_vld4_v:
8847 case NEON::BI__builtin_neon_vld4q_v: {
8848 llvm::Type *PTy = llvm::PointerType::getUnqual(VTy);
8849 Ops[1] = Builder.CreateBitCast(Ops[1], PTy);
8850 llvm::Type *Tys[2] = { VTy, PTy };
Tim Northover573cbee2014-05-24 12:52:07 +00008851 Function *F = CGM.getIntrinsic(Intrinsic::aarch64_neon_ld4, Tys);
Tim Northovera2ee4332014-03-29 15:09:45 +00008852 Ops[1] = Builder.CreateCall(F, Ops[1], "vld4");
8853 Ops[0] = Builder.CreateBitCast(Ops[0],
8854 llvm::PointerType::getUnqual(Ops[1]->getType()));
John McCall7f416cc2015-09-08 08:05:57 +00008855 return Builder.CreateDefaultAlignedStore(Ops[1], Ops[0]);
Tim Northovera2ee4332014-03-29 15:09:45 +00008856 }
Tim Northover74b2def2014-04-01 10:37:47 +00008857 case NEON::BI__builtin_neon_vld2_dup_v:
8858 case NEON::BI__builtin_neon_vld2q_dup_v: {
Tim Northovera2ee4332014-03-29 15:09:45 +00008859 llvm::Type *PTy =
8860 llvm::PointerType::getUnqual(VTy->getElementType());
8861 Ops[1] = Builder.CreateBitCast(Ops[1], PTy);
8862 llvm::Type *Tys[2] = { VTy, PTy };
Tim Northover573cbee2014-05-24 12:52:07 +00008863 Function *F = CGM.getIntrinsic(Intrinsic::aarch64_neon_ld2r, Tys);
Tim Northovera2ee4332014-03-29 15:09:45 +00008864 Ops[1] = Builder.CreateCall(F, Ops[1], "vld2");
8865 Ops[0] = Builder.CreateBitCast(Ops[0],
8866 llvm::PointerType::getUnqual(Ops[1]->getType()));
John McCall7f416cc2015-09-08 08:05:57 +00008867 return Builder.CreateDefaultAlignedStore(Ops[1], Ops[0]);
Tim Northovera2ee4332014-03-29 15:09:45 +00008868 }
Tim Northover74b2def2014-04-01 10:37:47 +00008869 case NEON::BI__builtin_neon_vld3_dup_v:
8870 case NEON::BI__builtin_neon_vld3q_dup_v: {
Tim Northovera2ee4332014-03-29 15:09:45 +00008871 llvm::Type *PTy =
8872 llvm::PointerType::getUnqual(VTy->getElementType());
8873 Ops[1] = Builder.CreateBitCast(Ops[1], PTy);
8874 llvm::Type *Tys[2] = { VTy, PTy };
Tim Northover573cbee2014-05-24 12:52:07 +00008875 Function *F = CGM.getIntrinsic(Intrinsic::aarch64_neon_ld3r, Tys);
Tim Northovera2ee4332014-03-29 15:09:45 +00008876 Ops[1] = Builder.CreateCall(F, Ops[1], "vld3");
8877 Ops[0] = Builder.CreateBitCast(Ops[0],
8878 llvm::PointerType::getUnqual(Ops[1]->getType()));
John McCall7f416cc2015-09-08 08:05:57 +00008879 return Builder.CreateDefaultAlignedStore(Ops[1], Ops[0]);
Tim Northovera2ee4332014-03-29 15:09:45 +00008880 }
Tim Northover74b2def2014-04-01 10:37:47 +00008881 case NEON::BI__builtin_neon_vld4_dup_v:
8882 case NEON::BI__builtin_neon_vld4q_dup_v: {
Tim Northovera2ee4332014-03-29 15:09:45 +00008883 llvm::Type *PTy =
8884 llvm::PointerType::getUnqual(VTy->getElementType());
8885 Ops[1] = Builder.CreateBitCast(Ops[1], PTy);
8886 llvm::Type *Tys[2] = { VTy, PTy };
Tim Northover573cbee2014-05-24 12:52:07 +00008887 Function *F = CGM.getIntrinsic(Intrinsic::aarch64_neon_ld4r, Tys);
Tim Northovera2ee4332014-03-29 15:09:45 +00008888 Ops[1] = Builder.CreateCall(F, Ops[1], "vld4");
8889 Ops[0] = Builder.CreateBitCast(Ops[0],
8890 llvm::PointerType::getUnqual(Ops[1]->getType()));
John McCall7f416cc2015-09-08 08:05:57 +00008891 return Builder.CreateDefaultAlignedStore(Ops[1], Ops[0]);
Tim Northovera2ee4332014-03-29 15:09:45 +00008892 }
8893 case NEON::BI__builtin_neon_vld2_lane_v:
8894 case NEON::BI__builtin_neon_vld2q_lane_v: {
8895 llvm::Type *Tys[2] = { VTy, Ops[1]->getType() };
Tim Northover573cbee2014-05-24 12:52:07 +00008896 Function *F = CGM.getIntrinsic(Intrinsic::aarch64_neon_ld2lane, Tys);
Tim Northovera2ee4332014-03-29 15:09:45 +00008897 Ops.push_back(Ops[1]);
8898 Ops.erase(Ops.begin()+1);
8899 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
8900 Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
Benjamin Kramerc385a802015-07-28 15:40:11 +00008901 Ops[3] = Builder.CreateZExt(Ops[3], Int64Ty);
Craig Topper5fc8fc22014-08-27 06:28:36 +00008902 Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld2_lane");
Tim Northovera2ee4332014-03-29 15:09:45 +00008903 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
8904 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
John McCall7f416cc2015-09-08 08:05:57 +00008905 return Builder.CreateDefaultAlignedStore(Ops[1], Ops[0]);
Tim Northovera2ee4332014-03-29 15:09:45 +00008906 }
8907 case NEON::BI__builtin_neon_vld3_lane_v:
8908 case NEON::BI__builtin_neon_vld3q_lane_v: {
8909 llvm::Type *Tys[2] = { VTy, Ops[1]->getType() };
Tim Northover573cbee2014-05-24 12:52:07 +00008910 Function *F = CGM.getIntrinsic(Intrinsic::aarch64_neon_ld3lane, Tys);
Tim Northovera2ee4332014-03-29 15:09:45 +00008911 Ops.push_back(Ops[1]);
8912 Ops.erase(Ops.begin()+1);
8913 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
8914 Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
8915 Ops[3] = Builder.CreateBitCast(Ops[3], Ty);
Benjamin Kramerc385a802015-07-28 15:40:11 +00008916 Ops[4] = Builder.CreateZExt(Ops[4], Int64Ty);
Craig Topper5fc8fc22014-08-27 06:28:36 +00008917 Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld3_lane");
Tim Northovera2ee4332014-03-29 15:09:45 +00008918 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
8919 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
John McCall7f416cc2015-09-08 08:05:57 +00008920 return Builder.CreateDefaultAlignedStore(Ops[1], Ops[0]);
Tim Northovera2ee4332014-03-29 15:09:45 +00008921 }
8922 case NEON::BI__builtin_neon_vld4_lane_v:
8923 case NEON::BI__builtin_neon_vld4q_lane_v: {
8924 llvm::Type *Tys[2] = { VTy, Ops[1]->getType() };
Tim Northover573cbee2014-05-24 12:52:07 +00008925 Function *F = CGM.getIntrinsic(Intrinsic::aarch64_neon_ld4lane, Tys);
Tim Northovera2ee4332014-03-29 15:09:45 +00008926 Ops.push_back(Ops[1]);
8927 Ops.erase(Ops.begin()+1);
8928 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
8929 Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
8930 Ops[3] = Builder.CreateBitCast(Ops[3], Ty);
8931 Ops[4] = Builder.CreateBitCast(Ops[4], Ty);
Benjamin Kramerc385a802015-07-28 15:40:11 +00008932 Ops[5] = Builder.CreateZExt(Ops[5], Int64Ty);
Craig Topper5fc8fc22014-08-27 06:28:36 +00008933 Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld4_lane");
Tim Northovera2ee4332014-03-29 15:09:45 +00008934 Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
8935 Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
John McCall7f416cc2015-09-08 08:05:57 +00008936 return Builder.CreateDefaultAlignedStore(Ops[1], Ops[0]);
Tim Northovera2ee4332014-03-29 15:09:45 +00008937 }
8938 case NEON::BI__builtin_neon_vst2_v:
8939 case NEON::BI__builtin_neon_vst2q_v: {
8940 Ops.push_back(Ops[0]);
8941 Ops.erase(Ops.begin());
8942 llvm::Type *Tys[2] = { VTy, Ops[2]->getType() };
Tim Northover573cbee2014-05-24 12:52:07 +00008943 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_st2, Tys),
Tim Northovera2ee4332014-03-29 15:09:45 +00008944 Ops, "");
8945 }
8946 case NEON::BI__builtin_neon_vst2_lane_v:
8947 case NEON::BI__builtin_neon_vst2q_lane_v: {
8948 Ops.push_back(Ops[0]);
8949 Ops.erase(Ops.begin());
Benjamin Kramerc385a802015-07-28 15:40:11 +00008950 Ops[2] = Builder.CreateZExt(Ops[2], Int64Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008951 llvm::Type *Tys[2] = { VTy, Ops[3]->getType() };
Tim Northover573cbee2014-05-24 12:52:07 +00008952 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_st2lane, Tys),
Tim Northovera2ee4332014-03-29 15:09:45 +00008953 Ops, "");
8954 }
8955 case NEON::BI__builtin_neon_vst3_v:
8956 case NEON::BI__builtin_neon_vst3q_v: {
8957 Ops.push_back(Ops[0]);
8958 Ops.erase(Ops.begin());
8959 llvm::Type *Tys[2] = { VTy, Ops[3]->getType() };
Tim Northover573cbee2014-05-24 12:52:07 +00008960 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_st3, Tys),
Tim Northovera2ee4332014-03-29 15:09:45 +00008961 Ops, "");
8962 }
8963 case NEON::BI__builtin_neon_vst3_lane_v:
8964 case NEON::BI__builtin_neon_vst3q_lane_v: {
8965 Ops.push_back(Ops[0]);
8966 Ops.erase(Ops.begin());
Benjamin Kramerc385a802015-07-28 15:40:11 +00008967 Ops[3] = Builder.CreateZExt(Ops[3], Int64Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008968 llvm::Type *Tys[2] = { VTy, Ops[4]->getType() };
Tim Northover573cbee2014-05-24 12:52:07 +00008969 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_st3lane, Tys),
Tim Northovera2ee4332014-03-29 15:09:45 +00008970 Ops, "");
8971 }
8972 case NEON::BI__builtin_neon_vst4_v:
8973 case NEON::BI__builtin_neon_vst4q_v: {
8974 Ops.push_back(Ops[0]);
8975 Ops.erase(Ops.begin());
8976 llvm::Type *Tys[2] = { VTy, Ops[4]->getType() };
Tim Northover573cbee2014-05-24 12:52:07 +00008977 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_st4, Tys),
Tim Northovera2ee4332014-03-29 15:09:45 +00008978 Ops, "");
8979 }
8980 case NEON::BI__builtin_neon_vst4_lane_v:
8981 case NEON::BI__builtin_neon_vst4q_lane_v: {
8982 Ops.push_back(Ops[0]);
8983 Ops.erase(Ops.begin());
Benjamin Kramerc385a802015-07-28 15:40:11 +00008984 Ops[4] = Builder.CreateZExt(Ops[4], Int64Ty);
Tim Northovera2ee4332014-03-29 15:09:45 +00008985 llvm::Type *Tys[2] = { VTy, Ops[5]->getType() };
Tim Northover573cbee2014-05-24 12:52:07 +00008986 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_st4lane, Tys),
Tim Northovera2ee4332014-03-29 15:09:45 +00008987 Ops, "");
8988 }
8989 case NEON::BI__builtin_neon_vtrn_v:
8990 case NEON::BI__builtin_neon_vtrnq_v: {
8991 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
8992 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
8993 Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
Craig Topper8a13c412014-05-21 05:09:00 +00008994 Value *SV = nullptr;
Tim Northovera2ee4332014-03-29 15:09:45 +00008995
8996 for (unsigned vi = 0; vi != 2; ++vi) {
Craig Topperd1cb4ce2016-06-12 00:41:24 +00008997 SmallVector<uint32_t, 16> Indices;
Tim Northovera2ee4332014-03-29 15:09:45 +00008998 for (unsigned i = 0, e = VTy->getNumElements(); i != e; i += 2) {
Craig Topper832caf02016-05-29 02:39:30 +00008999 Indices.push_back(i+vi);
9000 Indices.push_back(i+e+vi);
Tim Northovera2ee4332014-03-29 15:09:45 +00009001 }
David Blaikiefb901c7a2015-04-04 15:12:29 +00009002 Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi);
Craig Topper832caf02016-05-29 02:39:30 +00009003 SV = Builder.CreateShuffleVector(Ops[1], Ops[2], Indices, "vtrn");
John McCall7f416cc2015-09-08 08:05:57 +00009004 SV = Builder.CreateDefaultAlignedStore(SV, Addr);
Tim Northovera2ee4332014-03-29 15:09:45 +00009005 }
9006 return SV;
9007 }
9008 case NEON::BI__builtin_neon_vuzp_v:
9009 case NEON::BI__builtin_neon_vuzpq_v: {
9010 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
9011 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
9012 Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
Craig Topper8a13c412014-05-21 05:09:00 +00009013 Value *SV = nullptr;
Tim Northovera2ee4332014-03-29 15:09:45 +00009014
9015 for (unsigned vi = 0; vi != 2; ++vi) {
Craig Topperd1cb4ce2016-06-12 00:41:24 +00009016 SmallVector<uint32_t, 16> Indices;
Tim Northovera2ee4332014-03-29 15:09:45 +00009017 for (unsigned i = 0, e = VTy->getNumElements(); i != e; ++i)
Craig Topper832caf02016-05-29 02:39:30 +00009018 Indices.push_back(2*i+vi);
Tim Northovera2ee4332014-03-29 15:09:45 +00009019
David Blaikiefb901c7a2015-04-04 15:12:29 +00009020 Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi);
Craig Topper832caf02016-05-29 02:39:30 +00009021 SV = Builder.CreateShuffleVector(Ops[1], Ops[2], Indices, "vuzp");
John McCall7f416cc2015-09-08 08:05:57 +00009022 SV = Builder.CreateDefaultAlignedStore(SV, Addr);
Tim Northovera2ee4332014-03-29 15:09:45 +00009023 }
9024 return SV;
9025 }
9026 case NEON::BI__builtin_neon_vzip_v:
9027 case NEON::BI__builtin_neon_vzipq_v: {
9028 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
9029 Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
9030 Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
Craig Topper8a13c412014-05-21 05:09:00 +00009031 Value *SV = nullptr;
Tim Northovera2ee4332014-03-29 15:09:45 +00009032
9033 for (unsigned vi = 0; vi != 2; ++vi) {
Craig Topperd1cb4ce2016-06-12 00:41:24 +00009034 SmallVector<uint32_t, 16> Indices;
Tim Northovera2ee4332014-03-29 15:09:45 +00009035 for (unsigned i = 0, e = VTy->getNumElements(); i != e; i += 2) {
Craig Topper832caf02016-05-29 02:39:30 +00009036 Indices.push_back((i + vi*e) >> 1);
9037 Indices.push_back(((i + vi*e) >> 1)+e);
Tim Northovera2ee4332014-03-29 15:09:45 +00009038 }
David Blaikiefb901c7a2015-04-04 15:12:29 +00009039 Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi);
Craig Topper832caf02016-05-29 02:39:30 +00009040 SV = Builder.CreateShuffleVector(Ops[1], Ops[2], Indices, "vzip");
John McCall7f416cc2015-09-08 08:05:57 +00009041 SV = Builder.CreateDefaultAlignedStore(SV, Addr);
Tim Northovera2ee4332014-03-29 15:09:45 +00009042 }
9043 return SV;
9044 }
9045 case NEON::BI__builtin_neon_vqtbl1q_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00009046 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_tbl1, Ty),
Tim Northovera2ee4332014-03-29 15:09:45 +00009047 Ops, "vtbl1");
9048 }
9049 case NEON::BI__builtin_neon_vqtbl2q_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00009050 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_tbl2, Ty),
Tim Northovera2ee4332014-03-29 15:09:45 +00009051 Ops, "vtbl2");
9052 }
9053 case NEON::BI__builtin_neon_vqtbl3q_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00009054 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_tbl3, Ty),
Tim Northovera2ee4332014-03-29 15:09:45 +00009055 Ops, "vtbl3");
9056 }
9057 case NEON::BI__builtin_neon_vqtbl4q_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00009058 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_tbl4, Ty),
Tim Northovera2ee4332014-03-29 15:09:45 +00009059 Ops, "vtbl4");
9060 }
9061 case NEON::BI__builtin_neon_vqtbx1q_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00009062 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_tbx1, Ty),
Tim Northovera2ee4332014-03-29 15:09:45 +00009063 Ops, "vtbx1");
9064 }
9065 case NEON::BI__builtin_neon_vqtbx2q_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00009066 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_tbx2, Ty),
Tim Northovera2ee4332014-03-29 15:09:45 +00009067 Ops, "vtbx2");
9068 }
9069 case NEON::BI__builtin_neon_vqtbx3q_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00009070 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_tbx3, Ty),
Tim Northovera2ee4332014-03-29 15:09:45 +00009071 Ops, "vtbx3");
9072 }
9073 case NEON::BI__builtin_neon_vqtbx4q_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00009074 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_tbx4, Ty),
Tim Northovera2ee4332014-03-29 15:09:45 +00009075 Ops, "vtbx4");
9076 }
9077 case NEON::BI__builtin_neon_vsqadd_v:
9078 case NEON::BI__builtin_neon_vsqaddq_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00009079 Int = Intrinsic::aarch64_neon_usqadd;
Tim Northovera2ee4332014-03-29 15:09:45 +00009080 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vsqadd");
9081 }
9082 case NEON::BI__builtin_neon_vuqadd_v:
9083 case NEON::BI__builtin_neon_vuqaddq_v: {
Tim Northover573cbee2014-05-24 12:52:07 +00009084 Int = Intrinsic::aarch64_neon_suqadd;
Tim Northovera2ee4332014-03-29 15:09:45 +00009085 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vuqadd");
9086 }
Mandeep Singh Grang0054f482018-07-17 22:03:24 +00009087 case AArch64::BI_BitScanForward:
9088 case AArch64::BI_BitScanForward64:
9089 return EmitMSVCBuiltinExpr(MSVCIntrin::_BitScanForward, E);
9090 case AArch64::BI_BitScanReverse:
9091 case AArch64::BI_BitScanReverse64:
9092 return EmitMSVCBuiltinExpr(MSVCIntrin::_BitScanReverse, E);
9093 case AArch64::BI_InterlockedAnd64:
9094 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedAnd, E);
9095 case AArch64::BI_InterlockedExchange64:
9096 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchange, E);
9097 case AArch64::BI_InterlockedExchangeAdd64:
9098 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeAdd, E);
9099 case AArch64::BI_InterlockedExchangeSub64:
9100 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeSub, E);
9101 case AArch64::BI_InterlockedOr64:
9102 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedOr, E);
9103 case AArch64::BI_InterlockedXor64:
9104 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedXor, E);
9105 case AArch64::BI_InterlockedDecrement64:
9106 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedDecrement, E);
9107 case AArch64::BI_InterlockedIncrement64:
9108 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedIncrement, E);
Eli Friedmanb262d162018-10-31 21:31:09 +00009109 case AArch64::BI_InterlockedExchangeAdd8_acq:
9110 case AArch64::BI_InterlockedExchangeAdd16_acq:
9111 case AArch64::BI_InterlockedExchangeAdd_acq:
9112 case AArch64::BI_InterlockedExchangeAdd64_acq:
9113 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeAdd_acq, E);
9114 case AArch64::BI_InterlockedExchangeAdd8_rel:
9115 case AArch64::BI_InterlockedExchangeAdd16_rel:
9116 case AArch64::BI_InterlockedExchangeAdd_rel:
9117 case AArch64::BI_InterlockedExchangeAdd64_rel:
9118 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeAdd_rel, E);
9119 case AArch64::BI_InterlockedExchangeAdd8_nf:
9120 case AArch64::BI_InterlockedExchangeAdd16_nf:
9121 case AArch64::BI_InterlockedExchangeAdd_nf:
9122 case AArch64::BI_InterlockedExchangeAdd64_nf:
9123 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeAdd_nf, E);
Mandeep Singh Grang7fa07e52018-11-02 21:18:23 +00009124 case AArch64::BI_InterlockedExchange8_acq:
9125 case AArch64::BI_InterlockedExchange16_acq:
9126 case AArch64::BI_InterlockedExchange_acq:
9127 case AArch64::BI_InterlockedExchange64_acq:
9128 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchange_acq, E);
9129 case AArch64::BI_InterlockedExchange8_rel:
9130 case AArch64::BI_InterlockedExchange16_rel:
9131 case AArch64::BI_InterlockedExchange_rel:
9132 case AArch64::BI_InterlockedExchange64_rel:
9133 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchange_rel, E);
9134 case AArch64::BI_InterlockedExchange8_nf:
9135 case AArch64::BI_InterlockedExchange16_nf:
9136 case AArch64::BI_InterlockedExchange_nf:
9137 case AArch64::BI_InterlockedExchange64_nf:
9138 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchange_nf, E);
Mandeep Singh Grang6b880682018-11-06 00:36:48 +00009139 case AArch64::BI_InterlockedCompareExchange8_acq:
9140 case AArch64::BI_InterlockedCompareExchange16_acq:
9141 case AArch64::BI_InterlockedCompareExchange_acq:
9142 case AArch64::BI_InterlockedCompareExchange64_acq:
9143 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedCompareExchange_acq, E);
9144 case AArch64::BI_InterlockedCompareExchange8_rel:
9145 case AArch64::BI_InterlockedCompareExchange16_rel:
9146 case AArch64::BI_InterlockedCompareExchange_rel:
9147 case AArch64::BI_InterlockedCompareExchange64_rel:
9148 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedCompareExchange_rel, E);
9149 case AArch64::BI_InterlockedCompareExchange8_nf:
9150 case AArch64::BI_InterlockedCompareExchange16_nf:
9151 case AArch64::BI_InterlockedCompareExchange_nf:
9152 case AArch64::BI_InterlockedCompareExchange64_nf:
9153 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedCompareExchange_nf, E);
Mandeep Singh Grangec62b312018-11-06 01:11:25 +00009154 case AArch64::BI_InterlockedOr8_acq:
9155 case AArch64::BI_InterlockedOr16_acq:
9156 case AArch64::BI_InterlockedOr_acq:
9157 case AArch64::BI_InterlockedOr64_acq:
9158 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedOr_acq, E);
9159 case AArch64::BI_InterlockedOr8_rel:
9160 case AArch64::BI_InterlockedOr16_rel:
9161 case AArch64::BI_InterlockedOr_rel:
9162 case AArch64::BI_InterlockedOr64_rel:
9163 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedOr_rel, E);
9164 case AArch64::BI_InterlockedOr8_nf:
9165 case AArch64::BI_InterlockedOr16_nf:
9166 case AArch64::BI_InterlockedOr_nf:
9167 case AArch64::BI_InterlockedOr64_nf:
9168 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedOr_nf, E);
Mandeep Singh Grang806f1072018-11-06 04:55:20 +00009169 case AArch64::BI_InterlockedXor8_acq:
9170 case AArch64::BI_InterlockedXor16_acq:
9171 case AArch64::BI_InterlockedXor_acq:
9172 case AArch64::BI_InterlockedXor64_acq:
9173 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedXor_acq, E);
9174 case AArch64::BI_InterlockedXor8_rel:
9175 case AArch64::BI_InterlockedXor16_rel:
9176 case AArch64::BI_InterlockedXor_rel:
9177 case AArch64::BI_InterlockedXor64_rel:
9178 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedXor_rel, E);
9179 case AArch64::BI_InterlockedXor8_nf:
9180 case AArch64::BI_InterlockedXor16_nf:
9181 case AArch64::BI_InterlockedXor_nf:
9182 case AArch64::BI_InterlockedXor64_nf:
9183 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedXor_nf, E);
Mandeep Singh Grangc89157b2018-11-06 05:03:13 +00009184 case AArch64::BI_InterlockedAnd8_acq:
9185 case AArch64::BI_InterlockedAnd16_acq:
9186 case AArch64::BI_InterlockedAnd_acq:
9187 case AArch64::BI_InterlockedAnd64_acq:
9188 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedAnd_acq, E);
9189 case AArch64::BI_InterlockedAnd8_rel:
9190 case AArch64::BI_InterlockedAnd16_rel:
9191 case AArch64::BI_InterlockedAnd_rel:
9192 case AArch64::BI_InterlockedAnd64_rel:
9193 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedAnd_rel, E);
9194 case AArch64::BI_InterlockedAnd8_nf:
9195 case AArch64::BI_InterlockedAnd16_nf:
9196 case AArch64::BI_InterlockedAnd_nf:
9197 case AArch64::BI_InterlockedAnd64_nf:
9198 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedAnd_nf, E);
Mandeep Singh Grangfdf74d92018-11-06 05:05:32 +00009199 case AArch64::BI_InterlockedIncrement16_acq:
9200 case AArch64::BI_InterlockedIncrement_acq:
9201 case AArch64::BI_InterlockedIncrement64_acq:
9202 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedIncrement_acq, E);
9203 case AArch64::BI_InterlockedIncrement16_rel:
9204 case AArch64::BI_InterlockedIncrement_rel:
9205 case AArch64::BI_InterlockedIncrement64_rel:
9206 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedIncrement_rel, E);
9207 case AArch64::BI_InterlockedIncrement16_nf:
9208 case AArch64::BI_InterlockedIncrement_nf:
9209 case AArch64::BI_InterlockedIncrement64_nf:
9210 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedIncrement_nf, E);
Mandeep Singh Grang574cadd2018-11-06 05:07:43 +00009211 case AArch64::BI_InterlockedDecrement16_acq:
9212 case AArch64::BI_InterlockedDecrement_acq:
9213 case AArch64::BI_InterlockedDecrement64_acq:
9214 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedDecrement_acq, E);
9215 case AArch64::BI_InterlockedDecrement16_rel:
9216 case AArch64::BI_InterlockedDecrement_rel:
9217 case AArch64::BI_InterlockedDecrement64_rel:
9218 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedDecrement_rel, E);
9219 case AArch64::BI_InterlockedDecrement16_nf:
9220 case AArch64::BI_InterlockedDecrement_nf:
9221 case AArch64::BI_InterlockedDecrement64_nf:
9222 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedDecrement_nf, E);
Mandeep Singh Grangdf792962018-10-05 21:57:41 +00009223
9224 case AArch64::BI_InterlockedAdd: {
9225 Value *Arg0 = EmitScalarExpr(E->getArg(0));
9226 Value *Arg1 = EmitScalarExpr(E->getArg(1));
9227 AtomicRMWInst *RMWI = Builder.CreateAtomicRMW(
9228 AtomicRMWInst::Add, Arg0, Arg1,
9229 llvm::AtomicOrdering::SequentiallyConsistent);
9230 return Builder.CreateAdd(RMWI, Arg1);
9231 }
Tim Northovera2ee4332014-03-29 15:09:45 +00009232 }
9233}
9234
Bill Wendling65b2a962010-10-09 08:47:25 +00009235llvm::Value *CodeGenFunction::
Bill Wendlingf1a3fca2012-02-22 09:30:11 +00009236BuildVector(ArrayRef<llvm::Value*> Ops) {
Bill Wendling65b2a962010-10-09 08:47:25 +00009237 assert((Ops.size() & (Ops.size() - 1)) == 0 &&
9238 "Not a power-of-two sized vector!");
9239 bool AllConstants = true;
9240 for (unsigned i = 0, e = Ops.size(); i != e && AllConstants; ++i)
9241 AllConstants &= isa<Constant>(Ops[i]);
9242
9243 // If this is a constant vector, create a ConstantVector.
9244 if (AllConstants) {
Chris Lattner2d6b7b92012-01-25 05:34:41 +00009245 SmallVector<llvm::Constant*, 16> CstOps;
Bill Wendling65b2a962010-10-09 08:47:25 +00009246 for (unsigned i = 0, e = Ops.size(); i != e; ++i)
9247 CstOps.push_back(cast<Constant>(Ops[i]));
9248 return llvm::ConstantVector::get(CstOps);
9249 }
9250
9251 // Otherwise, insertelement the values to build the vector.
9252 Value *Result =
9253 llvm::UndefValue::get(llvm::VectorType::get(Ops[0]->getType(), Ops.size()));
9254
9255 for (unsigned i = 0, e = Ops.size(); i != e; ++i)
Chris Lattner2d6b7b92012-01-25 05:34:41 +00009256 Result = Builder.CreateInsertElement(Result, Ops[i], Builder.getInt32(i));
Bill Wendling65b2a962010-10-09 08:47:25 +00009257
9258 return Result;
9259}
9260
Igor Bregeraadb8762016-06-08 13:59:20 +00009261// Convert the mask from an integer type to a vector of i1.
9262static Value *getMaskVecValue(CodeGenFunction &CGF, Value *Mask,
9263 unsigned NumElts) {
9264
9265 llvm::VectorType *MaskTy = llvm::VectorType::get(CGF.Builder.getInt1Ty(),
9266 cast<IntegerType>(Mask->getType())->getBitWidth());
9267 Value *MaskVec = CGF.Builder.CreateBitCast(Mask, MaskTy);
9268
9269 // If we have less than 8 elements, then the starting mask was an i8 and
9270 // we need to extract down to the right number of elements.
9271 if (NumElts < 8) {
Craig Topperd1cb4ce2016-06-12 00:41:24 +00009272 uint32_t Indices[4];
Igor Bregeraadb8762016-06-08 13:59:20 +00009273 for (unsigned i = 0; i != NumElts; ++i)
9274 Indices[i] = i;
9275 MaskVec = CGF.Builder.CreateShuffleVector(MaskVec, MaskVec,
9276 makeArrayRef(Indices, NumElts),
9277 "extract");
9278 }
9279 return MaskVec;
9280}
9281
Craig Topper6e891fb2016-05-31 01:50:10 +00009282static Value *EmitX86MaskedStore(CodeGenFunction &CGF,
Craig Topperf886b442018-06-03 19:02:57 +00009283 ArrayRef<Value *> Ops,
Craig Topper6e891fb2016-05-31 01:50:10 +00009284 unsigned Align) {
9285 // Cast the pointer to right type.
Craig Topperf886b442018-06-03 19:02:57 +00009286 Value *Ptr = CGF.Builder.CreateBitCast(Ops[0],
Craig Topper6e891fb2016-05-31 01:50:10 +00009287 llvm::PointerType::getUnqual(Ops[1]->getType()));
9288
Igor Bregeraadb8762016-06-08 13:59:20 +00009289 Value *MaskVec = getMaskVecValue(CGF, Ops[2],
9290 Ops[1]->getType()->getVectorNumElements());
Craig Topper6e891fb2016-05-31 01:50:10 +00009291
Craig Topperf886b442018-06-03 19:02:57 +00009292 return CGF.Builder.CreateMaskedStore(Ops[1], Ptr, Align, MaskVec);
Craig Topper6e891fb2016-05-31 01:50:10 +00009293}
9294
Craig Topper4b060e32016-05-31 06:58:07 +00009295static Value *EmitX86MaskedLoad(CodeGenFunction &CGF,
Craig Topperf886b442018-06-03 19:02:57 +00009296 ArrayRef<Value *> Ops, unsigned Align) {
Craig Topper4b060e32016-05-31 06:58:07 +00009297 // Cast the pointer to right type.
Craig Topperf886b442018-06-03 19:02:57 +00009298 Value *Ptr = CGF.Builder.CreateBitCast(Ops[0],
Craig Topper4b060e32016-05-31 06:58:07 +00009299 llvm::PointerType::getUnqual(Ops[1]->getType()));
9300
Igor Bregeraadb8762016-06-08 13:59:20 +00009301 Value *MaskVec = getMaskVecValue(CGF, Ops[2],
9302 Ops[1]->getType()->getVectorNumElements());
Craig Topper4b060e32016-05-31 06:58:07 +00009303
Craig Topperf886b442018-06-03 19:02:57 +00009304 return CGF.Builder.CreateMaskedLoad(Ptr, Align, MaskVec, Ops[1]);
Igor Bregeraadb8762016-06-08 13:59:20 +00009305}
Craig Topper4b060e32016-05-31 06:58:07 +00009306
Craig Topper3cce6a72018-06-10 17:27:05 +00009307static Value *EmitX86ExpandLoad(CodeGenFunction &CGF,
9308 ArrayRef<Value *> Ops) {
9309 llvm::Type *ResultTy = Ops[1]->getType();
9310 llvm::Type *PtrTy = ResultTy->getVectorElementType();
9311
9312 // Cast the pointer to element type.
9313 Value *Ptr = CGF.Builder.CreateBitCast(Ops[0],
9314 llvm::PointerType::getUnqual(PtrTy));
9315
9316 Value *MaskVec = getMaskVecValue(CGF, Ops[2],
9317 ResultTy->getVectorNumElements());
9318
9319 llvm::Function *F = CGF.CGM.getIntrinsic(Intrinsic::masked_expandload,
9320 ResultTy);
9321 return CGF.Builder.CreateCall(F, { Ptr, MaskVec, Ops[1] });
9322}
9323
Craig Topper07b6d3d2019-01-28 07:03:10 +00009324static Value *EmitX86CompressExpand(CodeGenFunction &CGF,
9325 ArrayRef<Value *> Ops,
9326 bool IsCompress) {
9327 llvm::Type *ResultTy = Ops[1]->getType();
9328
9329 Value *MaskVec = getMaskVecValue(CGF, Ops[2],
9330 ResultTy->getVectorNumElements());
9331
9332 Intrinsic::ID IID = IsCompress ? Intrinsic::x86_avx512_mask_compress
9333 : Intrinsic::x86_avx512_mask_expand;
9334 llvm::Function *F = CGF.CGM.getIntrinsic(IID, ResultTy);
9335 return CGF.Builder.CreateCall(F, { Ops[0], Ops[1], MaskVec });
9336}
9337
Craig Topper3cce6a72018-06-10 17:27:05 +00009338static Value *EmitX86CompressStore(CodeGenFunction &CGF,
9339 ArrayRef<Value *> Ops) {
9340 llvm::Type *ResultTy = Ops[1]->getType();
9341 llvm::Type *PtrTy = ResultTy->getVectorElementType();
9342
9343 // Cast the pointer to element type.
9344 Value *Ptr = CGF.Builder.CreateBitCast(Ops[0],
9345 llvm::PointerType::getUnqual(PtrTy));
9346
9347 Value *MaskVec = getMaskVecValue(CGF, Ops[2],
9348 ResultTy->getVectorNumElements());
9349
9350 llvm::Function *F = CGF.CGM.getIntrinsic(Intrinsic::masked_compressstore,
9351 ResultTy);
9352 return CGF.Builder.CreateCall(F, { Ops[1], Ptr, MaskVec });
9353}
9354
Craig Topper5028ace2017-12-16 08:26:22 +00009355static Value *EmitX86MaskLogic(CodeGenFunction &CGF, Instruction::BinaryOps Opc,
Craig Topperc330ca82018-08-27 06:20:22 +00009356 ArrayRef<Value *> Ops,
Craig Topper5028ace2017-12-16 08:26:22 +00009357 bool InvertLHS = false) {
Craig Topperc330ca82018-08-27 06:20:22 +00009358 unsigned NumElts = Ops[0]->getType()->getIntegerBitWidth();
Craig Topper5028ace2017-12-16 08:26:22 +00009359 Value *LHS = getMaskVecValue(CGF, Ops[0], NumElts);
9360 Value *RHS = getMaskVecValue(CGF, Ops[1], NumElts);
9361
9362 if (InvertLHS)
9363 LHS = CGF.Builder.CreateNot(LHS);
9364
9365 return CGF.Builder.CreateBitCast(CGF.Builder.CreateBinOp(Opc, LHS, RHS),
Craig Toppera65bf652018-08-28 22:32:14 +00009366 Ops[0]->getType());
Craig Topper5028ace2017-12-16 08:26:22 +00009367}
9368
Simon Pilgrim45973792018-12-20 19:01:13 +00009369static Value *EmitX86FunnelShift(CodeGenFunction &CGF, Value *Op0, Value *Op1,
9370 Value *Amt, bool IsRight) {
9371 llvm::Type *Ty = Op0->getType();
9372
9373 // Amount may be scalar immediate, in which case create a splat vector.
9374 // Funnel shifts amounts are treated as modulo and types are all power-of-2 so
9375 // we only care about the lowest log2 bits anyway.
9376 if (Amt->getType() != Ty) {
9377 unsigned NumElts = Ty->getVectorNumElements();
9378 Amt = CGF.Builder.CreateIntCast(Amt, Ty->getScalarType(), false);
9379 Amt = CGF.Builder.CreateVectorSplat(NumElts, Amt);
9380 }
9381
9382 unsigned IID = IsRight ? Intrinsic::fshr : Intrinsic::fshl;
James Y Knight8799cae2019-02-03 21:53:49 +00009383 Function *F = CGF.CGM.getIntrinsic(IID, Ty);
Simon Pilgrim45973792018-12-20 19:01:13 +00009384 return CGF.Builder.CreateCall(F, {Op0, Op1, Amt});
9385}
9386
Simon Pilgrima7bcd722019-01-20 16:40:33 +00009387static Value *EmitX86vpcom(CodeGenFunction &CGF, ArrayRef<Value *> Ops,
9388 bool IsSigned) {
9389 Value *Op0 = Ops[0];
9390 Value *Op1 = Ops[1];
9391 llvm::Type *Ty = Op0->getType();
9392 uint64_t Imm = cast<llvm::ConstantInt>(Ops[2])->getZExtValue() & 0x7;
9393
9394 CmpInst::Predicate Pred;
9395 switch (Imm) {
9396 case 0x0:
9397 Pred = IsSigned ? ICmpInst::ICMP_SLT : ICmpInst::ICMP_ULT;
9398 break;
9399 case 0x1:
9400 Pred = IsSigned ? ICmpInst::ICMP_SLE : ICmpInst::ICMP_ULE;
9401 break;
9402 case 0x2:
9403 Pred = IsSigned ? ICmpInst::ICMP_SGT : ICmpInst::ICMP_UGT;
9404 break;
9405 case 0x3:
9406 Pred = IsSigned ? ICmpInst::ICMP_SGE : ICmpInst::ICMP_UGE;
9407 break;
9408 case 0x4:
9409 Pred = ICmpInst::ICMP_EQ;
9410 break;
9411 case 0x5:
9412 Pred = ICmpInst::ICMP_NE;
9413 break;
9414 case 0x6:
9415 return llvm::Constant::getNullValue(Ty); // FALSE
9416 case 0x7:
9417 return llvm::Constant::getAllOnesValue(Ty); // TRUE
9418 default:
9419 llvm_unreachable("Unexpected XOP vpcom/vpcomu predicate");
9420 }
9421
9422 Value *Cmp = CGF.Builder.CreateICmp(Pred, Op0, Op1);
9423 Value *Res = CGF.Builder.CreateSExt(Cmp, Ty);
9424 return Res;
9425}
9426
Igor Bregeraadb8762016-06-08 13:59:20 +00009427static Value *EmitX86Select(CodeGenFunction &CGF,
Craig Topperc1442972016-06-09 05:15:00 +00009428 Value *Mask, Value *Op0, Value *Op1) {
Igor Bregeraadb8762016-06-08 13:59:20 +00009429
9430 // If the mask is all ones just return first argument.
Craig Topperc1442972016-06-09 05:15:00 +00009431 if (const auto *C = dyn_cast<Constant>(Mask))
Igor Bregeraadb8762016-06-08 13:59:20 +00009432 if (C->isAllOnesValue())
Craig Topperc1442972016-06-09 05:15:00 +00009433 return Op0;
Igor Bregeraadb8762016-06-08 13:59:20 +00009434
Craig Topperc1442972016-06-09 05:15:00 +00009435 Mask = getMaskVecValue(CGF, Mask, Op0->getType()->getVectorNumElements());
Igor Bregeraadb8762016-06-08 13:59:20 +00009436
Craig Topperc1442972016-06-09 05:15:00 +00009437 return CGF.Builder.CreateSelect(Mask, Op0, Op1);
Craig Topper4b060e32016-05-31 06:58:07 +00009438}
9439
Craig Topperf89f62a2018-07-06 22:46:52 +00009440static Value *EmitX86ScalarSelect(CodeGenFunction &CGF,
9441 Value *Mask, Value *Op0, Value *Op1) {
9442 // If the mask is all ones just return first argument.
9443 if (const auto *C = dyn_cast<Constant>(Mask))
9444 if (C->isAllOnesValue())
9445 return Op0;
9446
9447 llvm::VectorType *MaskTy =
9448 llvm::VectorType::get(CGF.Builder.getInt1Ty(),
9449 Mask->getType()->getIntegerBitWidth());
9450 Mask = CGF.Builder.CreateBitCast(Mask, MaskTy);
9451 Mask = CGF.Builder.CreateExtractElement(Mask, (uint64_t)0);
9452 return CGF.Builder.CreateSelect(Mask, Op0, Op1);
9453}
9454
Craig Toppera57d64e2018-02-10 23:34:27 +00009455static Value *EmitX86MaskedCompareResult(CodeGenFunction &CGF, Value *Cmp,
9456 unsigned NumElts, Value *MaskIn) {
9457 if (MaskIn) {
9458 const auto *C = dyn_cast<Constant>(MaskIn);
9459 if (!C || !C->isAllOnesValue())
9460 Cmp = CGF.Builder.CreateAnd(Cmp, getMaskVecValue(CGF, MaskIn, NumElts));
9461 }
9462
9463 if (NumElts < 8) {
9464 uint32_t Indices[8];
9465 for (unsigned i = 0; i != NumElts; ++i)
9466 Indices[i] = i;
9467 for (unsigned i = NumElts; i != 8; ++i)
9468 Indices[i] = i % NumElts + NumElts;
9469 Cmp = CGF.Builder.CreateShuffleVector(
9470 Cmp, llvm::Constant::getNullValue(Cmp->getType()), Indices);
9471 }
9472
9473 return CGF.Builder.CreateBitCast(Cmp,
9474 IntegerType::get(CGF.getLLVMContext(),
9475 std::max(NumElts, 8U)));
9476}
9477
Craig Topperd1691c72016-06-22 04:47:58 +00009478static Value *EmitX86MaskedCompare(CodeGenFunction &CGF, unsigned CC,
Craig Topperde91dff2018-01-08 22:37:56 +00009479 bool Signed, ArrayRef<Value *> Ops) {
9480 assert((Ops.size() == 2 || Ops.size() == 4) &&
9481 "Unexpected number of arguments");
Craig Toppera54c21e2016-06-15 14:06:34 +00009482 unsigned NumElts = Ops[0]->getType()->getVectorNumElements();
Craig Topperd1691c72016-06-22 04:47:58 +00009483 Value *Cmp;
Craig Toppera54c21e2016-06-15 14:06:34 +00009484
Craig Topperd1691c72016-06-22 04:47:58 +00009485 if (CC == 3) {
9486 Cmp = Constant::getNullValue(
9487 llvm::VectorType::get(CGF.Builder.getInt1Ty(), NumElts));
9488 } else if (CC == 7) {
9489 Cmp = Constant::getAllOnesValue(
9490 llvm::VectorType::get(CGF.Builder.getInt1Ty(), NumElts));
9491 } else {
9492 ICmpInst::Predicate Pred;
9493 switch (CC) {
9494 default: llvm_unreachable("Unknown condition code");
9495 case 0: Pred = ICmpInst::ICMP_EQ; break;
9496 case 1: Pred = Signed ? ICmpInst::ICMP_SLT : ICmpInst::ICMP_ULT; break;
9497 case 2: Pred = Signed ? ICmpInst::ICMP_SLE : ICmpInst::ICMP_ULE; break;
9498 case 4: Pred = ICmpInst::ICMP_NE; break;
9499 case 5: Pred = Signed ? ICmpInst::ICMP_SGE : ICmpInst::ICMP_UGE; break;
9500 case 6: Pred = Signed ? ICmpInst::ICMP_SGT : ICmpInst::ICMP_UGT; break;
9501 }
9502 Cmp = CGF.Builder.CreateICmp(Pred, Ops[0], Ops[1]);
9503 }
9504
Craig Toppera57d64e2018-02-10 23:34:27 +00009505 Value *MaskIn = nullptr;
9506 if (Ops.size() == 4)
9507 MaskIn = Ops[3];
Craig Toppera54c21e2016-06-15 14:06:34 +00009508
Craig Toppera57d64e2018-02-10 23:34:27 +00009509 return EmitX86MaskedCompareResult(CGF, Cmp, NumElts, MaskIn);
Craig Toppera54c21e2016-06-15 14:06:34 +00009510}
9511
Craig Topperde91dff2018-01-08 22:37:56 +00009512static Value *EmitX86ConvertToMask(CodeGenFunction &CGF, Value *In) {
9513 Value *Zero = Constant::getNullValue(In->getType());
9514 return EmitX86MaskedCompare(CGF, 1, true, { In, Zero });
9515}
9516
Craig Topperbd7884e2019-01-26 02:42:01 +00009517static Value *EmitX86ConvertIntToFp(CodeGenFunction &CGF,
9518 ArrayRef<Value *> Ops, bool IsSigned) {
9519 unsigned Rnd = cast<llvm::ConstantInt>(Ops[3])->getZExtValue();
9520 llvm::Type *Ty = Ops[1]->getType();
9521
9522 Value *Res;
9523 if (Rnd != 4) {
9524 Intrinsic::ID IID = IsSigned ? Intrinsic::x86_avx512_sitofp_round
9525 : Intrinsic::x86_avx512_uitofp_round;
9526 Function *F = CGF.CGM.getIntrinsic(IID, { Ty, Ops[0]->getType() });
9527 Res = CGF.Builder.CreateCall(F, { Ops[0], Ops[3] });
9528 } else {
9529 Res = IsSigned ? CGF.Builder.CreateSIToFP(Ops[0], Ty)
9530 : CGF.Builder.CreateUIToFP(Ops[0], Ty);
9531 }
9532
9533 return EmitX86Select(CGF, Ops[2], Res, Ops[1]);
9534}
9535
Uriel Korach3fba3c32017-09-13 09:02:02 +00009536static Value *EmitX86Abs(CodeGenFunction &CGF, ArrayRef<Value *> Ops) {
9537
9538 llvm::Type *Ty = Ops[0]->getType();
9539 Value *Zero = llvm::Constant::getNullValue(Ty);
9540 Value *Sub = CGF.Builder.CreateSub(Zero, Ops[0]);
9541 Value *Cmp = CGF.Builder.CreateICmp(ICmpInst::ICMP_SGT, Ops[0], Zero);
9542 Value *Res = CGF.Builder.CreateSelect(Cmp, Ops[0], Sub);
Craig Topperf2043b02018-05-23 04:51:54 +00009543 return Res;
Uriel Korach3fba3c32017-09-13 09:02:02 +00009544}
9545
Craig Topper531ce282016-10-24 04:04:24 +00009546static Value *EmitX86MinMax(CodeGenFunction &CGF, ICmpInst::Predicate Pred,
9547 ArrayRef<Value *> Ops) {
9548 Value *Cmp = CGF.Builder.CreateICmp(Pred, Ops[0], Ops[1]);
9549 Value *Res = CGF.Builder.CreateSelect(Cmp, Ops[0], Ops[1]);
9550
Craig Topperf2043b02018-05-23 04:51:54 +00009551 assert(Ops.size() == 2);
9552 return Res;
Craig Topper531ce282016-10-24 04:04:24 +00009553}
9554
Gabor Buella70d8d512018-05-30 15:27:49 +00009555// Lowers X86 FMA intrinsics to IR.
9556static Value *EmitX86FMAExpr(CodeGenFunction &CGF, ArrayRef<Value *> Ops,
Craig Topperb92c77d2018-06-07 02:46:02 +00009557 unsigned BuiltinID, bool IsAddSub) {
Gabor Buella70d8d512018-05-30 15:27:49 +00009558
Craig Topperb92c77d2018-06-07 02:46:02 +00009559 bool Subtract = false;
9560 Intrinsic::ID IID = Intrinsic::not_intrinsic;
Gabor Buella70d8d512018-05-30 15:27:49 +00009561 switch (BuiltinID) {
9562 default: break;
Craig Topperb92c77d2018-06-07 02:46:02 +00009563 case clang::X86::BI__builtin_ia32_vfmsubps512_mask3:
9564 Subtract = true;
9565 LLVM_FALLTHROUGH;
9566 case clang::X86::BI__builtin_ia32_vfmaddps512_mask:
9567 case clang::X86::BI__builtin_ia32_vfmaddps512_maskz:
9568 case clang::X86::BI__builtin_ia32_vfmaddps512_mask3:
9569 IID = llvm::Intrinsic::x86_avx512_vfmadd_ps_512; break;
9570 case clang::X86::BI__builtin_ia32_vfmsubpd512_mask3:
9571 Subtract = true;
9572 LLVM_FALLTHROUGH;
9573 case clang::X86::BI__builtin_ia32_vfmaddpd512_mask:
9574 case clang::X86::BI__builtin_ia32_vfmaddpd512_maskz:
9575 case clang::X86::BI__builtin_ia32_vfmaddpd512_mask3:
9576 IID = llvm::Intrinsic::x86_avx512_vfmadd_pd_512; break;
9577 case clang::X86::BI__builtin_ia32_vfmsubaddps512_mask3:
9578 Subtract = true;
9579 LLVM_FALLTHROUGH;
9580 case clang::X86::BI__builtin_ia32_vfmaddsubps512_mask:
9581 case clang::X86::BI__builtin_ia32_vfmaddsubps512_maskz:
9582 case clang::X86::BI__builtin_ia32_vfmaddsubps512_mask3:
9583 IID = llvm::Intrinsic::x86_avx512_vfmaddsub_ps_512;
Gabor Buella70d8d512018-05-30 15:27:49 +00009584 break;
Craig Topperb92c77d2018-06-07 02:46:02 +00009585 case clang::X86::BI__builtin_ia32_vfmsubaddpd512_mask3:
9586 Subtract = true;
9587 LLVM_FALLTHROUGH;
9588 case clang::X86::BI__builtin_ia32_vfmaddsubpd512_mask:
9589 case clang::X86::BI__builtin_ia32_vfmaddsubpd512_maskz:
9590 case clang::X86::BI__builtin_ia32_vfmaddsubpd512_mask3:
9591 IID = llvm::Intrinsic::x86_avx512_vfmaddsub_pd_512;
Gabor Buella70d8d512018-05-30 15:27:49 +00009592 break;
9593 }
Gabor Buella70d8d512018-05-30 15:27:49 +00009594
9595 Value *A = Ops[0];
9596 Value *B = Ops[1];
9597 Value *C = Ops[2];
9598
Craig Topperb92c77d2018-06-07 02:46:02 +00009599 if (Subtract)
9600 C = CGF.Builder.CreateFNeg(C);
Gabor Buella70d8d512018-05-30 15:27:49 +00009601
Craig Topperb92c77d2018-06-07 02:46:02 +00009602 Value *Res;
Gabor Buella70d8d512018-05-30 15:27:49 +00009603
Craig Topperb92c77d2018-06-07 02:46:02 +00009604 // Only handle in case of _MM_FROUND_CUR_DIRECTION/4 (no rounding).
9605 if (IID != Intrinsic::not_intrinsic &&
9606 cast<llvm::ConstantInt>(Ops.back())->getZExtValue() != (uint64_t)4) {
9607 Function *Intr = CGF.CGM.getIntrinsic(IID);
9608 Res = CGF.Builder.CreateCall(Intr, {A, B, C, Ops.back() });
9609 } else {
9610 llvm::Type *Ty = A->getType();
9611 Function *FMA = CGF.CGM.getIntrinsic(Intrinsic::fma, Ty);
9612 Res = CGF.Builder.CreateCall(FMA, {A, B, C} );
Gabor Buella70d8d512018-05-30 15:27:49 +00009613
Craig Topperb92c77d2018-06-07 02:46:02 +00009614 if (IsAddSub) {
9615 // Negate even elts in C using a mask.
9616 unsigned NumElts = Ty->getVectorNumElements();
Craig Topper284c5f32018-07-05 20:38:31 +00009617 SmallVector<uint32_t, 16> Indices(NumElts);
9618 for (unsigned i = 0; i != NumElts; ++i)
9619 Indices[i] = i + (i % 2) * NumElts;
Craig Topperb92c77d2018-06-07 02:46:02 +00009620
9621 Value *NegC = CGF.Builder.CreateFNeg(C);
9622 Value *FMSub = CGF.Builder.CreateCall(FMA, {A, B, NegC} );
Craig Topper284c5f32018-07-05 20:38:31 +00009623 Res = CGF.Builder.CreateShuffleVector(FMSub, Res, Indices);
Gabor Buella70d8d512018-05-30 15:27:49 +00009624 }
Gabor Buella70d8d512018-05-30 15:27:49 +00009625 }
9626
Craig Topperb92c77d2018-06-07 02:46:02 +00009627 // Handle any required masking.
9628 Value *MaskFalseVal = nullptr;
9629 switch (BuiltinID) {
9630 case clang::X86::BI__builtin_ia32_vfmaddps512_mask:
9631 case clang::X86::BI__builtin_ia32_vfmaddpd512_mask:
9632 case clang::X86::BI__builtin_ia32_vfmaddsubps512_mask:
9633 case clang::X86::BI__builtin_ia32_vfmaddsubpd512_mask:
9634 MaskFalseVal = Ops[0];
9635 break;
9636 case clang::X86::BI__builtin_ia32_vfmaddps512_maskz:
9637 case clang::X86::BI__builtin_ia32_vfmaddpd512_maskz:
9638 case clang::X86::BI__builtin_ia32_vfmaddsubps512_maskz:
9639 case clang::X86::BI__builtin_ia32_vfmaddsubpd512_maskz:
9640 MaskFalseVal = Constant::getNullValue(Ops[0]->getType());
9641 break;
9642 case clang::X86::BI__builtin_ia32_vfmsubps512_mask3:
9643 case clang::X86::BI__builtin_ia32_vfmaddps512_mask3:
9644 case clang::X86::BI__builtin_ia32_vfmsubpd512_mask3:
9645 case clang::X86::BI__builtin_ia32_vfmaddpd512_mask3:
9646 case clang::X86::BI__builtin_ia32_vfmsubaddps512_mask3:
9647 case clang::X86::BI__builtin_ia32_vfmaddsubps512_mask3:
9648 case clang::X86::BI__builtin_ia32_vfmsubaddpd512_mask3:
9649 case clang::X86::BI__builtin_ia32_vfmaddsubpd512_mask3:
9650 MaskFalseVal = Ops[2];
9651 break;
9652 }
9653
9654 if (MaskFalseVal)
9655 return EmitX86Select(CGF, Ops[3], Res, MaskFalseVal);
9656
Gabor Buella70d8d512018-05-30 15:27:49 +00009657 return Res;
9658}
9659
Craig Topper8a8d7272018-07-08 01:10:47 +00009660static Value *
9661EmitScalarFMAExpr(CodeGenFunction &CGF, MutableArrayRef<Value *> Ops,
9662 Value *Upper, bool ZeroMask = false, unsigned PTIdx = 0,
9663 bool NegAcc = false) {
9664 unsigned Rnd = 4;
9665 if (Ops.size() > 4)
9666 Rnd = cast<llvm::ConstantInt>(Ops[4])->getZExtValue();
9667
9668 if (NegAcc)
9669 Ops[2] = CGF.Builder.CreateFNeg(Ops[2]);
9670
9671 Ops[0] = CGF.Builder.CreateExtractElement(Ops[0], (uint64_t)0);
9672 Ops[1] = CGF.Builder.CreateExtractElement(Ops[1], (uint64_t)0);
9673 Ops[2] = CGF.Builder.CreateExtractElement(Ops[2], (uint64_t)0);
9674 Value *Res;
9675 if (Rnd != 4) {
9676 Intrinsic::ID IID = Ops[0]->getType()->getPrimitiveSizeInBits() == 32 ?
9677 Intrinsic::x86_avx512_vfmadd_f32 :
9678 Intrinsic::x86_avx512_vfmadd_f64;
9679 Res = CGF.Builder.CreateCall(CGF.CGM.getIntrinsic(IID),
9680 {Ops[0], Ops[1], Ops[2], Ops[4]});
9681 } else {
9682 Function *FMA = CGF.CGM.getIntrinsic(Intrinsic::fma, Ops[0]->getType());
9683 Res = CGF.Builder.CreateCall(FMA, Ops.slice(0, 3));
9684 }
9685 // If we have more than 3 arguments, we need to do masking.
9686 if (Ops.size() > 3) {
9687 Value *PassThru = ZeroMask ? Constant::getNullValue(Res->getType())
9688 : Ops[PTIdx];
9689
9690 // If we negated the accumulator and the its the PassThru value we need to
9691 // bypass the negate. Conveniently Upper should be the same thing in this
9692 // case.
9693 if (NegAcc && PTIdx == 2)
9694 PassThru = CGF.Builder.CreateExtractElement(Upper, (uint64_t)0);
9695
9696 Res = EmitX86ScalarSelect(CGF, Ops[3], Res, PassThru);
9697 }
9698 return CGF.Builder.CreateInsertElement(Upper, Res, (uint64_t)0);
9699}
9700
Craig Topper304edc12018-04-09 19:17:54 +00009701static Value *EmitX86Muldq(CodeGenFunction &CGF, bool IsSigned,
9702 ArrayRef<Value *> Ops) {
9703 llvm::Type *Ty = Ops[0]->getType();
9704 // Arguments have a vXi32 type so cast to vXi64.
9705 Ty = llvm::VectorType::get(CGF.Int64Ty,
9706 Ty->getPrimitiveSizeInBits() / 64);
9707 Value *LHS = CGF.Builder.CreateBitCast(Ops[0], Ty);
9708 Value *RHS = CGF.Builder.CreateBitCast(Ops[1], Ty);
9709
9710 if (IsSigned) {
9711 // Shift left then arithmetic shift right.
9712 Constant *ShiftAmt = ConstantInt::get(Ty, 32);
9713 LHS = CGF.Builder.CreateShl(LHS, ShiftAmt);
9714 LHS = CGF.Builder.CreateAShr(LHS, ShiftAmt);
9715 RHS = CGF.Builder.CreateShl(RHS, ShiftAmt);
9716 RHS = CGF.Builder.CreateAShr(RHS, ShiftAmt);
9717 } else {
9718 // Clear the upper bits.
9719 Constant *Mask = ConstantInt::get(Ty, 0xffffffff);
9720 LHS = CGF.Builder.CreateAnd(LHS, Mask);
9721 RHS = CGF.Builder.CreateAnd(RHS, Mask);
9722 }
9723
9724 return CGF.Builder.CreateMul(LHS, RHS);
9725}
9726
Craig Topper288bd2e2018-05-21 20:58:23 +00009727// Emit a masked pternlog intrinsic. This only exists because the header has to
9728// use a macro and we aren't able to pass the input argument to a pternlog
9729// builtin and a select builtin without evaluating it twice.
9730static Value *EmitX86Ternlog(CodeGenFunction &CGF, bool ZeroMask,
9731 ArrayRef<Value *> Ops) {
9732 llvm::Type *Ty = Ops[0]->getType();
9733
9734 unsigned VecWidth = Ty->getPrimitiveSizeInBits();
9735 unsigned EltWidth = Ty->getScalarSizeInBits();
9736 Intrinsic::ID IID;
9737 if (VecWidth == 128 && EltWidth == 32)
9738 IID = Intrinsic::x86_avx512_pternlog_d_128;
9739 else if (VecWidth == 256 && EltWidth == 32)
9740 IID = Intrinsic::x86_avx512_pternlog_d_256;
9741 else if (VecWidth == 512 && EltWidth == 32)
9742 IID = Intrinsic::x86_avx512_pternlog_d_512;
9743 else if (VecWidth == 128 && EltWidth == 64)
9744 IID = Intrinsic::x86_avx512_pternlog_q_128;
9745 else if (VecWidth == 256 && EltWidth == 64)
9746 IID = Intrinsic::x86_avx512_pternlog_q_256;
9747 else if (VecWidth == 512 && EltWidth == 64)
9748 IID = Intrinsic::x86_avx512_pternlog_q_512;
9749 else
9750 llvm_unreachable("Unexpected intrinsic");
9751
9752 Value *Ternlog = CGF.Builder.CreateCall(CGF.CGM.getIntrinsic(IID),
9753 Ops.drop_back());
9754 Value *PassThru = ZeroMask ? ConstantAggregateZero::get(Ty) : Ops[0];
9755 return EmitX86Select(CGF, Ops[4], Ternlog, PassThru);
9756}
9757
Fangrui Song6907ce22018-07-30 19:24:48 +00009758static Value *EmitX86SExtMask(CodeGenFunction &CGF, Value *Op,
Michael Zuckerman755a13d2017-04-04 13:29:53 +00009759 llvm::Type *DstTy) {
9760 unsigned NumberOfElements = DstTy->getVectorNumElements();
9761 Value *Mask = getMaskVecValue(CGF, Op, NumberOfElements);
9762 return CGF.Builder.CreateSExt(Mask, DstTy, "vpmovm2");
9763}
9764
Simon Pilgrim313dc852018-12-20 11:53:45 +00009765// Emit addition or subtraction with signed/unsigned saturation.
Simon Pilgrima7b30b42018-12-19 14:43:47 +00009766static Value *EmitX86AddSubSatExpr(CodeGenFunction &CGF,
Simon Pilgrim313dc852018-12-20 11:53:45 +00009767 ArrayRef<Value *> Ops, bool IsSigned,
Tomasz Krupae8cf9722018-08-14 08:01:38 +00009768 bool IsAddition) {
Simon Pilgrim313dc852018-12-20 11:53:45 +00009769 Intrinsic::ID IID =
9770 IsSigned ? (IsAddition ? Intrinsic::sadd_sat : Intrinsic::ssub_sat)
9771 : (IsAddition ? Intrinsic::uadd_sat : Intrinsic::usub_sat);
Simon Pilgrima7b30b42018-12-19 14:43:47 +00009772 llvm::Function *F = CGF.CGM.getIntrinsic(IID, Ops[0]->getType());
9773 return CGF.Builder.CreateCall(F, {Ops[0], Ops[1]});
Tomasz Krupae8cf9722018-08-14 08:01:38 +00009774}
9775
Erich Keane9937b132017-09-01 19:42:45 +00009776Value *CodeGenFunction::EmitX86CpuIs(const CallExpr *E) {
Craig Topper699ae0c2017-08-10 20:28:30 +00009777 const Expr *CPUExpr = E->getArg(0)->IgnoreParenCasts();
9778 StringRef CPUStr = cast<clang::StringLiteral>(CPUExpr)->getString();
Erich Keane9937b132017-09-01 19:42:45 +00009779 return EmitX86CpuIs(CPUStr);
9780}
9781
9782Value *CodeGenFunction::EmitX86CpuIs(StringRef CPUStr) {
Craig Topper699ae0c2017-08-10 20:28:30 +00009783
Erich Keane9937b132017-09-01 19:42:45 +00009784 llvm::Type *Int32Ty = Builder.getInt32Ty();
Craig Topper699ae0c2017-08-10 20:28:30 +00009785
9786 // Matching the struct layout from the compiler-rt/libgcc structure that is
9787 // filled in:
9788 // unsigned int __cpu_vendor;
9789 // unsigned int __cpu_type;
9790 // unsigned int __cpu_subtype;
9791 // unsigned int __cpu_features[1];
9792 llvm::Type *STy = llvm::StructType::get(Int32Ty, Int32Ty, Int32Ty,
9793 llvm::ArrayType::get(Int32Ty, 1));
9794
9795 // Grab the global __cpu_model.
Erich Keane9937b132017-09-01 19:42:45 +00009796 llvm::Constant *CpuModel = CGM.CreateRuntimeVariable(STy, "__cpu_model");
Haibo Huang303b2332018-12-20 21:33:59 +00009797 cast<llvm::GlobalValue>(CpuModel)->setDSOLocal(true);
Craig Topper699ae0c2017-08-10 20:28:30 +00009798
9799 // Calculate the index needed to access the correct field based on the
9800 // range. Also adjust the expected value.
9801 unsigned Index;
9802 unsigned Value;
Erich Keane82025212017-11-15 00:11:24 +00009803 std::tie(Index, Value) = StringSwitch<std::pair<unsigned, unsigned>>(CPUStr)
9804#define X86_VENDOR(ENUM, STRING) \
9805 .Case(STRING, {0u, static_cast<unsigned>(llvm::X86::ENUM)})
9806#define X86_CPU_TYPE_COMPAT_WITH_ALIAS(ARCHNAME, ENUM, STR, ALIAS) \
9807 .Cases(STR, ALIAS, {1u, static_cast<unsigned>(llvm::X86::ENUM)})
9808#define X86_CPU_TYPE_COMPAT(ARCHNAME, ENUM, STR) \
9809 .Case(STR, {1u, static_cast<unsigned>(llvm::X86::ENUM)})
9810#define X86_CPU_SUBTYPE_COMPAT(ARCHNAME, ENUM, STR) \
9811 .Case(STR, {2u, static_cast<unsigned>(llvm::X86::ENUM)})
9812#include "llvm/Support/X86TargetParser.def"
9813 .Default({0, 0});
9814 assert(Value != 0 && "Invalid CPUStr passed to CpuIs");
Craig Topper699ae0c2017-08-10 20:28:30 +00009815
9816 // Grab the appropriate field from __cpu_model.
Erich Keane82025212017-11-15 00:11:24 +00009817 llvm::Value *Idxs[] = {ConstantInt::get(Int32Ty, 0),
9818 ConstantInt::get(Int32Ty, Index)};
Erich Keane9937b132017-09-01 19:42:45 +00009819 llvm::Value *CpuValue = Builder.CreateGEP(STy, CpuModel, Idxs);
9820 CpuValue = Builder.CreateAlignedLoad(CpuValue, CharUnits::fromQuantity(4));
Craig Topper699ae0c2017-08-10 20:28:30 +00009821
9822 // Check the value of the field against the requested value.
Erich Keane9937b132017-09-01 19:42:45 +00009823 return Builder.CreateICmpEQ(CpuValue,
Craig Topper699ae0c2017-08-10 20:28:30 +00009824 llvm::ConstantInt::get(Int32Ty, Value));
9825}
9826
Erich Keane9937b132017-09-01 19:42:45 +00009827Value *CodeGenFunction::EmitX86CpuSupports(const CallExpr *E) {
9828 const Expr *FeatureExpr = E->getArg(0)->IgnoreParenCasts();
9829 StringRef FeatureStr = cast<StringLiteral>(FeatureExpr)->getString();
9830 return EmitX86CpuSupports(FeatureStr);
9831}
9832
Craig Topper4d8ced12018-10-20 03:51:52 +00009833uint64_t
Erich Keane3efe0022018-07-20 14:13:28 +00009834CodeGenFunction::GetX86CpuSupportsMask(ArrayRef<StringRef> FeatureStrs) {
Erich Keane9937b132017-09-01 19:42:45 +00009835 // Processor features and mapping to processor feature value.
Craig Topper4d8ced12018-10-20 03:51:52 +00009836 uint64_t FeaturesMask = 0;
Erich Keane9937b132017-09-01 19:42:45 +00009837 for (const StringRef &FeatureStr : FeatureStrs) {
Erich Keane0a340ab2017-11-22 00:54:01 +00009838 unsigned Feature =
9839 StringSwitch<unsigned>(FeatureStr)
9840#define X86_FEATURE_COMPAT(VAL, ENUM, STR) .Case(STR, VAL)
9841#include "llvm/Support/X86TargetParser.def"
9842 ;
Craig Topper4d8ced12018-10-20 03:51:52 +00009843 FeaturesMask |= (1ULL << Feature);
Erich Keane9937b132017-09-01 19:42:45 +00009844 }
Erich Keane3efe0022018-07-20 14:13:28 +00009845 return FeaturesMask;
9846}
Erich Keane9937b132017-09-01 19:42:45 +00009847
Erich Keane3efe0022018-07-20 14:13:28 +00009848Value *CodeGenFunction::EmitX86CpuSupports(ArrayRef<StringRef> FeatureStrs) {
9849 return EmitX86CpuSupports(GetX86CpuSupportsMask(FeatureStrs));
9850}
9851
Craig Topper4d8ced12018-10-20 03:51:52 +00009852llvm::Value *CodeGenFunction::EmitX86CpuSupports(uint64_t FeaturesMask) {
9853 uint32_t Features1 = Lo_32(FeaturesMask);
9854 uint32_t Features2 = Hi_32(FeaturesMask);
Erich Keane9937b132017-09-01 19:42:45 +00009855
Craig Topper4d8ced12018-10-20 03:51:52 +00009856 Value *Result = Builder.getTrue();
Erich Keane9937b132017-09-01 19:42:45 +00009857
Craig Topper4d8ced12018-10-20 03:51:52 +00009858 if (Features1 != 0) {
9859 // Matching the struct layout from the compiler-rt/libgcc structure that is
9860 // filled in:
9861 // unsigned int __cpu_vendor;
9862 // unsigned int __cpu_type;
9863 // unsigned int __cpu_subtype;
9864 // unsigned int __cpu_features[1];
9865 llvm::Type *STy = llvm::StructType::get(Int32Ty, Int32Ty, Int32Ty,
9866 llvm::ArrayType::get(Int32Ty, 1));
Erich Keane9937b132017-09-01 19:42:45 +00009867
Craig Topper4d8ced12018-10-20 03:51:52 +00009868 // Grab the global __cpu_model.
9869 llvm::Constant *CpuModel = CGM.CreateRuntimeVariable(STy, "__cpu_model");
Haibo Huang303b2332018-12-20 21:33:59 +00009870 cast<llvm::GlobalValue>(CpuModel)->setDSOLocal(true);
Craig Topper4d8ced12018-10-20 03:51:52 +00009871
9872 // Grab the first (0th) element from the field __cpu_features off of the
9873 // global in the struct STy.
9874 Value *Idxs[] = {Builder.getInt32(0), Builder.getInt32(3),
9875 Builder.getInt32(0)};
9876 Value *CpuFeatures = Builder.CreateGEP(STy, CpuModel, Idxs);
9877 Value *Features =
9878 Builder.CreateAlignedLoad(CpuFeatures, CharUnits::fromQuantity(4));
9879
9880 // Check the value of the bit corresponding to the feature requested.
9881 Value *Mask = Builder.getInt32(Features1);
9882 Value *Bitset = Builder.CreateAnd(Features, Mask);
9883 Value *Cmp = Builder.CreateICmpEQ(Bitset, Mask);
9884 Result = Builder.CreateAnd(Result, Cmp);
9885 }
9886
9887 if (Features2 != 0) {
9888 llvm::Constant *CpuFeatures2 = CGM.CreateRuntimeVariable(Int32Ty,
9889 "__cpu_features2");
Haibo Huang303b2332018-12-20 21:33:59 +00009890 cast<llvm::GlobalValue>(CpuFeatures2)->setDSOLocal(true);
9891
Craig Topper4d8ced12018-10-20 03:51:52 +00009892 Value *Features =
9893 Builder.CreateAlignedLoad(CpuFeatures2, CharUnits::fromQuantity(4));
9894
9895 // Check the value of the bit corresponding to the feature requested.
9896 Value *Mask = Builder.getInt32(Features2);
9897 Value *Bitset = Builder.CreateAnd(Features, Mask);
9898 Value *Cmp = Builder.CreateICmpEQ(Bitset, Mask);
9899 Result = Builder.CreateAnd(Result, Cmp);
9900 }
9901
9902 return Result;
Erich Keane9937b132017-09-01 19:42:45 +00009903}
9904
Erich Keane1fe643a2017-10-06 16:40:45 +00009905Value *CodeGenFunction::EmitX86CpuInit() {
9906 llvm::FunctionType *FTy = llvm::FunctionType::get(VoidTy,
9907 /*Variadic*/ false);
James Y Knight9871db02019-02-05 16:42:33 +00009908 llvm::FunctionCallee Func =
9909 CGM.CreateRuntimeFunction(FTy, "__cpu_indicator_init");
9910 cast<llvm::GlobalValue>(Func.getCallee())->setDSOLocal(true);
9911 cast<llvm::GlobalValue>(Func.getCallee())
9912 ->setDLLStorageClass(llvm::GlobalValue::DefaultStorageClass);
Erich Keane1fe643a2017-10-06 16:40:45 +00009913 return Builder.CreateCall(Func);
9914}
9915
Mike Stump11289f42009-09-09 15:08:12 +00009916Value *CodeGenFunction::EmitX86BuiltinExpr(unsigned BuiltinID,
Chris Lattner13653d72007-12-13 07:34:23 +00009917 const CallExpr *E) {
Erich Keane9937b132017-09-01 19:42:45 +00009918 if (BuiltinID == X86::BI__builtin_cpu_is)
9919 return EmitX86CpuIs(E);
9920 if (BuiltinID == X86::BI__builtin_cpu_supports)
9921 return EmitX86CpuSupports(E);
Erich Keane1fe643a2017-10-06 16:40:45 +00009922 if (BuiltinID == X86::BI__builtin_cpu_init)
9923 return EmitX86CpuInit();
Erich Keane9937b132017-09-01 19:42:45 +00009924
Chris Lattner0e62c1c2011-07-23 10:55:15 +00009925 SmallVector<Value*, 4> Ops;
Anders Carlsson4d3094a2007-12-14 17:48:24 +00009926
Chris Lattner64d7f2a2010-10-02 00:09:12 +00009927 // Find out if any arguments are required to be integer constant expressions.
9928 unsigned ICEArguments = 0;
9929 ASTContext::GetBuiltinTypeError Error;
9930 getContext().GetBuiltinType(BuiltinID, Error, &ICEArguments);
9931 assert(Error == ASTContext::GE_None && "Should not codegen an error");
9932
9933 for (unsigned i = 0, e = E->getNumArgs(); i != e; i++) {
9934 // If this is a normal argument, just emit it as a scalar.
9935 if ((ICEArguments & (1 << i)) == 0) {
9936 Ops.push_back(EmitScalarExpr(E->getArg(i)));
9937 continue;
9938 }
9939
9940 // If this is required to be a constant, constant fold it so that we know
9941 // that the generated intrinsic gets a ConstantInt.
9942 llvm::APSInt Result;
9943 bool IsConst = E->getArg(i)->isIntegerConstantExpr(Result, getContext());
9944 assert(IsConst && "Constant arg isn't actually constant?"); (void)IsConst;
John McCallad7c5c12011-02-08 08:22:06 +00009945 Ops.push_back(llvm::ConstantInt::get(getLLVMContext(), Result));
Chris Lattner64d7f2a2010-10-02 00:09:12 +00009946 }
Anders Carlsson4d3094a2007-12-14 17:48:24 +00009947
Sanjay Patel280cfd12016-06-15 21:20:04 +00009948 // These exist so that the builtin that takes an immediate can be bounds
9949 // checked by clang to avoid passing bad immediates to the backend. Since
9950 // AVX has a larger immediate than SSE we would need separate builtins to
9951 // do the different bounds checking. Rather than create a clang specific
9952 // SSE only builtin, this implements eight separate builtins to match gcc
9953 // implementation.
9954 auto getCmpIntrinsicCall = [this, &Ops](Intrinsic::ID ID, unsigned Imm) {
9955 Ops.push_back(llvm::ConstantInt::get(Int8Ty, Imm));
9956 llvm::Function *F = CGM.getIntrinsic(ID);
9957 return Builder.CreateCall(F, Ops);
9958 };
9959
9960 // For the vector forms of FP comparisons, translate the builtins directly to
9961 // IR.
9962 // TODO: The builtins could be removed if the SSE header files used vector
9963 // extension comparisons directly (vector ordered/unordered may need
9964 // additional support via __builtin_isnan()).
Craig Topper01600632016-07-08 01:57:24 +00009965 auto getVectorFCmpIR = [this, &Ops](CmpInst::Predicate Pred) {
Sanjay Patel280cfd12016-06-15 21:20:04 +00009966 Value *Cmp = Builder.CreateFCmp(Pred, Ops[0], Ops[1]);
Craig Topper01600632016-07-08 01:57:24 +00009967 llvm::VectorType *FPVecTy = cast<llvm::VectorType>(Ops[0]->getType());
Sanjay Patel280cfd12016-06-15 21:20:04 +00009968 llvm::VectorType *IntVecTy = llvm::VectorType::getInteger(FPVecTy);
9969 Value *Sext = Builder.CreateSExt(Cmp, IntVecTy);
9970 return Builder.CreateBitCast(Sext, FPVecTy);
9971 };
9972
Anders Carlsson895af082007-12-09 23:17:02 +00009973 switch (BuiltinID) {
Craig Topper8a13c412014-05-21 05:09:00 +00009974 default: return nullptr;
Warren Hunt20e4a5d2014-02-21 23:08:53 +00009975 case X86::BI_mm_prefetch: {
John McCall7f416cc2015-09-08 08:05:57 +00009976 Value *Address = Ops[0];
Craig Topper170de4b2017-12-21 23:50:22 +00009977 ConstantInt *C = cast<ConstantInt>(Ops[1]);
9978 Value *RW = ConstantInt::get(Int32Ty, (C->getZExtValue() >> 2) & 0x1);
9979 Value *Locality = ConstantInt::get(Int32Ty, C->getZExtValue() & 0x3);
Warren Hunt20e4a5d2014-02-21 23:08:53 +00009980 Value *Data = ConstantInt::get(Int32Ty, 1);
James Y Knight8799cae2019-02-03 21:53:49 +00009981 Function *F = CGM.getIntrinsic(Intrinsic::prefetch);
David Blaikie43f9bb72015-05-18 22:14:03 +00009982 return Builder.CreateCall(F, {Address, RW, Locality, Data});
Warren Hunt20e4a5d2014-02-21 23:08:53 +00009983 }
Albert Gutowski727ab8a2016-09-14 21:19:43 +00009984 case X86::BI_mm_clflush: {
9985 return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_sse2_clflush),
9986 Ops[0]);
9987 }
9988 case X86::BI_mm_lfence: {
9989 return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_sse2_lfence));
9990 }
9991 case X86::BI_mm_mfence: {
9992 return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_sse2_mfence));
9993 }
9994 case X86::BI_mm_sfence: {
9995 return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_sse_sfence));
9996 }
9997 case X86::BI_mm_pause: {
9998 return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_sse2_pause));
9999 }
10000 case X86::BI__rdtsc: {
10001 return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_rdtsc));
10002 }
Craig Topperecf2e2f2018-09-07 19:14:24 +000010003 case X86::BI__builtin_ia32_rdtscp: {
10004 Value *Call = Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_rdtscp));
10005 Builder.CreateDefaultAlignedStore(Builder.CreateExtractValue(Call, 1),
10006 Ops[0]);
10007 return Builder.CreateExtractValue(Call, 0);
10008 }
Craig Topperfb5d9f22018-09-26 17:01:44 +000010009 case X86::BI__builtin_ia32_lzcnt_u16:
10010 case X86::BI__builtin_ia32_lzcnt_u32:
10011 case X86::BI__builtin_ia32_lzcnt_u64: {
James Y Knight8799cae2019-02-03 21:53:49 +000010012 Function *F = CGM.getIntrinsic(Intrinsic::ctlz, Ops[0]->getType());
Craig Topperfb5d9f22018-09-26 17:01:44 +000010013 return Builder.CreateCall(F, {Ops[0], Builder.getInt1(false)});
10014 }
10015 case X86::BI__builtin_ia32_tzcnt_u16:
10016 case X86::BI__builtin_ia32_tzcnt_u32:
10017 case X86::BI__builtin_ia32_tzcnt_u64: {
James Y Knight8799cae2019-02-03 21:53:49 +000010018 Function *F = CGM.getIntrinsic(Intrinsic::cttz, Ops[0]->getType());
Craig Topperfb5d9f22018-09-26 17:01:44 +000010019 return Builder.CreateCall(F, {Ops[0], Builder.getInt1(false)});
10020 }
Simon Pilgrim5aba9922015-08-26 21:17:12 +000010021 case X86::BI__builtin_ia32_undef128:
10022 case X86::BI__builtin_ia32_undef256:
10023 case X86::BI__builtin_ia32_undef512:
Sanjay Patele795daa2017-03-12 19:15:10 +000010024 // The x86 definition of "undef" is not the same as the LLVM definition
10025 // (PR32176). We leave optimizing away an unnecessary zero constant to the
10026 // IR optimizer and backend.
10027 // TODO: If we had a "freeze" IR instruction to generate a fixed undef
10028 // value, we should use that here instead of a zero.
10029 return llvm::Constant::getNullValue(ConvertType(E->getType()));
Bill Wendling65b2a962010-10-09 08:47:25 +000010030 case X86::BI__builtin_ia32_vec_init_v8qi:
10031 case X86::BI__builtin_ia32_vec_init_v4hi:
10032 case X86::BI__builtin_ia32_vec_init_v2si:
10033 return Builder.CreateBitCast(BuildVector(Ops),
John McCallad7c5c12011-02-08 08:22:06 +000010034 llvm::Type::getX86_MMXTy(getLLVMContext()));
Argyrios Kyrtzidis073c9cb2010-10-10 03:19:11 +000010035 case X86::BI__builtin_ia32_vec_ext_v2si:
Craig Topperf3914b72018-06-06 00:24:55 +000010036 case X86::BI__builtin_ia32_vec_ext_v16qi:
10037 case X86::BI__builtin_ia32_vec_ext_v8hi:
10038 case X86::BI__builtin_ia32_vec_ext_v4si:
10039 case X86::BI__builtin_ia32_vec_ext_v4sf:
10040 case X86::BI__builtin_ia32_vec_ext_v2di:
10041 case X86::BI__builtin_ia32_vec_ext_v32qi:
10042 case X86::BI__builtin_ia32_vec_ext_v16hi:
10043 case X86::BI__builtin_ia32_vec_ext_v8si:
Craig Topper342b0952018-06-21 23:39:47 +000010044 case X86::BI__builtin_ia32_vec_ext_v4di: {
10045 unsigned NumElts = Ops[0]->getType()->getVectorNumElements();
10046 uint64_t Index = cast<ConstantInt>(Ops[1])->getZExtValue();
10047 Index &= NumElts - 1;
Craig Topperf3914b72018-06-06 00:24:55 +000010048 // These builtins exist so we can ensure the index is an ICE and in range.
10049 // Otherwise we could just do this in the header file.
Craig Topper342b0952018-06-21 23:39:47 +000010050 return Builder.CreateExtractElement(Ops[0], Index);
10051 }
Craig Topperf3914b72018-06-06 00:24:55 +000010052 case X86::BI__builtin_ia32_vec_set_v16qi:
10053 case X86::BI__builtin_ia32_vec_set_v8hi:
10054 case X86::BI__builtin_ia32_vec_set_v4si:
10055 case X86::BI__builtin_ia32_vec_set_v2di:
10056 case X86::BI__builtin_ia32_vec_set_v32qi:
10057 case X86::BI__builtin_ia32_vec_set_v16hi:
10058 case X86::BI__builtin_ia32_vec_set_v8si:
Craig Topper342b0952018-06-21 23:39:47 +000010059 case X86::BI__builtin_ia32_vec_set_v4di: {
10060 unsigned NumElts = Ops[0]->getType()->getVectorNumElements();
10061 unsigned Index = cast<ConstantInt>(Ops[2])->getZExtValue();
10062 Index &= NumElts - 1;
Craig Topperf3914b72018-06-06 00:24:55 +000010063 // These builtins exist so we can ensure the index is an ICE and in range.
10064 // Otherwise we could just do this in the header file.
Craig Topper342b0952018-06-21 23:39:47 +000010065 return Builder.CreateInsertElement(Ops[0], Ops[1], Index);
10066 }
Albert Gutowski727ab8a2016-09-14 21:19:43 +000010067 case X86::BI_mm_setcsr:
Nate Begeman91f40e32008-04-14 04:49:57 +000010068 case X86::BI__builtin_ia32_ldmxcsr: {
John McCall7f416cc2015-09-08 08:05:57 +000010069 Address Tmp = CreateMemTemp(E->getArg(0)->getType());
Nate Begeman91f40e32008-04-14 04:49:57 +000010070 Builder.CreateStore(Ops[0], Tmp);
10071 return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_sse_ldmxcsr),
John McCall7f416cc2015-09-08 08:05:57 +000010072 Builder.CreateBitCast(Tmp.getPointer(), Int8PtrTy));
Nate Begeman91f40e32008-04-14 04:49:57 +000010073 }
Albert Gutowski727ab8a2016-09-14 21:19:43 +000010074 case X86::BI_mm_getcsr:
Nate Begeman91f40e32008-04-14 04:49:57 +000010075 case X86::BI__builtin_ia32_stmxcsr: {
John McCall7f416cc2015-09-08 08:05:57 +000010076 Address Tmp = CreateMemTemp(E->getType());
Ted Kremenekc14efa72011-08-17 21:04:19 +000010077 Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_sse_stmxcsr),
John McCall7f416cc2015-09-08 08:05:57 +000010078 Builder.CreateBitCast(Tmp.getPointer(), Int8PtrTy));
Nate Begeman91f40e32008-04-14 04:49:57 +000010079 return Builder.CreateLoad(Tmp, "stmxcsr");
10080 }
Amjad Aboud2b9b8a52015-10-13 12:29:35 +000010081 case X86::BI__builtin_ia32_xsave:
10082 case X86::BI__builtin_ia32_xsave64:
10083 case X86::BI__builtin_ia32_xrstor:
10084 case X86::BI__builtin_ia32_xrstor64:
10085 case X86::BI__builtin_ia32_xsaveopt:
10086 case X86::BI__builtin_ia32_xsaveopt64:
10087 case X86::BI__builtin_ia32_xrstors:
10088 case X86::BI__builtin_ia32_xrstors64:
10089 case X86::BI__builtin_ia32_xsavec:
10090 case X86::BI__builtin_ia32_xsavec64:
10091 case X86::BI__builtin_ia32_xsaves:
Craig Topper93177972019-01-16 22:56:25 +000010092 case X86::BI__builtin_ia32_xsaves64:
10093 case X86::BI__builtin_ia32_xsetbv:
10094 case X86::BI_xsetbv: {
Amjad Aboud2b9b8a52015-10-13 12:29:35 +000010095 Intrinsic::ID ID;
10096#define INTRINSIC_X86_XSAVE_ID(NAME) \
10097 case X86::BI__builtin_ia32_##NAME: \
10098 ID = Intrinsic::x86_##NAME; \
10099 break
10100 switch (BuiltinID) {
10101 default: llvm_unreachable("Unsupported intrinsic!");
10102 INTRINSIC_X86_XSAVE_ID(xsave);
10103 INTRINSIC_X86_XSAVE_ID(xsave64);
10104 INTRINSIC_X86_XSAVE_ID(xrstor);
10105 INTRINSIC_X86_XSAVE_ID(xrstor64);
10106 INTRINSIC_X86_XSAVE_ID(xsaveopt);
10107 INTRINSIC_X86_XSAVE_ID(xsaveopt64);
10108 INTRINSIC_X86_XSAVE_ID(xrstors);
10109 INTRINSIC_X86_XSAVE_ID(xrstors64);
10110 INTRINSIC_X86_XSAVE_ID(xsavec);
10111 INTRINSIC_X86_XSAVE_ID(xsavec64);
10112 INTRINSIC_X86_XSAVE_ID(xsaves);
10113 INTRINSIC_X86_XSAVE_ID(xsaves64);
Craig Topper93177972019-01-16 22:56:25 +000010114 INTRINSIC_X86_XSAVE_ID(xsetbv);
10115 case X86::BI_xsetbv:
10116 ID = Intrinsic::x86_xsetbv;
10117 break;
Amjad Aboud2b9b8a52015-10-13 12:29:35 +000010118 }
10119#undef INTRINSIC_X86_XSAVE_ID
10120 Value *Mhi = Builder.CreateTrunc(
10121 Builder.CreateLShr(Ops[1], ConstantInt::get(Int64Ty, 32)), Int32Ty);
10122 Value *Mlo = Builder.CreateTrunc(Ops[1], Int32Ty);
10123 Ops[1] = Mhi;
10124 Ops.push_back(Mlo);
10125 return Builder.CreateCall(CGM.getIntrinsic(ID), Ops);
10126 }
Craig Topper93177972019-01-16 22:56:25 +000010127 case X86::BI__builtin_ia32_xgetbv:
10128 case X86::BI_xgetbv:
10129 return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_xgetbv), Ops);
Craig Topper6e891fb2016-05-31 01:50:10 +000010130 case X86::BI__builtin_ia32_storedqudi128_mask:
10131 case X86::BI__builtin_ia32_storedqusi128_mask:
10132 case X86::BI__builtin_ia32_storedquhi128_mask:
10133 case X86::BI__builtin_ia32_storedquqi128_mask:
10134 case X86::BI__builtin_ia32_storeupd128_mask:
10135 case X86::BI__builtin_ia32_storeups128_mask:
10136 case X86::BI__builtin_ia32_storedqudi256_mask:
10137 case X86::BI__builtin_ia32_storedqusi256_mask:
10138 case X86::BI__builtin_ia32_storedquhi256_mask:
10139 case X86::BI__builtin_ia32_storedquqi256_mask:
10140 case X86::BI__builtin_ia32_storeupd256_mask:
10141 case X86::BI__builtin_ia32_storeups256_mask:
10142 case X86::BI__builtin_ia32_storedqudi512_mask:
10143 case X86::BI__builtin_ia32_storedqusi512_mask:
10144 case X86::BI__builtin_ia32_storedquhi512_mask:
10145 case X86::BI__builtin_ia32_storedquqi512_mask:
10146 case X86::BI__builtin_ia32_storeupd512_mask:
10147 case X86::BI__builtin_ia32_storeups512_mask:
10148 return EmitX86MaskedStore(*this, Ops, 1);
10149
Ayman Musae60a41c2016-11-08 12:00:30 +000010150 case X86::BI__builtin_ia32_storess128_mask:
10151 case X86::BI__builtin_ia32_storesd128_mask: {
Craig Topper74ac0ed2018-05-10 05:43:43 +000010152 return EmitX86MaskedStore(*this, Ops, 1);
Ayman Musae60a41c2016-11-08 12:00:30 +000010153 }
Coby Tayree22685762017-12-27 10:01:00 +000010154 case X86::BI__builtin_ia32_vpopcntb_128:
Craig Topperb846d1f2017-12-16 06:02:31 +000010155 case X86::BI__builtin_ia32_vpopcntd_128:
10156 case X86::BI__builtin_ia32_vpopcntq_128:
Coby Tayree22685762017-12-27 10:01:00 +000010157 case X86::BI__builtin_ia32_vpopcntw_128:
10158 case X86::BI__builtin_ia32_vpopcntb_256:
Craig Topperb846d1f2017-12-16 06:02:31 +000010159 case X86::BI__builtin_ia32_vpopcntd_256:
10160 case X86::BI__builtin_ia32_vpopcntq_256:
Coby Tayree22685762017-12-27 10:01:00 +000010161 case X86::BI__builtin_ia32_vpopcntw_256:
10162 case X86::BI__builtin_ia32_vpopcntb_512:
Oren Ben Simhon140c1fb2017-05-25 13:44:11 +000010163 case X86::BI__builtin_ia32_vpopcntd_512:
Coby Tayree22685762017-12-27 10:01:00 +000010164 case X86::BI__builtin_ia32_vpopcntq_512:
10165 case X86::BI__builtin_ia32_vpopcntw_512: {
Oren Ben Simhon140c1fb2017-05-25 13:44:11 +000010166 llvm::Type *ResultType = ConvertType(E->getType());
10167 llvm::Function *F = CGM.getIntrinsic(Intrinsic::ctpop, ResultType);
10168 return Builder.CreateCall(F, Ops);
10169 }
Michael Zuckerman755a13d2017-04-04 13:29:53 +000010170 case X86::BI__builtin_ia32_cvtmask2b128:
10171 case X86::BI__builtin_ia32_cvtmask2b256:
10172 case X86::BI__builtin_ia32_cvtmask2b512:
10173 case X86::BI__builtin_ia32_cvtmask2w128:
10174 case X86::BI__builtin_ia32_cvtmask2w256:
10175 case X86::BI__builtin_ia32_cvtmask2w512:
10176 case X86::BI__builtin_ia32_cvtmask2d128:
10177 case X86::BI__builtin_ia32_cvtmask2d256:
10178 case X86::BI__builtin_ia32_cvtmask2d512:
10179 case X86::BI__builtin_ia32_cvtmask2q128:
10180 case X86::BI__builtin_ia32_cvtmask2q256:
10181 case X86::BI__builtin_ia32_cvtmask2q512:
10182 return EmitX86SExtMask(*this, Ops[0], ConvertType(E->getType()));
10183
Craig Topperde91dff2018-01-08 22:37:56 +000010184 case X86::BI__builtin_ia32_cvtb2mask128:
10185 case X86::BI__builtin_ia32_cvtb2mask256:
10186 case X86::BI__builtin_ia32_cvtb2mask512:
10187 case X86::BI__builtin_ia32_cvtw2mask128:
10188 case X86::BI__builtin_ia32_cvtw2mask256:
10189 case X86::BI__builtin_ia32_cvtw2mask512:
10190 case X86::BI__builtin_ia32_cvtd2mask128:
10191 case X86::BI__builtin_ia32_cvtd2mask256:
10192 case X86::BI__builtin_ia32_cvtd2mask512:
10193 case X86::BI__builtin_ia32_cvtq2mask128:
10194 case X86::BI__builtin_ia32_cvtq2mask256:
10195 case X86::BI__builtin_ia32_cvtq2mask512:
10196 return EmitX86ConvertToMask(*this, Ops[0]);
10197
Craig Topperbd7884e2019-01-26 02:42:01 +000010198 case X86::BI__builtin_ia32_cvtdq2ps512_mask:
10199 case X86::BI__builtin_ia32_cvtqq2ps512_mask:
10200 case X86::BI__builtin_ia32_cvtqq2pd512_mask:
10201 return EmitX86ConvertIntToFp(*this, Ops, /*IsSigned*/true);
10202 case X86::BI__builtin_ia32_cvtudq2ps512_mask:
10203 case X86::BI__builtin_ia32_cvtuqq2ps512_mask:
10204 case X86::BI__builtin_ia32_cvtuqq2pd512_mask:
10205 return EmitX86ConvertIntToFp(*this, Ops, /*IsSigned*/false);
10206
Gabor Buella70d8d512018-05-30 15:27:49 +000010207 case X86::BI__builtin_ia32_vfmaddss3:
Craig Topper8a8d7272018-07-08 01:10:47 +000010208 case X86::BI__builtin_ia32_vfmaddsd3:
10209 case X86::BI__builtin_ia32_vfmaddss3_mask:
10210 case X86::BI__builtin_ia32_vfmaddsd3_mask:
10211 return EmitScalarFMAExpr(*this, Ops, Ops[0]);
Craig Topperbe4c29332018-07-06 07:14:47 +000010212 case X86::BI__builtin_ia32_vfmaddss:
Craig Topper8a8d7272018-07-08 01:10:47 +000010213 case X86::BI__builtin_ia32_vfmaddsd:
10214 return EmitScalarFMAExpr(*this, Ops,
10215 Constant::getNullValue(Ops[0]->getType()));
10216 case X86::BI__builtin_ia32_vfmaddss3_maskz:
10217 case X86::BI__builtin_ia32_vfmaddsd3_maskz:
10218 return EmitScalarFMAExpr(*this, Ops, Ops[0], /*ZeroMask*/true);
10219 case X86::BI__builtin_ia32_vfmaddss3_mask3:
10220 case X86::BI__builtin_ia32_vfmaddsd3_mask3:
10221 return EmitScalarFMAExpr(*this, Ops, Ops[2], /*ZeroMask*/false, 2);
10222 case X86::BI__builtin_ia32_vfmsubss3_mask3:
10223 case X86::BI__builtin_ia32_vfmsubsd3_mask3:
10224 return EmitScalarFMAExpr(*this, Ops, Ops[2], /*ZeroMask*/false, 2,
10225 /*NegAcc*/true);
Gabor Buella70d8d512018-05-30 15:27:49 +000010226 case X86::BI__builtin_ia32_vfmaddps:
10227 case X86::BI__builtin_ia32_vfmaddpd:
10228 case X86::BI__builtin_ia32_vfmaddps256:
10229 case X86::BI__builtin_ia32_vfmaddpd256:
Craig Topperb92c77d2018-06-07 02:46:02 +000010230 case X86::BI__builtin_ia32_vfmaddps512_mask:
10231 case X86::BI__builtin_ia32_vfmaddps512_maskz:
10232 case X86::BI__builtin_ia32_vfmaddps512_mask3:
10233 case X86::BI__builtin_ia32_vfmsubps512_mask3:
10234 case X86::BI__builtin_ia32_vfmaddpd512_mask:
10235 case X86::BI__builtin_ia32_vfmaddpd512_maskz:
10236 case X86::BI__builtin_ia32_vfmaddpd512_mask3:
10237 case X86::BI__builtin_ia32_vfmsubpd512_mask3:
10238 return EmitX86FMAExpr(*this, Ops, BuiltinID, /*IsAddSub*/false);
Gabor Buella70d8d512018-05-30 15:27:49 +000010239 case X86::BI__builtin_ia32_vfmaddsubps:
10240 case X86::BI__builtin_ia32_vfmaddsubpd:
10241 case X86::BI__builtin_ia32_vfmaddsubps256:
10242 case X86::BI__builtin_ia32_vfmaddsubpd256:
Craig Topperb92c77d2018-06-07 02:46:02 +000010243 case X86::BI__builtin_ia32_vfmaddsubps512_mask:
10244 case X86::BI__builtin_ia32_vfmaddsubps512_maskz:
10245 case X86::BI__builtin_ia32_vfmaddsubps512_mask3:
10246 case X86::BI__builtin_ia32_vfmsubaddps512_mask3:
10247 case X86::BI__builtin_ia32_vfmaddsubpd512_mask:
10248 case X86::BI__builtin_ia32_vfmaddsubpd512_maskz:
10249 case X86::BI__builtin_ia32_vfmaddsubpd512_mask3:
10250 case X86::BI__builtin_ia32_vfmsubaddpd512_mask3:
10251 return EmitX86FMAExpr(*this, Ops, BuiltinID, /*IsAddSub*/true);
Gabor Buella70d8d512018-05-30 15:27:49 +000010252
Craig Topper6e891fb2016-05-31 01:50:10 +000010253 case X86::BI__builtin_ia32_movdqa32store128_mask:
10254 case X86::BI__builtin_ia32_movdqa64store128_mask:
10255 case X86::BI__builtin_ia32_storeaps128_mask:
10256 case X86::BI__builtin_ia32_storeapd128_mask:
10257 case X86::BI__builtin_ia32_movdqa32store256_mask:
10258 case X86::BI__builtin_ia32_movdqa64store256_mask:
10259 case X86::BI__builtin_ia32_storeaps256_mask:
10260 case X86::BI__builtin_ia32_storeapd256_mask:
10261 case X86::BI__builtin_ia32_movdqa32store512_mask:
10262 case X86::BI__builtin_ia32_movdqa64store512_mask:
10263 case X86::BI__builtin_ia32_storeaps512_mask:
Reid Kleckner89fbd552018-06-04 21:39:20 +000010264 case X86::BI__builtin_ia32_storeapd512_mask: {
10265 unsigned Align =
10266 getContext().getTypeAlignInChars(E->getArg(1)->getType()).getQuantity();
10267 return EmitX86MaskedStore(*this, Ops, Align);
10268 }
Craig Topper4b060e32016-05-31 06:58:07 +000010269 case X86::BI__builtin_ia32_loadups128_mask:
10270 case X86::BI__builtin_ia32_loadups256_mask:
10271 case X86::BI__builtin_ia32_loadups512_mask:
10272 case X86::BI__builtin_ia32_loadupd128_mask:
10273 case X86::BI__builtin_ia32_loadupd256_mask:
10274 case X86::BI__builtin_ia32_loadupd512_mask:
10275 case X86::BI__builtin_ia32_loaddquqi128_mask:
10276 case X86::BI__builtin_ia32_loaddquqi256_mask:
10277 case X86::BI__builtin_ia32_loaddquqi512_mask:
10278 case X86::BI__builtin_ia32_loaddquhi128_mask:
10279 case X86::BI__builtin_ia32_loaddquhi256_mask:
10280 case X86::BI__builtin_ia32_loaddquhi512_mask:
10281 case X86::BI__builtin_ia32_loaddqusi128_mask:
10282 case X86::BI__builtin_ia32_loaddqusi256_mask:
10283 case X86::BI__builtin_ia32_loaddqusi512_mask:
10284 case X86::BI__builtin_ia32_loaddqudi128_mask:
10285 case X86::BI__builtin_ia32_loaddqudi256_mask:
10286 case X86::BI__builtin_ia32_loaddqudi512_mask:
10287 return EmitX86MaskedLoad(*this, Ops, 1);
10288
Ayman Musae60a41c2016-11-08 12:00:30 +000010289 case X86::BI__builtin_ia32_loadss128_mask:
10290 case X86::BI__builtin_ia32_loadsd128_mask:
Reid Kleckner89fbd552018-06-04 21:39:20 +000010291 return EmitX86MaskedLoad(*this, Ops, 1);
John McCall280c6562018-06-01 21:34:26 +000010292
Reid Kleckner89fbd552018-06-04 21:39:20 +000010293 case X86::BI__builtin_ia32_loadaps128_mask:
John McCall280c6562018-06-01 21:34:26 +000010294 case X86::BI__builtin_ia32_loadaps256_mask:
John McCall280c6562018-06-01 21:34:26 +000010295 case X86::BI__builtin_ia32_loadaps512_mask:
Reid Kleckner89fbd552018-06-04 21:39:20 +000010296 case X86::BI__builtin_ia32_loadapd128_mask:
10297 case X86::BI__builtin_ia32_loadapd256_mask:
John McCall280c6562018-06-01 21:34:26 +000010298 case X86::BI__builtin_ia32_loadapd512_mask:
Reid Kleckner89fbd552018-06-04 21:39:20 +000010299 case X86::BI__builtin_ia32_movdqa32load128_mask:
10300 case X86::BI__builtin_ia32_movdqa32load256_mask:
John McCall280c6562018-06-01 21:34:26 +000010301 case X86::BI__builtin_ia32_movdqa32load512_mask:
Reid Kleckner89fbd552018-06-04 21:39:20 +000010302 case X86::BI__builtin_ia32_movdqa64load128_mask:
10303 case X86::BI__builtin_ia32_movdqa64load256_mask:
10304 case X86::BI__builtin_ia32_movdqa64load512_mask: {
10305 unsigned Align =
10306 getContext().getTypeAlignInChars(E->getArg(1)->getType()).getQuantity();
10307 return EmitX86MaskedLoad(*this, Ops, Align);
10308 }
Simon Pilgrim2d851732016-07-22 13:58:56 +000010309
Craig Topper3cce6a72018-06-10 17:27:05 +000010310 case X86::BI__builtin_ia32_expandloaddf128_mask:
10311 case X86::BI__builtin_ia32_expandloaddf256_mask:
10312 case X86::BI__builtin_ia32_expandloaddf512_mask:
10313 case X86::BI__builtin_ia32_expandloadsf128_mask:
10314 case X86::BI__builtin_ia32_expandloadsf256_mask:
10315 case X86::BI__builtin_ia32_expandloadsf512_mask:
10316 case X86::BI__builtin_ia32_expandloaddi128_mask:
10317 case X86::BI__builtin_ia32_expandloaddi256_mask:
10318 case X86::BI__builtin_ia32_expandloaddi512_mask:
10319 case X86::BI__builtin_ia32_expandloadsi128_mask:
10320 case X86::BI__builtin_ia32_expandloadsi256_mask:
10321 case X86::BI__builtin_ia32_expandloadsi512_mask:
10322 case X86::BI__builtin_ia32_expandloadhi128_mask:
10323 case X86::BI__builtin_ia32_expandloadhi256_mask:
10324 case X86::BI__builtin_ia32_expandloadhi512_mask:
10325 case X86::BI__builtin_ia32_expandloadqi128_mask:
10326 case X86::BI__builtin_ia32_expandloadqi256_mask:
10327 case X86::BI__builtin_ia32_expandloadqi512_mask:
10328 return EmitX86ExpandLoad(*this, Ops);
10329
10330 case X86::BI__builtin_ia32_compressstoredf128_mask:
10331 case X86::BI__builtin_ia32_compressstoredf256_mask:
10332 case X86::BI__builtin_ia32_compressstoredf512_mask:
10333 case X86::BI__builtin_ia32_compressstoresf128_mask:
10334 case X86::BI__builtin_ia32_compressstoresf256_mask:
10335 case X86::BI__builtin_ia32_compressstoresf512_mask:
10336 case X86::BI__builtin_ia32_compressstoredi128_mask:
10337 case X86::BI__builtin_ia32_compressstoredi256_mask:
10338 case X86::BI__builtin_ia32_compressstoredi512_mask:
10339 case X86::BI__builtin_ia32_compressstoresi128_mask:
10340 case X86::BI__builtin_ia32_compressstoresi256_mask:
10341 case X86::BI__builtin_ia32_compressstoresi512_mask:
10342 case X86::BI__builtin_ia32_compressstorehi128_mask:
10343 case X86::BI__builtin_ia32_compressstorehi256_mask:
10344 case X86::BI__builtin_ia32_compressstorehi512_mask:
10345 case X86::BI__builtin_ia32_compressstoreqi128_mask:
10346 case X86::BI__builtin_ia32_compressstoreqi256_mask:
10347 case X86::BI__builtin_ia32_compressstoreqi512_mask:
10348 return EmitX86CompressStore(*this, Ops);
10349
Craig Topper07b6d3d2019-01-28 07:03:10 +000010350 case X86::BI__builtin_ia32_expanddf128_mask:
10351 case X86::BI__builtin_ia32_expanddf256_mask:
10352 case X86::BI__builtin_ia32_expanddf512_mask:
10353 case X86::BI__builtin_ia32_expandsf128_mask:
10354 case X86::BI__builtin_ia32_expandsf256_mask:
10355 case X86::BI__builtin_ia32_expandsf512_mask:
10356 case X86::BI__builtin_ia32_expanddi128_mask:
10357 case X86::BI__builtin_ia32_expanddi256_mask:
10358 case X86::BI__builtin_ia32_expanddi512_mask:
10359 case X86::BI__builtin_ia32_expandsi128_mask:
10360 case X86::BI__builtin_ia32_expandsi256_mask:
10361 case X86::BI__builtin_ia32_expandsi512_mask:
10362 case X86::BI__builtin_ia32_expandhi128_mask:
10363 case X86::BI__builtin_ia32_expandhi256_mask:
10364 case X86::BI__builtin_ia32_expandhi512_mask:
10365 case X86::BI__builtin_ia32_expandqi128_mask:
10366 case X86::BI__builtin_ia32_expandqi256_mask:
10367 case X86::BI__builtin_ia32_expandqi512_mask:
10368 return EmitX86CompressExpand(*this, Ops, /*IsCompress*/false);
10369
10370 case X86::BI__builtin_ia32_compressdf128_mask:
10371 case X86::BI__builtin_ia32_compressdf256_mask:
10372 case X86::BI__builtin_ia32_compressdf512_mask:
10373 case X86::BI__builtin_ia32_compresssf128_mask:
10374 case X86::BI__builtin_ia32_compresssf256_mask:
10375 case X86::BI__builtin_ia32_compresssf512_mask:
10376 case X86::BI__builtin_ia32_compressdi128_mask:
10377 case X86::BI__builtin_ia32_compressdi256_mask:
10378 case X86::BI__builtin_ia32_compressdi512_mask:
10379 case X86::BI__builtin_ia32_compresssi128_mask:
10380 case X86::BI__builtin_ia32_compresssi256_mask:
10381 case X86::BI__builtin_ia32_compresssi512_mask:
10382 case X86::BI__builtin_ia32_compresshi128_mask:
10383 case X86::BI__builtin_ia32_compresshi256_mask:
10384 case X86::BI__builtin_ia32_compresshi512_mask:
10385 case X86::BI__builtin_ia32_compressqi128_mask:
10386 case X86::BI__builtin_ia32_compressqi256_mask:
10387 case X86::BI__builtin_ia32_compressqi512_mask:
10388 return EmitX86CompressExpand(*this, Ops, /*IsCompress*/true);
10389
Craig Topperbb5b0662019-01-16 22:34:33 +000010390 case X86::BI__builtin_ia32_gather3div2df:
10391 case X86::BI__builtin_ia32_gather3div2di:
10392 case X86::BI__builtin_ia32_gather3div4df:
10393 case X86::BI__builtin_ia32_gather3div4di:
10394 case X86::BI__builtin_ia32_gather3div4sf:
10395 case X86::BI__builtin_ia32_gather3div4si:
10396 case X86::BI__builtin_ia32_gather3div8sf:
10397 case X86::BI__builtin_ia32_gather3div8si:
10398 case X86::BI__builtin_ia32_gather3siv2df:
10399 case X86::BI__builtin_ia32_gather3siv2di:
10400 case X86::BI__builtin_ia32_gather3siv4df:
10401 case X86::BI__builtin_ia32_gather3siv4di:
10402 case X86::BI__builtin_ia32_gather3siv4sf:
10403 case X86::BI__builtin_ia32_gather3siv4si:
10404 case X86::BI__builtin_ia32_gather3siv8sf:
10405 case X86::BI__builtin_ia32_gather3siv8si:
10406 case X86::BI__builtin_ia32_gathersiv8df:
10407 case X86::BI__builtin_ia32_gathersiv16sf:
10408 case X86::BI__builtin_ia32_gatherdiv8df:
10409 case X86::BI__builtin_ia32_gatherdiv16sf:
10410 case X86::BI__builtin_ia32_gathersiv8di:
10411 case X86::BI__builtin_ia32_gathersiv16si:
10412 case X86::BI__builtin_ia32_gatherdiv8di:
10413 case X86::BI__builtin_ia32_gatherdiv16si: {
10414 Intrinsic::ID IID;
10415 switch (BuiltinID) {
10416 default: llvm_unreachable("Unexpected builtin");
10417 case X86::BI__builtin_ia32_gather3div2df:
10418 IID = Intrinsic::x86_avx512_mask_gather3div2_df;
10419 break;
10420 case X86::BI__builtin_ia32_gather3div2di:
10421 IID = Intrinsic::x86_avx512_mask_gather3div2_di;
10422 break;
10423 case X86::BI__builtin_ia32_gather3div4df:
10424 IID = Intrinsic::x86_avx512_mask_gather3div4_df;
10425 break;
10426 case X86::BI__builtin_ia32_gather3div4di:
10427 IID = Intrinsic::x86_avx512_mask_gather3div4_di;
10428 break;
10429 case X86::BI__builtin_ia32_gather3div4sf:
10430 IID = Intrinsic::x86_avx512_mask_gather3div4_sf;
10431 break;
10432 case X86::BI__builtin_ia32_gather3div4si:
10433 IID = Intrinsic::x86_avx512_mask_gather3div4_si;
10434 break;
10435 case X86::BI__builtin_ia32_gather3div8sf:
10436 IID = Intrinsic::x86_avx512_mask_gather3div8_sf;
10437 break;
10438 case X86::BI__builtin_ia32_gather3div8si:
10439 IID = Intrinsic::x86_avx512_mask_gather3div8_si;
10440 break;
10441 case X86::BI__builtin_ia32_gather3siv2df:
10442 IID = Intrinsic::x86_avx512_mask_gather3siv2_df;
10443 break;
10444 case X86::BI__builtin_ia32_gather3siv2di:
10445 IID = Intrinsic::x86_avx512_mask_gather3siv2_di;
10446 break;
10447 case X86::BI__builtin_ia32_gather3siv4df:
10448 IID = Intrinsic::x86_avx512_mask_gather3siv4_df;
10449 break;
10450 case X86::BI__builtin_ia32_gather3siv4di:
10451 IID = Intrinsic::x86_avx512_mask_gather3siv4_di;
10452 break;
10453 case X86::BI__builtin_ia32_gather3siv4sf:
10454 IID = Intrinsic::x86_avx512_mask_gather3siv4_sf;
10455 break;
10456 case X86::BI__builtin_ia32_gather3siv4si:
10457 IID = Intrinsic::x86_avx512_mask_gather3siv4_si;
10458 break;
10459 case X86::BI__builtin_ia32_gather3siv8sf:
10460 IID = Intrinsic::x86_avx512_mask_gather3siv8_sf;
10461 break;
10462 case X86::BI__builtin_ia32_gather3siv8si:
10463 IID = Intrinsic::x86_avx512_mask_gather3siv8_si;
10464 break;
10465 case X86::BI__builtin_ia32_gathersiv8df:
10466 IID = Intrinsic::x86_avx512_mask_gather_dpd_512;
10467 break;
10468 case X86::BI__builtin_ia32_gathersiv16sf:
10469 IID = Intrinsic::x86_avx512_mask_gather_dps_512;
10470 break;
10471 case X86::BI__builtin_ia32_gatherdiv8df:
10472 IID = Intrinsic::x86_avx512_mask_gather_qpd_512;
10473 break;
10474 case X86::BI__builtin_ia32_gatherdiv16sf:
10475 IID = Intrinsic::x86_avx512_mask_gather_qps_512;
10476 break;
10477 case X86::BI__builtin_ia32_gathersiv8di:
10478 IID = Intrinsic::x86_avx512_mask_gather_dpq_512;
10479 break;
10480 case X86::BI__builtin_ia32_gathersiv16si:
10481 IID = Intrinsic::x86_avx512_mask_gather_dpi_512;
10482 break;
10483 case X86::BI__builtin_ia32_gatherdiv8di:
10484 IID = Intrinsic::x86_avx512_mask_gather_qpq_512;
10485 break;
10486 case X86::BI__builtin_ia32_gatherdiv16si:
10487 IID = Intrinsic::x86_avx512_mask_gather_qpi_512;
10488 break;
10489 }
10490
10491 unsigned MinElts = std::min(Ops[0]->getType()->getVectorNumElements(),
10492 Ops[2]->getType()->getVectorNumElements());
10493 Ops[3] = getMaskVecValue(*this, Ops[3], MinElts);
10494 Function *Intr = CGM.getIntrinsic(IID);
10495 return Builder.CreateCall(Intr, Ops);
10496 }
10497
Craig Topper015585a2019-01-17 00:34:19 +000010498 case X86::BI__builtin_ia32_scattersiv8df:
10499 case X86::BI__builtin_ia32_scattersiv16sf:
10500 case X86::BI__builtin_ia32_scatterdiv8df:
10501 case X86::BI__builtin_ia32_scatterdiv16sf:
10502 case X86::BI__builtin_ia32_scattersiv8di:
10503 case X86::BI__builtin_ia32_scattersiv16si:
10504 case X86::BI__builtin_ia32_scatterdiv8di:
10505 case X86::BI__builtin_ia32_scatterdiv16si:
10506 case X86::BI__builtin_ia32_scatterdiv2df:
10507 case X86::BI__builtin_ia32_scatterdiv2di:
10508 case X86::BI__builtin_ia32_scatterdiv4df:
10509 case X86::BI__builtin_ia32_scatterdiv4di:
10510 case X86::BI__builtin_ia32_scatterdiv4sf:
10511 case X86::BI__builtin_ia32_scatterdiv4si:
10512 case X86::BI__builtin_ia32_scatterdiv8sf:
10513 case X86::BI__builtin_ia32_scatterdiv8si:
10514 case X86::BI__builtin_ia32_scattersiv2df:
10515 case X86::BI__builtin_ia32_scattersiv2di:
10516 case X86::BI__builtin_ia32_scattersiv4df:
10517 case X86::BI__builtin_ia32_scattersiv4di:
10518 case X86::BI__builtin_ia32_scattersiv4sf:
10519 case X86::BI__builtin_ia32_scattersiv4si:
10520 case X86::BI__builtin_ia32_scattersiv8sf:
10521 case X86::BI__builtin_ia32_scattersiv8si: {
10522 Intrinsic::ID IID;
10523 switch (BuiltinID) {
10524 default: llvm_unreachable("Unexpected builtin");
10525 case X86::BI__builtin_ia32_scattersiv8df:
10526 IID = Intrinsic::x86_avx512_mask_scatter_dpd_512;
10527 break;
10528 case X86::BI__builtin_ia32_scattersiv16sf:
10529 IID = Intrinsic::x86_avx512_mask_scatter_dps_512;
10530 break;
10531 case X86::BI__builtin_ia32_scatterdiv8df:
10532 IID = Intrinsic::x86_avx512_mask_scatter_qpd_512;
10533 break;
10534 case X86::BI__builtin_ia32_scatterdiv16sf:
10535 IID = Intrinsic::x86_avx512_mask_scatter_qps_512;
10536 break;
10537 case X86::BI__builtin_ia32_scattersiv8di:
10538 IID = Intrinsic::x86_avx512_mask_scatter_dpq_512;
10539 break;
10540 case X86::BI__builtin_ia32_scattersiv16si:
10541 IID = Intrinsic::x86_avx512_mask_scatter_dpi_512;
10542 break;
10543 case X86::BI__builtin_ia32_scatterdiv8di:
10544 IID = Intrinsic::x86_avx512_mask_scatter_qpq_512;
10545 break;
10546 case X86::BI__builtin_ia32_scatterdiv16si:
10547 IID = Intrinsic::x86_avx512_mask_scatter_qpi_512;
10548 break;
10549 case X86::BI__builtin_ia32_scatterdiv2df:
10550 IID = Intrinsic::x86_avx512_mask_scatterdiv2_df;
10551 break;
10552 case X86::BI__builtin_ia32_scatterdiv2di:
10553 IID = Intrinsic::x86_avx512_mask_scatterdiv2_di;
10554 break;
10555 case X86::BI__builtin_ia32_scatterdiv4df:
10556 IID = Intrinsic::x86_avx512_mask_scatterdiv4_df;
10557 break;
10558 case X86::BI__builtin_ia32_scatterdiv4di:
10559 IID = Intrinsic::x86_avx512_mask_scatterdiv4_di;
10560 break;
10561 case X86::BI__builtin_ia32_scatterdiv4sf:
10562 IID = Intrinsic::x86_avx512_mask_scatterdiv4_sf;
10563 break;
10564 case X86::BI__builtin_ia32_scatterdiv4si:
10565 IID = Intrinsic::x86_avx512_mask_scatterdiv4_si;
10566 break;
10567 case X86::BI__builtin_ia32_scatterdiv8sf:
10568 IID = Intrinsic::x86_avx512_mask_scatterdiv8_sf;
10569 break;
10570 case X86::BI__builtin_ia32_scatterdiv8si:
10571 IID = Intrinsic::x86_avx512_mask_scatterdiv8_si;
10572 break;
10573 case X86::BI__builtin_ia32_scattersiv2df:
10574 IID = Intrinsic::x86_avx512_mask_scattersiv2_df;
10575 break;
10576 case X86::BI__builtin_ia32_scattersiv2di:
10577 IID = Intrinsic::x86_avx512_mask_scattersiv2_di;
10578 break;
10579 case X86::BI__builtin_ia32_scattersiv4df:
10580 IID = Intrinsic::x86_avx512_mask_scattersiv4_df;
10581 break;
10582 case X86::BI__builtin_ia32_scattersiv4di:
10583 IID = Intrinsic::x86_avx512_mask_scattersiv4_di;
10584 break;
10585 case X86::BI__builtin_ia32_scattersiv4sf:
10586 IID = Intrinsic::x86_avx512_mask_scattersiv4_sf;
10587 break;
10588 case X86::BI__builtin_ia32_scattersiv4si:
10589 IID = Intrinsic::x86_avx512_mask_scattersiv4_si;
10590 break;
10591 case X86::BI__builtin_ia32_scattersiv8sf:
10592 IID = Intrinsic::x86_avx512_mask_scattersiv8_sf;
10593 break;
10594 case X86::BI__builtin_ia32_scattersiv8si:
10595 IID = Intrinsic::x86_avx512_mask_scattersiv8_si;
10596 break;
10597 }
10598
10599 unsigned MinElts = std::min(Ops[2]->getType()->getVectorNumElements(),
10600 Ops[3]->getType()->getVectorNumElements());
10601 Ops[1] = getMaskVecValue(*this, Ops[1], MinElts);
10602 Function *Intr = CGM.getIntrinsic(IID);
10603 return Builder.CreateCall(Intr, Ops);
10604 }
10605
Nate Begeman91f40e32008-04-14 04:49:57 +000010606 case X86::BI__builtin_ia32_storehps:
10607 case X86::BI__builtin_ia32_storelps: {
Chris Lattner5e016ae2010-06-27 07:15:29 +000010608 llvm::Type *PtrTy = llvm::PointerType::getUnqual(Int64Ty);
10609 llvm::Type *VecTy = llvm::VectorType::get(Int64Ty, 2);
Mike Stump11289f42009-09-09 15:08:12 +000010610
Nate Begeman91f40e32008-04-14 04:49:57 +000010611 // cast val v2i64
10612 Ops[1] = Builder.CreateBitCast(Ops[1], VecTy, "cast");
Mike Stump11289f42009-09-09 15:08:12 +000010613
Nate Begeman91f40e32008-04-14 04:49:57 +000010614 // extract (0, 1)
10615 unsigned Index = BuiltinID == X86::BI__builtin_ia32_storelps ? 0 : 1;
Craig Topper342b0952018-06-21 23:39:47 +000010616 Ops[1] = Builder.CreateExtractElement(Ops[1], Index, "extract");
Nate Begeman91f40e32008-04-14 04:49:57 +000010617
10618 // cast pointer to i64 & store
10619 Ops[0] = Builder.CreateBitCast(Ops[0], PtrTy);
John McCall7f416cc2015-09-08 08:05:57 +000010620 return Builder.CreateDefaultAlignedStore(Ops[1], Ops[0]);
Nate Begeman91f40e32008-04-14 04:49:57 +000010621 }
Craig Topper3428bee2018-06-08 03:24:47 +000010622 case X86::BI__builtin_ia32_vextractf128_pd256:
10623 case X86::BI__builtin_ia32_vextractf128_ps256:
10624 case X86::BI__builtin_ia32_vextractf128_si256:
10625 case X86::BI__builtin_ia32_extract128i256:
Craig Topper5f50f3382018-06-08 21:50:07 +000010626 case X86::BI__builtin_ia32_extractf64x4_mask:
10627 case X86::BI__builtin_ia32_extractf32x4_mask:
10628 case X86::BI__builtin_ia32_extracti64x4_mask:
10629 case X86::BI__builtin_ia32_extracti32x4_mask:
10630 case X86::BI__builtin_ia32_extractf32x8_mask:
10631 case X86::BI__builtin_ia32_extracti32x8_mask:
10632 case X86::BI__builtin_ia32_extractf32x4_256_mask:
10633 case X86::BI__builtin_ia32_extracti32x4_256_mask:
10634 case X86::BI__builtin_ia32_extractf64x2_256_mask:
10635 case X86::BI__builtin_ia32_extracti64x2_256_mask:
10636 case X86::BI__builtin_ia32_extractf64x2_512_mask:
10637 case X86::BI__builtin_ia32_extracti64x2_512_mask: {
Craig Topper3428bee2018-06-08 03:24:47 +000010638 llvm::Type *DstTy = ConvertType(E->getType());
10639 unsigned NumElts = DstTy->getVectorNumElements();
Craig Topper342b0952018-06-21 23:39:47 +000010640 unsigned SrcNumElts = Ops[0]->getType()->getVectorNumElements();
10641 unsigned SubVectors = SrcNumElts / NumElts;
10642 unsigned Index = cast<ConstantInt>(Ops[1])->getZExtValue();
10643 assert(llvm::isPowerOf2_32(SubVectors) && "Expected power of 2 subvectors");
10644 Index &= SubVectors - 1; // Remove any extra bits.
10645 Index *= NumElts;
Craig Topper3428bee2018-06-08 03:24:47 +000010646
10647 uint32_t Indices[16];
10648 for (unsigned i = 0; i != NumElts; ++i)
10649 Indices[i] = i + Index;
10650
Craig Topper5f50f3382018-06-08 21:50:07 +000010651 Value *Res = Builder.CreateShuffleVector(Ops[0],
10652 UndefValue::get(Ops[0]->getType()),
10653 makeArrayRef(Indices, NumElts),
10654 "extract");
10655
10656 if (Ops.size() == 4)
10657 Res = EmitX86Select(*this, Ops[3], Res, Ops[2]);
10658
10659 return Res;
Craig Topper3428bee2018-06-08 03:24:47 +000010660 }
10661 case X86::BI__builtin_ia32_vinsertf128_pd256:
10662 case X86::BI__builtin_ia32_vinsertf128_ps256:
10663 case X86::BI__builtin_ia32_vinsertf128_si256:
10664 case X86::BI__builtin_ia32_insert128i256:
10665 case X86::BI__builtin_ia32_insertf64x4:
10666 case X86::BI__builtin_ia32_insertf32x4:
10667 case X86::BI__builtin_ia32_inserti64x4:
10668 case X86::BI__builtin_ia32_inserti32x4:
10669 case X86::BI__builtin_ia32_insertf32x8:
10670 case X86::BI__builtin_ia32_inserti32x8:
10671 case X86::BI__builtin_ia32_insertf32x4_256:
10672 case X86::BI__builtin_ia32_inserti32x4_256:
10673 case X86::BI__builtin_ia32_insertf64x2_256:
10674 case X86::BI__builtin_ia32_inserti64x2_256:
10675 case X86::BI__builtin_ia32_insertf64x2_512:
10676 case X86::BI__builtin_ia32_inserti64x2_512: {
10677 unsigned DstNumElts = Ops[0]->getType()->getVectorNumElements();
10678 unsigned SrcNumElts = Ops[1]->getType()->getVectorNumElements();
Craig Topper342b0952018-06-21 23:39:47 +000010679 unsigned SubVectors = DstNumElts / SrcNumElts;
10680 unsigned Index = cast<ConstantInt>(Ops[2])->getZExtValue();
10681 assert(llvm::isPowerOf2_32(SubVectors) && "Expected power of 2 subvectors");
10682 Index &= SubVectors - 1; // Remove any extra bits.
10683 Index *= SrcNumElts;
Craig Topper3428bee2018-06-08 03:24:47 +000010684
10685 uint32_t Indices[16];
10686 for (unsigned i = 0; i != DstNumElts; ++i)
10687 Indices[i] = (i >= SrcNumElts) ? SrcNumElts + (i % SrcNumElts) : i;
10688
10689 Value *Op1 = Builder.CreateShuffleVector(Ops[1],
10690 UndefValue::get(Ops[1]->getType()),
10691 makeArrayRef(Indices, DstNumElts),
10692 "widen");
10693
10694 for (unsigned i = 0; i != DstNumElts; ++i) {
10695 if (i >= Index && i < (Index + SrcNumElts))
10696 Indices[i] = (i - Index) + DstNumElts;
10697 else
10698 Indices[i] = i;
10699 }
10700
10701 return Builder.CreateShuffleVector(Ops[0], Op1,
10702 makeArrayRef(Indices, DstNumElts),
10703 "insert");
10704 }
Craig Topper88097d92018-06-08 21:50:08 +000010705 case X86::BI__builtin_ia32_pmovqd512_mask:
10706 case X86::BI__builtin_ia32_pmovwb512_mask: {
10707 Value *Res = Builder.CreateTrunc(Ops[0], Ops[1]->getType());
10708 return EmitX86Select(*this, Ops[2], Res, Ops[1]);
10709 }
10710 case X86::BI__builtin_ia32_pmovdb512_mask:
10711 case X86::BI__builtin_ia32_pmovdw512_mask:
10712 case X86::BI__builtin_ia32_pmovqw512_mask: {
10713 if (const auto *C = dyn_cast<Constant>(Ops[2]))
10714 if (C->isAllOnesValue())
10715 return Builder.CreateTrunc(Ops[0], Ops[1]->getType());
10716
10717 Intrinsic::ID IID;
10718 switch (BuiltinID) {
10719 default: llvm_unreachable("Unsupported intrinsic!");
10720 case X86::BI__builtin_ia32_pmovdb512_mask:
10721 IID = Intrinsic::x86_avx512_mask_pmov_db_512;
10722 break;
10723 case X86::BI__builtin_ia32_pmovdw512_mask:
10724 IID = Intrinsic::x86_avx512_mask_pmov_dw_512;
10725 break;
10726 case X86::BI__builtin_ia32_pmovqw512_mask:
10727 IID = Intrinsic::x86_avx512_mask_pmov_qw_512;
10728 break;
10729 }
10730
10731 Function *Intr = CGM.getIntrinsic(IID);
10732 return Builder.CreateCall(Intr, Ops);
10733 }
Craig Topper7d17d722018-06-08 00:00:21 +000010734 case X86::BI__builtin_ia32_pblendw128:
10735 case X86::BI__builtin_ia32_blendpd:
10736 case X86::BI__builtin_ia32_blendps:
10737 case X86::BI__builtin_ia32_blendpd256:
10738 case X86::BI__builtin_ia32_blendps256:
10739 case X86::BI__builtin_ia32_pblendw256:
10740 case X86::BI__builtin_ia32_pblendd128:
10741 case X86::BI__builtin_ia32_pblendd256: {
10742 unsigned NumElts = Ops[0]->getType()->getVectorNumElements();
10743 unsigned Imm = cast<llvm::ConstantInt>(Ops[2])->getZExtValue();
10744
10745 uint32_t Indices[16];
10746 // If there are more than 8 elements, the immediate is used twice so make
10747 // sure we handle that.
10748 for (unsigned i = 0; i != NumElts; ++i)
10749 Indices[i] = ((Imm >> (i % 8)) & 0x1) ? NumElts + i : i;
10750
Craig Topper201b9dd2018-06-11 17:06:01 +000010751 return Builder.CreateShuffleVector(Ops[0], Ops[1],
Craig Topper7d17d722018-06-08 00:00:21 +000010752 makeArrayRef(Indices, NumElts),
10753 "blend");
10754 }
Craig Topper03de1662018-06-08 06:13:16 +000010755 case X86::BI__builtin_ia32_pshuflw:
10756 case X86::BI__builtin_ia32_pshuflw256:
10757 case X86::BI__builtin_ia32_pshuflw512: {
10758 uint32_t Imm = cast<llvm::ConstantInt>(Ops[1])->getZExtValue();
10759 llvm::Type *Ty = Ops[0]->getType();
10760 unsigned NumElts = Ty->getVectorNumElements();
10761
10762 // Splat the 8-bits of immediate 4 times to help the loop wrap around.
10763 Imm = (Imm & 0xff) * 0x01010101;
10764
10765 uint32_t Indices[32];
10766 for (unsigned l = 0; l != NumElts; l += 8) {
10767 for (unsigned i = 0; i != 4; ++i) {
10768 Indices[l + i] = l + (Imm & 3);
10769 Imm >>= 2;
10770 }
10771 for (unsigned i = 4; i != 8; ++i)
10772 Indices[l + i] = l + i;
10773 }
10774
10775 return Builder.CreateShuffleVector(Ops[0], UndefValue::get(Ty),
10776 makeArrayRef(Indices, NumElts),
10777 "pshuflw");
10778 }
10779 case X86::BI__builtin_ia32_pshufhw:
10780 case X86::BI__builtin_ia32_pshufhw256:
10781 case X86::BI__builtin_ia32_pshufhw512: {
10782 uint32_t Imm = cast<llvm::ConstantInt>(Ops[1])->getZExtValue();
10783 llvm::Type *Ty = Ops[0]->getType();
10784 unsigned NumElts = Ty->getVectorNumElements();
10785
10786 // Splat the 8-bits of immediate 4 times to help the loop wrap around.
10787 Imm = (Imm & 0xff) * 0x01010101;
10788
10789 uint32_t Indices[32];
10790 for (unsigned l = 0; l != NumElts; l += 8) {
10791 for (unsigned i = 0; i != 4; ++i)
10792 Indices[l + i] = l + i;
10793 for (unsigned i = 4; i != 8; ++i) {
10794 Indices[l + i] = l + 4 + (Imm & 3);
10795 Imm >>= 2;
10796 }
10797 }
10798
10799 return Builder.CreateShuffleVector(Ops[0], UndefValue::get(Ty),
10800 makeArrayRef(Indices, NumElts),
10801 "pshufhw");
10802 }
10803 case X86::BI__builtin_ia32_pshufd:
10804 case X86::BI__builtin_ia32_pshufd256:
10805 case X86::BI__builtin_ia32_pshufd512:
Craig Topperacf56012018-06-08 00:59:27 +000010806 case X86::BI__builtin_ia32_vpermilpd:
10807 case X86::BI__builtin_ia32_vpermilps:
10808 case X86::BI__builtin_ia32_vpermilpd256:
10809 case X86::BI__builtin_ia32_vpermilps256:
10810 case X86::BI__builtin_ia32_vpermilpd512:
10811 case X86::BI__builtin_ia32_vpermilps512: {
10812 uint32_t Imm = cast<llvm::ConstantInt>(Ops[1])->getZExtValue();
10813 llvm::Type *Ty = Ops[0]->getType();
10814 unsigned NumElts = Ty->getVectorNumElements();
10815 unsigned NumLanes = Ty->getPrimitiveSizeInBits() / 128;
10816 unsigned NumLaneElts = NumElts / NumLanes;
10817
10818 // Splat the 8-bits of immediate 4 times to help the loop wrap around.
10819 Imm = (Imm & 0xff) * 0x01010101;
10820
10821 uint32_t Indices[16];
10822 for (unsigned l = 0; l != NumElts; l += NumLaneElts) {
10823 for (unsigned i = 0; i != NumLaneElts; ++i) {
10824 Indices[i + l] = (Imm % NumLaneElts) + l;
10825 Imm /= NumLaneElts;
10826 }
10827 }
10828
10829 return Builder.CreateShuffleVector(Ops[0], UndefValue::get(Ty),
10830 makeArrayRef(Indices, NumElts),
10831 "permil");
10832 }
Craig Topper422a1bb2018-06-08 07:18:33 +000010833 case X86::BI__builtin_ia32_shufpd:
10834 case X86::BI__builtin_ia32_shufpd256:
10835 case X86::BI__builtin_ia32_shufpd512:
10836 case X86::BI__builtin_ia32_shufps:
10837 case X86::BI__builtin_ia32_shufps256:
10838 case X86::BI__builtin_ia32_shufps512: {
10839 uint32_t Imm = cast<llvm::ConstantInt>(Ops[2])->getZExtValue();
10840 llvm::Type *Ty = Ops[0]->getType();
10841 unsigned NumElts = Ty->getVectorNumElements();
10842 unsigned NumLanes = Ty->getPrimitiveSizeInBits() / 128;
10843 unsigned NumLaneElts = NumElts / NumLanes;
10844
10845 // Splat the 8-bits of immediate 4 times to help the loop wrap around.
10846 Imm = (Imm & 0xff) * 0x01010101;
10847
10848 uint32_t Indices[16];
10849 for (unsigned l = 0; l != NumElts; l += NumLaneElts) {
10850 for (unsigned i = 0; i != NumLaneElts; ++i) {
10851 unsigned Index = Imm % NumLaneElts;
10852 Imm /= NumLaneElts;
10853 if (i >= (NumLaneElts / 2))
10854 Index += NumElts;
10855 Indices[l + i] = l + Index;
10856 }
10857 }
10858
10859 return Builder.CreateShuffleVector(Ops[0], Ops[1],
10860 makeArrayRef(Indices, NumElts),
10861 "shufp");
10862 }
Craig Topper03f4f042018-06-08 18:00:25 +000010863 case X86::BI__builtin_ia32_permdi256:
10864 case X86::BI__builtin_ia32_permdf256:
10865 case X86::BI__builtin_ia32_permdi512:
10866 case X86::BI__builtin_ia32_permdf512: {
10867 unsigned Imm = cast<llvm::ConstantInt>(Ops[1])->getZExtValue();
10868 llvm::Type *Ty = Ops[0]->getType();
10869 unsigned NumElts = Ty->getVectorNumElements();
10870
10871 // These intrinsics operate on 256-bit lanes of four 64-bit elements.
10872 uint32_t Indices[8];
10873 for (unsigned l = 0; l != NumElts; l += 4)
10874 for (unsigned i = 0; i != 4; ++i)
10875 Indices[l + i] = l + ((Imm >> (2 * i)) & 0x3);
10876
10877 return Builder.CreateShuffleVector(Ops[0], UndefValue::get(Ty),
10878 makeArrayRef(Indices, NumElts),
10879 "perm");
10880 }
Craig Topper480e2b62015-02-17 06:37:58 +000010881 case X86::BI__builtin_ia32_palignr128:
Craig Topperf51cc072016-06-06 06:13:01 +000010882 case X86::BI__builtin_ia32_palignr256:
Craig Topper8e3689c2018-05-22 20:48:24 +000010883 case X86::BI__builtin_ia32_palignr512: {
Craig Topper342b0952018-06-21 23:39:47 +000010884 unsigned ShiftVal = cast<llvm::ConstantInt>(Ops[2])->getZExtValue() & 0xff;
Craig Topper94aba2c2011-12-19 07:03:25 +000010885
Craig Topperf2f1a092016-07-08 02:17:35 +000010886 unsigned NumElts = Ops[0]->getType()->getVectorNumElements();
Craig Topper480e2b62015-02-17 06:37:58 +000010887 assert(NumElts % 16 == 0);
Craig Topper480e2b62015-02-17 06:37:58 +000010888
Craig Topper480e2b62015-02-17 06:37:58 +000010889 // If palignr is shifting the pair of vectors more than the size of two
10890 // lanes, emit zero.
Craig Topperb8b4b7e2016-05-29 07:06:02 +000010891 if (ShiftVal >= 32)
Craig Topper480e2b62015-02-17 06:37:58 +000010892 return llvm::Constant::getNullValue(ConvertType(E->getType()));
Craig Topper94aba2c2011-12-19 07:03:25 +000010893
Craig Topper480e2b62015-02-17 06:37:58 +000010894 // If palignr is shifting the pair of input vectors more than one lane,
Craig Topper96f9a572015-02-17 07:18:01 +000010895 // but less than two lanes, convert to shifting in zeroes.
Craig Topperb8b4b7e2016-05-29 07:06:02 +000010896 if (ShiftVal > 16) {
10897 ShiftVal -= 16;
Benjamin Kramerb5960562015-07-20 15:31:17 +000010898 Ops[1] = Ops[0];
Craig Topper96f9a572015-02-17 07:18:01 +000010899 Ops[0] = llvm::Constant::getNullValue(Ops[0]->getType());
Craig Topper94aba2c2011-12-19 07:03:25 +000010900 }
10901
Craig Topperd1cb4ce2016-06-12 00:41:24 +000010902 uint32_t Indices[64];
Craig Topper96f9a572015-02-17 07:18:01 +000010903 // 256-bit palignr operates on 128-bit lanes so we need to handle that
Craig Topperb8b4b7e2016-05-29 07:06:02 +000010904 for (unsigned l = 0; l != NumElts; l += 16) {
10905 for (unsigned i = 0; i != 16; ++i) {
Craig Topper96f9a572015-02-17 07:18:01 +000010906 unsigned Idx = ShiftVal + i;
Craig Topperb8b4b7e2016-05-29 07:06:02 +000010907 if (Idx >= 16)
10908 Idx += NumElts - 16; // End of lane, switch operand.
Benjamin Kramerc385a802015-07-28 15:40:11 +000010909 Indices[l + i] = Idx + l;
Craig Topper96f9a572015-02-17 07:18:01 +000010910 }
10911 }
10912
Craig Topper8e3689c2018-05-22 20:48:24 +000010913 return Builder.CreateShuffleVector(Ops[1], Ops[0],
10914 makeArrayRef(Indices, NumElts),
10915 "palignr");
Simon Pilgrim532de1c2016-06-13 10:05:19 +000010916 }
Craig Toppere56819e2018-06-07 21:27:41 +000010917 case X86::BI__builtin_ia32_alignd128:
10918 case X86::BI__builtin_ia32_alignd256:
10919 case X86::BI__builtin_ia32_alignd512:
10920 case X86::BI__builtin_ia32_alignq128:
10921 case X86::BI__builtin_ia32_alignq256:
10922 case X86::BI__builtin_ia32_alignq512: {
10923 unsigned NumElts = Ops[0]->getType()->getVectorNumElements();
Craig Topper342b0952018-06-21 23:39:47 +000010924 unsigned ShiftVal = cast<llvm::ConstantInt>(Ops[2])->getZExtValue() & 0xff;
Craig Toppere56819e2018-06-07 21:27:41 +000010925
10926 // Mask the shift amount to width of two vectors.
10927 ShiftVal &= (2 * NumElts) - 1;
10928
10929 uint32_t Indices[16];
10930 for (unsigned i = 0; i != NumElts; ++i)
10931 Indices[i] = i + ShiftVal;
10932
10933 return Builder.CreateShuffleVector(Ops[1], Ops[0],
10934 makeArrayRef(Indices, NumElts),
10935 "valign");
10936 }
Craig Topper93921362018-06-07 23:03:08 +000010937 case X86::BI__builtin_ia32_shuf_f32x4_256:
10938 case X86::BI__builtin_ia32_shuf_f64x2_256:
10939 case X86::BI__builtin_ia32_shuf_i32x4_256:
10940 case X86::BI__builtin_ia32_shuf_i64x2_256:
10941 case X86::BI__builtin_ia32_shuf_f32x4:
10942 case X86::BI__builtin_ia32_shuf_f64x2:
10943 case X86::BI__builtin_ia32_shuf_i32x4:
10944 case X86::BI__builtin_ia32_shuf_i64x2: {
10945 unsigned Imm = cast<llvm::ConstantInt>(Ops[2])->getZExtValue();
10946 llvm::Type *Ty = Ops[0]->getType();
10947 unsigned NumElts = Ty->getVectorNumElements();
10948 unsigned NumLanes = Ty->getPrimitiveSizeInBits() == 512 ? 4 : 2;
10949 unsigned NumLaneElts = NumElts / NumLanes;
10950
10951 uint32_t Indices[16];
10952 for (unsigned l = 0; l != NumElts; l += NumLaneElts) {
10953 unsigned Index = (Imm % NumLanes) * NumLaneElts;
10954 Imm /= NumLanes; // Discard the bits we just used.
10955 if (l >= (NumElts / 2))
10956 Index += NumElts; // Switch to other source.
10957 for (unsigned i = 0; i != NumLaneElts; ++i) {
10958 Indices[l + i] = Index + i;
10959 }
10960 }
10961
10962 return Builder.CreateShuffleVector(Ops[0], Ops[1],
10963 makeArrayRef(Indices, NumElts),
10964 "shuf");
10965 }
Simon Pilgrim532de1c2016-06-13 10:05:19 +000010966
Craig Topper8cd7b0c2017-09-15 23:00:59 +000010967 case X86::BI__builtin_ia32_vperm2f128_pd256:
10968 case X86::BI__builtin_ia32_vperm2f128_ps256:
10969 case X86::BI__builtin_ia32_vperm2f128_si256:
10970 case X86::BI__builtin_ia32_permti256: {
10971 unsigned Imm = cast<llvm::ConstantInt>(Ops[2])->getZExtValue();
10972 unsigned NumElts = Ops[0]->getType()->getVectorNumElements();
10973
10974 // This takes a very simple approach since there are two lanes and a
10975 // shuffle can have 2 inputs. So we reserve the first input for the first
10976 // lane and the second input for the second lane. This may result in
10977 // duplicate sources, but this can be dealt with in the backend.
10978
10979 Value *OutOps[2];
10980 uint32_t Indices[8];
10981 for (unsigned l = 0; l != 2; ++l) {
10982 // Determine the source for this lane.
10983 if (Imm & (1 << ((l * 4) + 3)))
10984 OutOps[l] = llvm::ConstantAggregateZero::get(Ops[0]->getType());
10985 else if (Imm & (1 << ((l * 4) + 1)))
10986 OutOps[l] = Ops[1];
10987 else
10988 OutOps[l] = Ops[0];
10989
10990 for (unsigned i = 0; i != NumElts/2; ++i) {
10991 // Start with ith element of the source for this lane.
10992 unsigned Idx = (l * NumElts) + i;
10993 // If bit 0 of the immediate half is set, switch to the high half of
10994 // the source.
10995 if (Imm & (1 << (l * 4)))
10996 Idx += NumElts/2;
10997 Indices[(l * (NumElts/2)) + i] = Idx;
10998 }
10999 }
11000
11001 return Builder.CreateShuffleVector(OutOps[0], OutOps[1],
11002 makeArrayRef(Indices, NumElts),
11003 "vperm");
11004 }
11005
Craig Topper31730ae2018-06-14 22:02:35 +000011006 case X86::BI__builtin_ia32_pslldqi128_byteshift:
11007 case X86::BI__builtin_ia32_pslldqi256_byteshift:
11008 case X86::BI__builtin_ia32_pslldqi512_byteshift: {
Craig Topper342b0952018-06-21 23:39:47 +000011009 unsigned ShiftVal = cast<llvm::ConstantInt>(Ops[1])->getZExtValue() & 0xff;
Craig Topperd3623152018-06-07 17:28:03 +000011010 llvm::Type *ResultType = Ops[0]->getType();
11011 // Builtin type is vXi64 so multiply by 8 to get bytes.
11012 unsigned NumElts = ResultType->getVectorNumElements() * 8;
11013
11014 // If pslldq is shifting the vector more than 15 bytes, emit zero.
11015 if (ShiftVal >= 16)
11016 return llvm::Constant::getNullValue(ResultType);
11017
11018 uint32_t Indices[64];
11019 // 256/512-bit pslldq operates on 128-bit lanes so we need to handle that
11020 for (unsigned l = 0; l != NumElts; l += 16) {
11021 for (unsigned i = 0; i != 16; ++i) {
11022 unsigned Idx = NumElts + i - ShiftVal;
11023 if (Idx < NumElts) Idx -= NumElts - 16; // end of lane, switch operand.
11024 Indices[l + i] = Idx + l;
11025 }
11026 }
11027
11028 llvm::Type *VecTy = llvm::VectorType::get(Int8Ty, NumElts);
11029 Value *Cast = Builder.CreateBitCast(Ops[0], VecTy, "cast");
11030 Value *Zero = llvm::Constant::getNullValue(VecTy);
11031 Value *SV = Builder.CreateShuffleVector(Zero, Cast,
11032 makeArrayRef(Indices, NumElts),
11033 "pslldq");
11034 return Builder.CreateBitCast(SV, Ops[0]->getType(), "cast");
11035 }
Craig Topper31730ae2018-06-14 22:02:35 +000011036 case X86::BI__builtin_ia32_psrldqi128_byteshift:
11037 case X86::BI__builtin_ia32_psrldqi256_byteshift:
11038 case X86::BI__builtin_ia32_psrldqi512_byteshift: {
Craig Topper342b0952018-06-21 23:39:47 +000011039 unsigned ShiftVal = cast<llvm::ConstantInt>(Ops[1])->getZExtValue() & 0xff;
Craig Topperd3623152018-06-07 17:28:03 +000011040 llvm::Type *ResultType = Ops[0]->getType();
11041 // Builtin type is vXi64 so multiply by 8 to get bytes.
11042 unsigned NumElts = ResultType->getVectorNumElements() * 8;
11043
11044 // If psrldq is shifting the vector more than 15 bytes, emit zero.
11045 if (ShiftVal >= 16)
11046 return llvm::Constant::getNullValue(ResultType);
11047
11048 uint32_t Indices[64];
11049 // 256/512-bit psrldq operates on 128-bit lanes so we need to handle that
11050 for (unsigned l = 0; l != NumElts; l += 16) {
11051 for (unsigned i = 0; i != 16; ++i) {
11052 unsigned Idx = i + ShiftVal;
11053 if (Idx >= 16) Idx += NumElts - 16; // end of lane, switch operand.
11054 Indices[l + i] = Idx + l;
11055 }
11056 }
11057
11058 llvm::Type *VecTy = llvm::VectorType::get(Int8Ty, NumElts);
11059 Value *Cast = Builder.CreateBitCast(Ops[0], VecTy, "cast");
11060 Value *Zero = llvm::Constant::getNullValue(VecTy);
11061 Value *SV = Builder.CreateShuffleVector(Cast, Zero,
11062 makeArrayRef(Indices, NumElts),
11063 "psrldq");
11064 return Builder.CreateBitCast(SV, ResultType, "cast");
11065 }
Craig Topper2aa8efc2018-08-31 18:22:52 +000011066 case X86::BI__builtin_ia32_kshiftliqi:
11067 case X86::BI__builtin_ia32_kshiftlihi:
11068 case X86::BI__builtin_ia32_kshiftlisi:
11069 case X86::BI__builtin_ia32_kshiftlidi: {
11070 unsigned ShiftVal = cast<llvm::ConstantInt>(Ops[1])->getZExtValue() & 0xff;
11071 unsigned NumElts = Ops[0]->getType()->getIntegerBitWidth();
11072
11073 if (ShiftVal >= NumElts)
11074 return llvm::Constant::getNullValue(Ops[0]->getType());
11075
11076 Value *In = getMaskVecValue(*this, Ops[0], NumElts);
11077
11078 uint32_t Indices[64];
11079 for (unsigned i = 0; i != NumElts; ++i)
11080 Indices[i] = NumElts + i - ShiftVal;
11081
11082 Value *Zero = llvm::Constant::getNullValue(In->getType());
11083 Value *SV = Builder.CreateShuffleVector(Zero, In,
11084 makeArrayRef(Indices, NumElts),
11085 "kshiftl");
11086 return Builder.CreateBitCast(SV, Ops[0]->getType());
11087 }
11088 case X86::BI__builtin_ia32_kshiftriqi:
11089 case X86::BI__builtin_ia32_kshiftrihi:
11090 case X86::BI__builtin_ia32_kshiftrisi:
11091 case X86::BI__builtin_ia32_kshiftridi: {
11092 unsigned ShiftVal = cast<llvm::ConstantInt>(Ops[1])->getZExtValue() & 0xff;
11093 unsigned NumElts = Ops[0]->getType()->getIntegerBitWidth();
11094
11095 if (ShiftVal >= NumElts)
11096 return llvm::Constant::getNullValue(Ops[0]->getType());
11097
11098 Value *In = getMaskVecValue(*this, Ops[0], NumElts);
11099
11100 uint32_t Indices[64];
11101 for (unsigned i = 0; i != NumElts; ++i)
11102 Indices[i] = i + ShiftVal;
11103
11104 Value *Zero = llvm::Constant::getNullValue(In->getType());
11105 Value *SV = Builder.CreateShuffleVector(In, Zero,
11106 makeArrayRef(Indices, NumElts),
11107 "kshiftr");
11108 return Builder.CreateBitCast(SV, Ops[0]->getType());
11109 }
Simon Pilgrim532de1c2016-06-13 10:05:19 +000011110 case X86::BI__builtin_ia32_movnti:
Simon Pilgrime47f2cd02016-11-11 14:38:34 +000011111 case X86::BI__builtin_ia32_movnti64:
Simon Pilgrimd39d0262016-06-17 14:28:16 +000011112 case X86::BI__builtin_ia32_movntsd:
11113 case X86::BI__builtin_ia32_movntss: {
11114 llvm::MDNode *Node = llvm::MDNode::get(
11115 getLLVMContext(), llvm::ConstantAsMetadata::get(Builder.getInt32(1)));
11116
Simon Pilgrime47f2cd02016-11-11 14:38:34 +000011117 Value *Ptr = Ops[0];
11118 Value *Src = Ops[1];
11119
Simon Pilgrimd39d0262016-06-17 14:28:16 +000011120 // Extract the 0'th element of the source vector.
Simon Pilgrime47f2cd02016-11-11 14:38:34 +000011121 if (BuiltinID == X86::BI__builtin_ia32_movntsd ||
11122 BuiltinID == X86::BI__builtin_ia32_movntss)
11123 Src = Builder.CreateExtractElement(Src, (uint64_t)0, "extract");
Simon Pilgrimd39d0262016-06-17 14:28:16 +000011124
11125 // Convert the type of the pointer to a pointer to the stored type.
Simon Pilgrime47f2cd02016-11-11 14:38:34 +000011126 Value *BC = Builder.CreateBitCast(
11127 Ptr, llvm::PointerType::getUnqual(Src->getType()), "cast");
Simon Pilgrimd39d0262016-06-17 14:28:16 +000011128
11129 // Unaligned nontemporal store of the scalar value.
Simon Pilgrime47f2cd02016-11-11 14:38:34 +000011130 StoreInst *SI = Builder.CreateDefaultAlignedStore(Src, BC);
Simon Pilgrimd39d0262016-06-17 14:28:16 +000011131 SI->setMetadata(CGM.getModule().getMDKindID("nontemporal"), Node);
11132 SI->setAlignment(1);
11133 return SI;
11134 }
Simon Pilgrim45973792018-12-20 19:01:13 +000011135 // Rotate is a special case of funnel shift - 1st 2 args are the same.
11136 case X86::BI__builtin_ia32_vprotb:
11137 case X86::BI__builtin_ia32_vprotw:
11138 case X86::BI__builtin_ia32_vprotd:
11139 case X86::BI__builtin_ia32_vprotq:
11140 case X86::BI__builtin_ia32_vprotbi:
11141 case X86::BI__builtin_ia32_vprotwi:
11142 case X86::BI__builtin_ia32_vprotdi:
11143 case X86::BI__builtin_ia32_vprotqi:
11144 case X86::BI__builtin_ia32_prold128:
11145 case X86::BI__builtin_ia32_prold256:
11146 case X86::BI__builtin_ia32_prold512:
11147 case X86::BI__builtin_ia32_prolq128:
11148 case X86::BI__builtin_ia32_prolq256:
11149 case X86::BI__builtin_ia32_prolq512:
11150 case X86::BI__builtin_ia32_prolvd128:
11151 case X86::BI__builtin_ia32_prolvd256:
11152 case X86::BI__builtin_ia32_prolvd512:
11153 case X86::BI__builtin_ia32_prolvq128:
11154 case X86::BI__builtin_ia32_prolvq256:
11155 case X86::BI__builtin_ia32_prolvq512:
11156 return EmitX86FunnelShift(*this, Ops[0], Ops[0], Ops[1], false);
11157 case X86::BI__builtin_ia32_prord128:
11158 case X86::BI__builtin_ia32_prord256:
11159 case X86::BI__builtin_ia32_prord512:
11160 case X86::BI__builtin_ia32_prorq128:
11161 case X86::BI__builtin_ia32_prorq256:
11162 case X86::BI__builtin_ia32_prorq512:
11163 case X86::BI__builtin_ia32_prorvd128:
11164 case X86::BI__builtin_ia32_prorvd256:
11165 case X86::BI__builtin_ia32_prorvd512:
11166 case X86::BI__builtin_ia32_prorvq128:
11167 case X86::BI__builtin_ia32_prorvq256:
11168 case X86::BI__builtin_ia32_prorvq512:
11169 return EmitX86FunnelShift(*this, Ops[0], Ops[0], Ops[1], true);
Simon Pilgrim532de1c2016-06-13 10:05:19 +000011170 case X86::BI__builtin_ia32_selectb_128:
Igor Bregeraadb8762016-06-08 13:59:20 +000011171 case X86::BI__builtin_ia32_selectb_256:
11172 case X86::BI__builtin_ia32_selectb_512:
11173 case X86::BI__builtin_ia32_selectw_128:
11174 case X86::BI__builtin_ia32_selectw_256:
11175 case X86::BI__builtin_ia32_selectw_512:
11176 case X86::BI__builtin_ia32_selectd_128:
11177 case X86::BI__builtin_ia32_selectd_256:
11178 case X86::BI__builtin_ia32_selectd_512:
11179 case X86::BI__builtin_ia32_selectq_128:
11180 case X86::BI__builtin_ia32_selectq_256:
11181 case X86::BI__builtin_ia32_selectq_512:
11182 case X86::BI__builtin_ia32_selectps_128:
11183 case X86::BI__builtin_ia32_selectps_256:
11184 case X86::BI__builtin_ia32_selectps_512:
11185 case X86::BI__builtin_ia32_selectpd_128:
11186 case X86::BI__builtin_ia32_selectpd_256:
11187 case X86::BI__builtin_ia32_selectpd_512:
Craig Topperc1442972016-06-09 05:15:00 +000011188 return EmitX86Select(*this, Ops[0], Ops[1], Ops[2]);
Craig Topper638426f2018-07-10 00:37:25 +000011189 case X86::BI__builtin_ia32_selectss_128:
11190 case X86::BI__builtin_ia32_selectsd_128: {
11191 Value *A = Builder.CreateExtractElement(Ops[1], (uint64_t)0);
11192 Value *B = Builder.CreateExtractElement(Ops[2], (uint64_t)0);
11193 A = EmitX86ScalarSelect(*this, Ops[0], A, B);
11194 return Builder.CreateInsertElement(Ops[1], A, (uint64_t)0);
11195 }
Craig Topperd1691c72016-06-22 04:47:58 +000011196 case X86::BI__builtin_ia32_cmpb128_mask:
11197 case X86::BI__builtin_ia32_cmpb256_mask:
11198 case X86::BI__builtin_ia32_cmpb512_mask:
11199 case X86::BI__builtin_ia32_cmpw128_mask:
11200 case X86::BI__builtin_ia32_cmpw256_mask:
11201 case X86::BI__builtin_ia32_cmpw512_mask:
11202 case X86::BI__builtin_ia32_cmpd128_mask:
11203 case X86::BI__builtin_ia32_cmpd256_mask:
11204 case X86::BI__builtin_ia32_cmpd512_mask:
11205 case X86::BI__builtin_ia32_cmpq128_mask:
11206 case X86::BI__builtin_ia32_cmpq256_mask:
11207 case X86::BI__builtin_ia32_cmpq512_mask: {
11208 unsigned CC = cast<llvm::ConstantInt>(Ops[2])->getZExtValue() & 0x7;
11209 return EmitX86MaskedCompare(*this, CC, true, Ops);
11210 }
11211 case X86::BI__builtin_ia32_ucmpb128_mask:
11212 case X86::BI__builtin_ia32_ucmpb256_mask:
11213 case X86::BI__builtin_ia32_ucmpb512_mask:
11214 case X86::BI__builtin_ia32_ucmpw128_mask:
11215 case X86::BI__builtin_ia32_ucmpw256_mask:
11216 case X86::BI__builtin_ia32_ucmpw512_mask:
11217 case X86::BI__builtin_ia32_ucmpd128_mask:
11218 case X86::BI__builtin_ia32_ucmpd256_mask:
11219 case X86::BI__builtin_ia32_ucmpd512_mask:
11220 case X86::BI__builtin_ia32_ucmpq128_mask:
11221 case X86::BI__builtin_ia32_ucmpq256_mask:
11222 case X86::BI__builtin_ia32_ucmpq512_mask: {
11223 unsigned CC = cast<llvm::ConstantInt>(Ops[2])->getZExtValue() & 0x7;
11224 return EmitX86MaskedCompare(*this, CC, false, Ops);
11225 }
Simon Pilgrima7bcd722019-01-20 16:40:33 +000011226 case X86::BI__builtin_ia32_vpcomb:
11227 case X86::BI__builtin_ia32_vpcomw:
11228 case X86::BI__builtin_ia32_vpcomd:
11229 case X86::BI__builtin_ia32_vpcomq:
11230 return EmitX86vpcom(*this, Ops, true);
11231 case X86::BI__builtin_ia32_vpcomub:
11232 case X86::BI__builtin_ia32_vpcomuw:
11233 case X86::BI__builtin_ia32_vpcomud:
11234 case X86::BI__builtin_ia32_vpcomuq:
11235 return EmitX86vpcom(*this, Ops, false);
Sanjay Patel7495ec02016-06-15 17:18:50 +000011236
Craig Toppercb5fd56c2018-08-28 06:28:25 +000011237 case X86::BI__builtin_ia32_kortestcqi:
Craig Topperc0b2e982018-02-08 20:16:17 +000011238 case X86::BI__builtin_ia32_kortestchi:
Craig Toppercb5fd56c2018-08-28 06:28:25 +000011239 case X86::BI__builtin_ia32_kortestcsi:
11240 case X86::BI__builtin_ia32_kortestcdi: {
Craig Topperc330ca82018-08-27 06:20:22 +000011241 Value *Or = EmitX86MaskLogic(*this, Instruction::Or, Ops);
Craig Toppercb5fd56c2018-08-28 06:28:25 +000011242 Value *C = llvm::Constant::getAllOnesValue(Ops[0]->getType());
11243 Value *Cmp = Builder.CreateICmpEQ(Or, C);
11244 return Builder.CreateZExt(Cmp, ConvertType(E->getType()));
11245 }
11246 case X86::BI__builtin_ia32_kortestzqi:
11247 case X86::BI__builtin_ia32_kortestzhi:
11248 case X86::BI__builtin_ia32_kortestzsi:
11249 case X86::BI__builtin_ia32_kortestzdi: {
11250 Value *Or = EmitX86MaskLogic(*this, Instruction::Or, Ops);
11251 Value *C = llvm::Constant::getNullValue(Ops[0]->getType());
Craig Topperc0b2e982018-02-08 20:16:17 +000011252 Value *Cmp = Builder.CreateICmpEQ(Or, C);
11253 return Builder.CreateZExt(Cmp, ConvertType(E->getType()));
11254 }
11255
Craig Topperd88f76a2018-08-31 22:29:56 +000011256 case X86::BI__builtin_ia32_ktestcqi:
11257 case X86::BI__builtin_ia32_ktestzqi:
11258 case X86::BI__builtin_ia32_ktestchi:
11259 case X86::BI__builtin_ia32_ktestzhi:
11260 case X86::BI__builtin_ia32_ktestcsi:
11261 case X86::BI__builtin_ia32_ktestzsi:
11262 case X86::BI__builtin_ia32_ktestcdi:
11263 case X86::BI__builtin_ia32_ktestzdi: {
11264 Intrinsic::ID IID;
11265 switch (BuiltinID) {
11266 default: llvm_unreachable("Unsupported intrinsic!");
11267 case X86::BI__builtin_ia32_ktestcqi:
11268 IID = Intrinsic::x86_avx512_ktestc_b;
11269 break;
11270 case X86::BI__builtin_ia32_ktestzqi:
11271 IID = Intrinsic::x86_avx512_ktestz_b;
11272 break;
11273 case X86::BI__builtin_ia32_ktestchi:
11274 IID = Intrinsic::x86_avx512_ktestc_w;
11275 break;
11276 case X86::BI__builtin_ia32_ktestzhi:
11277 IID = Intrinsic::x86_avx512_ktestz_w;
11278 break;
11279 case X86::BI__builtin_ia32_ktestcsi:
11280 IID = Intrinsic::x86_avx512_ktestc_d;
11281 break;
11282 case X86::BI__builtin_ia32_ktestzsi:
11283 IID = Intrinsic::x86_avx512_ktestz_d;
11284 break;
11285 case X86::BI__builtin_ia32_ktestcdi:
11286 IID = Intrinsic::x86_avx512_ktestc_q;
11287 break;
11288 case X86::BI__builtin_ia32_ktestzdi:
11289 IID = Intrinsic::x86_avx512_ktestz_q;
11290 break;
11291 }
11292
11293 unsigned NumElts = Ops[0]->getType()->getIntegerBitWidth();
11294 Value *LHS = getMaskVecValue(*this, Ops[0], NumElts);
11295 Value *RHS = getMaskVecValue(*this, Ops[1], NumElts);
11296 Function *Intr = CGM.getIntrinsic(IID);
11297 return Builder.CreateCall(Intr, {LHS, RHS});
11298 }
11299
Craig Toppera65bf652018-08-28 22:32:14 +000011300 case X86::BI__builtin_ia32_kaddqi:
11301 case X86::BI__builtin_ia32_kaddhi:
11302 case X86::BI__builtin_ia32_kaddsi:
11303 case X86::BI__builtin_ia32_kadddi: {
11304 Intrinsic::ID IID;
11305 switch (BuiltinID) {
11306 default: llvm_unreachable("Unsupported intrinsic!");
11307 case X86::BI__builtin_ia32_kaddqi:
11308 IID = Intrinsic::x86_avx512_kadd_b;
11309 break;
11310 case X86::BI__builtin_ia32_kaddhi:
11311 IID = Intrinsic::x86_avx512_kadd_w;
11312 break;
11313 case X86::BI__builtin_ia32_kaddsi:
11314 IID = Intrinsic::x86_avx512_kadd_d;
11315 break;
11316 case X86::BI__builtin_ia32_kadddi:
11317 IID = Intrinsic::x86_avx512_kadd_q;
11318 break;
11319 }
11320
11321 unsigned NumElts = Ops[0]->getType()->getIntegerBitWidth();
11322 Value *LHS = getMaskVecValue(*this, Ops[0], NumElts);
11323 Value *RHS = getMaskVecValue(*this, Ops[1], NumElts);
11324 Function *Intr = CGM.getIntrinsic(IID);
11325 Value *Res = Builder.CreateCall(Intr, {LHS, RHS});
11326 return Builder.CreateBitCast(Res, Ops[0]->getType());
11327 }
Craig Topperc330ca82018-08-27 06:20:22 +000011328 case X86::BI__builtin_ia32_kandqi:
Craig Topper5028ace2017-12-16 08:26:22 +000011329 case X86::BI__builtin_ia32_kandhi:
Craig Topperc330ca82018-08-27 06:20:22 +000011330 case X86::BI__builtin_ia32_kandsi:
11331 case X86::BI__builtin_ia32_kanddi:
11332 return EmitX86MaskLogic(*this, Instruction::And, Ops);
11333 case X86::BI__builtin_ia32_kandnqi:
Craig Topper5028ace2017-12-16 08:26:22 +000011334 case X86::BI__builtin_ia32_kandnhi:
Craig Topperc330ca82018-08-27 06:20:22 +000011335 case X86::BI__builtin_ia32_kandnsi:
11336 case X86::BI__builtin_ia32_kandndi:
11337 return EmitX86MaskLogic(*this, Instruction::And, Ops, true);
11338 case X86::BI__builtin_ia32_korqi:
Craig Topper5028ace2017-12-16 08:26:22 +000011339 case X86::BI__builtin_ia32_korhi:
Craig Topperc330ca82018-08-27 06:20:22 +000011340 case X86::BI__builtin_ia32_korsi:
11341 case X86::BI__builtin_ia32_kordi:
11342 return EmitX86MaskLogic(*this, Instruction::Or, Ops);
11343 case X86::BI__builtin_ia32_kxnorqi:
Craig Topper5028ace2017-12-16 08:26:22 +000011344 case X86::BI__builtin_ia32_kxnorhi:
Craig Topperc330ca82018-08-27 06:20:22 +000011345 case X86::BI__builtin_ia32_kxnorsi:
11346 case X86::BI__builtin_ia32_kxnordi:
11347 return EmitX86MaskLogic(*this, Instruction::Xor, Ops, true);
11348 case X86::BI__builtin_ia32_kxorqi:
Craig Topper5028ace2017-12-16 08:26:22 +000011349 case X86::BI__builtin_ia32_kxorhi:
Craig Topperc330ca82018-08-27 06:20:22 +000011350 case X86::BI__builtin_ia32_kxorsi:
11351 case X86::BI__builtin_ia32_kxordi:
11352 return EmitX86MaskLogic(*this, Instruction::Xor, Ops);
11353 case X86::BI__builtin_ia32_knotqi:
11354 case X86::BI__builtin_ia32_knothi:
11355 case X86::BI__builtin_ia32_knotsi:
11356 case X86::BI__builtin_ia32_knotdi: {
11357 unsigned NumElts = Ops[0]->getType()->getIntegerBitWidth();
11358 Value *Res = getMaskVecValue(*this, Ops[0], NumElts);
11359 return Builder.CreateBitCast(Builder.CreateNot(Res),
11360 Ops[0]->getType());
Craig Topper5028ace2017-12-16 08:26:22 +000011361 }
Craig Topper42a4d082018-08-31 20:41:06 +000011362 case X86::BI__builtin_ia32_kmovb:
11363 case X86::BI__builtin_ia32_kmovw:
11364 case X86::BI__builtin_ia32_kmovd:
11365 case X86::BI__builtin_ia32_kmovq: {
11366 // Bitcast to vXi1 type and then back to integer. This gets the mask
11367 // register type into the IR, but might be optimized out depending on
11368 // what's around it.
11369 unsigned NumElts = Ops[0]->getType()->getIntegerBitWidth();
11370 Value *Res = getMaskVecValue(*this, Ops[0], NumElts);
11371 return Builder.CreateBitCast(Res, Ops[0]->getType());
11372 }
Craig Topper5028ace2017-12-16 08:26:22 +000011373
Craig Topperf517f1a2018-01-14 19:23:50 +000011374 case X86::BI__builtin_ia32_kunpckdi:
11375 case X86::BI__builtin_ia32_kunpcksi:
11376 case X86::BI__builtin_ia32_kunpckhi: {
Craig Topperc330ca82018-08-27 06:20:22 +000011377 unsigned NumElts = Ops[0]->getType()->getIntegerBitWidth();
Craig Topperf517f1a2018-01-14 19:23:50 +000011378 Value *LHS = getMaskVecValue(*this, Ops[0], NumElts);
11379 Value *RHS = getMaskVecValue(*this, Ops[1], NumElts);
11380 uint32_t Indices[64];
11381 for (unsigned i = 0; i != NumElts; ++i)
11382 Indices[i] = i;
11383
11384 // First extract half of each vector. This gives better codegen than
11385 // doing it in a single shuffle.
11386 LHS = Builder.CreateShuffleVector(LHS, LHS,
11387 makeArrayRef(Indices, NumElts / 2));
11388 RHS = Builder.CreateShuffleVector(RHS, RHS,
11389 makeArrayRef(Indices, NumElts / 2));
11390 // Concat the vectors.
Craig Topperebb08382018-02-12 22:38:52 +000011391 // NOTE: Operands are swapped to match the intrinsic definition.
11392 Value *Res = Builder.CreateShuffleVector(RHS, LHS,
Craig Topperf517f1a2018-01-14 19:23:50 +000011393 makeArrayRef(Indices, NumElts));
11394 return Builder.CreateBitCast(Res, Ops[0]->getType());
11395 }
11396
Craig Topper8e3689c2018-05-22 20:48:24 +000011397 case X86::BI__builtin_ia32_vplzcntd_128:
11398 case X86::BI__builtin_ia32_vplzcntd_256:
11399 case X86::BI__builtin_ia32_vplzcntd_512:
11400 case X86::BI__builtin_ia32_vplzcntq_128:
11401 case X86::BI__builtin_ia32_vplzcntq_256:
11402 case X86::BI__builtin_ia32_vplzcntq_512: {
Craig Topper46e75552016-07-06 04:24:29 +000011403 Function *F = CGM.getIntrinsic(Intrinsic::ctlz, Ops[0]->getType());
Craig Topper8e3689c2018-05-22 20:48:24 +000011404 return Builder.CreateCall(F, {Ops[0],Builder.getInt1(false)});
Craig Topper46e75552016-07-06 04:24:29 +000011405 }
Tomasz Krupaf1792bb2018-06-15 18:05:59 +000011406 case X86::BI__builtin_ia32_sqrtss:
11407 case X86::BI__builtin_ia32_sqrtsd: {
11408 Value *A = Builder.CreateExtractElement(Ops[0], (uint64_t)0);
11409 Function *F = CGM.getIntrinsic(Intrinsic::sqrt, A->getType());
11410 A = Builder.CreateCall(F, {A});
Fangrui Song6907ce22018-07-30 19:24:48 +000011411 return Builder.CreateInsertElement(Ops[0], A, (uint64_t)0);
Tomasz Krupaf1792bb2018-06-15 18:05:59 +000011412 }
11413 case X86::BI__builtin_ia32_sqrtsd_round_mask:
11414 case X86::BI__builtin_ia32_sqrtss_round_mask: {
11415 unsigned CC = cast<llvm::ConstantInt>(Ops[4])->getZExtValue();
11416 // Support only if the rounding mode is 4 (AKA CUR_DIRECTION),
11417 // otherwise keep the intrinsic.
11418 if (CC != 4) {
11419 Intrinsic::ID IID = BuiltinID == X86::BI__builtin_ia32_sqrtsd_round_mask ?
11420 Intrinsic::x86_avx512_mask_sqrt_sd :
11421 Intrinsic::x86_avx512_mask_sqrt_ss;
11422 return Builder.CreateCall(CGM.getIntrinsic(IID), Ops);
11423 }
Tomasz Krupa83ba6fa2018-06-18 17:57:05 +000011424 Value *A = Builder.CreateExtractElement(Ops[1], (uint64_t)0);
Tomasz Krupaf1792bb2018-06-15 18:05:59 +000011425 Function *F = CGM.getIntrinsic(Intrinsic::sqrt, A->getType());
Craig Topperf89f62a2018-07-06 22:46:52 +000011426 A = Builder.CreateCall(F, A);
Tomasz Krupaf1792bb2018-06-15 18:05:59 +000011427 Value *Src = Builder.CreateExtractElement(Ops[2], (uint64_t)0);
Craig Topperf89f62a2018-07-06 22:46:52 +000011428 A = EmitX86ScalarSelect(*this, Ops[3], A, Src);
Tomasz Krupa83ba6fa2018-06-18 17:57:05 +000011429 return Builder.CreateInsertElement(Ops[0], A, (uint64_t)0);
Tomasz Krupaf1792bb2018-06-15 18:05:59 +000011430 }
11431 case X86::BI__builtin_ia32_sqrtpd256:
11432 case X86::BI__builtin_ia32_sqrtpd:
11433 case X86::BI__builtin_ia32_sqrtps256:
Craig Topper8bf793f2018-06-29 05:43:33 +000011434 case X86::BI__builtin_ia32_sqrtps:
11435 case X86::BI__builtin_ia32_sqrtps512:
11436 case X86::BI__builtin_ia32_sqrtpd512: {
11437 if (Ops.size() == 2) {
11438 unsigned CC = cast<llvm::ConstantInt>(Ops[1])->getZExtValue();
11439 // Support only if the rounding mode is 4 (AKA CUR_DIRECTION),
11440 // otherwise keep the intrinsic.
11441 if (CC != 4) {
11442 Intrinsic::ID IID = BuiltinID == X86::BI__builtin_ia32_sqrtps512 ?
11443 Intrinsic::x86_avx512_sqrt_ps_512 :
11444 Intrinsic::x86_avx512_sqrt_pd_512;
11445 return Builder.CreateCall(CGM.getIntrinsic(IID), Ops);
11446 }
Tomasz Krupaf1792bb2018-06-15 18:05:59 +000011447 }
11448 Function *F = CGM.getIntrinsic(Intrinsic::sqrt, Ops[0]->getType());
Craig Topper8bf793f2018-06-29 05:43:33 +000011449 return Builder.CreateCall(F, Ops[0]);
Tomasz Krupaf1792bb2018-06-15 18:05:59 +000011450 }
Uriel Korach3fba3c32017-09-13 09:02:02 +000011451 case X86::BI__builtin_ia32_pabsb128:
11452 case X86::BI__builtin_ia32_pabsw128:
11453 case X86::BI__builtin_ia32_pabsd128:
11454 case X86::BI__builtin_ia32_pabsb256:
11455 case X86::BI__builtin_ia32_pabsw256:
11456 case X86::BI__builtin_ia32_pabsd256:
Craig Topperf2043b02018-05-23 04:51:54 +000011457 case X86::BI__builtin_ia32_pabsq128:
11458 case X86::BI__builtin_ia32_pabsq256:
11459 case X86::BI__builtin_ia32_pabsb512:
11460 case X86::BI__builtin_ia32_pabsw512:
11461 case X86::BI__builtin_ia32_pabsd512:
11462 case X86::BI__builtin_ia32_pabsq512:
Uriel Korach3fba3c32017-09-13 09:02:02 +000011463 return EmitX86Abs(*this, Ops);
11464
Sanjay Patel7495ec02016-06-15 17:18:50 +000011465 case X86::BI__builtin_ia32_pmaxsb128:
11466 case X86::BI__builtin_ia32_pmaxsw128:
Sanjay Pateldbd68dd2016-06-16 18:45:01 +000011467 case X86::BI__builtin_ia32_pmaxsd128:
Craig Topperf2043b02018-05-23 04:51:54 +000011468 case X86::BI__builtin_ia32_pmaxsq128:
Sanjay Pateldbd68dd2016-06-16 18:45:01 +000011469 case X86::BI__builtin_ia32_pmaxsb256:
11470 case X86::BI__builtin_ia32_pmaxsw256:
Craig Topper531ce282016-10-24 04:04:24 +000011471 case X86::BI__builtin_ia32_pmaxsd256:
Craig Topperf2043b02018-05-23 04:51:54 +000011472 case X86::BI__builtin_ia32_pmaxsq256:
11473 case X86::BI__builtin_ia32_pmaxsb512:
11474 case X86::BI__builtin_ia32_pmaxsw512:
11475 case X86::BI__builtin_ia32_pmaxsd512:
11476 case X86::BI__builtin_ia32_pmaxsq512:
Craig Topper531ce282016-10-24 04:04:24 +000011477 return EmitX86MinMax(*this, ICmpInst::ICMP_SGT, Ops);
Sanjay Patel7495ec02016-06-15 17:18:50 +000011478 case X86::BI__builtin_ia32_pmaxub128:
11479 case X86::BI__builtin_ia32_pmaxuw128:
Sanjay Pateldbd68dd2016-06-16 18:45:01 +000011480 case X86::BI__builtin_ia32_pmaxud128:
Craig Topperf2043b02018-05-23 04:51:54 +000011481 case X86::BI__builtin_ia32_pmaxuq128:
Sanjay Pateldbd68dd2016-06-16 18:45:01 +000011482 case X86::BI__builtin_ia32_pmaxub256:
11483 case X86::BI__builtin_ia32_pmaxuw256:
Craig Topper531ce282016-10-24 04:04:24 +000011484 case X86::BI__builtin_ia32_pmaxud256:
Craig Topperf2043b02018-05-23 04:51:54 +000011485 case X86::BI__builtin_ia32_pmaxuq256:
11486 case X86::BI__builtin_ia32_pmaxub512:
11487 case X86::BI__builtin_ia32_pmaxuw512:
11488 case X86::BI__builtin_ia32_pmaxud512:
11489 case X86::BI__builtin_ia32_pmaxuq512:
Craig Topper531ce282016-10-24 04:04:24 +000011490 return EmitX86MinMax(*this, ICmpInst::ICMP_UGT, Ops);
Sanjay Patel7495ec02016-06-15 17:18:50 +000011491 case X86::BI__builtin_ia32_pminsb128:
11492 case X86::BI__builtin_ia32_pminsw128:
Sanjay Pateldbd68dd2016-06-16 18:45:01 +000011493 case X86::BI__builtin_ia32_pminsd128:
Craig Topperf2043b02018-05-23 04:51:54 +000011494 case X86::BI__builtin_ia32_pminsq128:
Sanjay Pateldbd68dd2016-06-16 18:45:01 +000011495 case X86::BI__builtin_ia32_pminsb256:
11496 case X86::BI__builtin_ia32_pminsw256:
Craig Topper531ce282016-10-24 04:04:24 +000011497 case X86::BI__builtin_ia32_pminsd256:
Craig Topperf2043b02018-05-23 04:51:54 +000011498 case X86::BI__builtin_ia32_pminsq256:
11499 case X86::BI__builtin_ia32_pminsb512:
11500 case X86::BI__builtin_ia32_pminsw512:
11501 case X86::BI__builtin_ia32_pminsd512:
11502 case X86::BI__builtin_ia32_pminsq512:
Craig Topper531ce282016-10-24 04:04:24 +000011503 return EmitX86MinMax(*this, ICmpInst::ICMP_SLT, Ops);
Sanjay Patel7495ec02016-06-15 17:18:50 +000011504 case X86::BI__builtin_ia32_pminub128:
11505 case X86::BI__builtin_ia32_pminuw128:
Sanjay Pateldbd68dd2016-06-16 18:45:01 +000011506 case X86::BI__builtin_ia32_pminud128:
Craig Topperf2043b02018-05-23 04:51:54 +000011507 case X86::BI__builtin_ia32_pminuq128:
Sanjay Pateldbd68dd2016-06-16 18:45:01 +000011508 case X86::BI__builtin_ia32_pminub256:
11509 case X86::BI__builtin_ia32_pminuw256:
Craig Topper531ce282016-10-24 04:04:24 +000011510 case X86::BI__builtin_ia32_pminud256:
Craig Topperf2043b02018-05-23 04:51:54 +000011511 case X86::BI__builtin_ia32_pminuq256:
11512 case X86::BI__builtin_ia32_pminub512:
11513 case X86::BI__builtin_ia32_pminuw512:
11514 case X86::BI__builtin_ia32_pminud512:
11515 case X86::BI__builtin_ia32_pminuq512:
Craig Topper531ce282016-10-24 04:04:24 +000011516 return EmitX86MinMax(*this, ICmpInst::ICMP_ULT, Ops);
Sanjay Patel7495ec02016-06-15 17:18:50 +000011517
Craig Topper304edc12018-04-09 19:17:54 +000011518 case X86::BI__builtin_ia32_pmuludq128:
11519 case X86::BI__builtin_ia32_pmuludq256:
11520 case X86::BI__builtin_ia32_pmuludq512:
11521 return EmitX86Muldq(*this, /*IsSigned*/false, Ops);
11522
11523 case X86::BI__builtin_ia32_pmuldq128:
11524 case X86::BI__builtin_ia32_pmuldq256:
11525 case X86::BI__builtin_ia32_pmuldq512:
11526 return EmitX86Muldq(*this, /*IsSigned*/true, Ops);
11527
Craig Topper288bd2e2018-05-21 20:58:23 +000011528 case X86::BI__builtin_ia32_pternlogd512_mask:
11529 case X86::BI__builtin_ia32_pternlogq512_mask:
11530 case X86::BI__builtin_ia32_pternlogd128_mask:
11531 case X86::BI__builtin_ia32_pternlogd256_mask:
11532 case X86::BI__builtin_ia32_pternlogq128_mask:
11533 case X86::BI__builtin_ia32_pternlogq256_mask:
11534 return EmitX86Ternlog(*this, /*ZeroMask*/false, Ops);
11535
11536 case X86::BI__builtin_ia32_pternlogd512_maskz:
11537 case X86::BI__builtin_ia32_pternlogq512_maskz:
11538 case X86::BI__builtin_ia32_pternlogd128_maskz:
11539 case X86::BI__builtin_ia32_pternlogd256_maskz:
11540 case X86::BI__builtin_ia32_pternlogq128_maskz:
11541 case X86::BI__builtin_ia32_pternlogq256_maskz:
11542 return EmitX86Ternlog(*this, /*ZeroMask*/true, Ops);
11543
Craig Toppercd9e2322019-01-07 21:00:41 +000011544 case X86::BI__builtin_ia32_vpshldd128:
11545 case X86::BI__builtin_ia32_vpshldd256:
11546 case X86::BI__builtin_ia32_vpshldd512:
11547 case X86::BI__builtin_ia32_vpshldq128:
11548 case X86::BI__builtin_ia32_vpshldq256:
11549 case X86::BI__builtin_ia32_vpshldq512:
11550 case X86::BI__builtin_ia32_vpshldw128:
11551 case X86::BI__builtin_ia32_vpshldw256:
11552 case X86::BI__builtin_ia32_vpshldw512:
11553 return EmitX86FunnelShift(*this, Ops[0], Ops[1], Ops[2], false);
11554
11555 case X86::BI__builtin_ia32_vpshrdd128:
11556 case X86::BI__builtin_ia32_vpshrdd256:
11557 case X86::BI__builtin_ia32_vpshrdd512:
11558 case X86::BI__builtin_ia32_vpshrdq128:
11559 case X86::BI__builtin_ia32_vpshrdq256:
11560 case X86::BI__builtin_ia32_vpshrdq512:
11561 case X86::BI__builtin_ia32_vpshrdw128:
11562 case X86::BI__builtin_ia32_vpshrdw256:
11563 case X86::BI__builtin_ia32_vpshrdw512:
11564 // Ops 0 and 1 are swapped.
11565 return EmitX86FunnelShift(*this, Ops[1], Ops[0], Ops[2], true);
11566
11567 case X86::BI__builtin_ia32_vpshldvd128:
11568 case X86::BI__builtin_ia32_vpshldvd256:
11569 case X86::BI__builtin_ia32_vpshldvd512:
11570 case X86::BI__builtin_ia32_vpshldvq128:
11571 case X86::BI__builtin_ia32_vpshldvq256:
11572 case X86::BI__builtin_ia32_vpshldvq512:
11573 case X86::BI__builtin_ia32_vpshldvw128:
11574 case X86::BI__builtin_ia32_vpshldvw256:
11575 case X86::BI__builtin_ia32_vpshldvw512:
11576 return EmitX86FunnelShift(*this, Ops[0], Ops[1], Ops[2], false);
11577
11578 case X86::BI__builtin_ia32_vpshrdvd128:
11579 case X86::BI__builtin_ia32_vpshrdvd256:
11580 case X86::BI__builtin_ia32_vpshrdvd512:
11581 case X86::BI__builtin_ia32_vpshrdvq128:
11582 case X86::BI__builtin_ia32_vpshrdvq256:
11583 case X86::BI__builtin_ia32_vpshrdvq512:
11584 case X86::BI__builtin_ia32_vpshrdvw128:
11585 case X86::BI__builtin_ia32_vpshrdvw256:
11586 case X86::BI__builtin_ia32_vpshrdvw512:
11587 // Ops 0 and 1 are swapped.
11588 return EmitX86FunnelShift(*this, Ops[1], Ops[0], Ops[2], true);
11589
Michael J. Spencer6826eb82011-04-15 15:07:13 +000011590 // 3DNow!
Michael J. Spencer6826eb82011-04-15 15:07:13 +000011591 case X86::BI__builtin_ia32_pswapdsf:
11592 case X86::BI__builtin_ia32_pswapdsi: {
Chandler Carrutha2a54102012-02-20 07:35:45 +000011593 llvm::Type *MMXTy = llvm::Type::getX86_MMXTy(getLLVMContext());
11594 Ops[0] = Builder.CreateBitCast(Ops[0], MMXTy, "cast");
Craig Topperd2f814d2015-02-16 21:30:08 +000011595 llvm::Function *F = CGM.getIntrinsic(Intrinsic::x86_3dnowa_pswapd);
11596 return Builder.CreateCall(F, Ops, "pswapd");
Michael J. Spencer6826eb82011-04-15 15:07:13 +000011597 }
Benjamin Kramera43b6992012-07-12 09:33:03 +000011598 case X86::BI__builtin_ia32_rdrand16_step:
11599 case X86::BI__builtin_ia32_rdrand32_step:
Michael Liaoffaae352013-03-29 05:17:55 +000011600 case X86::BI__builtin_ia32_rdrand64_step:
11601 case X86::BI__builtin_ia32_rdseed16_step:
11602 case X86::BI__builtin_ia32_rdseed32_step:
11603 case X86::BI__builtin_ia32_rdseed64_step: {
Benjamin Kramera43b6992012-07-12 09:33:03 +000011604 Intrinsic::ID ID;
11605 switch (BuiltinID) {
11606 default: llvm_unreachable("Unsupported intrinsic!");
11607 case X86::BI__builtin_ia32_rdrand16_step:
11608 ID = Intrinsic::x86_rdrand_16;
11609 break;
11610 case X86::BI__builtin_ia32_rdrand32_step:
11611 ID = Intrinsic::x86_rdrand_32;
11612 break;
11613 case X86::BI__builtin_ia32_rdrand64_step:
11614 ID = Intrinsic::x86_rdrand_64;
11615 break;
Michael Liaoffaae352013-03-29 05:17:55 +000011616 case X86::BI__builtin_ia32_rdseed16_step:
11617 ID = Intrinsic::x86_rdseed_16;
11618 break;
11619 case X86::BI__builtin_ia32_rdseed32_step:
11620 ID = Intrinsic::x86_rdseed_32;
11621 break;
11622 case X86::BI__builtin_ia32_rdseed64_step:
11623 ID = Intrinsic::x86_rdseed_64;
11624 break;
Benjamin Kramera43b6992012-07-12 09:33:03 +000011625 }
11626
David Blaikie4ba525b2015-07-14 17:27:39 +000011627 Value *Call = Builder.CreateCall(CGM.getIntrinsic(ID));
John McCall7f416cc2015-09-08 08:05:57 +000011628 Builder.CreateDefaultAlignedStore(Builder.CreateExtractValue(Call, 0),
11629 Ops[0]);
Benjamin Kramera43b6992012-07-12 09:33:03 +000011630 return Builder.CreateExtractValue(Call, 1);
11631 }
Craig Topper52a61fc2018-09-07 16:58:57 +000011632 case X86::BI__builtin_ia32_addcarryx_u32:
11633 case X86::BI__builtin_ia32_addcarryx_u64:
Craig Topper52a61fc2018-09-07 16:58:57 +000011634 case X86::BI__builtin_ia32_subborrow_u32:
11635 case X86::BI__builtin_ia32_subborrow_u64: {
11636 Intrinsic::ID IID;
11637 switch (BuiltinID) {
11638 default: llvm_unreachable("Unsupported intrinsic!");
11639 case X86::BI__builtin_ia32_addcarryx_u32:
Craig Topper6d7a7ef2018-12-10 06:07:59 +000011640 IID = Intrinsic::x86_addcarry_32;
Craig Topper52a61fc2018-09-07 16:58:57 +000011641 break;
11642 case X86::BI__builtin_ia32_addcarryx_u64:
Craig Topper6d7a7ef2018-12-10 06:07:59 +000011643 IID = Intrinsic::x86_addcarry_64;
Craig Topper52a61fc2018-09-07 16:58:57 +000011644 break;
11645 case X86::BI__builtin_ia32_subborrow_u32:
Craig Topper6d7a7ef2018-12-10 06:07:59 +000011646 IID = Intrinsic::x86_subborrow_32;
Craig Topper52a61fc2018-09-07 16:58:57 +000011647 break;
11648 case X86::BI__builtin_ia32_subborrow_u64:
Craig Topper6d7a7ef2018-12-10 06:07:59 +000011649 IID = Intrinsic::x86_subborrow_64;
Craig Topper52a61fc2018-09-07 16:58:57 +000011650 break;
11651 }
11652
11653 Value *Call = Builder.CreateCall(CGM.getIntrinsic(IID),
11654 { Ops[0], Ops[1], Ops[2] });
11655 Builder.CreateDefaultAlignedStore(Builder.CreateExtractValue(Call, 1),
11656 Ops[3]);
11657 return Builder.CreateExtractValue(Call, 0);
11658 }
Sanjay Patel280cfd12016-06-15 21:20:04 +000011659
Craig Topper4ef61ae2018-06-26 00:44:02 +000011660 case X86::BI__builtin_ia32_fpclassps128_mask:
11661 case X86::BI__builtin_ia32_fpclassps256_mask:
11662 case X86::BI__builtin_ia32_fpclassps512_mask:
11663 case X86::BI__builtin_ia32_fpclasspd128_mask:
11664 case X86::BI__builtin_ia32_fpclasspd256_mask:
11665 case X86::BI__builtin_ia32_fpclasspd512_mask: {
11666 unsigned NumElts = Ops[0]->getType()->getVectorNumElements();
11667 Value *MaskIn = Ops[2];
11668 Ops.erase(&Ops[2]);
11669
11670 Intrinsic::ID ID;
11671 switch (BuiltinID) {
11672 default: llvm_unreachable("Unsupported intrinsic!");
11673 case X86::BI__builtin_ia32_fpclassps128_mask:
Craig Topper851f3632018-06-27 15:57:57 +000011674 ID = Intrinsic::x86_avx512_fpclass_ps_128;
Craig Topper4ef61ae2018-06-26 00:44:02 +000011675 break;
11676 case X86::BI__builtin_ia32_fpclassps256_mask:
Craig Topper851f3632018-06-27 15:57:57 +000011677 ID = Intrinsic::x86_avx512_fpclass_ps_256;
Craig Topper4ef61ae2018-06-26 00:44:02 +000011678 break;
11679 case X86::BI__builtin_ia32_fpclassps512_mask:
Craig Topper851f3632018-06-27 15:57:57 +000011680 ID = Intrinsic::x86_avx512_fpclass_ps_512;
Craig Topper4ef61ae2018-06-26 00:44:02 +000011681 break;
11682 case X86::BI__builtin_ia32_fpclasspd128_mask:
Craig Topper851f3632018-06-27 15:57:57 +000011683 ID = Intrinsic::x86_avx512_fpclass_pd_128;
Craig Topper4ef61ae2018-06-26 00:44:02 +000011684 break;
11685 case X86::BI__builtin_ia32_fpclasspd256_mask:
Craig Topper851f3632018-06-27 15:57:57 +000011686 ID = Intrinsic::x86_avx512_fpclass_pd_256;
Craig Topper4ef61ae2018-06-26 00:44:02 +000011687 break;
11688 case X86::BI__builtin_ia32_fpclasspd512_mask:
Craig Topper851f3632018-06-27 15:57:57 +000011689 ID = Intrinsic::x86_avx512_fpclass_pd_512;
Craig Topper4ef61ae2018-06-26 00:44:02 +000011690 break;
11691 }
11692
11693 Value *Fpclass = Builder.CreateCall(CGM.getIntrinsic(ID), Ops);
11694 return EmitX86MaskedCompareResult(*this, Fpclass, NumElts, MaskIn);
11695 }
11696
Craig Topper49488402019-01-14 08:46:51 +000011697 case X86::BI__builtin_ia32_vpmultishiftqb128:
11698 case X86::BI__builtin_ia32_vpmultishiftqb256:
11699 case X86::BI__builtin_ia32_vpmultishiftqb512: {
11700 Intrinsic::ID ID;
11701 switch (BuiltinID) {
11702 default: llvm_unreachable("Unsupported intrinsic!");
11703 case X86::BI__builtin_ia32_vpmultishiftqb128:
11704 ID = Intrinsic::x86_avx512_pmultishift_qb_128;
11705 break;
11706 case X86::BI__builtin_ia32_vpmultishiftqb256:
11707 ID = Intrinsic::x86_avx512_pmultishift_qb_256;
11708 break;
11709 case X86::BI__builtin_ia32_vpmultishiftqb512:
11710 ID = Intrinsic::x86_avx512_pmultishift_qb_512;
11711 break;
11712 }
11713
11714 return Builder.CreateCall(CGM.getIntrinsic(ID), Ops);
11715 }
11716
Craig Topper689b3b72019-01-14 00:03:55 +000011717 case X86::BI__builtin_ia32_vpshufbitqmb128_mask:
11718 case X86::BI__builtin_ia32_vpshufbitqmb256_mask:
11719 case X86::BI__builtin_ia32_vpshufbitqmb512_mask: {
11720 unsigned NumElts = Ops[0]->getType()->getVectorNumElements();
11721 Value *MaskIn = Ops[2];
11722 Ops.erase(&Ops[2]);
11723
11724 Intrinsic::ID ID;
11725 switch (BuiltinID) {
11726 default: llvm_unreachable("Unsupported intrinsic!");
11727 case X86::BI__builtin_ia32_vpshufbitqmb128_mask:
11728 ID = Intrinsic::x86_avx512_vpshufbitqmb_128;
11729 break;
11730 case X86::BI__builtin_ia32_vpshufbitqmb256_mask:
11731 ID = Intrinsic::x86_avx512_vpshufbitqmb_256;
11732 break;
11733 case X86::BI__builtin_ia32_vpshufbitqmb512_mask:
11734 ID = Intrinsic::x86_avx512_vpshufbitqmb_512;
11735 break;
11736 }
11737
Craig Topper49488402019-01-14 08:46:51 +000011738 Value *Shufbit = Builder.CreateCall(CGM.getIntrinsic(ID), Ops);
11739 return EmitX86MaskedCompareResult(*this, Shufbit, NumElts, MaskIn);
Craig Topper689b3b72019-01-14 00:03:55 +000011740 }
11741
Gabor Buella716863c2018-06-22 11:59:16 +000011742 // packed comparison intrinsics
Craig Topper2094d8f2014-12-27 06:59:57 +000011743 case X86::BI__builtin_ia32_cmpeqps:
Craig Topper2094d8f2014-12-27 06:59:57 +000011744 case X86::BI__builtin_ia32_cmpeqpd:
Craig Topper01600632016-07-08 01:57:24 +000011745 return getVectorFCmpIR(CmpInst::FCMP_OEQ);
Craig Topper925ef0a2016-07-08 01:48:44 +000011746 case X86::BI__builtin_ia32_cmpltps:
Craig Topper2094d8f2014-12-27 06:59:57 +000011747 case X86::BI__builtin_ia32_cmpltpd:
Craig Topper01600632016-07-08 01:57:24 +000011748 return getVectorFCmpIR(CmpInst::FCMP_OLT);
Craig Topper925ef0a2016-07-08 01:48:44 +000011749 case X86::BI__builtin_ia32_cmpleps:
Craig Topper2094d8f2014-12-27 06:59:57 +000011750 case X86::BI__builtin_ia32_cmplepd:
Craig Topper01600632016-07-08 01:57:24 +000011751 return getVectorFCmpIR(CmpInst::FCMP_OLE);
Craig Topper925ef0a2016-07-08 01:48:44 +000011752 case X86::BI__builtin_ia32_cmpunordps:
Craig Topper2094d8f2014-12-27 06:59:57 +000011753 case X86::BI__builtin_ia32_cmpunordpd:
Craig Topper01600632016-07-08 01:57:24 +000011754 return getVectorFCmpIR(CmpInst::FCMP_UNO);
Craig Topper925ef0a2016-07-08 01:48:44 +000011755 case X86::BI__builtin_ia32_cmpneqps:
Craig Topper2094d8f2014-12-27 06:59:57 +000011756 case X86::BI__builtin_ia32_cmpneqpd:
Craig Topper01600632016-07-08 01:57:24 +000011757 return getVectorFCmpIR(CmpInst::FCMP_UNE);
Craig Topper925ef0a2016-07-08 01:48:44 +000011758 case X86::BI__builtin_ia32_cmpnltps:
Craig Topper2094d8f2014-12-27 06:59:57 +000011759 case X86::BI__builtin_ia32_cmpnltpd:
Craig Topper01600632016-07-08 01:57:24 +000011760 return getVectorFCmpIR(CmpInst::FCMP_UGE);
Craig Topper925ef0a2016-07-08 01:48:44 +000011761 case X86::BI__builtin_ia32_cmpnleps:
Craig Topper2094d8f2014-12-27 06:59:57 +000011762 case X86::BI__builtin_ia32_cmpnlepd:
Craig Topper01600632016-07-08 01:57:24 +000011763 return getVectorFCmpIR(CmpInst::FCMP_UGT);
Craig Topper925ef0a2016-07-08 01:48:44 +000011764 case X86::BI__builtin_ia32_cmpordps:
Craig Topper2094d8f2014-12-27 06:59:57 +000011765 case X86::BI__builtin_ia32_cmpordpd:
Craig Topper01600632016-07-08 01:57:24 +000011766 return getVectorFCmpIR(CmpInst::FCMP_ORD);
Craig Topper425d02d2016-07-06 06:27:31 +000011767 case X86::BI__builtin_ia32_cmpps:
11768 case X86::BI__builtin_ia32_cmpps256:
11769 case X86::BI__builtin_ia32_cmppd:
Gabor Buella716863c2018-06-22 11:59:16 +000011770 case X86::BI__builtin_ia32_cmppd256:
11771 case X86::BI__builtin_ia32_cmpps128_mask:
11772 case X86::BI__builtin_ia32_cmpps256_mask:
11773 case X86::BI__builtin_ia32_cmpps512_mask:
11774 case X86::BI__builtin_ia32_cmppd128_mask:
11775 case X86::BI__builtin_ia32_cmppd256_mask:
11776 case X86::BI__builtin_ia32_cmppd512_mask: {
11777 // Lowering vector comparisons to fcmp instructions, while
11778 // ignoring signalling behaviour requested
11779 // ignoring rounding mode requested
11780 // This is is only possible as long as FENV_ACCESS is not implemented.
11781 // See also: https://reviews.llvm.org/D45616
11782
11783 // The third argument is the comparison condition, and integer in the
11784 // range [0, 31]
Craig Topper342b0952018-06-21 23:39:47 +000011785 unsigned CC = cast<llvm::ConstantInt>(Ops[2])->getZExtValue() & 0x1f;
Gabor Buella716863c2018-06-22 11:59:16 +000011786
11787 // Lowering to IR fcmp instruction.
11788 // Ignoring requested signaling behaviour,
11789 // e.g. both _CMP_GT_OS & _CMP_GT_OQ are translated to FCMP_OGT.
11790 FCmpInst::Predicate Pred;
11791 switch (CC) {
Gabor Buella9679eb62018-07-05 14:26:56 +000011792 case 0x00: Pred = FCmpInst::FCMP_OEQ; break;
11793 case 0x01: Pred = FCmpInst::FCMP_OLT; break;
11794 case 0x02: Pred = FCmpInst::FCMP_OLE; break;
11795 case 0x03: Pred = FCmpInst::FCMP_UNO; break;
11796 case 0x04: Pred = FCmpInst::FCMP_UNE; break;
11797 case 0x05: Pred = FCmpInst::FCMP_UGE; break;
11798 case 0x06: Pred = FCmpInst::FCMP_UGT; break;
11799 case 0x07: Pred = FCmpInst::FCMP_ORD; break;
11800 case 0x08: Pred = FCmpInst::FCMP_UEQ; break;
11801 case 0x09: Pred = FCmpInst::FCMP_ULT; break;
11802 case 0x0a: Pred = FCmpInst::FCMP_ULE; break;
11803 case 0x0b: Pred = FCmpInst::FCMP_FALSE; break;
11804 case 0x0c: Pred = FCmpInst::FCMP_ONE; break;
11805 case 0x0d: Pred = FCmpInst::FCMP_OGE; break;
11806 case 0x0e: Pred = FCmpInst::FCMP_OGT; break;
11807 case 0x0f: Pred = FCmpInst::FCMP_TRUE; break;
11808 case 0x10: Pred = FCmpInst::FCMP_OEQ; break;
11809 case 0x11: Pred = FCmpInst::FCMP_OLT; break;
11810 case 0x12: Pred = FCmpInst::FCMP_OLE; break;
11811 case 0x13: Pred = FCmpInst::FCMP_UNO; break;
11812 case 0x14: Pred = FCmpInst::FCMP_UNE; break;
11813 case 0x15: Pred = FCmpInst::FCMP_UGE; break;
11814 case 0x16: Pred = FCmpInst::FCMP_UGT; break;
11815 case 0x17: Pred = FCmpInst::FCMP_ORD; break;
11816 case 0x18: Pred = FCmpInst::FCMP_UEQ; break;
11817 case 0x19: Pred = FCmpInst::FCMP_ULT; break;
11818 case 0x1a: Pred = FCmpInst::FCMP_ULE; break;
11819 case 0x1b: Pred = FCmpInst::FCMP_FALSE; break;
11820 case 0x1c: Pred = FCmpInst::FCMP_ONE; break;
11821 case 0x1d: Pred = FCmpInst::FCMP_OGE; break;
11822 case 0x1e: Pred = FCmpInst::FCMP_OGT; break;
11823 case 0x1f: Pred = FCmpInst::FCMP_TRUE; break;
Gabor Buella716863c2018-06-22 11:59:16 +000011824 default: llvm_unreachable("Unhandled CC");
Craig Topper425d02d2016-07-06 06:27:31 +000011825 }
11826
Gabor Buella716863c2018-06-22 11:59:16 +000011827 // Builtins without the _mask suffix return a vector of integers
11828 // of the same width as the input vectors
Craig Topper425d02d2016-07-06 06:27:31 +000011829 switch (BuiltinID) {
Gabor Buella716863c2018-06-22 11:59:16 +000011830 case X86::BI__builtin_ia32_cmpps512_mask:
11831 case X86::BI__builtin_ia32_cmppd512_mask:
11832 case X86::BI__builtin_ia32_cmpps128_mask:
11833 case X86::BI__builtin_ia32_cmpps256_mask:
11834 case X86::BI__builtin_ia32_cmppd128_mask:
11835 case X86::BI__builtin_ia32_cmppd256_mask: {
11836 unsigned NumElts = Ops[0]->getType()->getVectorNumElements();
11837 Value *Cmp = Builder.CreateFCmp(Pred, Ops[0], Ops[1]);
11838 return EmitX86MaskedCompareResult(*this, Cmp, NumElts, Ops[3]);
Craig Topper425d02d2016-07-06 06:27:31 +000011839 }
Gabor Buella716863c2018-06-22 11:59:16 +000011840 default:
Fangrui Song6907ce22018-07-30 19:24:48 +000011841 return getVectorFCmpIR(Pred);
Gabor Buella716863c2018-06-22 11:59:16 +000011842 }
Craig Topper425d02d2016-07-06 06:27:31 +000011843 }
Sanjay Patel280cfd12016-06-15 21:20:04 +000011844
11845 // SSE scalar comparison intrinsics
11846 case X86::BI__builtin_ia32_cmpeqss:
11847 return getCmpIntrinsicCall(Intrinsic::x86_sse_cmp_ss, 0);
11848 case X86::BI__builtin_ia32_cmpltss:
11849 return getCmpIntrinsicCall(Intrinsic::x86_sse_cmp_ss, 1);
11850 case X86::BI__builtin_ia32_cmpless:
11851 return getCmpIntrinsicCall(Intrinsic::x86_sse_cmp_ss, 2);
11852 case X86::BI__builtin_ia32_cmpunordss:
11853 return getCmpIntrinsicCall(Intrinsic::x86_sse_cmp_ss, 3);
11854 case X86::BI__builtin_ia32_cmpneqss:
11855 return getCmpIntrinsicCall(Intrinsic::x86_sse_cmp_ss, 4);
11856 case X86::BI__builtin_ia32_cmpnltss:
11857 return getCmpIntrinsicCall(Intrinsic::x86_sse_cmp_ss, 5);
11858 case X86::BI__builtin_ia32_cmpnless:
11859 return getCmpIntrinsicCall(Intrinsic::x86_sse_cmp_ss, 6);
11860 case X86::BI__builtin_ia32_cmpordss:
11861 return getCmpIntrinsicCall(Intrinsic::x86_sse_cmp_ss, 7);
Craig Topper2094d8f2014-12-27 06:59:57 +000011862 case X86::BI__builtin_ia32_cmpeqsd:
Sanjay Patel280cfd12016-06-15 21:20:04 +000011863 return getCmpIntrinsicCall(Intrinsic::x86_sse2_cmp_sd, 0);
Craig Topper2094d8f2014-12-27 06:59:57 +000011864 case X86::BI__builtin_ia32_cmpltsd:
Sanjay Patel280cfd12016-06-15 21:20:04 +000011865 return getCmpIntrinsicCall(Intrinsic::x86_sse2_cmp_sd, 1);
Craig Topper2094d8f2014-12-27 06:59:57 +000011866 case X86::BI__builtin_ia32_cmplesd:
Sanjay Patel280cfd12016-06-15 21:20:04 +000011867 return getCmpIntrinsicCall(Intrinsic::x86_sse2_cmp_sd, 2);
Craig Topper2094d8f2014-12-27 06:59:57 +000011868 case X86::BI__builtin_ia32_cmpunordsd:
Sanjay Patel280cfd12016-06-15 21:20:04 +000011869 return getCmpIntrinsicCall(Intrinsic::x86_sse2_cmp_sd, 3);
Craig Topper2094d8f2014-12-27 06:59:57 +000011870 case X86::BI__builtin_ia32_cmpneqsd:
Sanjay Patel280cfd12016-06-15 21:20:04 +000011871 return getCmpIntrinsicCall(Intrinsic::x86_sse2_cmp_sd, 4);
Craig Topper2094d8f2014-12-27 06:59:57 +000011872 case X86::BI__builtin_ia32_cmpnltsd:
Sanjay Patel280cfd12016-06-15 21:20:04 +000011873 return getCmpIntrinsicCall(Intrinsic::x86_sse2_cmp_sd, 5);
Craig Topper2094d8f2014-12-27 06:59:57 +000011874 case X86::BI__builtin_ia32_cmpnlesd:
Sanjay Patel280cfd12016-06-15 21:20:04 +000011875 return getCmpIntrinsicCall(Intrinsic::x86_sse2_cmp_sd, 6);
Craig Topper2094d8f2014-12-27 06:59:57 +000011876 case X86::BI__builtin_ia32_cmpordsd:
Sanjay Patel280cfd12016-06-15 21:20:04 +000011877 return getCmpIntrinsicCall(Intrinsic::x86_sse2_cmp_sd, 7);
Albert Gutowskif3a0bce2016-10-04 22:29:49 +000011878
Luo, Yuanke844f6622019-05-06 08:25:11 +000011879// AVX512 bf16 intrinsics
11880 case X86::BI__builtin_ia32_cvtneps2bf16_128_mask: {
11881 Ops[2] = getMaskVecValue(*this, Ops[2],
11882 Ops[0]->getType()->getVectorNumElements());
11883 Intrinsic::ID IID = Intrinsic::x86_avx512bf16_mask_cvtneps2bf16_128;
11884 return Builder.CreateCall(CGM.getIntrinsic(IID), Ops);
11885 }
11886
Craig Topper20040db2019-05-16 18:28:17 +000011887 case X86::BI__builtin_ia32_cvtneps2bf16_256_mask:
11888 case X86::BI__builtin_ia32_cvtneps2bf16_512_mask: {
11889 Intrinsic::ID IID;
11890 switch (BuiltinID) {
11891 default: llvm_unreachable("Unsupported intrinsic!");
11892 case X86::BI__builtin_ia32_cvtneps2bf16_256_mask:
11893 IID = Intrinsic::x86_avx512bf16_cvtneps2bf16_256;
11894 break;
11895 case X86::BI__builtin_ia32_cvtneps2bf16_512_mask:
11896 IID = Intrinsic::x86_avx512bf16_cvtneps2bf16_512;
11897 break;
11898 }
11899 Value *Res = Builder.CreateCall(CGM.getIntrinsic(IID), Ops[0]);
11900 return EmitX86Select(*this, Ops[2], Res, Ops[1]);
11901 }
11902
Albert Gutowski7216f172016-10-10 18:09:27 +000011903 case X86::BI__emul:
11904 case X86::BI__emulu: {
11905 llvm::Type *Int64Ty = llvm::IntegerType::get(getLLVMContext(), 64);
11906 bool isSigned = (BuiltinID == X86::BI__emul);
11907 Value *LHS = Builder.CreateIntCast(Ops[0], Int64Ty, isSigned);
11908 Value *RHS = Builder.CreateIntCast(Ops[1], Int64Ty, isSigned);
11909 return Builder.CreateMul(LHS, RHS, "", !isSigned, isSigned);
11910 }
Albert Gutowskif3a0bce2016-10-04 22:29:49 +000011911 case X86::BI__mulh:
Albert Gutowski7216f172016-10-10 18:09:27 +000011912 case X86::BI__umulh:
11913 case X86::BI_mul128:
11914 case X86::BI_umul128: {
Albert Gutowskif3a0bce2016-10-04 22:29:49 +000011915 llvm::Type *ResType = ConvertType(E->getType());
11916 llvm::Type *Int128Ty = llvm::IntegerType::get(getLLVMContext(), 128);
11917
Albert Gutowski7216f172016-10-10 18:09:27 +000011918 bool IsSigned = (BuiltinID == X86::BI__mulh || BuiltinID == X86::BI_mul128);
11919 Value *LHS = Builder.CreateIntCast(Ops[0], Int128Ty, IsSigned);
11920 Value *RHS = Builder.CreateIntCast(Ops[1], Int128Ty, IsSigned);
Albert Gutowskif3a0bce2016-10-04 22:29:49 +000011921
11922 Value *MulResult, *HigherBits;
11923 if (IsSigned) {
11924 MulResult = Builder.CreateNSWMul(LHS, RHS);
11925 HigherBits = Builder.CreateAShr(MulResult, 64);
11926 } else {
11927 MulResult = Builder.CreateNUWMul(LHS, RHS);
11928 HigherBits = Builder.CreateLShr(MulResult, 64);
11929 }
Albert Gutowskif3a0bce2016-10-04 22:29:49 +000011930 HigherBits = Builder.CreateIntCast(HigherBits, ResType, IsSigned);
Albert Gutowski7216f172016-10-10 18:09:27 +000011931
11932 if (BuiltinID == X86::BI__mulh || BuiltinID == X86::BI__umulh)
11933 return HigherBits;
11934
11935 Address HighBitsAddress = EmitPointerWithAlignment(E->getArg(2));
11936 Builder.CreateStore(HigherBits, HighBitsAddress);
11937 return Builder.CreateIntCast(MulResult, ResType, IsSigned);
Albert Gutowskif3a0bce2016-10-04 22:29:49 +000011938 }
Albert Gutowskifcea61c2016-10-10 19:40:51 +000011939
11940 case X86::BI__faststorefence: {
11941 return Builder.CreateFence(llvm::AtomicOrdering::SequentiallyConsistent,
Konstantin Zhuravlyovb0beb302017-07-11 22:23:37 +000011942 llvm::SyncScope::System);
Albert Gutowskifcea61c2016-10-10 19:40:51 +000011943 }
Nico Weberb2c53d32018-08-17 17:19:06 +000011944 case X86::BI__shiftleft128:
11945 case X86::BI__shiftright128: {
11946 // FIXME: Once fshl/fshr no longer add an unneeded and and cmov, do this:
11947 // llvm::Function *F = CGM.getIntrinsic(
11948 // BuiltinID == X86::BI__shiftleft128 ? Intrinsic::fshl : Intrinsic::fshr,
11949 // Int64Ty);
11950 // Ops[2] = Builder.CreateZExt(Ops[2], Int64Ty);
11951 // return Builder.CreateCall(F, Ops);
11952 llvm::Type *Int128Ty = Builder.getInt128Ty();
Nico Weberacf81a72019-02-13 19:04:26 +000011953 Value *HighPart128 =
11954 Builder.CreateShl(Builder.CreateZExt(Ops[1], Int128Ty), 64);
11955 Value *LowPart128 = Builder.CreateZExt(Ops[0], Int128Ty);
11956 Value *Val = Builder.CreateOr(HighPart128, LowPart128);
Nico Weberb2c53d32018-08-17 17:19:06 +000011957 Value *Amt = Builder.CreateAnd(Builder.CreateZExt(Ops[2], Int128Ty),
11958 llvm::ConstantInt::get(Int128Ty, 0x3f));
11959 Value *Res;
11960 if (BuiltinID == X86::BI__shiftleft128)
11961 Res = Builder.CreateLShr(Builder.CreateShl(Val, Amt), 64);
11962 else
11963 Res = Builder.CreateLShr(Val, Amt);
11964 return Builder.CreateTrunc(Res, Int64Ty);
11965 }
Albert Gutowskifcea61c2016-10-10 19:40:51 +000011966 case X86::BI_ReadWriteBarrier:
11967 case X86::BI_ReadBarrier:
11968 case X86::BI_WriteBarrier: {
11969 return Builder.CreateFence(llvm::AtomicOrdering::SequentiallyConsistent,
Konstantin Zhuravlyovb0beb302017-07-11 22:23:37 +000011970 llvm::SyncScope::SingleThread);
Albert Gutowskifcea61c2016-10-10 19:40:51 +000011971 }
Albert Gutowski2a0621e2016-10-12 22:01:05 +000011972 case X86::BI_BitScanForward:
11973 case X86::BI_BitScanForward64:
11974 return EmitMSVCBuiltinExpr(MSVCIntrin::_BitScanForward, E);
11975 case X86::BI_BitScanReverse:
11976 case X86::BI_BitScanReverse64:
11977 return EmitMSVCBuiltinExpr(MSVCIntrin::_BitScanReverse, E);
Albert Gutowski5e08df02016-10-13 22:35:07 +000011978
11979 case X86::BI_InterlockedAnd64:
11980 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedAnd, E);
11981 case X86::BI_InterlockedExchange64:
11982 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchange, E);
11983 case X86::BI_InterlockedExchangeAdd64:
11984 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeAdd, E);
11985 case X86::BI_InterlockedExchangeSub64:
11986 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeSub, E);
11987 case X86::BI_InterlockedOr64:
11988 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedOr, E);
11989 case X86::BI_InterlockedXor64:
11990 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedXor, E);
11991 case X86::BI_InterlockedDecrement64:
11992 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedDecrement, E);
11993 case X86::BI_InterlockedIncrement64:
11994 return EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedIncrement, E);
Reid Kleckner627f45f2017-12-14 19:00:21 +000011995 case X86::BI_InterlockedCompareExchange128: {
11996 // InterlockedCompareExchange128 doesn't directly refer to 128bit ints,
11997 // instead it takes pointers to 64bit ints for Destination and
11998 // ComparandResult, and exchange is taken as two 64bit ints (high & low).
11999 // The previous value is written to ComparandResult, and success is
12000 // returned.
12001
12002 llvm::Type *Int128Ty = Builder.getInt128Ty();
12003 llvm::Type *Int128PtrTy = Int128Ty->getPointerTo();
12004
12005 Value *Destination =
Nico Weber14a577b2018-08-21 22:19:55 +000012006 Builder.CreateBitCast(Ops[0], Int128PtrTy);
12007 Value *ExchangeHigh128 = Builder.CreateZExt(Ops[1], Int128Ty);
12008 Value *ExchangeLow128 = Builder.CreateZExt(Ops[2], Int128Ty);
12009 Address ComparandResult(Builder.CreateBitCast(Ops[3], Int128PtrTy),
12010 getContext().toCharUnitsFromBits(128));
Reid Kleckner627f45f2017-12-14 19:00:21 +000012011
12012 Value *Exchange = Builder.CreateOr(
12013 Builder.CreateShl(ExchangeHigh128, 64, "", false, false),
12014 ExchangeLow128);
12015
12016 Value *Comparand = Builder.CreateLoad(ComparandResult);
12017
12018 AtomicCmpXchgInst *CXI =
12019 Builder.CreateAtomicCmpXchg(Destination, Comparand, Exchange,
12020 AtomicOrdering::SequentiallyConsistent,
12021 AtomicOrdering::SequentiallyConsistent);
12022 CXI->setVolatile(true);
12023
12024 // Write the result back to the inout pointer.
12025 Builder.CreateStore(Builder.CreateExtractValue(CXI, 0), ComparandResult);
12026
12027 // Get the success boolean and zero extend it to i8.
12028 Value *Success = Builder.CreateExtractValue(CXI, 1);
12029 return Builder.CreateZExt(Success, ConvertType(E->getType()));
12030 }
Albert Gutowski5e08df02016-10-13 22:35:07 +000012031
Albert Gutowski397d81b2016-10-13 16:03:42 +000012032 case X86::BI_AddressOfReturnAddress: {
James Y Knight8799cae2019-02-03 21:53:49 +000012033 Function *F = CGM.getIntrinsic(Intrinsic::addressofreturnaddress);
Albert Gutowski397d81b2016-10-13 16:03:42 +000012034 return Builder.CreateCall(F);
12035 }
Albert Gutowski1deab382016-10-14 17:33:05 +000012036 case X86::BI__stosb: {
12037 // We treat __stosb as a volatile memset - it may not generate "rep stosb"
12038 // instruction, but it will create a memset that won't be optimized away.
12039 return Builder.CreateMemSet(Ops[0], Ops[1], Ops[2], 1, true);
12040 }
Reid Klecknerb04cb9a2017-03-06 19:43:16 +000012041 case X86::BI__ud2:
12042 // llvm.trap makes a ud2a instruction on x86.
12043 return EmitTrapCall(Intrinsic::trap);
12044 case X86::BI__int2c: {
12045 // This syscall signals a driver assertion failure in x86 NT kernels.
12046 llvm::FunctionType *FTy = llvm::FunctionType::get(VoidTy, false);
12047 llvm::InlineAsm *IA =
12048 llvm::InlineAsm::get(FTy, "int $$0x2c", "", /*SideEffects=*/true);
Reid Klecknerde864822017-03-21 16:57:30 +000012049 llvm::AttributeList NoReturnAttr = llvm::AttributeList::get(
12050 getLLVMContext(), llvm::AttributeList::FunctionIndex,
12051 llvm::Attribute::NoReturn);
James Y Knight3933add2019-01-30 02:54:28 +000012052 llvm::CallInst *CI = Builder.CreateCall(IA);
12053 CI->setAttributes(NoReturnAttr);
12054 return CI;
Reid Klecknerb04cb9a2017-03-06 19:43:16 +000012055 }
Hans Wennborg043f4022017-03-22 19:13:13 +000012056 case X86::BI__readfsbyte:
12057 case X86::BI__readfsword:
12058 case X86::BI__readfsdword:
12059 case X86::BI__readfsqword: {
12060 llvm::Type *IntTy = ConvertType(E->getType());
Nico Weber14a577b2018-08-21 22:19:55 +000012061 Value *Ptr =
12062 Builder.CreateIntToPtr(Ops[0], llvm::PointerType::get(IntTy, 257));
Hans Wennborg043f4022017-03-22 19:13:13 +000012063 LoadInst *Load = Builder.CreateAlignedLoad(
12064 IntTy, Ptr, getContext().getTypeAlignInChars(E->getType()));
12065 Load->setVolatile(true);
12066 return Load;
12067 }
12068 case X86::BI__readgsbyte:
12069 case X86::BI__readgsword:
12070 case X86::BI__readgsdword:
12071 case X86::BI__readgsqword: {
12072 llvm::Type *IntTy = ConvertType(E->getType());
Nico Weber14a577b2018-08-21 22:19:55 +000012073 Value *Ptr =
12074 Builder.CreateIntToPtr(Ops[0], llvm::PointerType::get(IntTy, 256));
Hans Wennborg043f4022017-03-22 19:13:13 +000012075 LoadInst *Load = Builder.CreateAlignedLoad(
12076 IntTy, Ptr, getContext().getTypeAlignInChars(E->getType()));
12077 Load->setVolatile(true);
12078 return Load;
12079 }
Simon Pilgrim313dc852018-12-20 11:53:45 +000012080 case X86::BI__builtin_ia32_paddsb512:
12081 case X86::BI__builtin_ia32_paddsw512:
12082 case X86::BI__builtin_ia32_paddsb256:
12083 case X86::BI__builtin_ia32_paddsw256:
12084 case X86::BI__builtin_ia32_paddsb128:
12085 case X86::BI__builtin_ia32_paddsw128:
12086 return EmitX86AddSubSatExpr(*this, Ops, true, true);
Craig Topper72a76062018-08-16 07:28:06 +000012087 case X86::BI__builtin_ia32_paddusb512:
12088 case X86::BI__builtin_ia32_paddusw512:
Tomasz Krupae8cf9722018-08-14 08:01:38 +000012089 case X86::BI__builtin_ia32_paddusb256:
12090 case X86::BI__builtin_ia32_paddusw256:
12091 case X86::BI__builtin_ia32_paddusb128:
12092 case X86::BI__builtin_ia32_paddusw128:
Simon Pilgrim313dc852018-12-20 11:53:45 +000012093 return EmitX86AddSubSatExpr(*this, Ops, false, true);
12094 case X86::BI__builtin_ia32_psubsb512:
12095 case X86::BI__builtin_ia32_psubsw512:
12096 case X86::BI__builtin_ia32_psubsb256:
12097 case X86::BI__builtin_ia32_psubsw256:
12098 case X86::BI__builtin_ia32_psubsb128:
12099 case X86::BI__builtin_ia32_psubsw128:
12100 return EmitX86AddSubSatExpr(*this, Ops, true, false);
Craig Topper72a76062018-08-16 07:28:06 +000012101 case X86::BI__builtin_ia32_psubusb512:
12102 case X86::BI__builtin_ia32_psubusw512:
Tomasz Krupae8cf9722018-08-14 08:01:38 +000012103 case X86::BI__builtin_ia32_psubusb256:
12104 case X86::BI__builtin_ia32_psubusw256:
12105 case X86::BI__builtin_ia32_psubusb128:
12106 case X86::BI__builtin_ia32_psubusw128:
Simon Pilgrim313dc852018-12-20 11:53:45 +000012107 return EmitX86AddSubSatExpr(*this, Ops, false, false);
Anders Carlsson895af082007-12-09 23:17:02 +000012108 }
12109}
12110
Mike Stump11289f42009-09-09 15:08:12 +000012111Value *CodeGenFunction::EmitPPCBuiltinExpr(unsigned BuiltinID,
Chris Lattner13653d72007-12-13 07:34:23 +000012112 const CallExpr *E) {
Chris Lattner0e62c1c2011-07-23 10:55:15 +000012113 SmallVector<Value*, 4> Ops;
Chris Lattnerdad40622010-04-14 03:54:58 +000012114
12115 for (unsigned i = 0, e = E->getNumArgs(); i != e; i++)
12116 Ops.push_back(EmitScalarExpr(E->getArg(i)));
12117
12118 Intrinsic::ID ID = Intrinsic::not_intrinsic;
12119
12120 switch (BuiltinID) {
Craig Topper8a13c412014-05-21 05:09:00 +000012121 default: return nullptr;
Chris Lattnerdad40622010-04-14 03:54:58 +000012122
Hal Finkel65e1e4d2015-08-31 23:55:19 +000012123 // __builtin_ppc_get_timebase is GCC 4.8+'s PowerPC-specific name for what we
12124 // call __builtin_readcyclecounter.
12125 case PPC::BI__builtin_ppc_get_timebase:
12126 return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::readcyclecounter));
12127
Tony Jiang6a49aad2016-11-15 14:30:56 +000012128 // vec_ld, vec_xl_be, vec_lvsl, vec_lvsr
Anton Korobeynikovcc50b7d2010-06-19 09:47:18 +000012129 case PPC::BI__builtin_altivec_lvx:
12130 case PPC::BI__builtin_altivec_lvxl:
12131 case PPC::BI__builtin_altivec_lvebx:
12132 case PPC::BI__builtin_altivec_lvehx:
12133 case PPC::BI__builtin_altivec_lvewx:
12134 case PPC::BI__builtin_altivec_lvsl:
12135 case PPC::BI__builtin_altivec_lvsr:
Bill Schmidt9ec8cea2014-11-12 04:19:56 +000012136 case PPC::BI__builtin_vsx_lxvd2x:
12137 case PPC::BI__builtin_vsx_lxvw4x:
Tony Jiang6a49aad2016-11-15 14:30:56 +000012138 case PPC::BI__builtin_vsx_lxvd2x_be:
12139 case PPC::BI__builtin_vsx_lxvw4x_be:
Zaara Syedac1d29522016-11-15 18:04:13 +000012140 case PPC::BI__builtin_vsx_lxvl:
12141 case PPC::BI__builtin_vsx_lxvll:
Anton Korobeynikovcc50b7d2010-06-19 09:47:18 +000012142 {
Zaara Syedac1d29522016-11-15 18:04:13 +000012143 if(BuiltinID == PPC::BI__builtin_vsx_lxvl ||
12144 BuiltinID == PPC::BI__builtin_vsx_lxvll){
12145 Ops[0] = Builder.CreateBitCast(Ops[0], Int8PtrTy);
12146 }else {
12147 Ops[1] = Builder.CreateBitCast(Ops[1], Int8PtrTy);
12148 Ops[0] = Builder.CreateGEP(Ops[1], Ops[0]);
12149 Ops.pop_back();
12150 }
Anton Korobeynikovcc50b7d2010-06-19 09:47:18 +000012151
12152 switch (BuiltinID) {
David Blaikie83d382b2011-09-23 05:06:16 +000012153 default: llvm_unreachable("Unsupported ld/lvsl/lvsr intrinsic!");
Anton Korobeynikovcc50b7d2010-06-19 09:47:18 +000012154 case PPC::BI__builtin_altivec_lvx:
12155 ID = Intrinsic::ppc_altivec_lvx;
12156 break;
12157 case PPC::BI__builtin_altivec_lvxl:
12158 ID = Intrinsic::ppc_altivec_lvxl;
12159 break;
12160 case PPC::BI__builtin_altivec_lvebx:
12161 ID = Intrinsic::ppc_altivec_lvebx;
12162 break;
12163 case PPC::BI__builtin_altivec_lvehx:
12164 ID = Intrinsic::ppc_altivec_lvehx;
12165 break;
12166 case PPC::BI__builtin_altivec_lvewx:
12167 ID = Intrinsic::ppc_altivec_lvewx;
12168 break;
12169 case PPC::BI__builtin_altivec_lvsl:
12170 ID = Intrinsic::ppc_altivec_lvsl;
12171 break;
12172 case PPC::BI__builtin_altivec_lvsr:
12173 ID = Intrinsic::ppc_altivec_lvsr;
12174 break;
Bill Schmidt9ec8cea2014-11-12 04:19:56 +000012175 case PPC::BI__builtin_vsx_lxvd2x:
12176 ID = Intrinsic::ppc_vsx_lxvd2x;
12177 break;
12178 case PPC::BI__builtin_vsx_lxvw4x:
12179 ID = Intrinsic::ppc_vsx_lxvw4x;
12180 break;
Tony Jiang6a49aad2016-11-15 14:30:56 +000012181 case PPC::BI__builtin_vsx_lxvd2x_be:
12182 ID = Intrinsic::ppc_vsx_lxvd2x_be;
12183 break;
12184 case PPC::BI__builtin_vsx_lxvw4x_be:
12185 ID = Intrinsic::ppc_vsx_lxvw4x_be;
12186 break;
Zaara Syedac1d29522016-11-15 18:04:13 +000012187 case PPC::BI__builtin_vsx_lxvl:
12188 ID = Intrinsic::ppc_vsx_lxvl;
12189 break;
12190 case PPC::BI__builtin_vsx_lxvll:
12191 ID = Intrinsic::ppc_vsx_lxvll;
12192 break;
Anton Korobeynikovcc50b7d2010-06-19 09:47:18 +000012193 }
12194 llvm::Function *F = CGM.getIntrinsic(ID);
Jay Foad5bd375a2011-07-15 08:37:34 +000012195 return Builder.CreateCall(F, Ops, "");
Anton Korobeynikovcc50b7d2010-06-19 09:47:18 +000012196 }
12197
Tony Jiang6a49aad2016-11-15 14:30:56 +000012198 // vec_st, vec_xst_be
Chris Lattnerdad40622010-04-14 03:54:58 +000012199 case PPC::BI__builtin_altivec_stvx:
12200 case PPC::BI__builtin_altivec_stvxl:
12201 case PPC::BI__builtin_altivec_stvebx:
12202 case PPC::BI__builtin_altivec_stvehx:
12203 case PPC::BI__builtin_altivec_stvewx:
Bill Schmidt9ec8cea2014-11-12 04:19:56 +000012204 case PPC::BI__builtin_vsx_stxvd2x:
12205 case PPC::BI__builtin_vsx_stxvw4x:
Tony Jiang6a49aad2016-11-15 14:30:56 +000012206 case PPC::BI__builtin_vsx_stxvd2x_be:
12207 case PPC::BI__builtin_vsx_stxvw4x_be:
Zaara Syedac1d29522016-11-15 18:04:13 +000012208 case PPC::BI__builtin_vsx_stxvl:
12209 case PPC::BI__builtin_vsx_stxvll:
Chris Lattnerdad40622010-04-14 03:54:58 +000012210 {
Zaara Syedac1d29522016-11-15 18:04:13 +000012211 if(BuiltinID == PPC::BI__builtin_vsx_stxvl ||
12212 BuiltinID == PPC::BI__builtin_vsx_stxvll ){
12213 Ops[1] = Builder.CreateBitCast(Ops[1], Int8PtrTy);
12214 }else {
12215 Ops[2] = Builder.CreateBitCast(Ops[2], Int8PtrTy);
12216 Ops[1] = Builder.CreateGEP(Ops[2], Ops[1]);
12217 Ops.pop_back();
12218 }
Chris Lattnerdad40622010-04-14 03:54:58 +000012219
12220 switch (BuiltinID) {
David Blaikie83d382b2011-09-23 05:06:16 +000012221 default: llvm_unreachable("Unsupported st intrinsic!");
Chris Lattnerdad40622010-04-14 03:54:58 +000012222 case PPC::BI__builtin_altivec_stvx:
12223 ID = Intrinsic::ppc_altivec_stvx;
12224 break;
12225 case PPC::BI__builtin_altivec_stvxl:
12226 ID = Intrinsic::ppc_altivec_stvxl;
12227 break;
12228 case PPC::BI__builtin_altivec_stvebx:
12229 ID = Intrinsic::ppc_altivec_stvebx;
12230 break;
12231 case PPC::BI__builtin_altivec_stvehx:
12232 ID = Intrinsic::ppc_altivec_stvehx;
12233 break;
12234 case PPC::BI__builtin_altivec_stvewx:
12235 ID = Intrinsic::ppc_altivec_stvewx;
12236 break;
Bill Schmidt9ec8cea2014-11-12 04:19:56 +000012237 case PPC::BI__builtin_vsx_stxvd2x:
12238 ID = Intrinsic::ppc_vsx_stxvd2x;
12239 break;
12240 case PPC::BI__builtin_vsx_stxvw4x:
12241 ID = Intrinsic::ppc_vsx_stxvw4x;
12242 break;
Tony Jiang6a49aad2016-11-15 14:30:56 +000012243 case PPC::BI__builtin_vsx_stxvd2x_be:
12244 ID = Intrinsic::ppc_vsx_stxvd2x_be;
12245 break;
12246 case PPC::BI__builtin_vsx_stxvw4x_be:
12247 ID = Intrinsic::ppc_vsx_stxvw4x_be;
12248 break;
Zaara Syedac1d29522016-11-15 18:04:13 +000012249 case PPC::BI__builtin_vsx_stxvl:
12250 ID = Intrinsic::ppc_vsx_stxvl;
12251 break;
12252 case PPC::BI__builtin_vsx_stxvll:
12253 ID = Intrinsic::ppc_vsx_stxvll;
12254 break;
Chris Lattnerdad40622010-04-14 03:54:58 +000012255 }
12256 llvm::Function *F = CGM.getIntrinsic(ID);
Jay Foad5bd375a2011-07-15 08:37:34 +000012257 return Builder.CreateCall(F, Ops, "");
Chris Lattnerdad40622010-04-14 03:54:58 +000012258 }
Nemanja Ivanovic1c7ad712015-07-05 06:40:52 +000012259 // Square root
12260 case PPC::BI__builtin_vsx_xvsqrtsp:
12261 case PPC::BI__builtin_vsx_xvsqrtdp: {
Nemanja Ivanovic2f1f9262015-06-26 19:27:20 +000012262 llvm::Type *ResultType = ConvertType(E->getType());
12263 Value *X = EmitScalarExpr(E->getArg(0));
Nemanja Ivanovic1c7ad712015-07-05 06:40:52 +000012264 ID = Intrinsic::sqrt;
Nemanja Ivanovic2f1f9262015-06-26 19:27:20 +000012265 llvm::Function *F = CGM.getIntrinsic(ID, ResultType);
12266 return Builder.CreateCall(F, X);
Chris Lattnerdad40622010-04-14 03:54:58 +000012267 }
Nemanja Ivanovic6c363ed2015-07-14 17:50:27 +000012268 // Count leading zeros
12269 case PPC::BI__builtin_altivec_vclzb:
12270 case PPC::BI__builtin_altivec_vclzh:
12271 case PPC::BI__builtin_altivec_vclzw:
12272 case PPC::BI__builtin_altivec_vclzd: {
12273 llvm::Type *ResultType = ConvertType(E->getType());
12274 Value *X = EmitScalarExpr(E->getArg(0));
12275 Value *Undef = ConstantInt::get(Builder.getInt1Ty(), false);
12276 Function *F = CGM.getIntrinsic(Intrinsic::ctlz, ResultType);
12277 return Builder.CreateCall(F, {X, Undef});
12278 }
Nemanja Ivanovic10e2b5d2016-09-27 10:45:22 +000012279 case PPC::BI__builtin_altivec_vctzb:
12280 case PPC::BI__builtin_altivec_vctzh:
12281 case PPC::BI__builtin_altivec_vctzw:
12282 case PPC::BI__builtin_altivec_vctzd: {
12283 llvm::Type *ResultType = ConvertType(E->getType());
12284 Value *X = EmitScalarExpr(E->getArg(0));
12285 Value *Undef = ConstantInt::get(Builder.getInt1Ty(), false);
12286 Function *F = CGM.getIntrinsic(Intrinsic::cttz, ResultType);
12287 return Builder.CreateCall(F, {X, Undef});
12288 }
12289 case PPC::BI__builtin_altivec_vpopcntb:
12290 case PPC::BI__builtin_altivec_vpopcnth:
12291 case PPC::BI__builtin_altivec_vpopcntw:
12292 case PPC::BI__builtin_altivec_vpopcntd: {
12293 llvm::Type *ResultType = ConvertType(E->getType());
12294 Value *X = EmitScalarExpr(E->getArg(0));
12295 llvm::Function *F = CGM.getIntrinsic(Intrinsic::ctpop, ResultType);
12296 return Builder.CreateCall(F, X);
12297 }
Nemanja Ivanovic6c363ed2015-07-14 17:50:27 +000012298 // Copy sign
12299 case PPC::BI__builtin_vsx_xvcpsgnsp:
12300 case PPC::BI__builtin_vsx_xvcpsgndp: {
12301 llvm::Type *ResultType = ConvertType(E->getType());
12302 Value *X = EmitScalarExpr(E->getArg(0));
12303 Value *Y = EmitScalarExpr(E->getArg(1));
12304 ID = Intrinsic::copysign;
12305 llvm::Function *F = CGM.getIntrinsic(ID, ResultType);
12306 return Builder.CreateCall(F, {X, Y});
12307 }
Nemanja Ivanovic1c7ad712015-07-05 06:40:52 +000012308 // Rounding/truncation
12309 case PPC::BI__builtin_vsx_xvrspip:
12310 case PPC::BI__builtin_vsx_xvrdpip:
12311 case PPC::BI__builtin_vsx_xvrdpim:
12312 case PPC::BI__builtin_vsx_xvrspim:
12313 case PPC::BI__builtin_vsx_xvrdpi:
12314 case PPC::BI__builtin_vsx_xvrspi:
12315 case PPC::BI__builtin_vsx_xvrdpic:
12316 case PPC::BI__builtin_vsx_xvrspic:
12317 case PPC::BI__builtin_vsx_xvrdpiz:
12318 case PPC::BI__builtin_vsx_xvrspiz: {
12319 llvm::Type *ResultType = ConvertType(E->getType());
12320 Value *X = EmitScalarExpr(E->getArg(0));
12321 if (BuiltinID == PPC::BI__builtin_vsx_xvrdpim ||
12322 BuiltinID == PPC::BI__builtin_vsx_xvrspim)
12323 ID = Intrinsic::floor;
12324 else if (BuiltinID == PPC::BI__builtin_vsx_xvrdpi ||
12325 BuiltinID == PPC::BI__builtin_vsx_xvrspi)
12326 ID = Intrinsic::round;
12327 else if (BuiltinID == PPC::BI__builtin_vsx_xvrdpic ||
12328 BuiltinID == PPC::BI__builtin_vsx_xvrspic)
12329 ID = Intrinsic::nearbyint;
12330 else if (BuiltinID == PPC::BI__builtin_vsx_xvrdpip ||
12331 BuiltinID == PPC::BI__builtin_vsx_xvrspip)
12332 ID = Intrinsic::ceil;
12333 else if (BuiltinID == PPC::BI__builtin_vsx_xvrdpiz ||
12334 BuiltinID == PPC::BI__builtin_vsx_xvrspiz)
12335 ID = Intrinsic::trunc;
12336 llvm::Function *F = CGM.getIntrinsic(ID, ResultType);
12337 return Builder.CreateCall(F, X);
12338 }
Kit Bartonfbab1582016-03-09 19:28:31 +000012339
12340 // Absolute value
12341 case PPC::BI__builtin_vsx_xvabsdp:
12342 case PPC::BI__builtin_vsx_xvabssp: {
12343 llvm::Type *ResultType = ConvertType(E->getType());
12344 Value *X = EmitScalarExpr(E->getArg(0));
12345 llvm::Function *F = CGM.getIntrinsic(Intrinsic::fabs, ResultType);
12346 return Builder.CreateCall(F, X);
12347 }
12348
Nemanja Ivanovic1c7ad712015-07-05 06:40:52 +000012349 // FMA variations
12350 case PPC::BI__builtin_vsx_xvmaddadp:
12351 case PPC::BI__builtin_vsx_xvmaddasp:
12352 case PPC::BI__builtin_vsx_xvnmaddadp:
12353 case PPC::BI__builtin_vsx_xvnmaddasp:
12354 case PPC::BI__builtin_vsx_xvmsubadp:
12355 case PPC::BI__builtin_vsx_xvmsubasp:
12356 case PPC::BI__builtin_vsx_xvnmsubadp:
12357 case PPC::BI__builtin_vsx_xvnmsubasp: {
12358 llvm::Type *ResultType = ConvertType(E->getType());
12359 Value *X = EmitScalarExpr(E->getArg(0));
12360 Value *Y = EmitScalarExpr(E->getArg(1));
12361 Value *Z = EmitScalarExpr(E->getArg(2));
12362 Value *Zero = llvm::ConstantFP::getZeroValueForNegation(ResultType);
12363 llvm::Function *F = CGM.getIntrinsic(Intrinsic::fma, ResultType);
12364 switch (BuiltinID) {
12365 case PPC::BI__builtin_vsx_xvmaddadp:
12366 case PPC::BI__builtin_vsx_xvmaddasp:
12367 return Builder.CreateCall(F, {X, Y, Z});
12368 case PPC::BI__builtin_vsx_xvnmaddadp:
12369 case PPC::BI__builtin_vsx_xvnmaddasp:
12370 return Builder.CreateFSub(Zero,
12371 Builder.CreateCall(F, {X, Y, Z}), "sub");
12372 case PPC::BI__builtin_vsx_xvmsubadp:
12373 case PPC::BI__builtin_vsx_xvmsubasp:
12374 return Builder.CreateCall(F,
12375 {X, Y, Builder.CreateFSub(Zero, Z, "sub")});
12376 case PPC::BI__builtin_vsx_xvnmsubadp:
12377 case PPC::BI__builtin_vsx_xvnmsubasp:
12378 Value *FsubRes =
12379 Builder.CreateCall(F, {X, Y, Builder.CreateFSub(Zero, Z, "sub")});
12380 return Builder.CreateFSub(Zero, FsubRes, "sub");
12381 }
12382 llvm_unreachable("Unknown FMA operation");
12383 return nullptr; // Suppress no-return warning
12384 }
Sean Fertile96d9e0e2017-01-05 21:43:30 +000012385
12386 case PPC::BI__builtin_vsx_insertword: {
12387 llvm::Function *F = CGM.getIntrinsic(Intrinsic::ppc_vsx_xxinsertw);
12388
12389 // Third argument is a compile time constant int. It must be clamped to
12390 // to the range [0, 12].
12391 ConstantInt *ArgCI = dyn_cast<ConstantInt>(Ops[2]);
12392 assert(ArgCI &&
12393 "Third arg to xxinsertw intrinsic must be constant integer");
12394 const int64_t MaxIndex = 12;
12395 int64_t Index = clamp(ArgCI->getSExtValue(), 0, MaxIndex);
12396
12397 // The builtin semantics don't exactly match the xxinsertw instructions
12398 // semantics (which ppc_vsx_xxinsertw follows). The builtin extracts the
12399 // word from the first argument, and inserts it in the second argument. The
12400 // instruction extracts the word from its second input register and inserts
12401 // it into its first input register, so swap the first and second arguments.
12402 std::swap(Ops[0], Ops[1]);
12403
12404 // Need to cast the second argument from a vector of unsigned int to a
12405 // vector of long long.
12406 Ops[1] = Builder.CreateBitCast(Ops[1], llvm::VectorType::get(Int64Ty, 2));
12407
12408 if (getTarget().isLittleEndian()) {
12409 // Create a shuffle mask of (1, 0)
12410 Constant *ShuffleElts[2] = { ConstantInt::get(Int32Ty, 1),
12411 ConstantInt::get(Int32Ty, 0)
12412 };
12413 Constant *ShuffleMask = llvm::ConstantVector::get(ShuffleElts);
12414
12415 // Reverse the double words in the vector we will extract from.
12416 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::VectorType::get(Int64Ty, 2));
12417 Ops[0] = Builder.CreateShuffleVector(Ops[0], Ops[0], ShuffleMask);
12418
12419 // Reverse the index.
12420 Index = MaxIndex - Index;
12421 }
12422
12423 // Intrinsic expects the first arg to be a vector of int.
12424 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::VectorType::get(Int32Ty, 4));
12425 Ops[2] = ConstantInt::getSigned(Int32Ty, Index);
12426 return Builder.CreateCall(F, Ops);
12427 }
12428
12429 case PPC::BI__builtin_vsx_extractuword: {
12430 llvm::Function *F = CGM.getIntrinsic(Intrinsic::ppc_vsx_xxextractuw);
12431
12432 // Intrinsic expects the first argument to be a vector of doublewords.
12433 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::VectorType::get(Int64Ty, 2));
12434
12435 // The second argument is a compile time constant int that needs to
12436 // be clamped to the range [0, 12].
12437 ConstantInt *ArgCI = dyn_cast<ConstantInt>(Ops[1]);
12438 assert(ArgCI &&
12439 "Second Arg to xxextractuw intrinsic must be a constant integer!");
12440 const int64_t MaxIndex = 12;
12441 int64_t Index = clamp(ArgCI->getSExtValue(), 0, MaxIndex);
12442
12443 if (getTarget().isLittleEndian()) {
12444 // Reverse the index.
12445 Index = MaxIndex - Index;
12446 Ops[1] = ConstantInt::getSigned(Int32Ty, Index);
12447
12448 // Emit the call, then reverse the double words of the results vector.
12449 Value *Call = Builder.CreateCall(F, Ops);
12450
12451 // Create a shuffle mask of (1, 0)
12452 Constant *ShuffleElts[2] = { ConstantInt::get(Int32Ty, 1),
12453 ConstantInt::get(Int32Ty, 0)
12454 };
12455 Constant *ShuffleMask = llvm::ConstantVector::get(ShuffleElts);
12456
12457 Value *ShuffleCall = Builder.CreateShuffleVector(Call, Call, ShuffleMask);
12458 return ShuffleCall;
12459 } else {
12460 Ops[1] = ConstantInt::getSigned(Int32Ty, Index);
12461 return Builder.CreateCall(F, Ops);
12462 }
12463 }
Tony Jiangbbc48e92017-05-24 15:13:32 +000012464
12465 case PPC::BI__builtin_vsx_xxpermdi: {
12466 ConstantInt *ArgCI = dyn_cast<ConstantInt>(Ops[2]);
12467 assert(ArgCI && "Third arg must be constant integer!");
12468
12469 unsigned Index = ArgCI->getZExtValue();
12470 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::VectorType::get(Int64Ty, 2));
12471 Ops[1] = Builder.CreateBitCast(Ops[1], llvm::VectorType::get(Int64Ty, 2));
12472
Nemanja Ivanovic1ac56bd2018-07-19 12:44:15 +000012473 // Account for endianness by treating this as just a shuffle. So we use the
12474 // same indices for both LE and BE in order to produce expected results in
12475 // both cases.
Nemanja Ivanovic2600b832018-07-19 12:49:27 +000012476 unsigned ElemIdx0 = (Index & 2) >> 1;
12477 unsigned ElemIdx1 = 2 + (Index & 1);
Tony Jiangbbc48e92017-05-24 15:13:32 +000012478
12479 Constant *ShuffleElts[2] = {ConstantInt::get(Int32Ty, ElemIdx0),
12480 ConstantInt::get(Int32Ty, ElemIdx1)};
12481 Constant *ShuffleMask = llvm::ConstantVector::get(ShuffleElts);
12482
12483 Value *ShuffleCall =
12484 Builder.CreateShuffleVector(Ops[0], Ops[1], ShuffleMask);
12485 QualType BIRetType = E->getType();
12486 auto RetTy = ConvertType(BIRetType);
12487 return Builder.CreateBitCast(ShuffleCall, RetTy);
12488 }
Tony Jiang9aa2c032017-05-24 15:54:13 +000012489
12490 case PPC::BI__builtin_vsx_xxsldwi: {
12491 ConstantInt *ArgCI = dyn_cast<ConstantInt>(Ops[2]);
12492 assert(ArgCI && "Third argument must be a compile time constant");
12493 unsigned Index = ArgCI->getZExtValue() & 0x3;
12494 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::VectorType::get(Int32Ty, 4));
12495 Ops[1] = Builder.CreateBitCast(Ops[1], llvm::VectorType::get(Int32Ty, 4));
12496
12497 // Create a shuffle mask
12498 unsigned ElemIdx0;
12499 unsigned ElemIdx1;
12500 unsigned ElemIdx2;
12501 unsigned ElemIdx3;
12502 if (getTarget().isLittleEndian()) {
12503 // Little endian element N comes from element 8+N-Index of the
12504 // concatenated wide vector (of course, using modulo arithmetic on
12505 // the total number of elements).
12506 ElemIdx0 = (8 - Index) % 8;
12507 ElemIdx1 = (9 - Index) % 8;
12508 ElemIdx2 = (10 - Index) % 8;
12509 ElemIdx3 = (11 - Index) % 8;
12510 } else {
12511 // Big endian ElemIdx<N> = Index + N
12512 ElemIdx0 = Index;
12513 ElemIdx1 = Index + 1;
12514 ElemIdx2 = Index + 2;
12515 ElemIdx3 = Index + 3;
12516 }
12517
12518 Constant *ShuffleElts[4] = {ConstantInt::get(Int32Ty, ElemIdx0),
12519 ConstantInt::get(Int32Ty, ElemIdx1),
12520 ConstantInt::get(Int32Ty, ElemIdx2),
12521 ConstantInt::get(Int32Ty, ElemIdx3)};
12522
12523 Constant *ShuffleMask = llvm::ConstantVector::get(ShuffleElts);
12524 Value *ShuffleCall =
12525 Builder.CreateShuffleVector(Ops[0], Ops[1], ShuffleMask);
12526 QualType BIRetType = E->getType();
12527 auto RetTy = ConvertType(BIRetType);
12528 return Builder.CreateBitCast(ShuffleCall, RetTy);
12529 }
QingShan Zhangaccb65b2018-09-20 05:04:57 +000012530
12531 case PPC::BI__builtin_pack_vector_int128: {
12532 bool isLittleEndian = getTarget().isLittleEndian();
12533 Value *UndefValue =
12534 llvm::UndefValue::get(llvm::VectorType::get(Ops[0]->getType(), 2));
12535 Value *Res = Builder.CreateInsertElement(
12536 UndefValue, Ops[0], (uint64_t)(isLittleEndian ? 1 : 0));
12537 Res = Builder.CreateInsertElement(Res, Ops[1],
12538 (uint64_t)(isLittleEndian ? 0 : 1));
12539 return Builder.CreateBitCast(Res, ConvertType(E->getType()));
12540 }
12541
12542 case PPC::BI__builtin_unpack_vector_int128: {
12543 ConstantInt *Index = cast<ConstantInt>(Ops[1]);
12544 Value *Unpacked = Builder.CreateBitCast(
12545 Ops[0], llvm::VectorType::get(ConvertType(E->getType()), 2));
12546
12547 if (getTarget().isLittleEndian())
12548 Index = ConstantInt::get(Index->getType(), 1 - Index->getZExtValue());
12549
12550 return Builder.CreateExtractElement(Unpacked, Index);
12551 }
Nemanja Ivanovic1c7ad712015-07-05 06:40:52 +000012552 }
Mike Stump11289f42009-09-09 15:08:12 +000012553}
Matt Arsenault56f008d2014-06-24 20:45:01 +000012554
Matt Arsenault3ea39f92015-06-19 17:54:10 +000012555Value *CodeGenFunction::EmitAMDGPUBuiltinExpr(unsigned BuiltinID,
12556 const CallExpr *E) {
Matt Arsenault56f008d2014-06-24 20:45:01 +000012557 switch (BuiltinID) {
Matt Arsenault8a4078c2016-01-22 21:30:53 +000012558 case AMDGPU::BI__builtin_amdgcn_div_scale:
12559 case AMDGPU::BI__builtin_amdgcn_div_scalef: {
Matt Arsenault56f008d2014-06-24 20:45:01 +000012560 // Translate from the intrinsics's struct return to the builtin's out
12561 // argument.
12562
John McCall7f416cc2015-09-08 08:05:57 +000012563 Address FlagOutPtr = EmitPointerWithAlignment(E->getArg(3));
Matt Arsenault56f008d2014-06-24 20:45:01 +000012564
12565 llvm::Value *X = EmitScalarExpr(E->getArg(0));
12566 llvm::Value *Y = EmitScalarExpr(E->getArg(1));
12567 llvm::Value *Z = EmitScalarExpr(E->getArg(2));
12568
James Y Knight8799cae2019-02-03 21:53:49 +000012569 llvm::Function *Callee = CGM.getIntrinsic(Intrinsic::amdgcn_div_scale,
Matt Arsenault56f008d2014-06-24 20:45:01 +000012570 X->getType());
12571
David Blaikie43f9bb72015-05-18 22:14:03 +000012572 llvm::Value *Tmp = Builder.CreateCall(Callee, {X, Y, Z});
Matt Arsenault56f008d2014-06-24 20:45:01 +000012573
12574 llvm::Value *Result = Builder.CreateExtractValue(Tmp, 0);
12575 llvm::Value *Flag = Builder.CreateExtractValue(Tmp, 1);
12576
12577 llvm::Type *RealFlagType
John McCall7f416cc2015-09-08 08:05:57 +000012578 = FlagOutPtr.getPointer()->getType()->getPointerElementType();
Matt Arsenault56f008d2014-06-24 20:45:01 +000012579
12580 llvm::Value *FlagExt = Builder.CreateZExt(Flag, RealFlagType);
John McCall7f416cc2015-09-08 08:05:57 +000012581 Builder.CreateStore(FlagExt, FlagOutPtr);
Matt Arsenault56f008d2014-06-24 20:45:01 +000012582 return Result;
Matt Arsenault85877112014-07-15 17:23:46 +000012583 }
Matt Arsenault8a4078c2016-01-22 21:30:53 +000012584 case AMDGPU::BI__builtin_amdgcn_div_fmas:
12585 case AMDGPU::BI__builtin_amdgcn_div_fmasf: {
Matt Arsenault2174a9d2014-10-21 22:21:41 +000012586 llvm::Value *Src0 = EmitScalarExpr(E->getArg(0));
12587 llvm::Value *Src1 = EmitScalarExpr(E->getArg(1));
12588 llvm::Value *Src2 = EmitScalarExpr(E->getArg(2));
12589 llvm::Value *Src3 = EmitScalarExpr(E->getArg(3));
12590
James Y Knight8799cae2019-02-03 21:53:49 +000012591 llvm::Function *F = CGM.getIntrinsic(Intrinsic::amdgcn_div_fmas,
Matt Arsenault2174a9d2014-10-21 22:21:41 +000012592 Src0->getType());
12593 llvm::Value *Src3ToBool = Builder.CreateIsNotNull(Src3);
David Blaikie43f9bb72015-05-18 22:14:03 +000012594 return Builder.CreateCall(F, {Src0, Src1, Src2, Src3ToBool});
Matt Arsenault2174a9d2014-10-21 22:21:41 +000012595 }
Changpeng Fang03bdd8f2016-08-18 22:04:54 +000012596
12597 case AMDGPU::BI__builtin_amdgcn_ds_swizzle:
12598 return emitBinaryBuiltin(*this, E, Intrinsic::amdgcn_ds_swizzle);
Yaxun Liuaae1e872018-10-17 02:32:26 +000012599 case AMDGPU::BI__builtin_amdgcn_mov_dpp:
12600 case AMDGPU::BI__builtin_amdgcn_update_dpp: {
12601 llvm::SmallVector<llvm::Value *, 6> Args;
12602 for (unsigned I = 0; I != E->getNumArgs(); ++I)
Yaxun Liu4d867992017-03-10 01:30:46 +000012603 Args.push_back(EmitScalarExpr(E->getArg(I)));
Yaxun Liuaae1e872018-10-17 02:32:26 +000012604 assert(Args.size() == 5 || Args.size() == 6);
12605 if (Args.size() == 5)
12606 Args.insert(Args.begin(), llvm::UndefValue::get(Args[0]->getType()));
James Y Knight8799cae2019-02-03 21:53:49 +000012607 Function *F =
Yaxun Liuaae1e872018-10-17 02:32:26 +000012608 CGM.getIntrinsic(Intrinsic::amdgcn_update_dpp, Args[0]->getType());
Yaxun Liu4d867992017-03-10 01:30:46 +000012609 return Builder.CreateCall(F, Args);
12610 }
Matt Arsenault8a4078c2016-01-22 21:30:53 +000012611 case AMDGPU::BI__builtin_amdgcn_div_fixup:
12612 case AMDGPU::BI__builtin_amdgcn_div_fixupf:
Konstantin Zhuravlyov81a78bb2016-11-13 02:37:05 +000012613 case AMDGPU::BI__builtin_amdgcn_div_fixuph:
Matt Arsenaultf652cae2016-07-01 17:38:14 +000012614 return emitTernaryBuiltin(*this, E, Intrinsic::amdgcn_div_fixup);
Matt Arsenault8a4078c2016-01-22 21:30:53 +000012615 case AMDGPU::BI__builtin_amdgcn_trig_preop:
12616 case AMDGPU::BI__builtin_amdgcn_trig_preopf:
12617 return emitFPIntBuiltin(*this, E, Intrinsic::amdgcn_trig_preop);
12618 case AMDGPU::BI__builtin_amdgcn_rcp:
12619 case AMDGPU::BI__builtin_amdgcn_rcpf:
Konstantin Zhuravlyov81a78bb2016-11-13 02:37:05 +000012620 case AMDGPU::BI__builtin_amdgcn_rcph:
Matt Arsenault105e8922016-02-03 17:49:38 +000012621 return emitUnaryBuiltin(*this, E, Intrinsic::amdgcn_rcp);
Matt Arsenault8a4078c2016-01-22 21:30:53 +000012622 case AMDGPU::BI__builtin_amdgcn_rsq:
12623 case AMDGPU::BI__builtin_amdgcn_rsqf:
Konstantin Zhuravlyov81a78bb2016-11-13 02:37:05 +000012624 case AMDGPU::BI__builtin_amdgcn_rsqh:
Matt Arsenault105e8922016-02-03 17:49:38 +000012625 return emitUnaryBuiltin(*this, E, Intrinsic::amdgcn_rsq);
Matt Arsenaultf5c1f472016-02-13 01:03:09 +000012626 case AMDGPU::BI__builtin_amdgcn_rsq_clamp:
12627 case AMDGPU::BI__builtin_amdgcn_rsq_clampf:
12628 return emitUnaryBuiltin(*this, E, Intrinsic::amdgcn_rsq_clamp);
Matt Arsenault9b277b42016-02-13 01:21:09 +000012629 case AMDGPU::BI__builtin_amdgcn_sinf:
Konstantin Zhuravlyov81a78bb2016-11-13 02:37:05 +000012630 case AMDGPU::BI__builtin_amdgcn_sinh:
Matt Arsenault9b277b42016-02-13 01:21:09 +000012631 return emitUnaryBuiltin(*this, E, Intrinsic::amdgcn_sin);
12632 case AMDGPU::BI__builtin_amdgcn_cosf:
Konstantin Zhuravlyov81a78bb2016-11-13 02:37:05 +000012633 case AMDGPU::BI__builtin_amdgcn_cosh:
Matt Arsenault9b277b42016-02-13 01:21:09 +000012634 return emitUnaryBuiltin(*this, E, Intrinsic::amdgcn_cos);
12635 case AMDGPU::BI__builtin_amdgcn_log_clampf:
12636 return emitUnaryBuiltin(*this, E, Intrinsic::amdgcn_log_clamp);
Matt Arsenault8a4078c2016-01-22 21:30:53 +000012637 case AMDGPU::BI__builtin_amdgcn_ldexp:
12638 case AMDGPU::BI__builtin_amdgcn_ldexpf:
Konstantin Zhuravlyov81a78bb2016-11-13 02:37:05 +000012639 case AMDGPU::BI__builtin_amdgcn_ldexph:
Matt Arsenault8a4078c2016-01-22 21:30:53 +000012640 return emitFPIntBuiltin(*this, E, Intrinsic::amdgcn_ldexp);
Matt Arsenault3fb96332016-03-30 22:57:40 +000012641 case AMDGPU::BI__builtin_amdgcn_frexp_mant:
Konstantin Zhuravlyov81a78bb2016-11-13 02:37:05 +000012642 case AMDGPU::BI__builtin_amdgcn_frexp_mantf:
12643 case AMDGPU::BI__builtin_amdgcn_frexp_manth:
Matt Arsenault3fb96332016-03-30 22:57:40 +000012644 return emitUnaryBuiltin(*this, E, Intrinsic::amdgcn_frexp_mant);
Matt Arsenault3fb96332016-03-30 22:57:40 +000012645 case AMDGPU::BI__builtin_amdgcn_frexp_exp:
Konstantin Zhuravlyov62ae8f62016-11-18 22:31:51 +000012646 case AMDGPU::BI__builtin_amdgcn_frexp_expf: {
12647 Value *Src0 = EmitScalarExpr(E->getArg(0));
James Y Knight8799cae2019-02-03 21:53:49 +000012648 Function *F = CGM.getIntrinsic(Intrinsic::amdgcn_frexp_exp,
Konstantin Zhuravlyov62ae8f62016-11-18 22:31:51 +000012649 { Builder.getInt32Ty(), Src0->getType() });
12650 return Builder.CreateCall(F, Src0);
12651 }
12652 case AMDGPU::BI__builtin_amdgcn_frexp_exph: {
12653 Value *Src0 = EmitScalarExpr(E->getArg(0));
James Y Knight8799cae2019-02-03 21:53:49 +000012654 Function *F = CGM.getIntrinsic(Intrinsic::amdgcn_frexp_exp,
Konstantin Zhuravlyov62ae8f62016-11-18 22:31:51 +000012655 { Builder.getInt16Ty(), Src0->getType() });
12656 return Builder.CreateCall(F, Src0);
12657 }
Matt Arsenault2d510592016-05-28 00:43:27 +000012658 case AMDGPU::BI__builtin_amdgcn_fract:
12659 case AMDGPU::BI__builtin_amdgcn_fractf:
Konstantin Zhuravlyov81a78bb2016-11-13 02:37:05 +000012660 case AMDGPU::BI__builtin_amdgcn_fracth:
Matt Arsenault2d510592016-05-28 00:43:27 +000012661 return emitUnaryBuiltin(*this, E, Intrinsic::amdgcn_fract);
Wei Dingea41f352016-07-15 16:43:03 +000012662 case AMDGPU::BI__builtin_amdgcn_lerp:
12663 return emitTernaryBuiltin(*this, E, Intrinsic::amdgcn_lerp);
Wei Ding91c84502016-08-05 15:38:46 +000012664 case AMDGPU::BI__builtin_amdgcn_uicmp:
12665 case AMDGPU::BI__builtin_amdgcn_uicmpl:
12666 case AMDGPU::BI__builtin_amdgcn_sicmp:
12667 case AMDGPU::BI__builtin_amdgcn_sicmpl:
12668 return emitTernaryBuiltin(*this, E, Intrinsic::amdgcn_icmp);
12669 case AMDGPU::BI__builtin_amdgcn_fcmp:
12670 case AMDGPU::BI__builtin_amdgcn_fcmpf:
12671 return emitTernaryBuiltin(*this, E, Intrinsic::amdgcn_fcmp);
Matt Arsenault8a4078c2016-01-22 21:30:53 +000012672 case AMDGPU::BI__builtin_amdgcn_class:
12673 case AMDGPU::BI__builtin_amdgcn_classf:
Konstantin Zhuravlyov81a78bb2016-11-13 02:37:05 +000012674 case AMDGPU::BI__builtin_amdgcn_classh:
Matt Arsenault8a4078c2016-01-22 21:30:53 +000012675 return emitFPIntBuiltin(*this, E, Intrinsic::amdgcn_class);
Matt Arsenaulta274b202017-01-31 03:42:07 +000012676 case AMDGPU::BI__builtin_amdgcn_fmed3f:
Matt Arsenaulta0c6dca2017-02-22 20:55:59 +000012677 case AMDGPU::BI__builtin_amdgcn_fmed3h:
Matt Arsenaulta274b202017-01-31 03:42:07 +000012678 return emitTernaryBuiltin(*this, E, Intrinsic::amdgcn_fmed3);
Matt Arsenaultb7288862019-01-28 23:59:18 +000012679 case AMDGPU::BI__builtin_amdgcn_ds_append:
12680 case AMDGPU::BI__builtin_amdgcn_ds_consume: {
12681 Intrinsic::ID Intrin = BuiltinID == AMDGPU::BI__builtin_amdgcn_ds_append ?
12682 Intrinsic::amdgcn_ds_append : Intrinsic::amdgcn_ds_consume;
12683 Value *Src0 = EmitScalarExpr(E->getArg(0));
12684 Function *F = CGM.getIntrinsic(Intrin, { Src0->getType() });
12685 return Builder.CreateCall(F, { Src0, Builder.getFalse() });
12686 }
Matt Arsenault64665bc2016-06-28 00:13:17 +000012687 case AMDGPU::BI__builtin_amdgcn_read_exec: {
12688 CallInst *CI = cast<CallInst>(
12689 EmitSpecialRegisterBuiltin(*this, E, Int64Ty, Int64Ty, true, "exec"));
12690 CI->setConvergent();
12691 return CI;
12692 }
Matt Arsenaultf12e3b82017-10-09 20:06:37 +000012693 case AMDGPU::BI__builtin_amdgcn_read_exec_lo:
12694 case AMDGPU::BI__builtin_amdgcn_read_exec_hi: {
12695 StringRef RegName = BuiltinID == AMDGPU::BI__builtin_amdgcn_read_exec_lo ?
12696 "exec_lo" : "exec_hi";
12697 CallInst *CI = cast<CallInst>(
12698 EmitSpecialRegisterBuiltin(*this, E, Int32Ty, Int32Ty, true, RegName));
12699 CI->setConvergent();
12700 return CI;
12701 }
Jan Veselyd7e03a52016-07-10 22:38:04 +000012702 // amdgcn workitem
12703 case AMDGPU::BI__builtin_amdgcn_workitem_id_x:
12704 return emitRangedBuiltin(*this, Intrinsic::amdgcn_workitem_id_x, 0, 1024);
12705 case AMDGPU::BI__builtin_amdgcn_workitem_id_y:
12706 return emitRangedBuiltin(*this, Intrinsic::amdgcn_workitem_id_y, 0, 1024);
12707 case AMDGPU::BI__builtin_amdgcn_workitem_id_z:
12708 return emitRangedBuiltin(*this, Intrinsic::amdgcn_workitem_id_z, 0, 1024);
12709
Matt Arsenaultc86671d2016-07-15 21:33:02 +000012710 // r600 intrinsics
12711 case AMDGPU::BI__builtin_r600_recipsqrt_ieee:
12712 case AMDGPU::BI__builtin_r600_recipsqrt_ieeef:
12713 return emitUnaryBuiltin(*this, E, Intrinsic::r600_recipsqrt_ieee);
Jan Veselyd7e03a52016-07-10 22:38:04 +000012714 case AMDGPU::BI__builtin_r600_read_tidig_x:
12715 return emitRangedBuiltin(*this, Intrinsic::r600_read_tidig_x, 0, 1024);
12716 case AMDGPU::BI__builtin_r600_read_tidig_y:
12717 return emitRangedBuiltin(*this, Intrinsic::r600_read_tidig_y, 0, 1024);
12718 case AMDGPU::BI__builtin_r600_read_tidig_z:
12719 return emitRangedBuiltin(*this, Intrinsic::r600_read_tidig_z, 0, 1024);
Matt Arsenault8a4078c2016-01-22 21:30:53 +000012720 default:
Matt Arsenault56f008d2014-06-24 20:45:01 +000012721 return nullptr;
12722 }
12723}
Ulrich Weigand3a610eb2015-04-01 12:54:25 +000012724
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012725/// Handle a SystemZ function in which the final argument is a pointer
12726/// to an int that receives the post-instruction CC value. At the LLVM level
12727/// this is represented as a function that returns a {result, cc} pair.
12728static Value *EmitSystemZIntrinsicWithCC(CodeGenFunction &CGF,
12729 unsigned IntrinsicID,
12730 const CallExpr *E) {
12731 unsigned NumArgs = E->getNumArgs() - 1;
12732 SmallVector<Value *, 8> Args(NumArgs);
12733 for (unsigned I = 0; I < NumArgs; ++I)
12734 Args[I] = CGF.EmitScalarExpr(E->getArg(I));
John McCall7f416cc2015-09-08 08:05:57 +000012735 Address CCPtr = CGF.EmitPointerWithAlignment(E->getArg(NumArgs));
James Y Knight8799cae2019-02-03 21:53:49 +000012736 Function *F = CGF.CGM.getIntrinsic(IntrinsicID);
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012737 Value *Call = CGF.Builder.CreateCall(F, Args);
12738 Value *CC = CGF.Builder.CreateExtractValue(Call, 1);
12739 CGF.Builder.CreateStore(CC, CCPtr);
12740 return CGF.Builder.CreateExtractValue(Call, 0);
12741}
12742
Ulrich Weigand3a610eb2015-04-01 12:54:25 +000012743Value *CodeGenFunction::EmitSystemZBuiltinExpr(unsigned BuiltinID,
12744 const CallExpr *E) {
12745 switch (BuiltinID) {
12746 case SystemZ::BI__builtin_tbegin: {
12747 Value *TDB = EmitScalarExpr(E->getArg(0));
12748 Value *Control = llvm::ConstantInt::get(Int32Ty, 0xff0c);
James Y Knight8799cae2019-02-03 21:53:49 +000012749 Function *F = CGM.getIntrinsic(Intrinsic::s390_tbegin);
David Blaikie43f9bb72015-05-18 22:14:03 +000012750 return Builder.CreateCall(F, {TDB, Control});
Ulrich Weigand3a610eb2015-04-01 12:54:25 +000012751 }
12752 case SystemZ::BI__builtin_tbegin_nofloat: {
12753 Value *TDB = EmitScalarExpr(E->getArg(0));
12754 Value *Control = llvm::ConstantInt::get(Int32Ty, 0xff0c);
James Y Knight8799cae2019-02-03 21:53:49 +000012755 Function *F = CGM.getIntrinsic(Intrinsic::s390_tbegin_nofloat);
David Blaikie43f9bb72015-05-18 22:14:03 +000012756 return Builder.CreateCall(F, {TDB, Control});
Ulrich Weigand3a610eb2015-04-01 12:54:25 +000012757 }
12758 case SystemZ::BI__builtin_tbeginc: {
12759 Value *TDB = llvm::ConstantPointerNull::get(Int8PtrTy);
12760 Value *Control = llvm::ConstantInt::get(Int32Ty, 0xff08);
James Y Knight8799cae2019-02-03 21:53:49 +000012761 Function *F = CGM.getIntrinsic(Intrinsic::s390_tbeginc);
David Blaikie43f9bb72015-05-18 22:14:03 +000012762 return Builder.CreateCall(F, {TDB, Control});
Ulrich Weigand3a610eb2015-04-01 12:54:25 +000012763 }
12764 case SystemZ::BI__builtin_tabort: {
12765 Value *Data = EmitScalarExpr(E->getArg(0));
James Y Knight8799cae2019-02-03 21:53:49 +000012766 Function *F = CGM.getIntrinsic(Intrinsic::s390_tabort);
Ulrich Weigand3a610eb2015-04-01 12:54:25 +000012767 return Builder.CreateCall(F, Builder.CreateSExt(Data, Int64Ty, "tabort"));
12768 }
12769 case SystemZ::BI__builtin_non_tx_store: {
12770 Value *Address = EmitScalarExpr(E->getArg(0));
12771 Value *Data = EmitScalarExpr(E->getArg(1));
James Y Knight8799cae2019-02-03 21:53:49 +000012772 Function *F = CGM.getIntrinsic(Intrinsic::s390_ntstg);
David Blaikie43f9bb72015-05-18 22:14:03 +000012773 return Builder.CreateCall(F, {Data, Address});
Ulrich Weigand3a610eb2015-04-01 12:54:25 +000012774 }
12775
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012776 // Vector builtins. Note that most vector builtins are mapped automatically
12777 // to target-specific LLVM intrinsics. The ones handled specially here can
12778 // be represented via standard LLVM IR, which is preferable to enable common
12779 // LLVM optimizations.
12780
12781 case SystemZ::BI__builtin_s390_vpopctb:
12782 case SystemZ::BI__builtin_s390_vpopcth:
12783 case SystemZ::BI__builtin_s390_vpopctf:
12784 case SystemZ::BI__builtin_s390_vpopctg: {
12785 llvm::Type *ResultType = ConvertType(E->getType());
12786 Value *X = EmitScalarExpr(E->getArg(0));
12787 Function *F = CGM.getIntrinsic(Intrinsic::ctpop, ResultType);
12788 return Builder.CreateCall(F, X);
12789 }
12790
12791 case SystemZ::BI__builtin_s390_vclzb:
12792 case SystemZ::BI__builtin_s390_vclzh:
12793 case SystemZ::BI__builtin_s390_vclzf:
12794 case SystemZ::BI__builtin_s390_vclzg: {
12795 llvm::Type *ResultType = ConvertType(E->getType());
12796 Value *X = EmitScalarExpr(E->getArg(0));
12797 Value *Undef = ConstantInt::get(Builder.getInt1Ty(), false);
12798 Function *F = CGM.getIntrinsic(Intrinsic::ctlz, ResultType);
David Blaikie43f9bb72015-05-18 22:14:03 +000012799 return Builder.CreateCall(F, {X, Undef});
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012800 }
12801
12802 case SystemZ::BI__builtin_s390_vctzb:
12803 case SystemZ::BI__builtin_s390_vctzh:
12804 case SystemZ::BI__builtin_s390_vctzf:
12805 case SystemZ::BI__builtin_s390_vctzg: {
12806 llvm::Type *ResultType = ConvertType(E->getType());
12807 Value *X = EmitScalarExpr(E->getArg(0));
12808 Value *Undef = ConstantInt::get(Builder.getInt1Ty(), false);
12809 Function *F = CGM.getIntrinsic(Intrinsic::cttz, ResultType);
David Blaikie43f9bb72015-05-18 22:14:03 +000012810 return Builder.CreateCall(F, {X, Undef});
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012811 }
12812
Ulrich Weigandcac24ab2017-07-17 17:45:57 +000012813 case SystemZ::BI__builtin_s390_vfsqsb:
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012814 case SystemZ::BI__builtin_s390_vfsqdb: {
12815 llvm::Type *ResultType = ConvertType(E->getType());
12816 Value *X = EmitScalarExpr(E->getArg(0));
12817 Function *F = CGM.getIntrinsic(Intrinsic::sqrt, ResultType);
12818 return Builder.CreateCall(F, X);
12819 }
Ulrich Weigandcac24ab2017-07-17 17:45:57 +000012820 case SystemZ::BI__builtin_s390_vfmasb:
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012821 case SystemZ::BI__builtin_s390_vfmadb: {
12822 llvm::Type *ResultType = ConvertType(E->getType());
12823 Value *X = EmitScalarExpr(E->getArg(0));
12824 Value *Y = EmitScalarExpr(E->getArg(1));
12825 Value *Z = EmitScalarExpr(E->getArg(2));
12826 Function *F = CGM.getIntrinsic(Intrinsic::fma, ResultType);
David Blaikie43f9bb72015-05-18 22:14:03 +000012827 return Builder.CreateCall(F, {X, Y, Z});
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012828 }
Ulrich Weigandcac24ab2017-07-17 17:45:57 +000012829 case SystemZ::BI__builtin_s390_vfmssb:
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012830 case SystemZ::BI__builtin_s390_vfmsdb: {
12831 llvm::Type *ResultType = ConvertType(E->getType());
12832 Value *X = EmitScalarExpr(E->getArg(0));
12833 Value *Y = EmitScalarExpr(E->getArg(1));
12834 Value *Z = EmitScalarExpr(E->getArg(2));
12835 Value *Zero = llvm::ConstantFP::getZeroValueForNegation(ResultType);
12836 Function *F = CGM.getIntrinsic(Intrinsic::fma, ResultType);
David Blaikie43f9bb72015-05-18 22:14:03 +000012837 return Builder.CreateCall(F, {X, Y, Builder.CreateFSub(Zero, Z, "sub")});
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012838 }
Ulrich Weigandcac24ab2017-07-17 17:45:57 +000012839 case SystemZ::BI__builtin_s390_vfnmasb:
12840 case SystemZ::BI__builtin_s390_vfnmadb: {
12841 llvm::Type *ResultType = ConvertType(E->getType());
12842 Value *X = EmitScalarExpr(E->getArg(0));
12843 Value *Y = EmitScalarExpr(E->getArg(1));
12844 Value *Z = EmitScalarExpr(E->getArg(2));
12845 Value *Zero = llvm::ConstantFP::getZeroValueForNegation(ResultType);
12846 Function *F = CGM.getIntrinsic(Intrinsic::fma, ResultType);
12847 return Builder.CreateFSub(Zero, Builder.CreateCall(F, {X, Y, Z}), "sub");
12848 }
12849 case SystemZ::BI__builtin_s390_vfnmssb:
12850 case SystemZ::BI__builtin_s390_vfnmsdb: {
12851 llvm::Type *ResultType = ConvertType(E->getType());
12852 Value *X = EmitScalarExpr(E->getArg(0));
12853 Value *Y = EmitScalarExpr(E->getArg(1));
12854 Value *Z = EmitScalarExpr(E->getArg(2));
12855 Value *Zero = llvm::ConstantFP::getZeroValueForNegation(ResultType);
12856 Function *F = CGM.getIntrinsic(Intrinsic::fma, ResultType);
12857 Value *NegZ = Builder.CreateFSub(Zero, Z, "sub");
12858 return Builder.CreateFSub(Zero, Builder.CreateCall(F, {X, Y, NegZ}));
12859 }
12860 case SystemZ::BI__builtin_s390_vflpsb:
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012861 case SystemZ::BI__builtin_s390_vflpdb: {
12862 llvm::Type *ResultType = ConvertType(E->getType());
12863 Value *X = EmitScalarExpr(E->getArg(0));
12864 Function *F = CGM.getIntrinsic(Intrinsic::fabs, ResultType);
12865 return Builder.CreateCall(F, X);
12866 }
Ulrich Weigandcac24ab2017-07-17 17:45:57 +000012867 case SystemZ::BI__builtin_s390_vflnsb:
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012868 case SystemZ::BI__builtin_s390_vflndb: {
12869 llvm::Type *ResultType = ConvertType(E->getType());
12870 Value *X = EmitScalarExpr(E->getArg(0));
12871 Value *Zero = llvm::ConstantFP::getZeroValueForNegation(ResultType);
12872 Function *F = CGM.getIntrinsic(Intrinsic::fabs, ResultType);
12873 return Builder.CreateFSub(Zero, Builder.CreateCall(F, X), "sub");
12874 }
Ulrich Weigandcac24ab2017-07-17 17:45:57 +000012875 case SystemZ::BI__builtin_s390_vfisb:
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012876 case SystemZ::BI__builtin_s390_vfidb: {
12877 llvm::Type *ResultType = ConvertType(E->getType());
12878 Value *X = EmitScalarExpr(E->getArg(0));
12879 // Constant-fold the M4 and M5 mask arguments.
12880 llvm::APSInt M4, M5;
12881 bool IsConstM4 = E->getArg(1)->isIntegerConstantExpr(M4, getContext());
12882 bool IsConstM5 = E->getArg(2)->isIntegerConstantExpr(M5, getContext());
12883 assert(IsConstM4 && IsConstM5 && "Constant arg isn't actually constant?");
12884 (void)IsConstM4; (void)IsConstM5;
Ulrich Weigandcac24ab2017-07-17 17:45:57 +000012885 // Check whether this instance can be represented via a LLVM standard
12886 // intrinsic. We only support some combinations of M4 and M5.
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012887 Intrinsic::ID ID = Intrinsic::not_intrinsic;
12888 switch (M4.getZExtValue()) {
12889 default: break;
12890 case 0: // IEEE-inexact exception allowed
12891 switch (M5.getZExtValue()) {
12892 default: break;
12893 case 0: ID = Intrinsic::rint; break;
12894 }
12895 break;
12896 case 4: // IEEE-inexact exception suppressed
12897 switch (M5.getZExtValue()) {
12898 default: break;
12899 case 0: ID = Intrinsic::nearbyint; break;
12900 case 1: ID = Intrinsic::round; break;
12901 case 5: ID = Intrinsic::trunc; break;
12902 case 6: ID = Intrinsic::ceil; break;
12903 case 7: ID = Intrinsic::floor; break;
12904 }
12905 break;
12906 }
12907 if (ID != Intrinsic::not_intrinsic) {
12908 Function *F = CGM.getIntrinsic(ID, ResultType);
12909 return Builder.CreateCall(F, X);
12910 }
Ulrich Weigandcac24ab2017-07-17 17:45:57 +000012911 switch (BuiltinID) {
12912 case SystemZ::BI__builtin_s390_vfisb: ID = Intrinsic::s390_vfisb; break;
12913 case SystemZ::BI__builtin_s390_vfidb: ID = Intrinsic::s390_vfidb; break;
12914 default: llvm_unreachable("Unknown BuiltinID");
12915 }
12916 Function *F = CGM.getIntrinsic(ID);
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012917 Value *M4Value = llvm::ConstantInt::get(getLLVMContext(), M4);
12918 Value *M5Value = llvm::ConstantInt::get(getLLVMContext(), M5);
David Blaikie43f9bb72015-05-18 22:14:03 +000012919 return Builder.CreateCall(F, {X, M4Value, M5Value});
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012920 }
Ulrich Weigandcac24ab2017-07-17 17:45:57 +000012921 case SystemZ::BI__builtin_s390_vfmaxsb:
12922 case SystemZ::BI__builtin_s390_vfmaxdb: {
12923 llvm::Type *ResultType = ConvertType(E->getType());
12924 Value *X = EmitScalarExpr(E->getArg(0));
12925 Value *Y = EmitScalarExpr(E->getArg(1));
12926 // Constant-fold the M4 mask argument.
12927 llvm::APSInt M4;
12928 bool IsConstM4 = E->getArg(2)->isIntegerConstantExpr(M4, getContext());
12929 assert(IsConstM4 && "Constant arg isn't actually constant?");
12930 (void)IsConstM4;
12931 // Check whether this instance can be represented via a LLVM standard
12932 // intrinsic. We only support some values of M4.
12933 Intrinsic::ID ID = Intrinsic::not_intrinsic;
12934 switch (M4.getZExtValue()) {
12935 default: break;
12936 case 4: ID = Intrinsic::maxnum; break;
12937 }
12938 if (ID != Intrinsic::not_intrinsic) {
12939 Function *F = CGM.getIntrinsic(ID, ResultType);
12940 return Builder.CreateCall(F, {X, Y});
12941 }
12942 switch (BuiltinID) {
12943 case SystemZ::BI__builtin_s390_vfmaxsb: ID = Intrinsic::s390_vfmaxsb; break;
12944 case SystemZ::BI__builtin_s390_vfmaxdb: ID = Intrinsic::s390_vfmaxdb; break;
12945 default: llvm_unreachable("Unknown BuiltinID");
12946 }
12947 Function *F = CGM.getIntrinsic(ID);
12948 Value *M4Value = llvm::ConstantInt::get(getLLVMContext(), M4);
12949 return Builder.CreateCall(F, {X, Y, M4Value});
12950 }
12951 case SystemZ::BI__builtin_s390_vfminsb:
12952 case SystemZ::BI__builtin_s390_vfmindb: {
12953 llvm::Type *ResultType = ConvertType(E->getType());
12954 Value *X = EmitScalarExpr(E->getArg(0));
12955 Value *Y = EmitScalarExpr(E->getArg(1));
12956 // Constant-fold the M4 mask argument.
12957 llvm::APSInt M4;
12958 bool IsConstM4 = E->getArg(2)->isIntegerConstantExpr(M4, getContext());
12959 assert(IsConstM4 && "Constant arg isn't actually constant?");
12960 (void)IsConstM4;
12961 // Check whether this instance can be represented via a LLVM standard
12962 // intrinsic. We only support some values of M4.
12963 Intrinsic::ID ID = Intrinsic::not_intrinsic;
12964 switch (M4.getZExtValue()) {
12965 default: break;
12966 case 4: ID = Intrinsic::minnum; break;
12967 }
12968 if (ID != Intrinsic::not_intrinsic) {
12969 Function *F = CGM.getIntrinsic(ID, ResultType);
12970 return Builder.CreateCall(F, {X, Y});
12971 }
12972 switch (BuiltinID) {
12973 case SystemZ::BI__builtin_s390_vfminsb: ID = Intrinsic::s390_vfminsb; break;
12974 case SystemZ::BI__builtin_s390_vfmindb: ID = Intrinsic::s390_vfmindb; break;
12975 default: llvm_unreachable("Unknown BuiltinID");
12976 }
12977 Function *F = CGM.getIntrinsic(ID);
12978 Value *M4Value = llvm::ConstantInt::get(getLLVMContext(), M4);
12979 return Builder.CreateCall(F, {X, Y, M4Value});
12980 }
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012981
Raphael Isemannb23ccec2018-12-10 12:37:46 +000012982 // Vector intrinsics that output the post-instruction CC value.
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000012983
12984#define INTRINSIC_WITH_CC(NAME) \
12985 case SystemZ::BI__builtin_##NAME: \
12986 return EmitSystemZIntrinsicWithCC(*this, Intrinsic::NAME, E)
12987
12988 INTRINSIC_WITH_CC(s390_vpkshs);
12989 INTRINSIC_WITH_CC(s390_vpksfs);
12990 INTRINSIC_WITH_CC(s390_vpksgs);
12991
12992 INTRINSIC_WITH_CC(s390_vpklshs);
12993 INTRINSIC_WITH_CC(s390_vpklsfs);
12994 INTRINSIC_WITH_CC(s390_vpklsgs);
12995
12996 INTRINSIC_WITH_CC(s390_vceqbs);
12997 INTRINSIC_WITH_CC(s390_vceqhs);
12998 INTRINSIC_WITH_CC(s390_vceqfs);
12999 INTRINSIC_WITH_CC(s390_vceqgs);
13000
13001 INTRINSIC_WITH_CC(s390_vchbs);
13002 INTRINSIC_WITH_CC(s390_vchhs);
13003 INTRINSIC_WITH_CC(s390_vchfs);
13004 INTRINSIC_WITH_CC(s390_vchgs);
13005
13006 INTRINSIC_WITH_CC(s390_vchlbs);
13007 INTRINSIC_WITH_CC(s390_vchlhs);
13008 INTRINSIC_WITH_CC(s390_vchlfs);
13009 INTRINSIC_WITH_CC(s390_vchlgs);
13010
13011 INTRINSIC_WITH_CC(s390_vfaebs);
13012 INTRINSIC_WITH_CC(s390_vfaehs);
13013 INTRINSIC_WITH_CC(s390_vfaefs);
13014
13015 INTRINSIC_WITH_CC(s390_vfaezbs);
13016 INTRINSIC_WITH_CC(s390_vfaezhs);
13017 INTRINSIC_WITH_CC(s390_vfaezfs);
13018
13019 INTRINSIC_WITH_CC(s390_vfeebs);
13020 INTRINSIC_WITH_CC(s390_vfeehs);
13021 INTRINSIC_WITH_CC(s390_vfeefs);
13022
13023 INTRINSIC_WITH_CC(s390_vfeezbs);
13024 INTRINSIC_WITH_CC(s390_vfeezhs);
13025 INTRINSIC_WITH_CC(s390_vfeezfs);
13026
13027 INTRINSIC_WITH_CC(s390_vfenebs);
13028 INTRINSIC_WITH_CC(s390_vfenehs);
13029 INTRINSIC_WITH_CC(s390_vfenefs);
13030
13031 INTRINSIC_WITH_CC(s390_vfenezbs);
13032 INTRINSIC_WITH_CC(s390_vfenezhs);
13033 INTRINSIC_WITH_CC(s390_vfenezfs);
13034
13035 INTRINSIC_WITH_CC(s390_vistrbs);
13036 INTRINSIC_WITH_CC(s390_vistrhs);
13037 INTRINSIC_WITH_CC(s390_vistrfs);
13038
13039 INTRINSIC_WITH_CC(s390_vstrcbs);
13040 INTRINSIC_WITH_CC(s390_vstrchs);
13041 INTRINSIC_WITH_CC(s390_vstrcfs);
13042
13043 INTRINSIC_WITH_CC(s390_vstrczbs);
13044 INTRINSIC_WITH_CC(s390_vstrczhs);
13045 INTRINSIC_WITH_CC(s390_vstrczfs);
13046
Ulrich Weigandcac24ab2017-07-17 17:45:57 +000013047 INTRINSIC_WITH_CC(s390_vfcesbs);
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000013048 INTRINSIC_WITH_CC(s390_vfcedbs);
Ulrich Weigandcac24ab2017-07-17 17:45:57 +000013049 INTRINSIC_WITH_CC(s390_vfchsbs);
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000013050 INTRINSIC_WITH_CC(s390_vfchdbs);
Ulrich Weigandcac24ab2017-07-17 17:45:57 +000013051 INTRINSIC_WITH_CC(s390_vfchesbs);
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000013052 INTRINSIC_WITH_CC(s390_vfchedbs);
13053
Ulrich Weigandcac24ab2017-07-17 17:45:57 +000013054 INTRINSIC_WITH_CC(s390_vftcisb);
Ulrich Weigand5722c0f2015-05-05 19:36:42 +000013055 INTRINSIC_WITH_CC(s390_vftcidb);
13056
13057#undef INTRINSIC_WITH_CC
13058
Ulrich Weigand3a610eb2015-04-01 12:54:25 +000013059 default:
13060 return nullptr;
13061 }
13062}
Artem Belevichd21e5c62015-06-25 18:29:42 +000013063
Artem Belevich5fe85a02019-04-25 22:28:09 +000013064namespace {
13065// Helper classes for mapping MMA builtins to particular LLVM intrinsic variant.
13066struct NVPTXMmaLdstInfo {
13067 unsigned NumResults; // Number of elements to load/store
13068 // Intrinsic IDs for row/col variants. 0 if particular layout is unsupported.
13069 unsigned IID_col;
13070 unsigned IID_row;
13071};
13072
13073#define MMA_INTR(geom_op_type, layout) \
13074 Intrinsic::nvvm_wmma_##geom_op_type##_##layout##_stride
13075#define MMA_LDST(n, geom_op_type) \
13076 { n, MMA_INTR(geom_op_type, col), MMA_INTR(geom_op_type, row) }
13077
13078static NVPTXMmaLdstInfo getNVPTXMmaLdstInfo(unsigned BuiltinID) {
13079 switch (BuiltinID) {
13080 // FP MMA loads
13081 case NVPTX::BI__hmma_m16n16k16_ld_a:
13082 return MMA_LDST(8, m16n16k16_load_a_f16);
13083 case NVPTX::BI__hmma_m16n16k16_ld_b:
13084 return MMA_LDST(8, m16n16k16_load_b_f16);
13085 case NVPTX::BI__hmma_m16n16k16_ld_c_f16:
13086 return MMA_LDST(4, m16n16k16_load_c_f16);
13087 case NVPTX::BI__hmma_m16n16k16_ld_c_f32:
13088 return MMA_LDST(8, m16n16k16_load_c_f32);
13089 case NVPTX::BI__hmma_m32n8k16_ld_a:
13090 return MMA_LDST(8, m32n8k16_load_a_f16);
13091 case NVPTX::BI__hmma_m32n8k16_ld_b:
13092 return MMA_LDST(8, m32n8k16_load_b_f16);
13093 case NVPTX::BI__hmma_m32n8k16_ld_c_f16:
13094 return MMA_LDST(4, m32n8k16_load_c_f16);
13095 case NVPTX::BI__hmma_m32n8k16_ld_c_f32:
13096 return MMA_LDST(8, m32n8k16_load_c_f32);
13097 case NVPTX::BI__hmma_m8n32k16_ld_a:
13098 return MMA_LDST(8, m8n32k16_load_a_f16);
13099 case NVPTX::BI__hmma_m8n32k16_ld_b:
13100 return MMA_LDST(8, m8n32k16_load_b_f16);
13101 case NVPTX::BI__hmma_m8n32k16_ld_c_f16:
13102 return MMA_LDST(4, m8n32k16_load_c_f16);
13103 case NVPTX::BI__hmma_m8n32k16_ld_c_f32:
13104 return MMA_LDST(8, m8n32k16_load_c_f32);
13105
13106 // Integer MMA loads
13107 case NVPTX::BI__imma_m16n16k16_ld_a_s8:
13108 return MMA_LDST(2, m16n16k16_load_a_s8);
13109 case NVPTX::BI__imma_m16n16k16_ld_a_u8:
13110 return MMA_LDST(2, m16n16k16_load_a_u8);
13111 case NVPTX::BI__imma_m16n16k16_ld_b_s8:
13112 return MMA_LDST(2, m16n16k16_load_b_s8);
13113 case NVPTX::BI__imma_m16n16k16_ld_b_u8:
13114 return MMA_LDST(2, m16n16k16_load_b_u8);
13115 case NVPTX::BI__imma_m16n16k16_ld_c:
13116 return MMA_LDST(8, m16n16k16_load_c_s32);
13117 case NVPTX::BI__imma_m32n8k16_ld_a_s8:
13118 return MMA_LDST(4, m32n8k16_load_a_s8);
13119 case NVPTX::BI__imma_m32n8k16_ld_a_u8:
13120 return MMA_LDST(4, m32n8k16_load_a_u8);
13121 case NVPTX::BI__imma_m32n8k16_ld_b_s8:
13122 return MMA_LDST(1, m32n8k16_load_b_s8);
13123 case NVPTX::BI__imma_m32n8k16_ld_b_u8:
13124 return MMA_LDST(1, m32n8k16_load_b_u8);
13125 case NVPTX::BI__imma_m32n8k16_ld_c:
13126 return MMA_LDST(8, m32n8k16_load_c_s32);
13127 case NVPTX::BI__imma_m8n32k16_ld_a_s8:
13128 return MMA_LDST(1, m8n32k16_load_a_s8);
13129 case NVPTX::BI__imma_m8n32k16_ld_a_u8:
13130 return MMA_LDST(1, m8n32k16_load_a_u8);
13131 case NVPTX::BI__imma_m8n32k16_ld_b_s8:
13132 return MMA_LDST(4, m8n32k16_load_b_s8);
13133 case NVPTX::BI__imma_m8n32k16_ld_b_u8:
13134 return MMA_LDST(4, m8n32k16_load_b_u8);
13135 case NVPTX::BI__imma_m8n32k16_ld_c:
13136 return MMA_LDST(8, m8n32k16_load_c_s32);
13137
13138 // Sub-integer MMA loads.
13139 // Only row/col layout is supported by A/B fragments.
13140 case NVPTX::BI__imma_m8n8k32_ld_a_s4:
13141 return {1, 0, MMA_INTR(m8n8k32_load_a_s4, row)};
13142 case NVPTX::BI__imma_m8n8k32_ld_a_u4:
13143 return {1, 0, MMA_INTR(m8n8k32_load_a_u4, row)};
13144 case NVPTX::BI__imma_m8n8k32_ld_b_s4:
13145 return {1, MMA_INTR(m8n8k32_load_b_s4, col), 0};
13146 case NVPTX::BI__imma_m8n8k32_ld_b_u4:
13147 return {1, MMA_INTR(m8n8k32_load_b_u4, col), 0};
13148 case NVPTX::BI__imma_m8n8k32_ld_c:
13149 return MMA_LDST(2, m8n8k32_load_c_s32);
13150 case NVPTX::BI__bmma_m8n8k128_ld_a_b1:
13151 return {1, 0, MMA_INTR(m8n8k128_load_a_b1, row)};
13152 case NVPTX::BI__bmma_m8n8k128_ld_b_b1:
13153 return {1, MMA_INTR(m8n8k128_load_b_b1, col), 0};
13154 case NVPTX::BI__bmma_m8n8k128_ld_c:
13155 return MMA_LDST(2, m8n8k128_load_c_s32);
13156
13157 // NOTE: We need to follow inconsitent naming scheme used by NVCC. Unlike
13158 // PTX and LLVM IR where stores always use fragment D, NVCC builtins always
13159 // use fragment C for both loads and stores.
13160 // FP MMA stores.
13161 case NVPTX::BI__hmma_m16n16k16_st_c_f16:
13162 return MMA_LDST(4, m16n16k16_store_d_f16);
13163 case NVPTX::BI__hmma_m16n16k16_st_c_f32:
13164 return MMA_LDST(8, m16n16k16_store_d_f32);
13165 case NVPTX::BI__hmma_m32n8k16_st_c_f16:
13166 return MMA_LDST(4, m32n8k16_store_d_f16);
13167 case NVPTX::BI__hmma_m32n8k16_st_c_f32:
13168 return MMA_LDST(8, m32n8k16_store_d_f32);
13169 case NVPTX::BI__hmma_m8n32k16_st_c_f16:
13170 return MMA_LDST(4, m8n32k16_store_d_f16);
13171 case NVPTX::BI__hmma_m8n32k16_st_c_f32:
13172 return MMA_LDST(8, m8n32k16_store_d_f32);
13173
13174 // Integer and sub-integer MMA stores.
13175 // Another naming quirk. Unlike other MMA builtins that use PTX types in the
13176 // name, integer loads/stores use LLVM's i32.
13177 case NVPTX::BI__imma_m16n16k16_st_c_i32:
13178 return MMA_LDST(8, m16n16k16_store_d_s32);
13179 case NVPTX::BI__imma_m32n8k16_st_c_i32:
13180 return MMA_LDST(8, m32n8k16_store_d_s32);
13181 case NVPTX::BI__imma_m8n32k16_st_c_i32:
13182 return MMA_LDST(8, m8n32k16_store_d_s32);
13183 case NVPTX::BI__imma_m8n8k32_st_c_i32:
13184 return MMA_LDST(2, m8n8k32_store_d_s32);
13185 case NVPTX::BI__bmma_m8n8k128_st_c_i32:
13186 return MMA_LDST(2, m8n8k128_store_d_s32);
13187
13188 default:
13189 llvm_unreachable("Unknown MMA builtin");
13190 }
13191}
13192#undef MMA_LDST
13193#undef MMA_INTR
13194
13195
13196struct NVPTXMmaInfo {
13197 unsigned NumEltsA;
13198 unsigned NumEltsB;
13199 unsigned NumEltsC;
13200 unsigned NumEltsD;
13201 std::array<unsigned, 8> Variants;
13202
13203 unsigned getMMAIntrinsic(int Layout, bool Satf) {
13204 unsigned Index = Layout * 2 + Satf;
13205 if (Index >= Variants.size())
13206 return 0;
13207 return Variants[Index];
13208 }
13209};
13210
13211 // Returns an intrinsic that matches Layout and Satf for valid combinations of
13212 // Layout and Satf, 0 otherwise.
13213static NVPTXMmaInfo getNVPTXMmaInfo(unsigned BuiltinID) {
13214 // clang-format off
13215#define MMA_VARIANTS(geom, type) {{ \
13216 Intrinsic::nvvm_wmma_##geom##_mma_row_row_##type, \
13217 Intrinsic::nvvm_wmma_##geom##_mma_row_row_##type##_satfinite, \
13218 Intrinsic::nvvm_wmma_##geom##_mma_row_col_##type, \
13219 Intrinsic::nvvm_wmma_##geom##_mma_row_col_##type##_satfinite, \
13220 Intrinsic::nvvm_wmma_##geom##_mma_col_row_##type, \
13221 Intrinsic::nvvm_wmma_##geom##_mma_col_row_##type##_satfinite, \
13222 Intrinsic::nvvm_wmma_##geom##_mma_col_col_##type, \
13223 Intrinsic::nvvm_wmma_##geom##_mma_col_col_##type##_satfinite \
13224 }}
13225// Sub-integer MMA only supports row.col layout.
13226#define MMA_VARIANTS_I4(geom, type) {{ \
13227 0, \
13228 0, \
13229 Intrinsic::nvvm_wmma_##geom##_mma_row_col_##type, \
13230 Intrinsic::nvvm_wmma_##geom##_mma_row_col_##type##_satfinite, \
13231 0, \
13232 0, \
13233 0, \
13234 0 \
13235 }}
13236// b1 MMA does not support .satfinite.
13237#define MMA_VARIANTS_B1(geom, type) {{ \
13238 0, \
13239 0, \
13240 Intrinsic::nvvm_wmma_##geom##_mma_row_col_##type, \
13241 0, \
13242 0, \
13243 0, \
13244 0, \
13245 0 \
13246 }}
13247 // clang-format on
13248 switch (BuiltinID) {
13249 // FP MMA
13250 // Note that 'type' argument of MMA_VARIANT uses D_C notation, while
13251 // NumEltsN of return value are ordered as A,B,C,D.
13252 case NVPTX::BI__hmma_m16n16k16_mma_f16f16:
13253 return {8, 8, 4, 4, MMA_VARIANTS(m16n16k16, f16_f16)};
13254 case NVPTX::BI__hmma_m16n16k16_mma_f32f16:
13255 return {8, 8, 4, 8, MMA_VARIANTS(m16n16k16, f32_f16)};
13256 case NVPTX::BI__hmma_m16n16k16_mma_f16f32:
13257 return {8, 8, 8, 4, MMA_VARIANTS(m16n16k16, f16_f32)};
13258 case NVPTX::BI__hmma_m16n16k16_mma_f32f32:
13259 return {8, 8, 8, 8, MMA_VARIANTS(m16n16k16, f32_f32)};
13260 case NVPTX::BI__hmma_m32n8k16_mma_f16f16:
13261 return {8, 8, 4, 4, MMA_VARIANTS(m32n8k16, f16_f16)};
13262 case NVPTX::BI__hmma_m32n8k16_mma_f32f16:
13263 return {8, 8, 4, 8, MMA_VARIANTS(m32n8k16, f32_f16)};
13264 case NVPTX::BI__hmma_m32n8k16_mma_f16f32:
13265 return {8, 8, 8, 4, MMA_VARIANTS(m32n8k16, f16_f32)};
13266 case NVPTX::BI__hmma_m32n8k16_mma_f32f32:
13267 return {8, 8, 8, 8, MMA_VARIANTS(m32n8k16, f32_f32)};
13268 case NVPTX::BI__hmma_m8n32k16_mma_f16f16:
13269 return {8, 8, 4, 4, MMA_VARIANTS(m8n32k16, f16_f16)};
13270 case NVPTX::BI__hmma_m8n32k16_mma_f32f16:
13271 return {8, 8, 4, 8, MMA_VARIANTS(m8n32k16, f32_f16)};
13272 case NVPTX::BI__hmma_m8n32k16_mma_f16f32:
13273 return {8, 8, 8, 4, MMA_VARIANTS(m8n32k16, f16_f32)};
13274 case NVPTX::BI__hmma_m8n32k16_mma_f32f32:
13275 return {8, 8, 8, 8, MMA_VARIANTS(m8n32k16, f32_f32)};
13276
13277 // Integer MMA
13278 case NVPTX::BI__imma_m16n16k16_mma_s8:
13279 return {2, 2, 8, 8, MMA_VARIANTS(m16n16k16, s8)};
13280 case NVPTX::BI__imma_m16n16k16_mma_u8:
13281 return {2, 2, 8, 8, MMA_VARIANTS(m16n16k16, u8)};
13282 case NVPTX::BI__imma_m32n8k16_mma_s8:
13283 return {4, 1, 8, 8, MMA_VARIANTS(m32n8k16, s8)};
13284 case NVPTX::BI__imma_m32n8k16_mma_u8:
13285 return {4, 1, 8, 8, MMA_VARIANTS(m32n8k16, u8)};
13286 case NVPTX::BI__imma_m8n32k16_mma_s8:
13287 return {1, 4, 8, 8, MMA_VARIANTS(m8n32k16, s8)};
13288 case NVPTX::BI__imma_m8n32k16_mma_u8:
13289 return {1, 4, 8, 8, MMA_VARIANTS(m8n32k16, u8)};
13290
13291 // Sub-integer MMA
13292 case NVPTX::BI__imma_m8n8k32_mma_s4:
13293 return {1, 1, 2, 2, MMA_VARIANTS_I4(m8n8k32, s4)};
13294 case NVPTX::BI__imma_m8n8k32_mma_u4:
13295 return {1, 1, 2, 2, MMA_VARIANTS_I4(m8n8k32, u4)};
13296 case NVPTX::BI__bmma_m8n8k128_mma_xor_popc_b1:
13297 return {1, 1, 2, 2, MMA_VARIANTS_B1(m8n8k128, b1)};
13298 default:
13299 llvm_unreachable("Unexpected builtin ID.");
13300 }
13301#undef MMA_VARIANTS
13302#undef MMA_VARIANTS_I4
13303#undef MMA_VARIANTS_B1
13304}
13305
13306} // namespace
13307
13308Value *
13309CodeGenFunction::EmitNVPTXBuiltinExpr(unsigned BuiltinID, const CallExpr *E) {
Justin Lebar2e4ecfd2016-05-19 22:49:13 +000013310 auto MakeLdg = [&](unsigned IntrinsicID) {
13311 Value *Ptr = EmitScalarExpr(E->getArg(0));
Justin Lebar2e4ecfd2016-05-19 22:49:13 +000013312 clang::CharUnits Align =
Krzysztof Parzyszek8f248232017-05-18 17:07:11 +000013313 getNaturalPointeeTypeAlignment(E->getArg(0)->getType());
Justin Lebar2e4ecfd2016-05-19 22:49:13 +000013314 return Builder.CreateCall(
13315 CGM.getIntrinsic(IntrinsicID, {Ptr->getType()->getPointerElementType(),
13316 Ptr->getType()}),
13317 {Ptr, ConstantInt::get(Builder.getInt32Ty(), Align.getQuantity())});
13318 };
Artem Belevichfda99052016-09-28 17:47:35 +000013319 auto MakeScopedAtomic = [&](unsigned IntrinsicID) {
13320 Value *Ptr = EmitScalarExpr(E->getArg(0));
13321 return Builder.CreateCall(
13322 CGM.getIntrinsic(IntrinsicID, {Ptr->getType()->getPointerElementType(),
13323 Ptr->getType()}),
13324 {Ptr, EmitScalarExpr(E->getArg(1))});
13325 };
Artem Belevichd21e5c62015-06-25 18:29:42 +000013326 switch (BuiltinID) {
13327 case NVPTX::BI__nvvm_atom_add_gen_i:
13328 case NVPTX::BI__nvvm_atom_add_gen_l:
13329 case NVPTX::BI__nvvm_atom_add_gen_ll:
13330 return MakeBinaryAtomicValue(*this, llvm::AtomicRMWInst::Add, E);
13331
13332 case NVPTX::BI__nvvm_atom_sub_gen_i:
13333 case NVPTX::BI__nvvm_atom_sub_gen_l:
13334 case NVPTX::BI__nvvm_atom_sub_gen_ll:
13335 return MakeBinaryAtomicValue(*this, llvm::AtomicRMWInst::Sub, E);
13336
13337 case NVPTX::BI__nvvm_atom_and_gen_i:
13338 case NVPTX::BI__nvvm_atom_and_gen_l:
13339 case NVPTX::BI__nvvm_atom_and_gen_ll:
13340 return MakeBinaryAtomicValue(*this, llvm::AtomicRMWInst::And, E);
13341
13342 case NVPTX::BI__nvvm_atom_or_gen_i:
13343 case NVPTX::BI__nvvm_atom_or_gen_l:
13344 case NVPTX::BI__nvvm_atom_or_gen_ll:
13345 return MakeBinaryAtomicValue(*this, llvm::AtomicRMWInst::Or, E);
13346
13347 case NVPTX::BI__nvvm_atom_xor_gen_i:
13348 case NVPTX::BI__nvvm_atom_xor_gen_l:
13349 case NVPTX::BI__nvvm_atom_xor_gen_ll:
13350 return MakeBinaryAtomicValue(*this, llvm::AtomicRMWInst::Xor, E);
13351
13352 case NVPTX::BI__nvvm_atom_xchg_gen_i:
13353 case NVPTX::BI__nvvm_atom_xchg_gen_l:
13354 case NVPTX::BI__nvvm_atom_xchg_gen_ll:
13355 return MakeBinaryAtomicValue(*this, llvm::AtomicRMWInst::Xchg, E);
13356
13357 case NVPTX::BI__nvvm_atom_max_gen_i:
13358 case NVPTX::BI__nvvm_atom_max_gen_l:
13359 case NVPTX::BI__nvvm_atom_max_gen_ll:
Jingyue Wu2d69f962015-08-31 17:25:51 +000013360 return MakeBinaryAtomicValue(*this, llvm::AtomicRMWInst::Max, E);
13361
Artem Belevichd21e5c62015-06-25 18:29:42 +000013362 case NVPTX::BI__nvvm_atom_max_gen_ui:
13363 case NVPTX::BI__nvvm_atom_max_gen_ul:
13364 case NVPTX::BI__nvvm_atom_max_gen_ull:
Jingyue Wu2d69f962015-08-31 17:25:51 +000013365 return MakeBinaryAtomicValue(*this, llvm::AtomicRMWInst::UMax, E);
Artem Belevichd21e5c62015-06-25 18:29:42 +000013366
13367 case NVPTX::BI__nvvm_atom_min_gen_i:
13368 case NVPTX::BI__nvvm_atom_min_gen_l:
13369 case NVPTX::BI__nvvm_atom_min_gen_ll:
Jingyue Wu2d69f962015-08-31 17:25:51 +000013370 return MakeBinaryAtomicValue(*this, llvm::AtomicRMWInst::Min, E);
13371
Artem Belevichd21e5c62015-06-25 18:29:42 +000013372 case NVPTX::BI__nvvm_atom_min_gen_ui:
13373 case NVPTX::BI__nvvm_atom_min_gen_ul:
13374 case NVPTX::BI__nvvm_atom_min_gen_ull:
Jingyue Wu2d69f962015-08-31 17:25:51 +000013375 return MakeBinaryAtomicValue(*this, llvm::AtomicRMWInst::UMin, E);
Artem Belevichd21e5c62015-06-25 18:29:42 +000013376
13377 case NVPTX::BI__nvvm_atom_cas_gen_i:
13378 case NVPTX::BI__nvvm_atom_cas_gen_l:
13379 case NVPTX::BI__nvvm_atom_cas_gen_ll:
Jingyue Wuf1eca252015-09-30 21:49:32 +000013380 // __nvvm_atom_cas_gen_* should return the old value rather than the
13381 // success flag.
13382 return MakeAtomicCmpXchgValue(*this, E, /*ReturnBool=*/false);
Artem Belevichd21e5c62015-06-25 18:29:42 +000013383
13384 case NVPTX::BI__nvvm_atom_add_gen_f: {
13385 Value *Ptr = EmitScalarExpr(E->getArg(0));
13386 Value *Val = EmitScalarExpr(E->getArg(1));
13387 // atomicrmw only deals with integer arguments so we need to use
13388 // LLVM's nvvm_atomic_load_add_f32 intrinsic for that.
James Y Knight8799cae2019-02-03 21:53:49 +000013389 Function *FnALAF32 =
Artem Belevichd21e5c62015-06-25 18:29:42 +000013390 CGM.getIntrinsic(Intrinsic::nvvm_atomic_load_add_f32, Ptr->getType());
13391 return Builder.CreateCall(FnALAF32, {Ptr, Val});
13392 }
13393
Justin Lebarda9e0bd2017-11-07 22:10:54 +000013394 case NVPTX::BI__nvvm_atom_add_gen_d: {
13395 Value *Ptr = EmitScalarExpr(E->getArg(0));
13396 Value *Val = EmitScalarExpr(E->getArg(1));
13397 // atomicrmw only deals with integer arguments, so we need to use
13398 // LLVM's nvvm_atomic_load_add_f64 intrinsic.
James Y Knight8799cae2019-02-03 21:53:49 +000013399 Function *FnALAF64 =
Justin Lebarda9e0bd2017-11-07 22:10:54 +000013400 CGM.getIntrinsic(Intrinsic::nvvm_atomic_load_add_f64, Ptr->getType());
13401 return Builder.CreateCall(FnALAF64, {Ptr, Val});
13402 }
13403
Justin Lebar717d2b02016-03-22 00:09:28 +000013404 case NVPTX::BI__nvvm_atom_inc_gen_ui: {
13405 Value *Ptr = EmitScalarExpr(E->getArg(0));
13406 Value *Val = EmitScalarExpr(E->getArg(1));
James Y Knight8799cae2019-02-03 21:53:49 +000013407 Function *FnALI32 =
Justin Lebar717d2b02016-03-22 00:09:28 +000013408 CGM.getIntrinsic(Intrinsic::nvvm_atomic_load_inc_32, Ptr->getType());
13409 return Builder.CreateCall(FnALI32, {Ptr, Val});
13410 }
13411
13412 case NVPTX::BI__nvvm_atom_dec_gen_ui: {
13413 Value *Ptr = EmitScalarExpr(E->getArg(0));
13414 Value *Val = EmitScalarExpr(E->getArg(1));
James Y Knight8799cae2019-02-03 21:53:49 +000013415 Function *FnALD32 =
Justin Lebar717d2b02016-03-22 00:09:28 +000013416 CGM.getIntrinsic(Intrinsic::nvvm_atomic_load_dec_32, Ptr->getType());
13417 return Builder.CreateCall(FnALD32, {Ptr, Val});
13418 }
13419
Justin Lebar2e4ecfd2016-05-19 22:49:13 +000013420 case NVPTX::BI__nvvm_ldg_c:
13421 case NVPTX::BI__nvvm_ldg_c2:
13422 case NVPTX::BI__nvvm_ldg_c4:
13423 case NVPTX::BI__nvvm_ldg_s:
13424 case NVPTX::BI__nvvm_ldg_s2:
13425 case NVPTX::BI__nvvm_ldg_s4:
13426 case NVPTX::BI__nvvm_ldg_i:
13427 case NVPTX::BI__nvvm_ldg_i2:
13428 case NVPTX::BI__nvvm_ldg_i4:
13429 case NVPTX::BI__nvvm_ldg_l:
13430 case NVPTX::BI__nvvm_ldg_ll:
13431 case NVPTX::BI__nvvm_ldg_ll2:
13432 case NVPTX::BI__nvvm_ldg_uc:
13433 case NVPTX::BI__nvvm_ldg_uc2:
13434 case NVPTX::BI__nvvm_ldg_uc4:
13435 case NVPTX::BI__nvvm_ldg_us:
13436 case NVPTX::BI__nvvm_ldg_us2:
13437 case NVPTX::BI__nvvm_ldg_us4:
13438 case NVPTX::BI__nvvm_ldg_ui:
13439 case NVPTX::BI__nvvm_ldg_ui2:
13440 case NVPTX::BI__nvvm_ldg_ui4:
13441 case NVPTX::BI__nvvm_ldg_ul:
13442 case NVPTX::BI__nvvm_ldg_ull:
13443 case NVPTX::BI__nvvm_ldg_ull2:
13444 // PTX Interoperability section 2.2: "For a vector with an even number of
13445 // elements, its alignment is set to number of elements times the alignment
13446 // of its member: n*alignof(t)."
13447 return MakeLdg(Intrinsic::nvvm_ldg_global_i);
13448 case NVPTX::BI__nvvm_ldg_f:
13449 case NVPTX::BI__nvvm_ldg_f2:
13450 case NVPTX::BI__nvvm_ldg_f4:
13451 case NVPTX::BI__nvvm_ldg_d:
13452 case NVPTX::BI__nvvm_ldg_d2:
13453 return MakeLdg(Intrinsic::nvvm_ldg_global_f);
Artem Belevichfda99052016-09-28 17:47:35 +000013454
13455 case NVPTX::BI__nvvm_atom_cta_add_gen_i:
13456 case NVPTX::BI__nvvm_atom_cta_add_gen_l:
13457 case NVPTX::BI__nvvm_atom_cta_add_gen_ll:
13458 return MakeScopedAtomic(Intrinsic::nvvm_atomic_add_gen_i_cta);
13459 case NVPTX::BI__nvvm_atom_sys_add_gen_i:
13460 case NVPTX::BI__nvvm_atom_sys_add_gen_l:
13461 case NVPTX::BI__nvvm_atom_sys_add_gen_ll:
13462 return MakeScopedAtomic(Intrinsic::nvvm_atomic_add_gen_i_sys);
13463 case NVPTX::BI__nvvm_atom_cta_add_gen_f:
13464 case NVPTX::BI__nvvm_atom_cta_add_gen_d:
13465 return MakeScopedAtomic(Intrinsic::nvvm_atomic_add_gen_f_cta);
13466 case NVPTX::BI__nvvm_atom_sys_add_gen_f:
13467 case NVPTX::BI__nvvm_atom_sys_add_gen_d:
13468 return MakeScopedAtomic(Intrinsic::nvvm_atomic_add_gen_f_sys);
13469 case NVPTX::BI__nvvm_atom_cta_xchg_gen_i:
13470 case NVPTX::BI__nvvm_atom_cta_xchg_gen_l:
13471 case NVPTX::BI__nvvm_atom_cta_xchg_gen_ll:
13472 return MakeScopedAtomic(Intrinsic::nvvm_atomic_exch_gen_i_cta);
13473 case NVPTX::BI__nvvm_atom_sys_xchg_gen_i:
13474 case NVPTX::BI__nvvm_atom_sys_xchg_gen_l:
13475 case NVPTX::BI__nvvm_atom_sys_xchg_gen_ll:
13476 return MakeScopedAtomic(Intrinsic::nvvm_atomic_exch_gen_i_sys);
13477 case NVPTX::BI__nvvm_atom_cta_max_gen_i:
13478 case NVPTX::BI__nvvm_atom_cta_max_gen_ui:
13479 case NVPTX::BI__nvvm_atom_cta_max_gen_l:
13480 case NVPTX::BI__nvvm_atom_cta_max_gen_ul:
13481 case NVPTX::BI__nvvm_atom_cta_max_gen_ll:
13482 case NVPTX::BI__nvvm_atom_cta_max_gen_ull:
13483 return MakeScopedAtomic(Intrinsic::nvvm_atomic_max_gen_i_cta);
13484 case NVPTX::BI__nvvm_atom_sys_max_gen_i:
13485 case NVPTX::BI__nvvm_atom_sys_max_gen_ui:
13486 case NVPTX::BI__nvvm_atom_sys_max_gen_l:
13487 case NVPTX::BI__nvvm_atom_sys_max_gen_ul:
13488 case NVPTX::BI__nvvm_atom_sys_max_gen_ll:
13489 case NVPTX::BI__nvvm_atom_sys_max_gen_ull:
13490 return MakeScopedAtomic(Intrinsic::nvvm_atomic_max_gen_i_sys);
13491 case NVPTX::BI__nvvm_atom_cta_min_gen_i:
13492 case NVPTX::BI__nvvm_atom_cta_min_gen_ui:
13493 case NVPTX::BI__nvvm_atom_cta_min_gen_l:
13494 case NVPTX::BI__nvvm_atom_cta_min_gen_ul:
13495 case NVPTX::BI__nvvm_atom_cta_min_gen_ll:
13496 case NVPTX::BI__nvvm_atom_cta_min_gen_ull:
13497 return MakeScopedAtomic(Intrinsic::nvvm_atomic_min_gen_i_cta);
13498 case NVPTX::BI__nvvm_atom_sys_min_gen_i:
13499 case NVPTX::BI__nvvm_atom_sys_min_gen_ui:
13500 case NVPTX::BI__nvvm_atom_sys_min_gen_l:
13501 case NVPTX::BI__nvvm_atom_sys_min_gen_ul:
13502 case NVPTX::BI__nvvm_atom_sys_min_gen_ll:
13503 case NVPTX::BI__nvvm_atom_sys_min_gen_ull:
13504 return MakeScopedAtomic(Intrinsic::nvvm_atomic_min_gen_i_sys);
13505 case NVPTX::BI__nvvm_atom_cta_inc_gen_ui:
13506 return MakeScopedAtomic(Intrinsic::nvvm_atomic_inc_gen_i_cta);
13507 case NVPTX::BI__nvvm_atom_cta_dec_gen_ui:
13508 return MakeScopedAtomic(Intrinsic::nvvm_atomic_dec_gen_i_cta);
13509 case NVPTX::BI__nvvm_atom_sys_inc_gen_ui:
13510 return MakeScopedAtomic(Intrinsic::nvvm_atomic_inc_gen_i_sys);
13511 case NVPTX::BI__nvvm_atom_sys_dec_gen_ui:
13512 return MakeScopedAtomic(Intrinsic::nvvm_atomic_dec_gen_i_sys);
13513 case NVPTX::BI__nvvm_atom_cta_and_gen_i:
13514 case NVPTX::BI__nvvm_atom_cta_and_gen_l:
13515 case NVPTX::BI__nvvm_atom_cta_and_gen_ll:
13516 return MakeScopedAtomic(Intrinsic::nvvm_atomic_and_gen_i_cta);
13517 case NVPTX::BI__nvvm_atom_sys_and_gen_i:
13518 case NVPTX::BI__nvvm_atom_sys_and_gen_l:
13519 case NVPTX::BI__nvvm_atom_sys_and_gen_ll:
13520 return MakeScopedAtomic(Intrinsic::nvvm_atomic_and_gen_i_sys);
13521 case NVPTX::BI__nvvm_atom_cta_or_gen_i:
13522 case NVPTX::BI__nvvm_atom_cta_or_gen_l:
13523 case NVPTX::BI__nvvm_atom_cta_or_gen_ll:
13524 return MakeScopedAtomic(Intrinsic::nvvm_atomic_or_gen_i_cta);
13525 case NVPTX::BI__nvvm_atom_sys_or_gen_i:
13526 case NVPTX::BI__nvvm_atom_sys_or_gen_l:
13527 case NVPTX::BI__nvvm_atom_sys_or_gen_ll:
13528 return MakeScopedAtomic(Intrinsic::nvvm_atomic_or_gen_i_sys);
13529 case NVPTX::BI__nvvm_atom_cta_xor_gen_i:
13530 case NVPTX::BI__nvvm_atom_cta_xor_gen_l:
13531 case NVPTX::BI__nvvm_atom_cta_xor_gen_ll:
13532 return MakeScopedAtomic(Intrinsic::nvvm_atomic_xor_gen_i_cta);
13533 case NVPTX::BI__nvvm_atom_sys_xor_gen_i:
13534 case NVPTX::BI__nvvm_atom_sys_xor_gen_l:
13535 case NVPTX::BI__nvvm_atom_sys_xor_gen_ll:
13536 return MakeScopedAtomic(Intrinsic::nvvm_atomic_xor_gen_i_sys);
13537 case NVPTX::BI__nvvm_atom_cta_cas_gen_i:
13538 case NVPTX::BI__nvvm_atom_cta_cas_gen_l:
13539 case NVPTX::BI__nvvm_atom_cta_cas_gen_ll: {
13540 Value *Ptr = EmitScalarExpr(E->getArg(0));
13541 return Builder.CreateCall(
13542 CGM.getIntrinsic(
13543 Intrinsic::nvvm_atomic_cas_gen_i_cta,
13544 {Ptr->getType()->getPointerElementType(), Ptr->getType()}),
13545 {Ptr, EmitScalarExpr(E->getArg(1)), EmitScalarExpr(E->getArg(2))});
13546 }
13547 case NVPTX::BI__nvvm_atom_sys_cas_gen_i:
13548 case NVPTX::BI__nvvm_atom_sys_cas_gen_l:
13549 case NVPTX::BI__nvvm_atom_sys_cas_gen_ll: {
13550 Value *Ptr = EmitScalarExpr(E->getArg(0));
13551 return Builder.CreateCall(
13552 CGM.getIntrinsic(
13553 Intrinsic::nvvm_atomic_cas_gen_i_sys,
13554 {Ptr->getType()->getPointerElementType(), Ptr->getType()}),
13555 {Ptr, EmitScalarExpr(E->getArg(1)), EmitScalarExpr(E->getArg(2))});
13556 }
Artem Belevichbab95c72017-09-26 17:07:23 +000013557 case NVPTX::BI__nvvm_match_all_sync_i32p:
13558 case NVPTX::BI__nvvm_match_all_sync_i64p: {
13559 Value *Mask = EmitScalarExpr(E->getArg(0));
13560 Value *Val = EmitScalarExpr(E->getArg(1));
13561 Address PredOutPtr = EmitPointerWithAlignment(E->getArg(2));
13562 Value *ResultPair = Builder.CreateCall(
13563 CGM.getIntrinsic(BuiltinID == NVPTX::BI__nvvm_match_all_sync_i32p
13564 ? Intrinsic::nvvm_match_all_sync_i32p
13565 : Intrinsic::nvvm_match_all_sync_i64p),
13566 {Mask, Val});
13567 Value *Pred = Builder.CreateZExt(Builder.CreateExtractValue(ResultPair, 1),
13568 PredOutPtr.getElementType());
13569 Builder.CreateStore(Pred, PredOutPtr);
13570 return Builder.CreateExtractValue(ResultPair, 0);
13571 }
Artem Belevich5fe85a02019-04-25 22:28:09 +000013572
13573 // FP MMA loads
Artem Belevich91cc00b2017-10-12 21:32:19 +000013574 case NVPTX::BI__hmma_m16n16k16_ld_a:
13575 case NVPTX::BI__hmma_m16n16k16_ld_b:
13576 case NVPTX::BI__hmma_m16n16k16_ld_c_f16:
Artem Belevich0ae85902018-04-18 21:51:48 +000013577 case NVPTX::BI__hmma_m16n16k16_ld_c_f32:
13578 case NVPTX::BI__hmma_m32n8k16_ld_a:
13579 case NVPTX::BI__hmma_m32n8k16_ld_b:
13580 case NVPTX::BI__hmma_m32n8k16_ld_c_f16:
13581 case NVPTX::BI__hmma_m32n8k16_ld_c_f32:
13582 case NVPTX::BI__hmma_m8n32k16_ld_a:
13583 case NVPTX::BI__hmma_m8n32k16_ld_b:
13584 case NVPTX::BI__hmma_m8n32k16_ld_c_f16:
Artem Belevich5fe85a02019-04-25 22:28:09 +000013585 case NVPTX::BI__hmma_m8n32k16_ld_c_f32:
13586 // Integer MMA loads.
13587 case NVPTX::BI__imma_m16n16k16_ld_a_s8:
13588 case NVPTX::BI__imma_m16n16k16_ld_a_u8:
13589 case NVPTX::BI__imma_m16n16k16_ld_b_s8:
13590 case NVPTX::BI__imma_m16n16k16_ld_b_u8:
13591 case NVPTX::BI__imma_m16n16k16_ld_c:
13592 case NVPTX::BI__imma_m32n8k16_ld_a_s8:
13593 case NVPTX::BI__imma_m32n8k16_ld_a_u8:
13594 case NVPTX::BI__imma_m32n8k16_ld_b_s8:
13595 case NVPTX::BI__imma_m32n8k16_ld_b_u8:
13596 case NVPTX::BI__imma_m32n8k16_ld_c:
13597 case NVPTX::BI__imma_m8n32k16_ld_a_s8:
13598 case NVPTX::BI__imma_m8n32k16_ld_a_u8:
13599 case NVPTX::BI__imma_m8n32k16_ld_b_s8:
13600 case NVPTX::BI__imma_m8n32k16_ld_b_u8:
13601 case NVPTX::BI__imma_m8n32k16_ld_c:
13602 // Sub-integer MMA loads.
13603 case NVPTX::BI__imma_m8n8k32_ld_a_s4:
13604 case NVPTX::BI__imma_m8n8k32_ld_a_u4:
13605 case NVPTX::BI__imma_m8n8k32_ld_b_s4:
13606 case NVPTX::BI__imma_m8n8k32_ld_b_u4:
13607 case NVPTX::BI__imma_m8n8k32_ld_c:
13608 case NVPTX::BI__bmma_m8n8k128_ld_a_b1:
13609 case NVPTX::BI__bmma_m8n8k128_ld_b_b1:
13610 case NVPTX::BI__bmma_m8n8k128_ld_c:
13611 {
Artem Belevich91cc00b2017-10-12 21:32:19 +000013612 Address Dst = EmitPointerWithAlignment(E->getArg(0));
13613 Value *Src = EmitScalarExpr(E->getArg(1));
13614 Value *Ldm = EmitScalarExpr(E->getArg(2));
13615 llvm::APSInt isColMajorArg;
13616 if (!E->getArg(3)->isIntegerConstantExpr(isColMajorArg, getContext()))
13617 return nullptr;
13618 bool isColMajor = isColMajorArg.getSExtValue();
Artem Belevich5fe85a02019-04-25 22:28:09 +000013619 NVPTXMmaLdstInfo II = getNVPTXMmaLdstInfo(BuiltinID);
13620 unsigned IID = isColMajor ? II.IID_col : II.IID_row;
13621 if (IID == 0)
13622 return nullptr;
13623
Artem Belevich91cc00b2017-10-12 21:32:19 +000013624 Value *Result =
Artem Belevich914d4ba2018-03-20 17:18:59 +000013625 Builder.CreateCall(CGM.getIntrinsic(IID, Src->getType()), {Src, Ldm});
Artem Belevich91cc00b2017-10-12 21:32:19 +000013626
13627 // Save returned values.
Artem Belevich5fe85a02019-04-25 22:28:09 +000013628 assert(II.NumResults);
13629 if (II.NumResults == 1) {
13630 Builder.CreateAlignedStore(Result, Dst.getPointer(),
13631 CharUnits::fromQuantity(4));
13632 } else {
13633 for (unsigned i = 0; i < II.NumResults; ++i) {
13634 Builder.CreateAlignedStore(
13635 Builder.CreateBitCast(Builder.CreateExtractValue(Result, i),
13636 Dst.getElementType()),
13637 Builder.CreateGEP(Dst.getPointer(),
13638 llvm::ConstantInt::get(IntTy, i)),
13639 CharUnits::fromQuantity(4));
13640 }
Artem Belevich91cc00b2017-10-12 21:32:19 +000013641 }
13642 return Result;
13643 }
13644
13645 case NVPTX::BI__hmma_m16n16k16_st_c_f16:
Artem Belevich0ae85902018-04-18 21:51:48 +000013646 case NVPTX::BI__hmma_m16n16k16_st_c_f32:
13647 case NVPTX::BI__hmma_m32n8k16_st_c_f16:
13648 case NVPTX::BI__hmma_m32n8k16_st_c_f32:
13649 case NVPTX::BI__hmma_m8n32k16_st_c_f16:
Artem Belevich5fe85a02019-04-25 22:28:09 +000013650 case NVPTX::BI__hmma_m8n32k16_st_c_f32:
13651 case NVPTX::BI__imma_m16n16k16_st_c_i32:
13652 case NVPTX::BI__imma_m32n8k16_st_c_i32:
13653 case NVPTX::BI__imma_m8n32k16_st_c_i32:
13654 case NVPTX::BI__imma_m8n8k32_st_c_i32:
13655 case NVPTX::BI__bmma_m8n8k128_st_c_i32: {
Artem Belevich91cc00b2017-10-12 21:32:19 +000013656 Value *Dst = EmitScalarExpr(E->getArg(0));
13657 Address Src = EmitPointerWithAlignment(E->getArg(1));
13658 Value *Ldm = EmitScalarExpr(E->getArg(2));
13659 llvm::APSInt isColMajorArg;
13660 if (!E->getArg(3)->isIntegerConstantExpr(isColMajorArg, getContext()))
13661 return nullptr;
13662 bool isColMajor = isColMajorArg.getSExtValue();
Artem Belevich5fe85a02019-04-25 22:28:09 +000013663 NVPTXMmaLdstInfo II = getNVPTXMmaLdstInfo(BuiltinID);
13664 unsigned IID = isColMajor ? II.IID_col : II.IID_row;
13665 if (IID == 0)
13666 return nullptr;
13667 Function *Intrinsic =
13668 CGM.getIntrinsic(IID, Dst->getType());
Artem Belevich91cc00b2017-10-12 21:32:19 +000013669 llvm::Type *ParamType = Intrinsic->getFunctionType()->getParamType(1);
Artem Belevich914d4ba2018-03-20 17:18:59 +000013670 SmallVector<Value *, 10> Values = {Dst};
Artem Belevich5fe85a02019-04-25 22:28:09 +000013671 for (unsigned i = 0; i < II.NumResults; ++i) {
Artem Belevich91cc00b2017-10-12 21:32:19 +000013672 Value *V = Builder.CreateAlignedLoad(
13673 Builder.CreateGEP(Src.getPointer(), llvm::ConstantInt::get(IntTy, i)),
13674 CharUnits::fromQuantity(4));
13675 Values.push_back(Builder.CreateBitCast(V, ParamType));
13676 }
13677 Values.push_back(Ldm);
13678 Value *Result = Builder.CreateCall(Intrinsic, Values);
13679 return Result;
13680 }
13681
Artem Belevich30512862018-03-21 21:55:02 +000013682 // BI__hmma_m16n16k16_mma_<Dtype><CType>(d, a, b, c, layout, satf) -->
13683 // Intrinsic::nvvm_wmma_m16n16k16_mma_sync<layout A,B><DType><CType><Satf>
Artem Belevich91cc00b2017-10-12 21:32:19 +000013684 case NVPTX::BI__hmma_m16n16k16_mma_f16f16:
13685 case NVPTX::BI__hmma_m16n16k16_mma_f32f16:
13686 case NVPTX::BI__hmma_m16n16k16_mma_f32f32:
Artem Belevich0ae85902018-04-18 21:51:48 +000013687 case NVPTX::BI__hmma_m16n16k16_mma_f16f32:
13688 case NVPTX::BI__hmma_m32n8k16_mma_f16f16:
13689 case NVPTX::BI__hmma_m32n8k16_mma_f32f16:
13690 case NVPTX::BI__hmma_m32n8k16_mma_f32f32:
13691 case NVPTX::BI__hmma_m32n8k16_mma_f16f32:
13692 case NVPTX::BI__hmma_m8n32k16_mma_f16f16:
13693 case NVPTX::BI__hmma_m8n32k16_mma_f32f16:
13694 case NVPTX::BI__hmma_m8n32k16_mma_f32f32:
Artem Belevich5fe85a02019-04-25 22:28:09 +000013695 case NVPTX::BI__hmma_m8n32k16_mma_f16f32:
13696 case NVPTX::BI__imma_m16n16k16_mma_s8:
13697 case NVPTX::BI__imma_m16n16k16_mma_u8:
13698 case NVPTX::BI__imma_m32n8k16_mma_s8:
13699 case NVPTX::BI__imma_m32n8k16_mma_u8:
13700 case NVPTX::BI__imma_m8n32k16_mma_s8:
13701 case NVPTX::BI__imma_m8n32k16_mma_u8:
13702 case NVPTX::BI__imma_m8n8k32_mma_s4:
13703 case NVPTX::BI__imma_m8n8k32_mma_u4:
13704 case NVPTX::BI__bmma_m8n8k128_mma_xor_popc_b1: {
Artem Belevich91cc00b2017-10-12 21:32:19 +000013705 Address Dst = EmitPointerWithAlignment(E->getArg(0));
13706 Address SrcA = EmitPointerWithAlignment(E->getArg(1));
13707 Address SrcB = EmitPointerWithAlignment(E->getArg(2));
13708 Address SrcC = EmitPointerWithAlignment(E->getArg(3));
13709 llvm::APSInt LayoutArg;
13710 if (!E->getArg(4)->isIntegerConstantExpr(LayoutArg, getContext()))
13711 return nullptr;
13712 int Layout = LayoutArg.getSExtValue();
13713 if (Layout < 0 || Layout > 3)
13714 return nullptr;
13715 llvm::APSInt SatfArg;
Artem Belevich5fe85a02019-04-25 22:28:09 +000013716 if (BuiltinID == NVPTX::BI__bmma_m8n8k128_mma_xor_popc_b1)
13717 SatfArg = 0; // .b1 does not have satf argument.
13718 else if (!E->getArg(5)->isIntegerConstantExpr(SatfArg, getContext()))
Artem Belevich91cc00b2017-10-12 21:32:19 +000013719 return nullptr;
13720 bool Satf = SatfArg.getSExtValue();
Artem Belevich5fe85a02019-04-25 22:28:09 +000013721 NVPTXMmaInfo MI = getNVPTXMmaInfo(BuiltinID);
13722 unsigned IID = MI.getMMAIntrinsic(Layout, Satf);
13723 if (IID == 0) // Unsupported combination of Layout/Satf.
13724 return nullptr;
Artem Belevich91cc00b2017-10-12 21:32:19 +000013725
13726 SmallVector<Value *, 24> Values;
13727 Function *Intrinsic = CGM.getIntrinsic(IID);
Artem Belevich5fe85a02019-04-25 22:28:09 +000013728 llvm::Type *AType = Intrinsic->getFunctionType()->getParamType(0);
Artem Belevich91cc00b2017-10-12 21:32:19 +000013729 // Load A
Artem Belevich5fe85a02019-04-25 22:28:09 +000013730 for (unsigned i = 0; i < MI.NumEltsA; ++i) {
Artem Belevich91cc00b2017-10-12 21:32:19 +000013731 Value *V = Builder.CreateAlignedLoad(
13732 Builder.CreateGEP(SrcA.getPointer(),
13733 llvm::ConstantInt::get(IntTy, i)),
13734 CharUnits::fromQuantity(4));
Artem Belevich5fe85a02019-04-25 22:28:09 +000013735 Values.push_back(Builder.CreateBitCast(V, AType));
Artem Belevich91cc00b2017-10-12 21:32:19 +000013736 }
13737 // Load B
Artem Belevich5fe85a02019-04-25 22:28:09 +000013738 llvm::Type *BType = Intrinsic->getFunctionType()->getParamType(MI.NumEltsA);
13739 for (unsigned i = 0; i < MI.NumEltsB; ++i) {
Artem Belevich91cc00b2017-10-12 21:32:19 +000013740 Value *V = Builder.CreateAlignedLoad(
13741 Builder.CreateGEP(SrcB.getPointer(),
13742 llvm::ConstantInt::get(IntTy, i)),
13743 CharUnits::fromQuantity(4));
Artem Belevich5fe85a02019-04-25 22:28:09 +000013744 Values.push_back(Builder.CreateBitCast(V, BType));
Artem Belevich91cc00b2017-10-12 21:32:19 +000013745 }
13746 // Load C
Artem Belevich5fe85a02019-04-25 22:28:09 +000013747 llvm::Type *CType =
13748 Intrinsic->getFunctionType()->getParamType(MI.NumEltsA + MI.NumEltsB);
13749 for (unsigned i = 0; i < MI.NumEltsC; ++i) {
Artem Belevich91cc00b2017-10-12 21:32:19 +000013750 Value *V = Builder.CreateAlignedLoad(
13751 Builder.CreateGEP(SrcC.getPointer(),
13752 llvm::ConstantInt::get(IntTy, i)),
13753 CharUnits::fromQuantity(4));
13754 Values.push_back(Builder.CreateBitCast(V, CType));
13755 }
13756 Value *Result = Builder.CreateCall(Intrinsic, Values);
13757 llvm::Type *DType = Dst.getElementType();
Artem Belevich5fe85a02019-04-25 22:28:09 +000013758 for (unsigned i = 0; i < MI.NumEltsD; ++i)
Artem Belevich91cc00b2017-10-12 21:32:19 +000013759 Builder.CreateAlignedStore(
13760 Builder.CreateBitCast(Builder.CreateExtractValue(Result, i), DType),
13761 Builder.CreateGEP(Dst.getPointer(), llvm::ConstantInt::get(IntTy, i)),
13762 CharUnits::fromQuantity(4));
13763 return Result;
13764 }
Artem Belevichd21e5c62015-06-25 18:29:42 +000013765 default:
13766 return nullptr;
13767 }
13768}
Dan Gohmanc2853072015-09-03 22:51:53 +000013769
13770Value *CodeGenFunction::EmitWebAssemblyBuiltinExpr(unsigned BuiltinID,
13771 const CallExpr *E) {
13772 switch (BuiltinID) {
Dan Gohman9f8ee032018-06-01 00:05:51 +000013773 case WebAssembly::BI__builtin_wasm_memory_size: {
13774 llvm::Type *ResultType = ConvertType(E->getType());
13775 Value *I = EmitScalarExpr(E->getArg(0));
James Y Knight8799cae2019-02-03 21:53:49 +000013776 Function *Callee = CGM.getIntrinsic(Intrinsic::wasm_memory_size, ResultType);
Dan Gohman9f8ee032018-06-01 00:05:51 +000013777 return Builder.CreateCall(Callee, I);
13778 }
13779 case WebAssembly::BI__builtin_wasm_memory_grow: {
13780 llvm::Type *ResultType = ConvertType(E->getType());
13781 Value *Args[] = {
13782 EmitScalarExpr(E->getArg(0)),
13783 EmitScalarExpr(E->getArg(1))
13784 };
James Y Knight8799cae2019-02-03 21:53:49 +000013785 Function *Callee = CGM.getIntrinsic(Intrinsic::wasm_memory_grow, ResultType);
Dan Gohman9f8ee032018-06-01 00:05:51 +000013786 return Builder.CreateCall(Callee, Args);
13787 }
Thomas Livelyde7a0a12019-02-13 22:11:16 +000013788 case WebAssembly::BI__builtin_wasm_memory_init: {
13789 llvm::APSInt SegConst;
13790 if (!E->getArg(0)->isIntegerConstantExpr(SegConst, getContext()))
13791 llvm_unreachable("Constant arg isn't actually constant?");
13792 llvm::APSInt MemConst;
13793 if (!E->getArg(1)->isIntegerConstantExpr(MemConst, getContext()))
13794 llvm_unreachable("Constant arg isn't actually constant?");
13795 if (!MemConst.isNullValue())
13796 ErrorUnsupported(E, "non-zero memory index");
13797 Value *Args[] = {llvm::ConstantInt::get(getLLVMContext(), SegConst),
13798 llvm::ConstantInt::get(getLLVMContext(), MemConst),
13799 EmitScalarExpr(E->getArg(2)), EmitScalarExpr(E->getArg(3)),
13800 EmitScalarExpr(E->getArg(4))};
13801 Function *Callee = CGM.getIntrinsic(Intrinsic::wasm_memory_init);
13802 return Builder.CreateCall(Callee, Args);
13803 }
13804 case WebAssembly::BI__builtin_wasm_data_drop: {
13805 llvm::APSInt SegConst;
13806 if (!E->getArg(0)->isIntegerConstantExpr(SegConst, getContext()))
13807 llvm_unreachable("Constant arg isn't actually constant?");
13808 Value *Arg = llvm::ConstantInt::get(getLLVMContext(), SegConst);
13809 Function *Callee = CGM.getIntrinsic(Intrinsic::wasm_data_drop);
13810 return Builder.CreateCall(Callee, {Arg});
13811 }
Heejin Ahnb92440e2017-06-30 00:44:01 +000013812 case WebAssembly::BI__builtin_wasm_throw: {
13813 Value *Tag = EmitScalarExpr(E->getArg(0));
13814 Value *Obj = EmitScalarExpr(E->getArg(1));
James Y Knight8799cae2019-02-03 21:53:49 +000013815 Function *Callee = CGM.getIntrinsic(Intrinsic::wasm_throw);
Heejin Ahnb92440e2017-06-30 00:44:01 +000013816 return Builder.CreateCall(Callee, {Tag, Obj});
13817 }
Heejin Ahn7e66a502019-03-16 05:39:12 +000013818 case WebAssembly::BI__builtin_wasm_rethrow_in_catch: {
13819 Function *Callee = CGM.getIntrinsic(Intrinsic::wasm_rethrow_in_catch);
Heejin Ahnb29a17b2017-09-16 01:07:43 +000013820 return Builder.CreateCall(Callee);
13821 }
Heejin Ahn00aa81b2018-08-02 21:44:40 +000013822 case WebAssembly::BI__builtin_wasm_atomic_wait_i32: {
13823 Value *Addr = EmitScalarExpr(E->getArg(0));
13824 Value *Expected = EmitScalarExpr(E->getArg(1));
13825 Value *Timeout = EmitScalarExpr(E->getArg(2));
James Y Knight8799cae2019-02-03 21:53:49 +000013826 Function *Callee = CGM.getIntrinsic(Intrinsic::wasm_atomic_wait_i32);
Heejin Ahn00aa81b2018-08-02 21:44:40 +000013827 return Builder.CreateCall(Callee, {Addr, Expected, Timeout});
13828 }
13829 case WebAssembly::BI__builtin_wasm_atomic_wait_i64: {
13830 Value *Addr = EmitScalarExpr(E->getArg(0));
13831 Value *Expected = EmitScalarExpr(E->getArg(1));
13832 Value *Timeout = EmitScalarExpr(E->getArg(2));
James Y Knight8799cae2019-02-03 21:53:49 +000013833 Function *Callee = CGM.getIntrinsic(Intrinsic::wasm_atomic_wait_i64);
Heejin Ahn00aa81b2018-08-02 21:44:40 +000013834 return Builder.CreateCall(Callee, {Addr, Expected, Timeout});
13835 }
13836 case WebAssembly::BI__builtin_wasm_atomic_notify: {
13837 Value *Addr = EmitScalarExpr(E->getArg(0));
13838 Value *Count = EmitScalarExpr(E->getArg(1));
James Y Knight8799cae2019-02-03 21:53:49 +000013839 Function *Callee = CGM.getIntrinsic(Intrinsic::wasm_atomic_notify);
Heejin Ahn00aa81b2018-08-02 21:44:40 +000013840 return Builder.CreateCall(Callee, {Addr, Count});
13841 }
Thomas Lively07ce6df2018-10-11 00:07:55 +000013842 case WebAssembly::BI__builtin_wasm_trunc_saturate_s_i32_f32:
13843 case WebAssembly::BI__builtin_wasm_trunc_saturate_s_i32_f64:
13844 case WebAssembly::BI__builtin_wasm_trunc_saturate_s_i64_f32:
13845 case WebAssembly::BI__builtin_wasm_trunc_saturate_s_i64_f64:
Thomas Lively69403282018-11-01 01:03:17 +000013846 case WebAssembly::BI__builtin_wasm_trunc_saturate_s_i32x4_f32x4:
13847 case WebAssembly::BI__builtin_wasm_trunc_saturate_s_i64x2_f64x2: {
Thomas Lively07ce6df2018-10-11 00:07:55 +000013848 Value *Src = EmitScalarExpr(E->getArg(0));
13849 llvm::Type *ResT = ConvertType(E->getType());
James Y Knight8799cae2019-02-03 21:53:49 +000013850 Function *Callee = CGM.getIntrinsic(Intrinsic::wasm_trunc_saturate_signed,
Thomas Lively07ce6df2018-10-11 00:07:55 +000013851 {ResT, Src->getType()});
13852 return Builder.CreateCall(Callee, {Src});
13853 }
13854 case WebAssembly::BI__builtin_wasm_trunc_saturate_u_i32_f32:
13855 case WebAssembly::BI__builtin_wasm_trunc_saturate_u_i32_f64:
13856 case WebAssembly::BI__builtin_wasm_trunc_saturate_u_i64_f32:
13857 case WebAssembly::BI__builtin_wasm_trunc_saturate_u_i64_f64:
Thomas Lively69403282018-11-01 01:03:17 +000013858 case WebAssembly::BI__builtin_wasm_trunc_saturate_u_i32x4_f32x4:
13859 case WebAssembly::BI__builtin_wasm_trunc_saturate_u_i64x2_f64x2: {
Thomas Lively07ce6df2018-10-11 00:07:55 +000013860 Value *Src = EmitScalarExpr(E->getArg(0));
13861 llvm::Type *ResT = ConvertType(E->getType());
James Y Knight8799cae2019-02-03 21:53:49 +000013862 Function *Callee = CGM.getIntrinsic(Intrinsic::wasm_trunc_saturate_unsigned,
Thomas Lively07ce6df2018-10-11 00:07:55 +000013863 {ResT, Src->getType()});
13864 return Builder.CreateCall(Callee, {Src});
13865 }
Thomas Livelyd4bf99a2018-10-25 19:11:41 +000013866 case WebAssembly::BI__builtin_wasm_min_f32:
13867 case WebAssembly::BI__builtin_wasm_min_f64:
13868 case WebAssembly::BI__builtin_wasm_min_f32x4:
13869 case WebAssembly::BI__builtin_wasm_min_f64x2: {
13870 Value *LHS = EmitScalarExpr(E->getArg(0));
13871 Value *RHS = EmitScalarExpr(E->getArg(1));
James Y Knight8799cae2019-02-03 21:53:49 +000013872 Function *Callee = CGM.getIntrinsic(Intrinsic::minimum,
Thomas Livelyd4bf99a2018-10-25 19:11:41 +000013873 ConvertType(E->getType()));
13874 return Builder.CreateCall(Callee, {LHS, RHS});
13875 }
13876 case WebAssembly::BI__builtin_wasm_max_f32:
13877 case WebAssembly::BI__builtin_wasm_max_f64:
13878 case WebAssembly::BI__builtin_wasm_max_f32x4:
13879 case WebAssembly::BI__builtin_wasm_max_f64x2: {
13880 Value *LHS = EmitScalarExpr(E->getArg(0));
13881 Value *RHS = EmitScalarExpr(E->getArg(1));
James Y Knight8799cae2019-02-03 21:53:49 +000013882 Function *Callee = CGM.getIntrinsic(Intrinsic::maximum,
Thomas Livelyd4bf99a2018-10-25 19:11:41 +000013883 ConvertType(E->getType()));
13884 return Builder.CreateCall(Callee, {LHS, RHS});
13885 }
Thomas Livelyd6792c02018-10-05 00:54:44 +000013886 case WebAssembly::BI__builtin_wasm_extract_lane_s_i8x16:
13887 case WebAssembly::BI__builtin_wasm_extract_lane_u_i8x16:
13888 case WebAssembly::BI__builtin_wasm_extract_lane_s_i16x8:
13889 case WebAssembly::BI__builtin_wasm_extract_lane_u_i16x8:
13890 case WebAssembly::BI__builtin_wasm_extract_lane_i32x4:
13891 case WebAssembly::BI__builtin_wasm_extract_lane_i64x2:
13892 case WebAssembly::BI__builtin_wasm_extract_lane_f32x4:
13893 case WebAssembly::BI__builtin_wasm_extract_lane_f64x2: {
13894 llvm::APSInt LaneConst;
13895 if (!E->getArg(1)->isIntegerConstantExpr(LaneConst, getContext()))
13896 llvm_unreachable("Constant arg isn't actually constant?");
13897 Value *Vec = EmitScalarExpr(E->getArg(0));
13898 Value *Lane = llvm::ConstantInt::get(getLLVMContext(), LaneConst);
13899 Value *Extract = Builder.CreateExtractElement(Vec, Lane);
13900 switch (BuiltinID) {
13901 case WebAssembly::BI__builtin_wasm_extract_lane_s_i8x16:
13902 case WebAssembly::BI__builtin_wasm_extract_lane_s_i16x8:
13903 return Builder.CreateSExt(Extract, ConvertType(E->getType()));
13904 case WebAssembly::BI__builtin_wasm_extract_lane_u_i8x16:
13905 case WebAssembly::BI__builtin_wasm_extract_lane_u_i16x8:
13906 return Builder.CreateZExt(Extract, ConvertType(E->getType()));
13907 case WebAssembly::BI__builtin_wasm_extract_lane_i32x4:
13908 case WebAssembly::BI__builtin_wasm_extract_lane_i64x2:
13909 case WebAssembly::BI__builtin_wasm_extract_lane_f32x4:
13910 case WebAssembly::BI__builtin_wasm_extract_lane_f64x2:
13911 return Extract;
13912 default:
13913 llvm_unreachable("unexpected builtin ID");
13914 }
13915 }
Thomas Livelya3474362018-10-05 00:58:07 +000013916 case WebAssembly::BI__builtin_wasm_replace_lane_i8x16:
13917 case WebAssembly::BI__builtin_wasm_replace_lane_i16x8:
13918 case WebAssembly::BI__builtin_wasm_replace_lane_i32x4:
13919 case WebAssembly::BI__builtin_wasm_replace_lane_i64x2:
13920 case WebAssembly::BI__builtin_wasm_replace_lane_f32x4:
13921 case WebAssembly::BI__builtin_wasm_replace_lane_f64x2: {
13922 llvm::APSInt LaneConst;
13923 if (!E->getArg(1)->isIntegerConstantExpr(LaneConst, getContext()))
13924 llvm_unreachable("Constant arg isn't actually constant?");
13925 Value *Vec = EmitScalarExpr(E->getArg(0));
13926 Value *Lane = llvm::ConstantInt::get(getLLVMContext(), LaneConst);
13927 Value *Val = EmitScalarExpr(E->getArg(2));
13928 switch (BuiltinID) {
13929 case WebAssembly::BI__builtin_wasm_replace_lane_i8x16:
13930 case WebAssembly::BI__builtin_wasm_replace_lane_i16x8: {
13931 llvm::Type *ElemType = ConvertType(E->getType())->getVectorElementType();
13932 Value *Trunc = Builder.CreateTrunc(Val, ElemType);
13933 return Builder.CreateInsertElement(Vec, Trunc, Lane);
13934 }
13935 case WebAssembly::BI__builtin_wasm_replace_lane_i32x4:
13936 case WebAssembly::BI__builtin_wasm_replace_lane_i64x2:
13937 case WebAssembly::BI__builtin_wasm_replace_lane_f32x4:
13938 case WebAssembly::BI__builtin_wasm_replace_lane_f64x2:
13939 return Builder.CreateInsertElement(Vec, Val, Lane);
13940 default:
13941 llvm_unreachable("unexpected builtin ID");
13942 }
13943 }
Thomas Lively9034a472018-10-05 00:58:56 +000013944 case WebAssembly::BI__builtin_wasm_add_saturate_s_i8x16:
13945 case WebAssembly::BI__builtin_wasm_add_saturate_u_i8x16:
13946 case WebAssembly::BI__builtin_wasm_add_saturate_s_i16x8:
13947 case WebAssembly::BI__builtin_wasm_add_saturate_u_i16x8:
13948 case WebAssembly::BI__builtin_wasm_sub_saturate_s_i8x16:
13949 case WebAssembly::BI__builtin_wasm_sub_saturate_u_i8x16:
13950 case WebAssembly::BI__builtin_wasm_sub_saturate_s_i16x8:
13951 case WebAssembly::BI__builtin_wasm_sub_saturate_u_i16x8: {
13952 unsigned IntNo;
13953 switch (BuiltinID) {
13954 case WebAssembly::BI__builtin_wasm_add_saturate_s_i8x16:
13955 case WebAssembly::BI__builtin_wasm_add_saturate_s_i16x8:
Thomas Lively535b4df2018-10-25 19:06:15 +000013956 IntNo = Intrinsic::sadd_sat;
Thomas Lively9034a472018-10-05 00:58:56 +000013957 break;
13958 case WebAssembly::BI__builtin_wasm_add_saturate_u_i8x16:
13959 case WebAssembly::BI__builtin_wasm_add_saturate_u_i16x8:
Thomas Lively535b4df2018-10-25 19:06:15 +000013960 IntNo = Intrinsic::uadd_sat;
Thomas Lively9034a472018-10-05 00:58:56 +000013961 break;
13962 case WebAssembly::BI__builtin_wasm_sub_saturate_s_i8x16:
13963 case WebAssembly::BI__builtin_wasm_sub_saturate_s_i16x8:
13964 IntNo = Intrinsic::wasm_sub_saturate_signed;
13965 break;
13966 case WebAssembly::BI__builtin_wasm_sub_saturate_u_i8x16:
13967 case WebAssembly::BI__builtin_wasm_sub_saturate_u_i16x8:
13968 IntNo = Intrinsic::wasm_sub_saturate_unsigned;
13969 break;
13970 default:
13971 llvm_unreachable("unexpected builtin ID");
13972 }
13973 Value *LHS = EmitScalarExpr(E->getArg(0));
13974 Value *RHS = EmitScalarExpr(E->getArg(1));
James Y Knight8799cae2019-02-03 21:53:49 +000013975 Function *Callee = CGM.getIntrinsic(IntNo, ConvertType(E->getType()));
Thomas Lively9034a472018-10-05 00:58:56 +000013976 return Builder.CreateCall(Callee, {LHS, RHS});
13977 }
Thomas Livelyd4bf99a2018-10-25 19:11:41 +000013978 case WebAssembly::BI__builtin_wasm_bitselect: {
13979 Value *V1 = EmitScalarExpr(E->getArg(0));
13980 Value *V2 = EmitScalarExpr(E->getArg(1));
13981 Value *C = EmitScalarExpr(E->getArg(2));
James Y Knight8799cae2019-02-03 21:53:49 +000013982 Function *Callee = CGM.getIntrinsic(Intrinsic::wasm_bitselect,
Thomas Livelyd4bf99a2018-10-25 19:11:41 +000013983 ConvertType(E->getType()));
13984 return Builder.CreateCall(Callee, {V1, V2, C});
13985 }
Thomas Lively291d75b2018-10-05 00:59:37 +000013986 case WebAssembly::BI__builtin_wasm_any_true_i8x16:
13987 case WebAssembly::BI__builtin_wasm_any_true_i16x8:
13988 case WebAssembly::BI__builtin_wasm_any_true_i32x4:
13989 case WebAssembly::BI__builtin_wasm_any_true_i64x2:
13990 case WebAssembly::BI__builtin_wasm_all_true_i8x16:
13991 case WebAssembly::BI__builtin_wasm_all_true_i16x8:
13992 case WebAssembly::BI__builtin_wasm_all_true_i32x4:
13993 case WebAssembly::BI__builtin_wasm_all_true_i64x2: {
13994 unsigned IntNo;
13995 switch (BuiltinID) {
13996 case WebAssembly::BI__builtin_wasm_any_true_i8x16:
13997 case WebAssembly::BI__builtin_wasm_any_true_i16x8:
13998 case WebAssembly::BI__builtin_wasm_any_true_i32x4:
13999 case WebAssembly::BI__builtin_wasm_any_true_i64x2:
14000 IntNo = Intrinsic::wasm_anytrue;
14001 break;
14002 case WebAssembly::BI__builtin_wasm_all_true_i8x16:
14003 case WebAssembly::BI__builtin_wasm_all_true_i16x8:
14004 case WebAssembly::BI__builtin_wasm_all_true_i32x4:
14005 case WebAssembly::BI__builtin_wasm_all_true_i64x2:
14006 IntNo = Intrinsic::wasm_alltrue;
14007 break;
14008 default:
14009 llvm_unreachable("unexpected builtin ID");
14010 }
14011 Value *Vec = EmitScalarExpr(E->getArg(0));
James Y Knight8799cae2019-02-03 21:53:49 +000014012 Function *Callee = CGM.getIntrinsic(IntNo, Vec->getType());
Thomas Lively291d75b2018-10-05 00:59:37 +000014013 return Builder.CreateCall(Callee, {Vec});
14014 }
Thomas Livelyd2a293c2018-10-05 01:02:54 +000014015 case WebAssembly::BI__builtin_wasm_abs_f32x4:
14016 case WebAssembly::BI__builtin_wasm_abs_f64x2: {
14017 Value *Vec = EmitScalarExpr(E->getArg(0));
James Y Knight8799cae2019-02-03 21:53:49 +000014018 Function *Callee = CGM.getIntrinsic(Intrinsic::fabs, Vec->getType());
Thomas Livelyd2a293c2018-10-05 01:02:54 +000014019 return Builder.CreateCall(Callee, {Vec});
14020 }
14021 case WebAssembly::BI__builtin_wasm_sqrt_f32x4:
14022 case WebAssembly::BI__builtin_wasm_sqrt_f64x2: {
14023 Value *Vec = EmitScalarExpr(E->getArg(0));
James Y Knight8799cae2019-02-03 21:53:49 +000014024 Function *Callee = CGM.getIntrinsic(Intrinsic::sqrt, Vec->getType());
Thomas Livelyd2a293c2018-10-05 01:02:54 +000014025 return Builder.CreateCall(Callee, {Vec});
14026 }
Dan Gohmanc2853072015-09-03 22:51:53 +000014027
14028 default:
14029 return nullptr;
14030 }
14031}
Krzysztof Parzyszek5a655832017-12-13 19:56:03 +000014032
14033Value *CodeGenFunction::EmitHexagonBuiltinExpr(unsigned BuiltinID,
14034 const CallExpr *E) {
14035 SmallVector<llvm::Value *, 4> Ops;
14036 Intrinsic::ID ID = Intrinsic::not_intrinsic;
14037
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014038 auto MakeCircLd = [&](unsigned IntID, bool HasImm) {
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014039 // The base pointer is passed by address, so it needs to be loaded.
14040 Address BP = EmitPointerWithAlignment(E->getArg(0));
14041 BP = Address(Builder.CreateBitCast(BP.getPointer(), Int8PtrPtrTy),
14042 BP.getAlignment());
14043 llvm::Value *Base = Builder.CreateLoad(BP);
14044 // Operands are Base, Increment, Modifier, Start.
14045 if (HasImm)
14046 Ops = { Base, EmitScalarExpr(E->getArg(1)), EmitScalarExpr(E->getArg(2)),
14047 EmitScalarExpr(E->getArg(3)) };
14048 else
14049 Ops = { Base, EmitScalarExpr(E->getArg(1)),
14050 EmitScalarExpr(E->getArg(2)) };
14051
14052 llvm::Value *Result = Builder.CreateCall(CGM.getIntrinsic(IntID), Ops);
14053 llvm::Value *NewBase = Builder.CreateExtractValue(Result, 1);
14054 llvm::Value *LV = Builder.CreateBitCast(EmitScalarExpr(E->getArg(0)),
14055 NewBase->getType()->getPointerTo());
14056 Address Dest = EmitPointerWithAlignment(E->getArg(0));
14057 // The intrinsic generates two results. The new value for the base pointer
14058 // needs to be stored.
14059 Builder.CreateAlignedStore(NewBase, LV, Dest.getAlignment());
14060 return Builder.CreateExtractValue(Result, 0);
14061 };
14062
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014063 auto MakeCircSt = [&](unsigned IntID, bool HasImm) {
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014064 // The base pointer is passed by address, so it needs to be loaded.
14065 Address BP = EmitPointerWithAlignment(E->getArg(0));
14066 BP = Address(Builder.CreateBitCast(BP.getPointer(), Int8PtrPtrTy),
14067 BP.getAlignment());
14068 llvm::Value *Base = Builder.CreateLoad(BP);
14069 // Operands are Base, Increment, Modifier, Value, Start.
14070 if (HasImm)
14071 Ops = { Base, EmitScalarExpr(E->getArg(1)), EmitScalarExpr(E->getArg(2)),
14072 EmitScalarExpr(E->getArg(3)), EmitScalarExpr(E->getArg(4)) };
14073 else
14074 Ops = { Base, EmitScalarExpr(E->getArg(1)),
14075 EmitScalarExpr(E->getArg(2)), EmitScalarExpr(E->getArg(3)) };
14076
14077 llvm::Value *NewBase = Builder.CreateCall(CGM.getIntrinsic(IntID), Ops);
14078 llvm::Value *LV = Builder.CreateBitCast(EmitScalarExpr(E->getArg(0)),
14079 NewBase->getType()->getPointerTo());
14080 Address Dest = EmitPointerWithAlignment(E->getArg(0));
14081 // The intrinsic generates one result, which is the new value for the base
14082 // pointer. It needs to be stored.
14083 return Builder.CreateAlignedStore(NewBase, LV, Dest.getAlignment());
14084 };
14085
Krzysztof Parzyszek790e4222018-03-29 13:54:31 +000014086 // Handle the conversion of bit-reverse load intrinsics to bit code.
14087 // The intrinsic call after this function only reads from memory and the
14088 // write to memory is dealt by the store instruction.
14089 auto MakeBrevLd = [&](unsigned IntID, llvm::Type *DestTy) {
14090 // The intrinsic generates one result, which is the new value for the base
14091 // pointer. It needs to be returned. The result of the load instruction is
14092 // passed to intrinsic by address, so the value needs to be stored.
14093 llvm::Value *BaseAddress =
14094 Builder.CreateBitCast(EmitScalarExpr(E->getArg(0)), Int8PtrTy);
14095
14096 // Expressions like &(*pt++) will be incremented per evaluation.
14097 // EmitPointerWithAlignment and EmitScalarExpr evaluates the expression
14098 // per call.
14099 Address DestAddr = EmitPointerWithAlignment(E->getArg(1));
14100 DestAddr = Address(Builder.CreateBitCast(DestAddr.getPointer(), Int8PtrTy),
14101 DestAddr.getAlignment());
14102 llvm::Value *DestAddress = DestAddr.getPointer();
14103
14104 // Operands are Base, Dest, Modifier.
14105 // The intrinsic format in LLVM IR is defined as
14106 // { ValueType, i8* } (i8*, i32).
14107 Ops = {BaseAddress, EmitScalarExpr(E->getArg(2))};
14108
14109 llvm::Value *Result = Builder.CreateCall(CGM.getIntrinsic(IntID), Ops);
14110 // The value needs to be stored as the variable is passed by reference.
14111 llvm::Value *DestVal = Builder.CreateExtractValue(Result, 0);
14112
14113 // The store needs to be truncated to fit the destination type.
14114 // While i32 and i64 are natively supported on Hexagon, i8 and i16 needs
14115 // to be handled with stores of respective destination type.
14116 DestVal = Builder.CreateTrunc(DestVal, DestTy);
14117
14118 llvm::Value *DestForStore =
14119 Builder.CreateBitCast(DestAddress, DestVal->getType()->getPointerTo());
14120 Builder.CreateAlignedStore(DestVal, DestForStore, DestAddr.getAlignment());
14121 // The updated value of the base pointer is returned.
14122 return Builder.CreateExtractValue(Result, 1);
14123 };
14124
Krzysztof Parzyszek5a655832017-12-13 19:56:03 +000014125 switch (BuiltinID) {
14126 case Hexagon::BI__builtin_HEXAGON_V6_vaddcarry:
14127 case Hexagon::BI__builtin_HEXAGON_V6_vaddcarry_128B: {
14128 Address Dest = EmitPointerWithAlignment(E->getArg(2));
14129 unsigned Size;
14130 if (BuiltinID == Hexagon::BI__builtin_HEXAGON_V6_vaddcarry) {
14131 Size = 512;
14132 ID = Intrinsic::hexagon_V6_vaddcarry;
14133 } else {
14134 Size = 1024;
14135 ID = Intrinsic::hexagon_V6_vaddcarry_128B;
14136 }
14137 Dest = Builder.CreateBitCast(Dest,
14138 llvm::VectorType::get(Builder.getInt1Ty(), Size)->getPointerTo(0));
14139 LoadInst *QLd = Builder.CreateLoad(Dest);
14140 Ops = { EmitScalarExpr(E->getArg(0)), EmitScalarExpr(E->getArg(1)), QLd };
14141 llvm::Value *Result = Builder.CreateCall(CGM.getIntrinsic(ID), Ops);
14142 llvm::Value *Vprd = Builder.CreateExtractValue(Result, 1);
14143 llvm::Value *Base = Builder.CreateBitCast(EmitScalarExpr(E->getArg(2)),
14144 Vprd->getType()->getPointerTo(0));
14145 Builder.CreateAlignedStore(Vprd, Base, Dest.getAlignment());
14146 return Builder.CreateExtractValue(Result, 0);
14147 }
14148 case Hexagon::BI__builtin_HEXAGON_V6_vsubcarry:
14149 case Hexagon::BI__builtin_HEXAGON_V6_vsubcarry_128B: {
14150 Address Dest = EmitPointerWithAlignment(E->getArg(2));
14151 unsigned Size;
14152 if (BuiltinID == Hexagon::BI__builtin_HEXAGON_V6_vsubcarry) {
14153 Size = 512;
14154 ID = Intrinsic::hexagon_V6_vsubcarry;
14155 } else {
14156 Size = 1024;
14157 ID = Intrinsic::hexagon_V6_vsubcarry_128B;
14158 }
14159 Dest = Builder.CreateBitCast(Dest,
14160 llvm::VectorType::get(Builder.getInt1Ty(), Size)->getPointerTo(0));
14161 LoadInst *QLd = Builder.CreateLoad(Dest);
14162 Ops = { EmitScalarExpr(E->getArg(0)), EmitScalarExpr(E->getArg(1)), QLd };
14163 llvm::Value *Result = Builder.CreateCall(CGM.getIntrinsic(ID), Ops);
14164 llvm::Value *Vprd = Builder.CreateExtractValue(Result, 1);
14165 llvm::Value *Base = Builder.CreateBitCast(EmitScalarExpr(E->getArg(2)),
14166 Vprd->getType()->getPointerTo(0));
14167 Builder.CreateAlignedStore(Vprd, Base, Dest.getAlignment());
14168 return Builder.CreateExtractValue(Result, 0);
14169 }
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014170 case Hexagon::BI__builtin_HEXAGON_L2_loadrub_pci:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014171 return MakeCircLd(Intrinsic::hexagon_L2_loadrub_pci, /*HasImm*/true);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014172 case Hexagon::BI__builtin_HEXAGON_L2_loadrb_pci:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014173 return MakeCircLd(Intrinsic::hexagon_L2_loadrb_pci, /*HasImm*/true);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014174 case Hexagon::BI__builtin_HEXAGON_L2_loadruh_pci:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014175 return MakeCircLd(Intrinsic::hexagon_L2_loadruh_pci, /*HasImm*/true);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014176 case Hexagon::BI__builtin_HEXAGON_L2_loadrh_pci:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014177 return MakeCircLd(Intrinsic::hexagon_L2_loadrh_pci, /*HasImm*/true);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014178 case Hexagon::BI__builtin_HEXAGON_L2_loadri_pci:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014179 return MakeCircLd(Intrinsic::hexagon_L2_loadri_pci, /*HasImm*/true);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014180 case Hexagon::BI__builtin_HEXAGON_L2_loadrd_pci:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014181 return MakeCircLd(Intrinsic::hexagon_L2_loadrd_pci, /*HasImm*/true);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014182 case Hexagon::BI__builtin_HEXAGON_L2_loadrub_pcr:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014183 return MakeCircLd(Intrinsic::hexagon_L2_loadrub_pcr, /*HasImm*/false);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014184 case Hexagon::BI__builtin_HEXAGON_L2_loadrb_pcr:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014185 return MakeCircLd(Intrinsic::hexagon_L2_loadrb_pcr, /*HasImm*/false);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014186 case Hexagon::BI__builtin_HEXAGON_L2_loadruh_pcr:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014187 return MakeCircLd(Intrinsic::hexagon_L2_loadruh_pcr, /*HasImm*/false);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014188 case Hexagon::BI__builtin_HEXAGON_L2_loadrh_pcr:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014189 return MakeCircLd(Intrinsic::hexagon_L2_loadrh_pcr, /*HasImm*/false);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014190 case Hexagon::BI__builtin_HEXAGON_L2_loadri_pcr:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014191 return MakeCircLd(Intrinsic::hexagon_L2_loadri_pcr, /*HasImm*/false);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014192 case Hexagon::BI__builtin_HEXAGON_L2_loadrd_pcr:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014193 return MakeCircLd(Intrinsic::hexagon_L2_loadrd_pcr, /*HasImm*/false);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014194 case Hexagon::BI__builtin_HEXAGON_S2_storerb_pci:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014195 return MakeCircSt(Intrinsic::hexagon_S2_storerb_pci, /*HasImm*/true);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014196 case Hexagon::BI__builtin_HEXAGON_S2_storerh_pci:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014197 return MakeCircSt(Intrinsic::hexagon_S2_storerh_pci, /*HasImm*/true);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014198 case Hexagon::BI__builtin_HEXAGON_S2_storerf_pci:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014199 return MakeCircSt(Intrinsic::hexagon_S2_storerf_pci, /*HasImm*/true);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014200 case Hexagon::BI__builtin_HEXAGON_S2_storeri_pci:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014201 return MakeCircSt(Intrinsic::hexagon_S2_storeri_pci, /*HasImm*/true);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014202 case Hexagon::BI__builtin_HEXAGON_S2_storerd_pci:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014203 return MakeCircSt(Intrinsic::hexagon_S2_storerd_pci, /*HasImm*/true);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014204 case Hexagon::BI__builtin_HEXAGON_S2_storerb_pcr:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014205 return MakeCircSt(Intrinsic::hexagon_S2_storerb_pcr, /*HasImm*/false);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014206 case Hexagon::BI__builtin_HEXAGON_S2_storerh_pcr:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014207 return MakeCircSt(Intrinsic::hexagon_S2_storerh_pcr, /*HasImm*/false);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014208 case Hexagon::BI__builtin_HEXAGON_S2_storerf_pcr:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014209 return MakeCircSt(Intrinsic::hexagon_S2_storerf_pcr, /*HasImm*/false);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014210 case Hexagon::BI__builtin_HEXAGON_S2_storeri_pcr:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014211 return MakeCircSt(Intrinsic::hexagon_S2_storeri_pcr, /*HasImm*/false);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014212 case Hexagon::BI__builtin_HEXAGON_S2_storerd_pcr:
Krzysztof Parzyszek49fb6b52018-04-06 13:51:48 +000014213 return MakeCircSt(Intrinsic::hexagon_S2_storerd_pcr, /*HasImm*/false);
Krzysztof Parzyszek790e4222018-03-29 13:54:31 +000014214 case Hexagon::BI__builtin_brev_ldub:
14215 return MakeBrevLd(Intrinsic::hexagon_L2_loadrub_pbr, Int8Ty);
14216 case Hexagon::BI__builtin_brev_ldb:
14217 return MakeBrevLd(Intrinsic::hexagon_L2_loadrb_pbr, Int8Ty);
14218 case Hexagon::BI__builtin_brev_lduh:
14219 return MakeBrevLd(Intrinsic::hexagon_L2_loadruh_pbr, Int16Ty);
14220 case Hexagon::BI__builtin_brev_ldh:
14221 return MakeBrevLd(Intrinsic::hexagon_L2_loadrh_pbr, Int16Ty);
14222 case Hexagon::BI__builtin_brev_ldw:
14223 return MakeBrevLd(Intrinsic::hexagon_L2_loadri_pbr, Int32Ty);
14224 case Hexagon::BI__builtin_brev_ldd:
14225 return MakeBrevLd(Intrinsic::hexagon_L2_loadrd_pbr, Int64Ty);
Krzysztof Parzyszek1ef2a1f2018-03-28 19:40:57 +000014226 default:
14227 break;
Krzysztof Parzyszek5a655832017-12-13 19:56:03 +000014228 } // switch
14229
14230 return nullptr;
14231}