blob: 245fed01e523f595aec26d294bd28db9e0ad50a6 [file] [log] [blame]
Alexey Bataev9959db52014-05-06 10:08:46 +00001//===--- CGStmtOpenMP.cpp - Emit LLVM Code from Statements ----------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This contains code to emit OpenMP nodes as LLVM code.
11//
12//===----------------------------------------------------------------------===//
13
14#include "CGOpenMPRuntime.h"
15#include "CodeGenFunction.h"
16#include "CodeGenModule.h"
Chandler Carruth0d9593d2015-01-14 11:29:14 +000017#include "TargetInfo.h"
Alexey Bataev9959db52014-05-06 10:08:46 +000018#include "clang/AST/Stmt.h"
19#include "clang/AST/StmtOpenMP.h"
20using namespace clang;
21using namespace CodeGen;
22
23//===----------------------------------------------------------------------===//
24// OpenMP Directive Emission
25//===----------------------------------------------------------------------===//
Alexey Bataev36bf0112015-03-10 05:15:26 +000026namespace {
27/// \brief RAII for inlined OpenMP regions (like 'omp for', 'omp simd', 'omp
28/// critical' etc.). Helps to generate proper debug info and provides correct
29/// code generation for such constructs.
30class InlinedOpenMPRegionScopeRAII {
31 InlinedOpenMPRegionRAII Region;
32 CodeGenFunction::LexicalScope DirectiveScope;
33
34public:
35 InlinedOpenMPRegionScopeRAII(CodeGenFunction &CGF,
36 const OMPExecutableDirective &D)
37 : Region(CGF, D), DirectiveScope(CGF, D.getSourceRange()) {}
38};
39} // namespace
Alexey Bataev9959db52014-05-06 10:08:46 +000040
Alexey Bataevd74d0602014-10-13 06:02:40 +000041/// \brief Emits code for OpenMP 'if' clause using specified \a CodeGen
42/// function. Here is the logic:
43/// if (Cond) {
44/// CodeGen(true);
45/// } else {
46/// CodeGen(false);
47/// }
48static void EmitOMPIfClause(CodeGenFunction &CGF, const Expr *Cond,
49 const std::function<void(bool)> &CodeGen) {
50 CodeGenFunction::LexicalScope ConditionScope(CGF, Cond->getSourceRange());
51
52 // If the condition constant folds and can be elided, try to avoid emitting
53 // the condition and the dead arm of the if/else.
54 bool CondConstant;
55 if (CGF.ConstantFoldsToSimpleInteger(Cond, CondConstant)) {
56 CodeGen(CondConstant);
57 return;
58 }
59
60 // Otherwise, the condition did not fold, or we couldn't elide it. Just
61 // emit the conditional branch.
62 auto ThenBlock = CGF.createBasicBlock(/*name*/ "omp_if.then");
63 auto ElseBlock = CGF.createBasicBlock(/*name*/ "omp_if.else");
64 auto ContBlock = CGF.createBasicBlock(/*name*/ "omp_if.end");
65 CGF.EmitBranchOnBoolExpr(Cond, ThenBlock, ElseBlock, /*TrueCount*/ 0);
66
67 // Emit the 'then' code.
68 CGF.EmitBlock(ThenBlock);
69 CodeGen(/*ThenBlock*/ true);
70 CGF.EmitBranch(ContBlock);
71 // Emit the 'else' code if present.
72 {
73 // There is no need to emit line number for unconditional branch.
Adrian Prantl95b24e92015-02-03 20:00:54 +000074 auto NL = ApplyDebugLocation::CreateEmpty(CGF);
Alexey Bataevd74d0602014-10-13 06:02:40 +000075 CGF.EmitBlock(ElseBlock);
76 }
77 CodeGen(/*ThenBlock*/ false);
78 {
79 // There is no need to emit line number for unconditional branch.
Adrian Prantl95b24e92015-02-03 20:00:54 +000080 auto NL = ApplyDebugLocation::CreateEmpty(CGF);
Alexey Bataevd74d0602014-10-13 06:02:40 +000081 CGF.EmitBranch(ContBlock);
82 }
83 // Emit the continuation block for code after the if.
84 CGF.EmitBlock(ContBlock, /*IsFinished*/ true);
85}
86
Alexey Bataev4a5bb772014-10-08 14:01:46 +000087void CodeGenFunction::EmitOMPAggregateAssign(LValue OriginalAddr,
88 llvm::Value *PrivateAddr,
89 const Expr *AssignExpr,
90 QualType OriginalType,
91 const VarDecl *VDInit) {
92 EmitBlock(createBasicBlock(".omp.assign.begin."));
93 if (!isa<CXXConstructExpr>(AssignExpr) || isTrivialInitializer(AssignExpr)) {
94 // Perform simple memcpy.
95 EmitAggregateAssign(PrivateAddr, OriginalAddr.getAddress(),
96 AssignExpr->getType());
97 } else {
98 // Perform element-by-element initialization.
99 QualType ElementTy;
100 auto SrcBegin = OriginalAddr.getAddress();
101 auto DestBegin = PrivateAddr;
102 auto ArrayTy = OriginalType->getAsArrayTypeUnsafe();
103 auto SrcNumElements = emitArrayLength(ArrayTy, ElementTy, SrcBegin);
104 auto DestNumElements = emitArrayLength(ArrayTy, ElementTy, DestBegin);
105 auto SrcEnd = Builder.CreateGEP(SrcBegin, SrcNumElements);
106 auto DestEnd = Builder.CreateGEP(DestBegin, DestNumElements);
107 // The basic structure here is a do-while loop, because we don't
108 // need to check for the zero-element case.
109 auto BodyBB = createBasicBlock("omp.arraycpy.body");
110 auto DoneBB = createBasicBlock("omp.arraycpy.done");
111 auto IsEmpty =
112 Builder.CreateICmpEQ(DestBegin, DestEnd, "omp.arraycpy.isempty");
113 Builder.CreateCondBr(IsEmpty, DoneBB, BodyBB);
114
115 // Enter the loop body, making that address the current address.
116 auto EntryBB = Builder.GetInsertBlock();
117 EmitBlock(BodyBB);
118 auto SrcElementPast = Builder.CreatePHI(SrcBegin->getType(), 2,
119 "omp.arraycpy.srcElementPast");
120 SrcElementPast->addIncoming(SrcEnd, EntryBB);
121 auto DestElementPast = Builder.CreatePHI(DestBegin->getType(), 2,
122 "omp.arraycpy.destElementPast");
123 DestElementPast->addIncoming(DestEnd, EntryBB);
124
125 // Shift the address back by one element.
126 auto NegativeOne = llvm::ConstantInt::get(SizeTy, -1, true);
127 auto DestElement = Builder.CreateGEP(DestElementPast, NegativeOne,
128 "omp.arraycpy.dest.element");
129 auto SrcElement = Builder.CreateGEP(SrcElementPast, NegativeOne,
130 "omp.arraycpy.src.element");
131 {
132 // Create RunCleanScope to cleanup possible temps.
133 CodeGenFunction::RunCleanupsScope Init(*this);
134 // Emit initialization for single element.
135 LocalDeclMap[VDInit] = SrcElement;
136 EmitAnyExprToMem(AssignExpr, DestElement,
137 AssignExpr->getType().getQualifiers(),
138 /*IsInitializer*/ false);
139 LocalDeclMap.erase(VDInit);
140 }
141
142 // Check whether we've reached the end.
143 auto Done =
144 Builder.CreateICmpEQ(DestElement, DestBegin, "omp.arraycpy.done");
145 Builder.CreateCondBr(Done, DoneBB, BodyBB);
146 DestElementPast->addIncoming(DestElement, Builder.GetInsertBlock());
147 SrcElementPast->addIncoming(SrcElement, Builder.GetInsertBlock());
148
149 // Done.
150 EmitBlock(DoneBB, true);
151 }
152 EmitBlock(createBasicBlock(".omp.assign.end."));
153}
154
155void CodeGenFunction::EmitOMPFirstprivateClause(
156 const OMPExecutableDirective &D,
Alexey Bataev435ad7b2014-10-10 09:48:26 +0000157 CodeGenFunction::OMPPrivateScope &PrivateScope) {
Alexey Bataev4a5bb772014-10-08 14:01:46 +0000158 auto PrivateFilter = [](const OMPClause *C) -> bool {
159 return C->getClauseKind() == OMPC_firstprivate;
160 };
161 for (OMPExecutableDirective::filtered_clause_iterator<decltype(PrivateFilter)>
162 I(D.clauses(), PrivateFilter); I; ++I) {
163 auto *C = cast<OMPFirstprivateClause>(*I);
164 auto IRef = C->varlist_begin();
165 auto InitsRef = C->inits().begin();
166 for (auto IInit : C->private_copies()) {
Alexey Bataev435ad7b2014-10-10 09:48:26 +0000167 auto *OrigVD = cast<VarDecl>(cast<DeclRefExpr>(*IRef)->getDecl());
168 auto *VD = cast<VarDecl>(cast<DeclRefExpr>(IInit)->getDecl());
169 bool IsRegistered;
Alexey Bataev4a5bb772014-10-08 14:01:46 +0000170 if (*InitsRef != nullptr) {
171 // Emit VarDecl with copy init for arrays.
Alexey Bataev435ad7b2014-10-10 09:48:26 +0000172 auto *FD = CapturedStmtInfo->lookup(OrigVD);
Alexey Bataev4a5bb772014-10-08 14:01:46 +0000173 LValue Base = MakeNaturalAlignAddrLValue(
174 CapturedStmtInfo->getContextValue(),
175 getContext().getTagDeclType(FD->getParent()));
176 auto OriginalAddr = EmitLValueForField(Base, FD);
177 auto VDInit = cast<VarDecl>(cast<DeclRefExpr>(*InitsRef)->getDecl());
Alexey Bataev435ad7b2014-10-10 09:48:26 +0000178 IsRegistered = PrivateScope.addPrivate(OrigVD, [&]() -> llvm::Value * {
179 auto Emission = EmitAutoVarAlloca(*VD);
180 // Emit initialization of aggregate firstprivate vars.
181 EmitOMPAggregateAssign(OriginalAddr, Emission.getAllocatedAddress(),
182 VD->getInit(), (*IRef)->getType(), VDInit);
183 EmitAutoVarCleanups(Emission);
184 return Emission.getAllocatedAddress();
185 });
Alexey Bataev4a5bb772014-10-08 14:01:46 +0000186 } else
Alexey Bataev435ad7b2014-10-10 09:48:26 +0000187 IsRegistered = PrivateScope.addPrivate(OrigVD, [&]() -> llvm::Value * {
188 // Emit private VarDecl with copy init.
189 EmitDecl(*VD);
190 return GetAddrOfLocalVar(VD);
191 });
192 assert(IsRegistered && "counter already registered as private");
193 // Silence the warning about unused variable.
194 (void)IsRegistered;
Alexey Bataev4a5bb772014-10-08 14:01:46 +0000195 ++IRef, ++InitsRef;
196 }
197 }
198}
199
Alexey Bataev03b340a2014-10-21 03:16:40 +0000200void CodeGenFunction::EmitOMPPrivateClause(
201 const OMPExecutableDirective &D,
202 CodeGenFunction::OMPPrivateScope &PrivateScope) {
203 auto PrivateFilter = [](const OMPClause *C) -> bool {
204 return C->getClauseKind() == OMPC_private;
205 };
206 for (OMPExecutableDirective::filtered_clause_iterator<decltype(PrivateFilter)>
207 I(D.clauses(), PrivateFilter); I; ++I) {
208 auto *C = cast<OMPPrivateClause>(*I);
209 auto IRef = C->varlist_begin();
210 for (auto IInit : C->private_copies()) {
211 auto *OrigVD = cast<VarDecl>(cast<DeclRefExpr>(*IRef)->getDecl());
212 auto VD = cast<VarDecl>(cast<DeclRefExpr>(IInit)->getDecl());
213 bool IsRegistered =
214 PrivateScope.addPrivate(OrigVD, [&]() -> llvm::Value * {
215 // Emit private VarDecl with copy init.
216 EmitDecl(*VD);
217 return GetAddrOfLocalVar(VD);
218 });
219 assert(IsRegistered && "counter already registered as private");
220 // Silence the warning about unused variable.
221 (void)IsRegistered;
222 ++IRef;
223 }
224 }
225}
226
Alexey Bataevb2059782014-10-13 08:23:51 +0000227/// \brief Emits code for OpenMP parallel directive in the parallel region.
228static void EmitOMPParallelCall(CodeGenFunction &CGF,
229 const OMPParallelDirective &S,
230 llvm::Value *OutlinedFn,
231 llvm::Value *CapturedStruct) {
232 if (auto C = S.getSingleClause(/*K*/ OMPC_num_threads)) {
233 CodeGenFunction::RunCleanupsScope NumThreadsScope(CGF);
234 auto NumThreadsClause = cast<OMPNumThreadsClause>(C);
235 auto NumThreads = CGF.EmitScalarExpr(NumThreadsClause->getNumThreads(),
236 /*IgnoreResultAssign*/ true);
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000237 CGF.CGM.getOpenMPRuntime().emitNumThreadsClause(
Alexey Bataevb2059782014-10-13 08:23:51 +0000238 CGF, NumThreads, NumThreadsClause->getLocStart());
239 }
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000240 CGF.CGM.getOpenMPRuntime().emitParallelCall(CGF, S.getLocStart(), OutlinedFn,
241 CapturedStruct);
Alexey Bataevb2059782014-10-13 08:23:51 +0000242}
243
Alexey Bataev9959db52014-05-06 10:08:46 +0000244void CodeGenFunction::EmitOMPParallelDirective(const OMPParallelDirective &S) {
Alexey Bataev18095712014-10-10 12:19:54 +0000245 auto CS = cast<CapturedStmt>(S.getAssociatedStmt());
246 auto CapturedStruct = GenerateCapturedStmtArgument(*CS);
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000247 auto OutlinedFn = CGM.getOpenMPRuntime().emitOutlinedFunction(
Alexey Bataev18095712014-10-10 12:19:54 +0000248 S, *CS->getCapturedDecl()->param_begin());
Alexey Bataevd74d0602014-10-13 06:02:40 +0000249 if (auto C = S.getSingleClause(/*K*/ OMPC_if)) {
250 auto Cond = cast<OMPIfClause>(C)->getCondition();
251 EmitOMPIfClause(*this, Cond, [&](bool ThenBlock) {
252 if (ThenBlock)
Alexey Bataevb2059782014-10-13 08:23:51 +0000253 EmitOMPParallelCall(*this, S, OutlinedFn, CapturedStruct);
Alexey Bataevd74d0602014-10-13 06:02:40 +0000254 else
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000255 CGM.getOpenMPRuntime().emitSerialCall(*this, S.getLocStart(),
256 OutlinedFn, CapturedStruct);
Alexey Bataevd74d0602014-10-13 06:02:40 +0000257 });
Alexey Bataevb2059782014-10-13 08:23:51 +0000258 } else
259 EmitOMPParallelCall(*this, S, OutlinedFn, CapturedStruct);
Alexey Bataev9959db52014-05-06 10:08:46 +0000260}
Alexander Musman515ad8c2014-05-22 08:54:05 +0000261
Alexander Musmand196ef22014-10-07 08:57:09 +0000262void CodeGenFunction::EmitOMPLoopBody(const OMPLoopDirective &S,
Alexander Musmana5f070a2014-10-01 06:03:56 +0000263 bool SeparateIter) {
264 RunCleanupsScope BodyScope(*this);
265 // Update counters values on current iteration.
266 for (auto I : S.updates()) {
267 EmitIgnoredExpr(I);
268 }
269 // On a continue in the body, jump to the end.
Alexander Musmand196ef22014-10-07 08:57:09 +0000270 auto Continue = getJumpDestInCurrentScope("omp.body.continue");
Alexander Musmana5f070a2014-10-01 06:03:56 +0000271 BreakContinueStack.push_back(BreakContinue(JumpDest(), Continue));
272 // Emit loop body.
273 EmitStmt(S.getBody());
274 // The end (updates/cleanups).
275 EmitBlock(Continue.getBlock());
276 BreakContinueStack.pop_back();
277 if (SeparateIter) {
278 // TODO: Update lastprivates if the SeparateIter flag is true.
279 // This will be implemented in a follow-up OMPLastprivateClause patch, but
280 // result should be still correct without it, as we do not make these
281 // variables private yet.
282 }
283}
284
Alexander Musmand196ef22014-10-07 08:57:09 +0000285void CodeGenFunction::EmitOMPInnerLoop(const OMPLoopDirective &S,
286 OMPPrivateScope &LoopScope,
287 bool SeparateIter) {
288 auto LoopExit = getJumpDestInCurrentScope("omp.inner.for.end");
Alexander Musmana5f070a2014-10-01 06:03:56 +0000289 auto Cnt = getPGORegionCounter(&S);
290
291 // Start the loop with a block that tests the condition.
Alexander Musmand196ef22014-10-07 08:57:09 +0000292 auto CondBlock = createBasicBlock("omp.inner.for.cond");
Alexander Musmana5f070a2014-10-01 06:03:56 +0000293 EmitBlock(CondBlock);
294 LoopStack.push(CondBlock);
295
296 // If there are any cleanups between here and the loop-exit scope,
297 // create a block to stage a loop exit along.
298 auto ExitBlock = LoopExit.getBlock();
299 if (LoopScope.requiresCleanups())
Alexander Musmand196ef22014-10-07 08:57:09 +0000300 ExitBlock = createBasicBlock("omp.inner.for.cond.cleanup");
Alexander Musmana5f070a2014-10-01 06:03:56 +0000301
Alexander Musmand196ef22014-10-07 08:57:09 +0000302 auto LoopBody = createBasicBlock("omp.inner.for.body");
Alexander Musmana5f070a2014-10-01 06:03:56 +0000303
304 // Emit condition: "IV < LastIteration + 1 [ - 1]"
305 // ("- 1" when lastprivate clause is present - separate one iteration).
306 llvm::Value *BoolCondVal = EvaluateExprAsBool(S.getCond(SeparateIter));
307 Builder.CreateCondBr(BoolCondVal, LoopBody, ExitBlock,
308 PGO.createLoopWeights(S.getCond(SeparateIter), Cnt));
309
310 if (ExitBlock != LoopExit.getBlock()) {
311 EmitBlock(ExitBlock);
312 EmitBranchThroughCleanup(LoopExit);
313 }
314
315 EmitBlock(LoopBody);
316 Cnt.beginRegion(Builder);
317
318 // Create a block for the increment.
Alexander Musmand196ef22014-10-07 08:57:09 +0000319 auto Continue = getJumpDestInCurrentScope("omp.inner.for.inc");
Alexander Musmana5f070a2014-10-01 06:03:56 +0000320 BreakContinueStack.push_back(BreakContinue(LoopExit, Continue));
321
Alexander Musmand196ef22014-10-07 08:57:09 +0000322 EmitOMPLoopBody(S);
Alexander Musmana5f070a2014-10-01 06:03:56 +0000323 EmitStopPoint(&S);
324
325 // Emit "IV = IV + 1" and a back-edge to the condition block.
326 EmitBlock(Continue.getBlock());
327 EmitIgnoredExpr(S.getInc());
328 BreakContinueStack.pop_back();
329 EmitBranch(CondBlock);
330 LoopStack.pop();
331 // Emit the fall-through block.
332 EmitBlock(LoopExit.getBlock());
333}
334
335void CodeGenFunction::EmitOMPSimdFinal(const OMPLoopDirective &S) {
336 auto IC = S.counters().begin();
337 for (auto F : S.finals()) {
338 if (LocalDeclMap.lookup(cast<DeclRefExpr>((*IC))->getDecl())) {
339 EmitIgnoredExpr(F);
340 }
341 ++IC;
342 }
343}
344
Alexander Musman09184fe2014-09-30 05:29:28 +0000345static void EmitOMPAlignedClause(CodeGenFunction &CGF, CodeGenModule &CGM,
346 const OMPAlignedClause &Clause) {
347 unsigned ClauseAlignment = 0;
348 if (auto AlignmentExpr = Clause.getAlignment()) {
349 auto AlignmentCI =
350 cast<llvm::ConstantInt>(CGF.EmitScalarExpr(AlignmentExpr));
351 ClauseAlignment = static_cast<unsigned>(AlignmentCI->getZExtValue());
352 }
353 for (auto E : Clause.varlists()) {
354 unsigned Alignment = ClauseAlignment;
355 if (Alignment == 0) {
356 // OpenMP [2.8.1, Description]
Alexey Bataev435ad7b2014-10-10 09:48:26 +0000357 // If no optional parameter is specified, implementation-defined default
Alexander Musman09184fe2014-09-30 05:29:28 +0000358 // alignments for SIMD instructions on the target platforms are assumed.
359 Alignment = CGM.getTargetCodeGenInfo().getOpenMPSimdDefaultAlignment(
360 E->getType());
361 }
362 assert((Alignment == 0 || llvm::isPowerOf2_32(Alignment)) &&
363 "alignment is not power of 2");
364 if (Alignment != 0) {
365 llvm::Value *PtrValue = CGF.EmitScalarExpr(E);
366 CGF.EmitAlignmentAssumption(PtrValue, Alignment);
367 }
368 }
369}
370
Alexey Bataev435ad7b2014-10-10 09:48:26 +0000371static void EmitPrivateLoopCounters(CodeGenFunction &CGF,
372 CodeGenFunction::OMPPrivateScope &LoopScope,
373 ArrayRef<Expr *> Counters) {
374 for (auto *E : Counters) {
375 auto VD = cast<VarDecl>(cast<DeclRefExpr>(E)->getDecl());
376 bool IsRegistered = LoopScope.addPrivate(VD, [&]() -> llvm::Value * {
377 // Emit var without initialization.
378 auto VarEmission = CGF.EmitAutoVarAlloca(*VD);
379 CGF.EmitAutoVarCleanups(VarEmission);
380 return VarEmission.getAllocatedAddress();
381 });
382 assert(IsRegistered && "counter already registered as private");
383 // Silence the warning about unused variable.
384 (void)IsRegistered;
385 }
386 (void)LoopScope.Privatize();
387}
388
Alexander Musman515ad8c2014-05-22 08:54:05 +0000389void CodeGenFunction::EmitOMPSimdDirective(const OMPSimdDirective &S) {
Alexander Musmana5f070a2014-10-01 06:03:56 +0000390 // Pragma 'simd' code depends on presence of 'lastprivate'.
391 // If present, we have to separate last iteration of the loop:
392 //
393 // if (LastIteration != 0) {
394 // for (IV in 0..LastIteration-1) BODY;
395 // BODY with updates of lastprivate vars;
396 // <Final counter/linear vars updates>;
397 // }
398 //
399 // otherwise (when there's no lastprivate):
400 //
401 // for (IV in 0..LastIteration) BODY;
402 // <Final counter/linear vars updates>;
403 //
404
405 // Walk clauses and process safelen/lastprivate.
406 bool SeparateIter = false;
Alexander Musman515ad8c2014-05-22 08:54:05 +0000407 LoopStack.setParallel();
408 LoopStack.setVectorizerEnable(true);
409 for (auto C : S.clauses()) {
410 switch (C->getClauseKind()) {
411 case OMPC_safelen: {
412 RValue Len = EmitAnyExpr(cast<OMPSafelenClause>(C)->getSafelen(),
413 AggValueSlot::ignored(), true);
414 llvm::ConstantInt *Val = cast<llvm::ConstantInt>(Len.getScalarVal());
415 LoopStack.setVectorizerWidth(Val->getZExtValue());
416 // In presence of finite 'safelen', it may be unsafe to mark all
417 // the memory instructions parallel, because loop-carried
418 // dependences of 'safelen' iterations are possible.
419 LoopStack.setParallel(false);
420 break;
421 }
Alexander Musman09184fe2014-09-30 05:29:28 +0000422 case OMPC_aligned:
423 EmitOMPAlignedClause(*this, CGM, cast<OMPAlignedClause>(*C));
424 break;
Alexander Musmana5f070a2014-10-01 06:03:56 +0000425 case OMPC_lastprivate:
426 SeparateIter = true;
427 break;
Alexander Musman515ad8c2014-05-22 08:54:05 +0000428 default:
429 // Not handled yet
430 ;
431 }
432 }
Alexander Musmana5f070a2014-10-01 06:03:56 +0000433
Alexey Bataev36bf0112015-03-10 05:15:26 +0000434 InlinedOpenMPRegionScopeRAII Region(*this, S);
Alexander Musmana5f070a2014-10-01 06:03:56 +0000435
436 // Emit the loop iteration variable.
437 const Expr *IVExpr = S.getIterationVariable();
438 const VarDecl *IVDecl = cast<VarDecl>(cast<DeclRefExpr>(IVExpr)->getDecl());
439 EmitVarDecl(*IVDecl);
440 EmitIgnoredExpr(S.getInit());
441
442 // Emit the iterations count variable.
443 // If it is not a variable, Sema decided to calculate iterations count on each
444 // iteration (e.g., it is foldable into a constant).
445 if (auto LIExpr = dyn_cast<DeclRefExpr>(S.getLastIteration())) {
446 EmitVarDecl(*cast<VarDecl>(LIExpr->getDecl()));
447 // Emit calculation of the iterations count.
448 EmitIgnoredExpr(S.getCalcLastIteration());
449 }
450
451 if (SeparateIter) {
452 // Emit: if (LastIteration > 0) - begin.
453 RegionCounter Cnt = getPGORegionCounter(&S);
454 auto ThenBlock = createBasicBlock("simd.if.then");
455 auto ContBlock = createBasicBlock("simd.if.end");
456 EmitBranchOnBoolExpr(S.getPreCond(), ThenBlock, ContBlock, Cnt.getCount());
457 EmitBlock(ThenBlock);
458 Cnt.beginRegion(Builder);
459 // Emit 'then' code.
460 {
461 OMPPrivateScope LoopScope(*this);
Alexey Bataev435ad7b2014-10-10 09:48:26 +0000462 EmitPrivateLoopCounters(*this, LoopScope, S.counters());
Alexander Musmand196ef22014-10-07 08:57:09 +0000463 EmitOMPInnerLoop(S, LoopScope, /* SeparateIter */ true);
464 EmitOMPLoopBody(S, /* SeparateIter */ true);
Alexander Musmana5f070a2014-10-01 06:03:56 +0000465 }
466 EmitOMPSimdFinal(S);
467 // Emit: if (LastIteration != 0) - end.
468 EmitBranch(ContBlock);
469 EmitBlock(ContBlock, true);
470 } else {
471 {
472 OMPPrivateScope LoopScope(*this);
Alexey Bataev435ad7b2014-10-10 09:48:26 +0000473 EmitPrivateLoopCounters(*this, LoopScope, S.counters());
Alexander Musmand196ef22014-10-07 08:57:09 +0000474 EmitOMPInnerLoop(S, LoopScope);
Alexander Musmana5f070a2014-10-01 06:03:56 +0000475 }
476 EmitOMPSimdFinal(S);
477 }
Alexander Musman515ad8c2014-05-22 08:54:05 +0000478}
479
Alexander Musmandf7a8e22015-01-22 08:49:35 +0000480void CodeGenFunction::EmitOMPForOuterLoop(OpenMPScheduleClauseKind ScheduleKind,
481 const OMPLoopDirective &S,
482 OMPPrivateScope &LoopScope,
483 llvm::Value *LB, llvm::Value *UB,
484 llvm::Value *ST, llvm::Value *IL,
485 llvm::Value *Chunk) {
486 auto &RT = CGM.getOpenMPRuntime();
487 assert(!RT.isStaticNonchunked(ScheduleKind, /* Chunked */ Chunk != nullptr) &&
488 "static non-chunked schedule does not need outer loop");
489 if (RT.isDynamic(ScheduleKind)) {
490 ErrorUnsupported(&S, "OpenMP loop with dynamic schedule");
491 return;
492 }
493
494 // Emit outer loop.
495 //
496 // OpenMP [2.7.1, Loop Construct, Description, table 2-1]
497 // When schedule(static, chunk_size) is specified, iterations are divided into
498 // chunks of size chunk_size, and the chunks are assigned to the threads in
499 // the team in a round-robin fashion in the order of the thread number.
500 //
501 // while(UB = min(UB, GlobalUB), idx = LB, idx < UB) {
502 // while (idx <= UB) { BODY; ++idx; } // inner loop
503 // LB = LB + ST;
504 // UB = UB + ST;
505 // }
506 //
507 const Expr *IVExpr = S.getIterationVariable();
508 const unsigned IVSize = getContext().getTypeSize(IVExpr->getType());
509 const bool IVSigned = IVExpr->getType()->hasSignedIntegerRepresentation();
510
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000511 RT.emitForInit(*this, S.getLocStart(), ScheduleKind, IVSize, IVSigned, IL, LB,
512 UB, ST, Chunk);
Alexander Musmandf7a8e22015-01-22 08:49:35 +0000513 auto LoopExit = getJumpDestInCurrentScope("omp.dispatch.end");
514
515 // Start the loop with a block that tests the condition.
516 auto CondBlock = createBasicBlock("omp.dispatch.cond");
517 EmitBlock(CondBlock);
518 LoopStack.push(CondBlock);
519
520 llvm::Value *BoolCondVal = nullptr;
521 // UB = min(UB, GlobalUB)
522 EmitIgnoredExpr(S.getEnsureUpperBound());
523 // IV = LB
524 EmitIgnoredExpr(S.getInit());
525 // IV < UB
526 BoolCondVal = EvaluateExprAsBool(S.getCond(false));
527
528 // If there are any cleanups between here and the loop-exit scope,
529 // create a block to stage a loop exit along.
530 auto ExitBlock = LoopExit.getBlock();
531 if (LoopScope.requiresCleanups())
532 ExitBlock = createBasicBlock("omp.dispatch.cleanup");
533
534 auto LoopBody = createBasicBlock("omp.dispatch.body");
535 Builder.CreateCondBr(BoolCondVal, LoopBody, ExitBlock);
536 if (ExitBlock != LoopExit.getBlock()) {
537 EmitBlock(ExitBlock);
538 EmitBranchThroughCleanup(LoopExit);
539 }
540 EmitBlock(LoopBody);
541
542 // Create a block for the increment.
543 auto Continue = getJumpDestInCurrentScope("omp.dispatch.inc");
544 BreakContinueStack.push_back(BreakContinue(LoopExit, Continue));
545
546 EmitOMPInnerLoop(S, LoopScope);
547
548 EmitBlock(Continue.getBlock());
549 BreakContinueStack.pop_back();
550 // Emit "LB = LB + Stride", "UB = UB + Stride".
551 EmitIgnoredExpr(S.getNextLowerBound());
552 EmitIgnoredExpr(S.getNextUpperBound());
553
554 EmitBranch(CondBlock);
555 LoopStack.pop();
556 // Emit the fall-through block.
557 EmitBlock(LoopExit.getBlock());
558
559 // Tell the runtime we are done.
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000560 RT.emitForFinish(*this, S.getLocStart(), ScheduleKind);
Alexander Musmandf7a8e22015-01-22 08:49:35 +0000561}
562
Alexander Musmanc6388682014-12-15 07:07:06 +0000563/// \brief Emit a helper variable and return corresponding lvalue.
564static LValue EmitOMPHelperVar(CodeGenFunction &CGF,
565 const DeclRefExpr *Helper) {
566 auto VDecl = cast<VarDecl>(Helper->getDecl());
567 CGF.EmitVarDecl(*VDecl);
568 return CGF.EmitLValue(Helper);
569}
570
571void CodeGenFunction::EmitOMPWorksharingLoop(const OMPLoopDirective &S) {
572 // Emit the loop iteration variable.
573 auto IVExpr = cast<DeclRefExpr>(S.getIterationVariable());
574 auto IVDecl = cast<VarDecl>(IVExpr->getDecl());
575 EmitVarDecl(*IVDecl);
576
577 // Emit the iterations count variable.
578 // If it is not a variable, Sema decided to calculate iterations count on each
579 // iteration (e.g., it is foldable into a constant).
580 if (auto LIExpr = dyn_cast<DeclRefExpr>(S.getLastIteration())) {
581 EmitVarDecl(*cast<VarDecl>(LIExpr->getDecl()));
582 // Emit calculation of the iterations count.
583 EmitIgnoredExpr(S.getCalcLastIteration());
584 }
585
586 auto &RT = CGM.getOpenMPRuntime();
587
588 // Check pre-condition.
589 {
590 // Skip the entire loop if we don't meet the precondition.
591 RegionCounter Cnt = getPGORegionCounter(&S);
592 auto ThenBlock = createBasicBlock("omp.precond.then");
593 auto ContBlock = createBasicBlock("omp.precond.end");
594 EmitBranchOnBoolExpr(S.getPreCond(), ThenBlock, ContBlock, Cnt.getCount());
595 EmitBlock(ThenBlock);
596 Cnt.beginRegion(Builder);
597 // Emit 'then' code.
598 {
599 // Emit helper vars inits.
600 LValue LB =
601 EmitOMPHelperVar(*this, cast<DeclRefExpr>(S.getLowerBoundVariable()));
602 LValue UB =
603 EmitOMPHelperVar(*this, cast<DeclRefExpr>(S.getUpperBoundVariable()));
604 LValue ST =
605 EmitOMPHelperVar(*this, cast<DeclRefExpr>(S.getStrideVariable()));
606 LValue IL =
607 EmitOMPHelperVar(*this, cast<DeclRefExpr>(S.getIsLastIterVariable()));
608
609 OMPPrivateScope LoopScope(*this);
610 EmitPrivateLoopCounters(*this, LoopScope, S.counters());
611
612 // Detect the loop schedule kind and chunk.
613 auto ScheduleKind = OMPC_SCHEDULE_unknown;
614 llvm::Value *Chunk = nullptr;
615 if (auto C = cast_or_null<OMPScheduleClause>(
616 S.getSingleClause(OMPC_schedule))) {
617 ScheduleKind = C->getScheduleKind();
618 if (auto Ch = C->getChunkSize()) {
619 Chunk = EmitScalarExpr(Ch);
620 Chunk = EmitScalarConversion(Chunk, Ch->getType(),
621 S.getIterationVariable()->getType());
622 }
623 }
624 const unsigned IVSize = getContext().getTypeSize(IVExpr->getType());
625 const bool IVSigned = IVExpr->getType()->hasSignedIntegerRepresentation();
626 if (RT.isStaticNonchunked(ScheduleKind,
627 /* Chunked */ Chunk != nullptr)) {
628 // OpenMP [2.7.1, Loop Construct, Description, table 2-1]
629 // When no chunk_size is specified, the iteration space is divided into
630 // chunks that are approximately equal in size, and at most one chunk is
631 // distributed to each thread. Note that the size of the chunks is
632 // unspecified in this case.
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000633 RT.emitForInit(*this, S.getLocStart(), ScheduleKind, IVSize, IVSigned,
634 IL.getAddress(), LB.getAddress(), UB.getAddress(),
635 ST.getAddress());
Alexander Musmanc6388682014-12-15 07:07:06 +0000636 // UB = min(UB, GlobalUB);
637 EmitIgnoredExpr(S.getEnsureUpperBound());
638 // IV = LB;
639 EmitIgnoredExpr(S.getInit());
640 // while (idx <= UB) { BODY; ++idx; }
641 EmitOMPInnerLoop(S, LoopScope);
642 // Tell the runtime we are done.
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000643 RT.emitForFinish(*this, S.getLocStart(), ScheduleKind);
Alexander Musmandf7a8e22015-01-22 08:49:35 +0000644 } else {
645 // Emit the outer loop, which requests its work chunk [LB..UB] from
646 // runtime and runs the inner loop to process it.
647 EmitOMPForOuterLoop(ScheduleKind, S, LoopScope, LB.getAddress(),
648 UB.getAddress(), ST.getAddress(), IL.getAddress(),
649 Chunk);
650 }
Alexander Musmanc6388682014-12-15 07:07:06 +0000651 }
652 // We're now done with the loop, so jump to the continuation block.
653 EmitBranch(ContBlock);
654 EmitBlock(ContBlock, true);
655 }
656}
657
658void CodeGenFunction::EmitOMPForDirective(const OMPForDirective &S) {
Alexey Bataev36bf0112015-03-10 05:15:26 +0000659 InlinedOpenMPRegionScopeRAII Region(*this, S);
Alexander Musmanc6388682014-12-15 07:07:06 +0000660
661 EmitOMPWorksharingLoop(S);
662
663 // Emit an implicit barrier at the end.
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000664 CGM.getOpenMPRuntime().emitBarrierCall(*this, S.getLocStart(),
665 /*IsExplicit*/ false);
Alexey Bataevf29276e2014-06-18 04:14:57 +0000666}
Alexey Bataevd3f8dd22014-06-25 11:44:49 +0000667
Alexander Musmanf82886e2014-09-18 05:12:34 +0000668void CodeGenFunction::EmitOMPForSimdDirective(const OMPForSimdDirective &) {
669 llvm_unreachable("CodeGen for 'omp for simd' is not supported yet.");
670}
671
Alexey Bataevd3f8dd22014-06-25 11:44:49 +0000672void CodeGenFunction::EmitOMPSectionsDirective(const OMPSectionsDirective &) {
673 llvm_unreachable("CodeGen for 'omp sections' is not supported yet.");
674}
675
Alexey Bataev1e0498a2014-06-26 08:21:58 +0000676void CodeGenFunction::EmitOMPSectionDirective(const OMPSectionDirective &) {
677 llvm_unreachable("CodeGen for 'omp section' is not supported yet.");
678}
679
Alexey Bataev6956e2e2015-02-05 06:35:41 +0000680void CodeGenFunction::EmitOMPSingleDirective(const OMPSingleDirective &S) {
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000681 CGM.getOpenMPRuntime().emitSingleRegion(*this, [&]() -> void {
Alexey Bataev36bf0112015-03-10 05:15:26 +0000682 InlinedOpenMPRegionScopeRAII Region(*this, S);
Alexey Bataev6956e2e2015-02-05 06:35:41 +0000683 EmitStmt(cast<CapturedStmt>(S.getAssociatedStmt())->getCapturedStmt());
684 EnsureInsertPoint();
685 }, S.getLocStart());
Alexey Bataevd1e40fb2014-06-26 12:05:45 +0000686}
687
Alexey Bataev8d690652014-12-04 07:23:53 +0000688void CodeGenFunction::EmitOMPMasterDirective(const OMPMasterDirective &S) {
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000689 CGM.getOpenMPRuntime().emitMasterRegion(*this, [&]() -> void {
Alexey Bataev36bf0112015-03-10 05:15:26 +0000690 InlinedOpenMPRegionScopeRAII Region(*this, S);
Alexey Bataev8d690652014-12-04 07:23:53 +0000691 EmitStmt(cast<CapturedStmt>(S.getAssociatedStmt())->getCapturedStmt());
692 EnsureInsertPoint();
693 }, S.getLocStart());
Alexander Musman80c22892014-07-17 08:54:58 +0000694}
695
Alexey Bataev3a3bf0b2014-09-22 10:01:53 +0000696void CodeGenFunction::EmitOMPCriticalDirective(const OMPCriticalDirective &S) {
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000697 CGM.getOpenMPRuntime().emitCriticalRegion(
Alexey Bataev75ddfab2014-12-01 11:32:38 +0000698 *this, S.getDirectiveName().getAsString(), [&]() -> void {
Alexey Bataev36bf0112015-03-10 05:15:26 +0000699 InlinedOpenMPRegionScopeRAII Region(*this, S);
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000700 EmitStmt(cast<CapturedStmt>(S.getAssociatedStmt())->getCapturedStmt());
701 EnsureInsertPoint();
702 }, S.getLocStart());
Alexander Musmand9ed09f2014-07-21 09:42:05 +0000703}
704
Alexey Bataev4acb8592014-07-07 13:01:15 +0000705void
706CodeGenFunction::EmitOMPParallelForDirective(const OMPParallelForDirective &) {
707 llvm_unreachable("CodeGen for 'omp parallel for' is not supported yet.");
708}
709
Alexander Musmane4e893b2014-09-23 09:33:00 +0000710void CodeGenFunction::EmitOMPParallelForSimdDirective(
711 const OMPParallelForSimdDirective &) {
712 llvm_unreachable("CodeGen for 'omp parallel for simd' is not supported yet.");
713}
714
Alexey Bataev84d0b3e2014-07-08 08:12:03 +0000715void CodeGenFunction::EmitOMPParallelSectionsDirective(
716 const OMPParallelSectionsDirective &) {
717 llvm_unreachable("CodeGen for 'omp parallel sections' is not supported yet.");
718}
719
Alexey Bataev9c2e8ee2014-07-11 11:25:16 +0000720void CodeGenFunction::EmitOMPTaskDirective(const OMPTaskDirective &) {
721 llvm_unreachable("CodeGen for 'omp task' is not supported yet.");
722}
723
Alexey Bataev9f797f32015-02-05 05:57:51 +0000724void CodeGenFunction::EmitOMPTaskyieldDirective(
725 const OMPTaskyieldDirective &S) {
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000726 CGM.getOpenMPRuntime().emitTaskyieldCall(*this, S.getLocStart());
Alexey Bataev68446b72014-07-18 07:47:19 +0000727}
728
Alexey Bataev8f7c1b02014-12-05 04:09:23 +0000729void CodeGenFunction::EmitOMPBarrierDirective(const OMPBarrierDirective &S) {
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000730 CGM.getOpenMPRuntime().emitBarrierCall(*this, S.getLocStart());
Alexey Bataev4d1dfea2014-07-18 09:11:51 +0000731}
732
Alexey Bataev2df347a2014-07-18 10:17:07 +0000733void CodeGenFunction::EmitOMPTaskwaitDirective(const OMPTaskwaitDirective &) {
734 llvm_unreachable("CodeGen for 'omp taskwait' is not supported yet.");
735}
736
Alexey Bataevcc37cc12014-11-20 04:34:54 +0000737void CodeGenFunction::EmitOMPFlushDirective(const OMPFlushDirective &S) {
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000738 CGM.getOpenMPRuntime().emitFlush(*this, [&]() -> ArrayRef<const Expr *> {
739 if (auto C = S.getSingleClause(/*K*/ OMPC_flush)) {
740 auto FlushClause = cast<OMPFlushClause>(C);
741 return llvm::makeArrayRef(FlushClause->varlist_begin(),
742 FlushClause->varlist_end());
743 }
744 return llvm::None;
745 }(), S.getLocStart());
Alexey Bataev6125da92014-07-21 11:26:11 +0000746}
747
Alexey Bataev9fb6e642014-07-22 06:45:04 +0000748void CodeGenFunction::EmitOMPOrderedDirective(const OMPOrderedDirective &) {
749 llvm_unreachable("CodeGen for 'omp ordered' is not supported yet.");
750}
751
Alexey Bataevb57056f2015-01-22 06:17:56 +0000752static llvm::Value *convertToScalarValue(CodeGenFunction &CGF, RValue Val,
753 QualType SrcType, QualType DestType) {
754 assert(CGF.hasScalarEvaluationKind(DestType) &&
755 "DestType must have scalar evaluation kind.");
756 assert(!Val.isAggregate() && "Must be a scalar or complex.");
757 return Val.isScalar()
758 ? CGF.EmitScalarConversion(Val.getScalarVal(), SrcType, DestType)
759 : CGF.EmitComplexToScalarConversion(Val.getComplexVal(), SrcType,
760 DestType);
761}
762
763static CodeGenFunction::ComplexPairTy
764convertToComplexValue(CodeGenFunction &CGF, RValue Val, QualType SrcType,
765 QualType DestType) {
766 assert(CGF.getEvaluationKind(DestType) == TEK_Complex &&
767 "DestType must have complex evaluation kind.");
768 CodeGenFunction::ComplexPairTy ComplexVal;
769 if (Val.isScalar()) {
770 // Convert the input element to the element type of the complex.
771 auto DestElementType = DestType->castAs<ComplexType>()->getElementType();
772 auto ScalarVal =
773 CGF.EmitScalarConversion(Val.getScalarVal(), SrcType, DestElementType);
774 ComplexVal = CodeGenFunction::ComplexPairTy(
775 ScalarVal, llvm::Constant::getNullValue(ScalarVal->getType()));
776 } else {
777 assert(Val.isComplex() && "Must be a scalar or complex.");
778 auto SrcElementType = SrcType->castAs<ComplexType>()->getElementType();
779 auto DestElementType = DestType->castAs<ComplexType>()->getElementType();
780 ComplexVal.first = CGF.EmitScalarConversion(
781 Val.getComplexVal().first, SrcElementType, DestElementType);
782 ComplexVal.second = CGF.EmitScalarConversion(
783 Val.getComplexVal().second, SrcElementType, DestElementType);
784 }
785 return ComplexVal;
786}
787
788static void EmitOMPAtomicReadExpr(CodeGenFunction &CGF, bool IsSeqCst,
789 const Expr *X, const Expr *V,
790 SourceLocation Loc) {
791 // v = x;
792 assert(V->isLValue() && "V of 'omp atomic read' is not lvalue");
793 assert(X->isLValue() && "X of 'omp atomic read' is not lvalue");
794 LValue XLValue = CGF.EmitLValue(X);
795 LValue VLValue = CGF.EmitLValue(V);
David Majnemera5b195a2015-02-14 01:35:12 +0000796 RValue Res = XLValue.isGlobalReg()
797 ? CGF.EmitLoadOfLValue(XLValue, Loc)
798 : CGF.EmitAtomicLoad(XLValue, Loc,
799 IsSeqCst ? llvm::SequentiallyConsistent
Alexey Bataevb8329262015-02-27 06:33:30 +0000800 : llvm::Monotonic,
801 XLValue.isVolatile());
Alexey Bataevb57056f2015-01-22 06:17:56 +0000802 // OpenMP, 2.12.6, atomic Construct
803 // Any atomic construct with a seq_cst clause forces the atomically
804 // performed operation to include an implicit flush operation without a
805 // list.
806 if (IsSeqCst)
Alexey Bataev3eff5f42015-02-25 08:32:46 +0000807 CGF.CGM.getOpenMPRuntime().emitFlush(CGF, llvm::None, Loc);
Alexey Bataevb57056f2015-01-22 06:17:56 +0000808 switch (CGF.getEvaluationKind(V->getType())) {
809 case TEK_Scalar:
810 CGF.EmitStoreOfScalar(
811 convertToScalarValue(CGF, Res, X->getType(), V->getType()), VLValue);
812 break;
813 case TEK_Complex:
814 CGF.EmitStoreOfComplex(
815 convertToComplexValue(CGF, Res, X->getType(), V->getType()), VLValue,
816 /*isInit=*/false);
817 break;
818 case TEK_Aggregate:
819 llvm_unreachable("Must be a scalar or complex.");
820 }
821}
822
Alexey Bataevb8329262015-02-27 06:33:30 +0000823static void EmitOMPAtomicWriteExpr(CodeGenFunction &CGF, bool IsSeqCst,
824 const Expr *X, const Expr *E,
825 SourceLocation Loc) {
826 // x = expr;
827 assert(X->isLValue() && "X of 'omp atomic write' is not lvalue");
828 LValue XLValue = CGF.EmitLValue(X);
829 RValue ExprRValue = CGF.EmitAnyExpr(E);
830 if (XLValue.isGlobalReg())
831 CGF.EmitStoreThroughGlobalRegLValue(ExprRValue, XLValue);
832 else
833 CGF.EmitAtomicStore(ExprRValue, XLValue,
834 IsSeqCst ? llvm::SequentiallyConsistent
835 : llvm::Monotonic,
836 XLValue.isVolatile(), /*IsInit=*/false);
837 // OpenMP, 2.12.6, atomic Construct
838 // Any atomic construct with a seq_cst clause forces the atomically
839 // performed operation to include an implicit flush operation without a
840 // list.
841 if (IsSeqCst)
842 CGF.CGM.getOpenMPRuntime().emitFlush(CGF, llvm::None, Loc);
843}
844
Alexey Bataevb57056f2015-01-22 06:17:56 +0000845static void EmitOMPAtomicExpr(CodeGenFunction &CGF, OpenMPClauseKind Kind,
846 bool IsSeqCst, const Expr *X, const Expr *V,
Alexey Bataevb8329262015-02-27 06:33:30 +0000847 const Expr *E, SourceLocation Loc) {
Alexey Bataevb57056f2015-01-22 06:17:56 +0000848 switch (Kind) {
849 case OMPC_read:
850 EmitOMPAtomicReadExpr(CGF, IsSeqCst, X, V, Loc);
851 break;
852 case OMPC_write:
Alexey Bataevb8329262015-02-27 06:33:30 +0000853 EmitOMPAtomicWriteExpr(CGF, IsSeqCst, X, E, Loc);
854 break;
Alexey Bataevb57056f2015-01-22 06:17:56 +0000855 case OMPC_update:
856 case OMPC_capture:
857 llvm_unreachable("CodeGen for 'omp atomic clause' is not supported yet.");
858 case OMPC_if:
859 case OMPC_final:
860 case OMPC_num_threads:
861 case OMPC_private:
862 case OMPC_firstprivate:
863 case OMPC_lastprivate:
864 case OMPC_reduction:
865 case OMPC_safelen:
866 case OMPC_collapse:
867 case OMPC_default:
868 case OMPC_seq_cst:
869 case OMPC_shared:
870 case OMPC_linear:
871 case OMPC_aligned:
872 case OMPC_copyin:
873 case OMPC_copyprivate:
874 case OMPC_flush:
875 case OMPC_proc_bind:
876 case OMPC_schedule:
877 case OMPC_ordered:
878 case OMPC_nowait:
879 case OMPC_untied:
880 case OMPC_threadprivate:
881 case OMPC_mergeable:
882 case OMPC_unknown:
883 llvm_unreachable("Clause is not allowed in 'omp atomic'.");
884 }
885}
886
887void CodeGenFunction::EmitOMPAtomicDirective(const OMPAtomicDirective &S) {
888 bool IsSeqCst = S.getSingleClause(/*K=*/OMPC_seq_cst);
889 OpenMPClauseKind Kind = OMPC_unknown;
890 for (auto *C : S.clauses()) {
891 // Find first clause (skip seq_cst clause, if it is first).
892 if (C->getClauseKind() != OMPC_seq_cst) {
893 Kind = C->getClauseKind();
894 break;
895 }
896 }
Alexey Bataev36bf0112015-03-10 05:15:26 +0000897 InlinedOpenMPRegionScopeRAII Region(*this, S);
Alexey Bataevb57056f2015-01-22 06:17:56 +0000898 EmitOMPAtomicExpr(*this, Kind, IsSeqCst, S.getX(), S.getV(), S.getExpr(),
899 S.getLocStart());
Alexey Bataev0162e452014-07-22 10:10:35 +0000900}
901
Alexey Bataev0bd520b2014-09-19 08:19:49 +0000902void CodeGenFunction::EmitOMPTargetDirective(const OMPTargetDirective &) {
903 llvm_unreachable("CodeGen for 'omp target' is not supported yet.");
904}
905
Alexey Bataev13314bf2014-10-09 04:18:56 +0000906void CodeGenFunction::EmitOMPTeamsDirective(const OMPTeamsDirective &) {
907 llvm_unreachable("CodeGen for 'omp teams' is not supported yet.");
908}
909