[OpenMP] Sema and parsing for 'teams distribute' pragma
This patch is to implement sema and parsing for 'teams distribute' pragma.
Differential Revision: https://reviews.llvm.org/D23189
llvm-svn: 277818
diff --git a/clang/lib/Sema/SemaOpenMP.cpp b/clang/lib/Sema/SemaOpenMP.cpp
index 79c4d1c..8194e16 100644
--- a/clang/lib/Sema/SemaOpenMP.cpp
+++ b/clang/lib/Sema/SemaOpenMP.cpp
@@ -1694,7 +1694,8 @@
}
case OMPD_distribute_parallel_for_simd:
case OMPD_distribute_simd:
- case OMPD_distribute_parallel_for: {
+ case OMPD_distribute_parallel_for:
+ case OMPD_teams_distribute: {
QualType KmpInt32Ty = Context.getIntTypeForBitwidth(32, 1);
QualType KmpInt32PtrTy =
Context.getPointerType(KmpInt32Ty).withConst().withRestrict();
@@ -1922,6 +1923,7 @@
// | |parallel for simd| |
// | parallel | distribute simd | + |
// | parallel | target simd | * |
+ // | parallel | teams distribute| + |
// +------------------+-----------------+------------------------------------+
// | for | parallel | * |
// | for | for | + |
@@ -1966,6 +1968,7 @@
// | for | target parallel | + |
// | | for simd | |
// | for | target simd | * |
+ // | for | teams distribute| + |
// +------------------+-----------------+------------------------------------+
// | master | parallel | * |
// | master | for | + |
@@ -2010,6 +2013,7 @@
// | master | target parallel | + |
// | | for simd | |
// | master | target simd | * |
+ // | master | teams distribute| + |
// +------------------+-----------------+------------------------------------+
// | critical | parallel | * |
// | critical | for | + |
@@ -2053,6 +2057,7 @@
// | critical | target parallel | + |
// | | for simd | |
// | critical | target simd | * |
+ // | critical | teams distribute| + |
// +------------------+-----------------+------------------------------------+
// | simd | parallel | |
// | simd | for | |
@@ -2097,6 +2102,7 @@
// | simd | target parallel | |
// | | for simd | |
// | simd | target simd | |
+ // | simd | teams distribute| |
// +------------------+-----------------+------------------------------------+
// | for simd | parallel | |
// | for simd | for | |
@@ -2141,6 +2147,7 @@
// | for simd | target parallel | |
// | | for simd | |
// | for simd | target simd | |
+ // | for simd | teams distribute| |
// +------------------+-----------------+------------------------------------+
// | parallel for simd| parallel | |
// | parallel for simd| for | |
@@ -2184,6 +2191,7 @@
// | parallel for simd| distribute simd | |
// | | for simd | |
// | parallel for simd| target simd | |
+ // | parallel for simd| teams distribute| |
// +------------------+-----------------+------------------------------------+
// | sections | parallel | * |
// | sections | for | + |
@@ -2272,6 +2280,7 @@
// | section | target parallel | + |
// | | for simd | |
// | section | target simd | * |
+ // | section | teams distrubte | + |
// +------------------+-----------------+------------------------------------+
// | single | parallel | * |
// | single | for | + |
@@ -2316,6 +2325,7 @@
// | single | target parallel | + |
// | | for simd | |
// | single | target simd | * |
+ // | single | teams distrubte | + |
// +------------------+-----------------+------------------------------------+
// | parallel for | parallel | * |
// | parallel for | for | + |
@@ -2360,6 +2370,7 @@
// | parallel for | target parallel | + |
// | | for simd | |
// | parallel for | target simd | * |
+ // | parallel for | teams distribute| + |
// +------------------+-----------------+------------------------------------+
// | parallel sections| parallel | * |
// | parallel sections| for | + |
@@ -2404,6 +2415,7 @@
// | parallel sections| target parallel | + |
// | | for simd | |
// | parallel sections| target simd | * |
+ // | parallel sections| teams distribute| + |
// +------------------+-----------------+------------------------------------+
// | task | parallel | * |
// | task | for | + |
@@ -2448,6 +2460,7 @@
// | task | target parallel | + |
// | | for simd | |
// | task | target simd | * |
+ // | task | teams distribute| + |
// +------------------+-----------------+------------------------------------+
// | ordered | parallel | * |
// | ordered | for | + |
@@ -2492,6 +2505,7 @@
// | ordered | target parallel | + |
// | | for simd | |
// | ordered | target simd | * |
+ // | ordered | teams distribute| + |
// +------------------+-----------------+------------------------------------+
// | atomic | parallel | |
// | atomic | for | |
@@ -2536,6 +2550,7 @@
// | atomic | target parallel | |
// | | for simd | |
// | atomic | target simd | |
+ // | atomic | teams distribute| |
// +------------------+-----------------+------------------------------------+
// | target | parallel | * |
// | target | for | * |
@@ -2580,6 +2595,7 @@
// | target | target parallel | |
// | | for simd | |
// | target | target simd | |
+ // | target | teams distribute| |
// +------------------+-----------------+------------------------------------+
// | target parallel | parallel | * |
// | target parallel | for | * |
@@ -2624,6 +2640,7 @@
// | target parallel | target parallel | |
// | | for simd | |
// | target parallel | target simd | |
+ // | target parallel | teams distribute| + |
// +------------------+-----------------+------------------------------------+
// | target parallel | parallel | * |
// | for | | |
@@ -2697,6 +2714,8 @@
// | for | for simd | |
// | target parallel | target simd | |
// | for | | |
+ // | target parallel | teams distribute| |
+ // | for | | |
// +------------------+-----------------+------------------------------------+
// | teams | parallel | * |
// | teams | for | + |
@@ -2741,6 +2760,7 @@
// | teams | target parallel | + |
// | | for simd | |
// | teams | target simd | + |
+ // | teams | teams distribute| + |
// +------------------+-----------------+------------------------------------+
// | taskloop | parallel | * |
// | taskloop | for | + |
@@ -2784,6 +2804,7 @@
// | taskloop | target parallel | * |
// | | for simd | |
// | taskloop | target simd | * |
+ // | taskloop | teams distribute| + |
// +------------------+-----------------+------------------------------------+
// | taskloop simd | parallel | |
// | taskloop simd | for | |
@@ -2828,6 +2849,7 @@
// | taskloop simd | target parallel | |
// | | for simd | |
// | taskloop simd | target simd | |
+ // | taskloop simd | teams distribute| |
// +------------------+-----------------+------------------------------------+
// | distribute | parallel | * |
// | distribute | for | * |
@@ -2872,6 +2894,7 @@
// | distribute | target parallel | |
// | | for simd | |
// | distribute | target simd | |
+ // | distribute | teams distribute| |
// +------------------+-----------------+------------------------------------+
// | distribute | parallel | * |
// | parallel for | | |
@@ -2946,6 +2969,8 @@
// | parallel for | for simd | |
// | distribute | target simd | |
// | parallel for | | |
+ // | distribute | teams distribute| |
+ // | parallel for | | |
// +------------------+-----------------+------------------------------------+
// | distribute | parallel | * |
// | parallel for simd| | |
@@ -3019,6 +3044,8 @@
// | parallel for simd| for simd | |
// | distribute | target simd | |
// | parallel for simd| | |
+ // | distribute | teams distribute| |
+ // | parallel for simd| | |
// +------------------+-----------------+------------------------------------+
// | distribute simd | parallel | * |
// | distribute simd | for | * |
@@ -3063,6 +3090,7 @@
// | distribute simd | target parallel | * |
// | | for simd | |
// | distribute simd | target simd | * |
+ // | distribute simd | teams distribute| * |
// +------------------+-----------------+------------------------------------+
// | target parallel | parallel | * |
// | for simd | | |
@@ -3136,6 +3164,8 @@
// | for simd | for simd | |
// | target parallel | target simd | * |
// | for simd | | |
+ // | target parallel | teams distribute| * |
+ // | for simd | | |
// +------------------+-----------------+------------------------------------+
// | target simd | parallel | |
// | target simd | for | |
@@ -3180,6 +3210,51 @@
// | target simd | target parallel | |
// | | for simd | |
// | target simd | target simd | |
+ // | target simd | teams distribute| |
+ // +------------------+-----------------+------------------------------------+
+ // | teams distribute | parallel | |
+ // | teams distribute | for | |
+ // | teams distribute | for simd | |
+ // | teams distribute | master | |
+ // | teams distribute | critical | |
+ // | teams distribute | simd | |
+ // | teams distribute | sections | |
+ // | teams distribute | section | |
+ // | teams distribute | single | |
+ // | teams distribute | parallel for | |
+ // | teams distribute |parallel for simd| |
+ // | teams distribute |parallel sections| |
+ // | teams distribute | task | |
+ // | teams distribute | taskyield | |
+ // | teams distribute | barrier | |
+ // | teams distribute | taskwait | |
+ // | teams distribute | taskgroup | |
+ // | teams distribute | flush | |
+ // | teams distribute | ordered | + (with simd clause) |
+ // | teams distribute | atomic | |
+ // | teams distribute | target | |
+ // | teams distribute | target parallel | |
+ // | teams distribute | target parallel | |
+ // | | for | |
+ // | teams distribute | target enter | |
+ // | | data | |
+ // | teams distribute | target exit | |
+ // | | data | |
+ // | teams distribute | teams | |
+ // | teams distribute | cancellation | |
+ // | | point | |
+ // | teams distribute | cancel | |
+ // | teams distribute | taskloop | |
+ // | teams distribute | taskloop simd | |
+ // | teams distribute | distribute | |
+ // | teams distribute | distribute | |
+ // | | parallel for | |
+ // | teams distribute | distribute | |
+ // | |parallel for simd| |
+ // | teams distribute | distribute simd | |
+ // | teams distribute | target parallel | |
+ // | | for simd | |
+ // | teams distribute | teams distribute| |
// +------------------+-----------------+------------------------------------+
if (Stack->getCurScope()) {
auto ParentRegion = Stack->getParentDirective();
@@ -3335,7 +3410,7 @@
Recommend = ShouldBeInTargetRegion;
Stack->setParentTeamsRegionLoc(Stack->getConstructLoc());
}
- if (!NestingProhibited && isOpenMPTeamsDirective(ParentRegion)) {
+ if (!NestingProhibited && ParentRegion == OMPD_teams) {
// OpenMP [2.16, Nesting of Regions]
// distribute, parallel, parallel sections, parallel workshare, and the
// parallel loop and parallel loop SIMD constructs are the only OpenMP
@@ -3344,11 +3419,12 @@
!isOpenMPDistributeDirective(CurrentRegion);
Recommend = ShouldBeInParallelRegion;
}
- if (!NestingProhibited && isOpenMPDistributeDirective(CurrentRegion)) {
+ if (!NestingProhibited &&
+ isOpenMPNestingDistributeDirective(CurrentRegion)) {
// OpenMP 4.5 [2.17 Nesting of Regions]
// The region associated with the distribute construct must be strictly
// nested inside a teams region
- NestingProhibited = !isOpenMPTeamsDirective(ParentRegion);
+ NestingProhibited = ParentRegion != OMPD_teams;
Recommend = ShouldBeInTeamsRegion;
}
if (!NestingProhibited &&
@@ -3698,6 +3774,11 @@
EndLoc, VarsWithInheritedDSA);
AllowedNameModifiers.push_back(OMPD_target);
break;
+ case OMPD_teams_distribute:
+ Res = ActOnOpenMPTeamsDistributeDirective(ClausesWithImplicit, AStmt,
+ StartLoc, EndLoc,
+ VarsWithInheritedDSA);
+ break;
case OMPD_declare_target:
case OMPD_end_declare_target:
case OMPD_threadprivate:
@@ -7350,6 +7431,40 @@
NestedLoopCount, Clauses, AStmt, B);
}
+StmtResult Sema::ActOnOpenMPTeamsDistributeDirective(
+ ArrayRef<OMPClause *> Clauses, Stmt *AStmt, SourceLocation StartLoc,
+ SourceLocation EndLoc,
+ llvm::DenseMap<ValueDecl *, Expr *> &VarsWithImplicitDSA) {
+ if (!AStmt)
+ return StmtError();
+
+ CapturedStmt *CS = cast<CapturedStmt>(AStmt);
+ // 1.2.2 OpenMP Language Terminology
+ // Structured block - An executable statement with a single entry at the
+ // top and a single exit at the bottom.
+ // The point of exit cannot be a branch out of the structured block.
+ // longjmp() and throw() must not violate the entry/exit criteria.
+ CS->getCapturedDecl()->setNothrow();
+
+ OMPLoopDirective::HelperExprs B;
+ // In presence of clause 'collapse' with number of loops, it will
+ // define the nested loops number.
+ unsigned NestedLoopCount =
+ CheckOpenMPLoop(OMPD_teams_distribute, getCollapseNumberExpr(Clauses),
+ nullptr /*ordered not a clause on distribute*/, AStmt,
+ *this, *DSAStack, VarsWithImplicitDSA, B);
+ if (NestedLoopCount == 0)
+ return StmtError();
+
+ assert((CurContext->isDependentContext() || B.builtAll()) &&
+ "omp teams distribute loop exprs were not built");
+
+ getCurFunction()->setHasBranchProtectedScope();
+ return OMPTeamsDistributeDirective::Create(Context, StartLoc, EndLoc,
+ NestedLoopCount, Clauses, AStmt,
+ B);
+}
+
OMPClause *Sema::ActOnOpenMPSingleExprClause(OpenMPClauseKind Kind, Expr *Expr,
SourceLocation StartLoc,
SourceLocation LParenLoc,