Alexey Bataev | 5bbcead | 2019-10-14 17:17:41 +0000 | [diff] [blame] | 1 | // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp -x c++ -emit-llvm %s -o - -femit-all-decls | FileCheck %s |
| 2 | // RUN: %clang_cc1 -fopenmp -x c++ -triple x86_64-apple-darwin10 -emit-pch -o %t %s |
| 3 | // RUN: %clang_cc1 -fopenmp -x c++ -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - -femit-all-decls | FileCheck %s |
| 4 | |
| 5 | // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp-simd -x c++ -emit-llvm %s -o - -femit-all-decls | FileCheck --check-prefix SIMD-ONLY0 %s |
| 6 | // RUN: %clang_cc1 -fopenmp-simd -x c++ -triple x86_64-apple-darwin10 -emit-pch -o %t %s |
| 7 | // RUN: %clang_cc1 -fopenmp-simd -x c++ -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - -femit-all-decls | FileCheck --check-prefix SIMD-ONLY0 %s |
| 8 | // SIMD-ONLY0-NOT: {{__kmpc|__tgt}} |
| 9 | // expected-no-diagnostics |
| 10 | #ifndef HEADER |
| 11 | #define HEADER |
| 12 | |
| 13 | // CHECK-LABEL: @main |
| 14 | int main(int argc, char **argv) { |
| 15 | // CHECK: [[GTID:%.+]] = call i32 @__kmpc_global_thread_num(%struct.ident_t* [[DEFLOC:@.+]]) |
| 16 | // CHECK: call void (%struct.ident_t*, i32, void (i32*, i32*, ...)*, ...) @__kmpc_fork_call(%struct.ident_t* [[DEFLOC]], i32 0, void (i32*, i32*, ...)* bitcast (void (i32*, i32*)* [[OMP_OUTLINED1:@.+]] to void (i32*, i32*, ...)*)) |
Alexey Bataev | b9c55e2 | 2019-10-14 19:29:52 +0000 | [diff] [blame^] | 17 | // CHECK: call void (%struct.ident_t*, i32, void (i32*, i32*, ...)*, ...) @__kmpc_fork_call(%struct.ident_t* [[DEFLOC]], i32 1, void (i32*, i32*, ...)* bitcast (void (i32*, i32*, i64)* [[OMP_OUTLINED2:@.+]] to void (i32*, i32*, ...)*), i64 [[GRAINSIZE:%.+]]) |
Alexey Bataev | 5bbcead | 2019-10-14 17:17:41 +0000 | [diff] [blame] | 18 | // CHECK: call void (%struct.ident_t*, i32, void (i32*, i32*, ...)*, ...) @__kmpc_fork_call(%struct.ident_t* [[DEFLOC]], i32 3, void (i32*, i32*, ...)* bitcast (void (i32*, i32*, i32*, i8***, i64)* [[OMP_OUTLINED3:@.+]] to void (i32*, i32*, ...)*), i32* [[ARGC:%.+]], i8*** [[ARGV:%.+]], i64 [[COND:%.+]]) |
| 19 | // CHECK: call void @__kmpc_serialized_parallel(%struct.ident_t* [[DEFLOC]], i32 [[GTID]]) |
| 20 | // CHECK: call void [[OMP_OUTLINED3]](i32* %{{.+}}, i32* %{{.+}}, i32* [[ARGC]], i8*** [[ARGV]], i64 [[COND]]) |
| 21 | // CHECK: call void @__kmpc_end_serialized_parallel(%struct.ident_t* [[DEFLOC]], i32 [[GTID]]) |
| 22 | |
| 23 | |
| 24 | // CHECK: define internal void [[OMP_OUTLINED1]](i32* noalias %{{.+}}, i32* noalias %{{.+}}) |
| 25 | // CHECK: [[RES:%.+]] = call {{.*}}i32 @__kmpc_master(%struct.ident_t* [[DEFLOC]], i32 [[GTID:%.+]]) |
| 26 | // CHECK-NEXT: [[IS_MASTER:%.+]] = icmp ne i32 [[RES]], 0 |
| 27 | // CHECK-NEXT: br i1 [[IS_MASTER]], label {{%?}}[[THEN:.+]], label {{%?}}[[EXIT:.+]] |
| 28 | // CHECK: [[THEN]] |
| 29 | // CHECK: call void @__kmpc_taskgroup(%struct.ident_t* [[DEFLOC]], i32 [[GTID]]) |
| 30 | // CHECK: [[TASKV:%.+]] = call i8* @__kmpc_omp_task_alloc(%struct.ident_t* [[DEFLOC]], i32 [[GTID]], i32 33, i64 80, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[TDP_TY:%.+]]*)* [[TASK1:@.+]] to i32 (i32, i8*)*)) |
| 31 | // CHECK: [[TASK:%.+]] = bitcast i8* [[TASKV]] to [[TDP_TY]]* |
| 32 | // CHECK: [[TASK_DATA:%.+]] = getelementptr inbounds [[TDP_TY]], [[TDP_TY]]* [[TASK]], i32 0, i32 0 |
| 33 | // CHECK: [[DOWN:%.+]] = getelementptr inbounds [[TD_TY:%.+]], [[TD_TY]]* [[TASK_DATA]], i32 0, i32 5 |
| 34 | // CHECK: store i64 0, i64* [[DOWN]], |
| 35 | // CHECK: [[UP:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* [[TASK_DATA]], i32 0, i32 6 |
| 36 | // CHECK: store i64 9, i64* [[UP]], |
| 37 | // CHECK: [[ST:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* [[TASK_DATA]], i32 0, i32 7 |
| 38 | // CHECK: store i64 1, i64* [[ST]], |
| 39 | // CHECK: [[ST_VAL:%.+]] = load i64, i64* [[ST]], |
| 40 | // CHECK: call void @__kmpc_taskloop(%struct.ident_t* [[DEFLOC]], i32 [[GTID]], i8* [[TASKV]], i32 1, i64* [[DOWN]], i64* [[UP]], i64 [[ST_VAL]], i32 1, i32 0, i64 0, i8* null) |
| 41 | // CHECK: call void @__kmpc_end_taskgroup(%struct.ident_t* [[DEFLOC]], i32 [[GTID]]) |
| 42 | // CHECK-NEXT: call {{.*}}void @__kmpc_end_master(%struct.ident_t* [[DEFLOC]], i32 [[GTID]]) |
| 43 | // CHECK-NEXT: br label {{%?}}[[EXIT]] |
| 44 | // CHECK: [[EXIT]] |
| 45 | |
| 46 | |
| 47 | // CHECK: define internal i32 [[TASK1]]( |
| 48 | // CHECK: [[DOWN:%.+]] = getelementptr inbounds [[TD_TY:%.+]], [[TD_TY]]* %{{.+}}, i32 0, i32 5 |
| 49 | // CHECK: [[DOWN_VAL:%.+]] = load i64, i64* [[DOWN]], |
| 50 | // CHECK: [[UP:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* %{{.+}}, i32 0, i32 6 |
| 51 | // CHECK: [[UP_VAL:%.+]] = load i64, i64* [[UP]], |
| 52 | // CHECK: [[ST:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* %{{.+}}, i32 0, i32 7 |
| 53 | // CHECK: [[ST_VAL:%.+]] = load i64, i64* [[ST]], |
| 54 | // CHECK: [[LITER:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* %{{.+}}, i32 0, i32 8 |
| 55 | // CHECK: [[LITER_VAL:%.+]] = load i32, i32* [[LITER]], |
| 56 | // CHECK: store i64 [[DOWN_VAL]], i64* [[LB:%[^,]+]], |
| 57 | // CHECK: store i64 [[UP_VAL]], i64* [[UB:%[^,]+]], |
| 58 | // CHECK: store i64 [[ST_VAL]], i64* [[ST:%[^,]+]], |
| 59 | // CHECK: store i32 [[LITER_VAL]], i32* [[LITER:%[^,]+]], |
| 60 | // CHECK: [[LB_VAL:%.+]] = load i64, i64* [[LB]], |
| 61 | // CHECK: [[LB_I32:%.+]] = trunc i64 [[LB_VAL]] to i32 |
| 62 | // CHECK: store i32 [[LB_I32]], i32* [[CNT:%.+]], |
| 63 | // CHECK: br label |
| 64 | // CHECK: [[VAL:%.+]] = load i32, i32* [[CNT]], |
| 65 | // CHECK: [[VAL_I64:%.+]] = sext i32 [[VAL]] to i64 |
| 66 | // CHECK: [[UB_VAL:%.+]] = load i64, i64* [[UB]], |
| 67 | // CHECK: [[CMP:%.+]] = icmp ule i64 [[VAL_I64]], [[UB_VAL]] |
| 68 | // CHECK: br i1 [[CMP]], label %{{.+}}, label %{{.+}} |
| 69 | // CHECK: load i32, i32* % |
| 70 | // CHECK: store i32 % |
| 71 | // CHECK: load i32, i32* % |
| 72 | // CHECK: add nsw i32 %{{.+}}, 1 |
| 73 | // CHECK: store i32 %{{.+}}, i32* % |
| 74 | // CHECK: br label % |
| 75 | // CHECK: ret i32 0 |
| 76 | |
| 77 | #pragma omp parallel master taskloop priority(4) |
| 78 | for (int i = 0; i < 10; ++i) |
| 79 | ; |
Alexey Bataev | b9c55e2 | 2019-10-14 19:29:52 +0000 | [diff] [blame^] | 80 | // CHECK: define internal void [[OMP_OUTLINED2]](i32* noalias %{{.+}}, i32* noalias %{{.+}}, i64 %{{.+}}) |
Alexey Bataev | 5bbcead | 2019-10-14 17:17:41 +0000 | [diff] [blame] | 81 | // CHECK: [[RES:%.+]] = call {{.*}}i32 @__kmpc_master(%struct.ident_t* [[DEFLOC]], i32 [[GTID:%.+]]) |
| 82 | // CHECK-NEXT: [[IS_MASTER:%.+]] = icmp ne i32 [[RES]], 0 |
| 83 | // CHECK-NEXT: br i1 [[IS_MASTER]], label {{%?}}[[THEN:.+]], label {{%?}}[[EXIT:.+]] |
| 84 | // CHECK: [[THEN]] |
| 85 | // CHECK: [[TASKV:%.+]] = call i8* @__kmpc_omp_task_alloc(%struct.ident_t* [[DEFLOC]], i32 [[GTID]], i32 1, i64 80, i64 1, i32 (i32, i8*)* bitcast (i32 (i32, [[TDP_TY:%.+]]*)* [[TASK2:@.+]] to i32 (i32, i8*)*)) |
| 86 | // CHECK: [[TASK:%.+]] = bitcast i8* [[TASKV]] to [[TDP_TY]]* |
| 87 | // CHECK: [[TASK_DATA:%.+]] = getelementptr inbounds [[TDP_TY]], [[TDP_TY]]* [[TASK]], i32 0, i32 0 |
| 88 | // CHECK: [[DOWN:%.+]] = getelementptr inbounds [[TD_TY:%.+]], [[TD_TY]]* [[TASK_DATA]], i32 0, i32 5 |
| 89 | // CHECK: store i64 0, i64* [[DOWN]], |
| 90 | // CHECK: [[UP:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* [[TASK_DATA]], i32 0, i32 6 |
| 91 | // CHECK: store i64 9, i64* [[UP]], |
| 92 | // CHECK: [[ST:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* [[TASK_DATA]], i32 0, i32 7 |
| 93 | // CHECK: store i64 1, i64* [[ST]], |
| 94 | // CHECK: [[ST_VAL:%.+]] = load i64, i64* [[ST]], |
Alexey Bataev | b9c55e2 | 2019-10-14 19:29:52 +0000 | [diff] [blame^] | 95 | // CHECK: [[GRAINSIZE:%.+]] = zext i32 %{{.+}} to i64 |
| 96 | // CHECK: call void @__kmpc_taskloop(%struct.ident_t* [[DEFLOC]], i32 [[GTID]], i8* [[TASKV]], i32 1, i64* [[DOWN]], i64* [[UP]], i64 [[ST_VAL]], i32 1, i32 1, i64 [[GRAINSIZE]], i8* null) |
Alexey Bataev | 5bbcead | 2019-10-14 17:17:41 +0000 | [diff] [blame] | 97 | // CHECK-NEXT: call {{.*}}void @__kmpc_end_master(%struct.ident_t* [[DEFLOC]], i32 [[GTID]]) |
| 98 | // CHECK-NEXT: br label {{%?}}[[EXIT]] |
| 99 | // CHECK: [[EXIT]] |
| 100 | |
| 101 | |
| 102 | // CHECK: define internal i32 [[TASK2]]( |
| 103 | // CHECK: [[DOWN:%.+]] = getelementptr inbounds [[TD_TY:%.+]], [[TD_TY]]* %{{.+}}, i32 0, i32 5 |
| 104 | // CHECK: [[DOWN_VAL:%.+]] = load i64, i64* [[DOWN]], |
| 105 | // CHECK: [[UP:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* %{{.+}}, i32 0, i32 6 |
| 106 | // CHECK: [[UP_VAL:%.+]] = load i64, i64* [[UP]], |
| 107 | // CHECK: [[ST:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* %{{.+}}, i32 0, i32 7 |
| 108 | // CHECK: [[ST_VAL:%.+]] = load i64, i64* [[ST]], |
| 109 | // CHECK: [[LITER:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* %{{.+}}, i32 0, i32 8 |
| 110 | // CHECK: [[LITER_VAL:%.+]] = load i32, i32* [[LITER]], |
| 111 | // CHECK: store i64 [[DOWN_VAL]], i64* [[LB:%[^,]+]], |
| 112 | // CHECK: store i64 [[UP_VAL]], i64* [[UB:%[^,]+]], |
| 113 | // CHECK: store i64 [[ST_VAL]], i64* [[ST:%[^,]+]], |
| 114 | // CHECK: store i32 [[LITER_VAL]], i32* [[LITER:%[^,]+]], |
| 115 | // CHECK: [[LB_VAL:%.+]] = load i64, i64* [[LB]], |
| 116 | // CHECK: [[LB_I32:%.+]] = trunc i64 [[LB_VAL]] to i32 |
| 117 | // CHECK: store i32 [[LB_I32]], i32* [[CNT:%.+]], |
| 118 | // CHECK: br label |
| 119 | // CHECK: [[VAL:%.+]] = load i32, i32* [[CNT]], |
| 120 | // CHECK: [[VAL_I64:%.+]] = sext i32 [[VAL]] to i64 |
| 121 | // CHECK: [[UB_VAL:%.+]] = load i64, i64* [[UB]], |
| 122 | // CHECK: [[CMP:%.+]] = icmp ule i64 [[VAL_I64]], [[UB_VAL]] |
| 123 | // CHECK: br i1 [[CMP]], label %{{.+}}, label %{{.+}} |
| 124 | // CHECK: load i32, i32* % |
| 125 | // CHECK: store i32 % |
| 126 | // CHECK: load i32, i32* % |
| 127 | // CHECK: add nsw i32 %{{.+}}, 1 |
| 128 | // CHECK: store i32 %{{.+}}, i32* % |
| 129 | // CHECK: br label % |
| 130 | // CHECK: ret i32 0 |
| 131 | |
Alexey Bataev | b9c55e2 | 2019-10-14 19:29:52 +0000 | [diff] [blame^] | 132 | #pragma omp parallel master taskloop nogroup grainsize(argc) |
Alexey Bataev | 5bbcead | 2019-10-14 17:17:41 +0000 | [diff] [blame] | 133 | for (int i = 0; i < 10; ++i) |
| 134 | ; |
| 135 | // CHECK: define internal void [[OMP_OUTLINED3]](i32* noalias %{{.+}}, i32* noalias %{{.+}}, i32* dereferenceable(4) %{{.+}}, i8*** dereferenceable(8) %{{.+}}, i64 %{{.+}}) |
| 136 | // CHECK: [[RES:%.+]] = call {{.*}}i32 @__kmpc_master(%struct.ident_t* [[DEFLOC]], i32 [[GTID:%.+]]) |
| 137 | // CHECK-NEXT: [[IS_MASTER:%.+]] = icmp ne i32 [[RES]], 0 |
| 138 | // CHECK-NEXT: br i1 [[IS_MASTER]], label {{%?}}[[THEN:.+]], label {{%?}}[[EXIT:.+]] |
| 139 | // CHECK: [[THEN]] |
| 140 | // CHECK: call void @__kmpc_taskgroup(%struct.ident_t* [[DEFLOC]], i32 [[GTID]]) |
| 141 | // CHECK: [[TASKV:%.+]] = call i8* @__kmpc_omp_task_alloc(%struct.ident_t* [[DEFLOC]], i32 [[GTID]], i32 1, i64 80, i64 16, i32 (i32, i8*)* bitcast (i32 (i32, [[TDP_TY:%.+]]*)* [[TASK3:@.+]] to i32 (i32, i8*)*)) |
| 142 | // CHECK: [[TASK:%.+]] = bitcast i8* [[TASKV]] to [[TDP_TY]]* |
| 143 | // CHECK: [[TASK_DATA:%.+]] = getelementptr inbounds [[TDP_TY]], [[TDP_TY]]* [[TASK]], i32 0, i32 0 |
| 144 | // CHECK: [[COND_VAL:%.+]] = load i8, i8* %{{.+}}, |
| 145 | // CHECK: [[COND_BOOL:%.+]] = trunc i8 [[COND_VAL]] to i1 |
| 146 | // CHECK: [[IF_INT:%.+]] = sext i1 [[COND_BOOL]] to i32 |
| 147 | // CHECK: [[DOWN:%.+]] = getelementptr inbounds [[TD_TY:%.+]], [[TD_TY]]* [[TASK_DATA]], i32 0, i32 5 |
| 148 | // CHECK: store i64 0, i64* [[DOWN]], |
| 149 | // CHECK: [[UP:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* [[TASK_DATA]], i32 0, i32 6 |
| 150 | // CHECK: store i64 %{{.+}}, i64* [[UP]], |
| 151 | // CHECK: [[ST:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* [[TASK_DATA]], i32 0, i32 7 |
| 152 | // CHECK: store i64 1, i64* [[ST]], |
| 153 | // CHECK: [[ST_VAL:%.+]] = load i64, i64* [[ST]], |
| 154 | // CHECK: call void @__kmpc_taskloop(%struct.ident_t* [[DEFLOC]], i32 [[GTID]], i8* [[TASKV]], i32 [[IF_INT]], i64* [[DOWN]], i64* [[UP]], i64 [[ST_VAL]], i32 1, i32 2, i64 4, i8* null) |
| 155 | // CHECK: call void @__kmpc_end_taskgroup(%struct.ident_t* [[DEFLOC]], i32 [[GTID]]) |
| 156 | // CHECK-NEXT: call {{.*}}void @__kmpc_end_master(%struct.ident_t* [[DEFLOC]], i32 [[GTID]]) |
| 157 | // CHECK-NEXT: br label {{%?}}[[EXIT]] |
| 158 | // CHECK: [[EXIT]] |
| 159 | |
| 160 | // CHECK: define internal i32 [[TASK3]]( |
| 161 | // CHECK: [[DOWN:%.+]] = getelementptr inbounds [[TD_TY:%.+]], [[TD_TY]]* %{{.+}}, i32 0, i32 5 |
| 162 | // CHECK: [[DOWN_VAL:%.+]] = load i64, i64* [[DOWN]], |
| 163 | // CHECK: [[UP:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* %{{.+}}, i32 0, i32 6 |
| 164 | // CHECK: [[UP_VAL:%.+]] = load i64, i64* [[UP]], |
| 165 | // CHECK: [[ST:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* %{{.+}}, i32 0, i32 7 |
| 166 | // CHECK: [[ST_VAL:%.+]] = load i64, i64* [[ST]], |
| 167 | // CHECK: [[LITER:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* %{{.+}}, i32 0, i32 8 |
| 168 | // CHECK: [[LITER_VAL:%.+]] = load i32, i32* [[LITER]], |
| 169 | // CHECK: store i64 [[DOWN_VAL]], i64* [[LB:%[^,]+]], |
| 170 | // CHECK: store i64 [[UP_VAL]], i64* [[UB:%[^,]+]], |
| 171 | // CHECK: store i64 [[ST_VAL]], i64* [[ST:%[^,]+]], |
| 172 | // CHECK: store i32 [[LITER_VAL]], i32* [[LITER:%[^,]+]], |
| 173 | // CHECK: [[LB_VAL:%.+]] = load i64, i64* [[LB]], |
| 174 | // CHECK: store i64 [[LB_VAL]], i64* [[CNT:%.+]], |
| 175 | // CHECK: br label |
| 176 | // CHECK: ret i32 0 |
| 177 | |
| 178 | int i; |
| 179 | #pragma omp parallel master taskloop if(argc) shared(argc, argv) collapse(2) num_tasks(4) |
| 180 | for (i = 0; i < argc; ++i) |
| 181 | for (int j = argc; j < argv[argc][argc]; ++j) |
| 182 | ; |
| 183 | } |
| 184 | |
| 185 | // CHECK-LABEL: @_ZN1SC2Ei |
| 186 | struct S { |
| 187 | int a; |
| 188 | S(int c) { |
| 189 | // CHECK: call void (%struct.ident_t*, i32, void (i32*, i32*, ...)*, ...) @__kmpc_fork_call(%struct.ident_t* [[DEFLOC]], i32 2, void (i32*, i32*, ...)* bitcast (void (i32*, i32*, %struct.S*, i32*)* [[OMP_OUTLINED4:@.+]] to void (i32*, i32*, ...)*), %struct.S* %{{.+}}, i32* %{{.+}}) |
| 190 | |
| 191 | // CHECK: define internal void [[OMP_OUTLINED4]](i32* noalias %{{.+}}, i32* noalias %{{.+}}, %struct.S* %{{.+}}, i32* dereferenceable(4) %{{.+}}) |
| 192 | // CHECK: [[TASKV:%.+]] = call i8* @__kmpc_omp_task_alloc(%struct.ident_t* [[DEFLOC]], i32 [[GTID:%.+]], i32 1, i64 80, i64 16, i32 (i32, i8*)* bitcast (i32 (i32, [[TDP_TY:%.+]]*)* [[TASK4:@.+]] to i32 (i32, i8*)*)) |
| 193 | // CHECK: [[TASK:%.+]] = bitcast i8* [[TASKV]] to [[TDP_TY]]* |
| 194 | // CHECK: [[TASK_DATA:%.+]] = getelementptr inbounds [[TDP_TY]], [[TDP_TY]]* [[TASK]], i32 0, i32 0 |
| 195 | // CHECK: [[DOWN:%.+]] = getelementptr inbounds [[TD_TY:%.+]], [[TD_TY]]* [[TASK_DATA]], i32 0, i32 5 |
| 196 | // CHECK: store i64 0, i64* [[DOWN]], |
| 197 | // CHECK: [[UP:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* [[TASK_DATA]], i32 0, i32 6 |
| 198 | // CHECK: store i64 %{{.+}}, i64* [[UP]], |
| 199 | // CHECK: [[ST:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* [[TASK_DATA]], i32 0, i32 7 |
| 200 | // CHECK: store i64 1, i64* [[ST]], |
| 201 | // CHECK: [[ST_VAL:%.+]] = load i64, i64* [[ST]], |
| 202 | // CHECK: call void @__kmpc_taskloop(%struct.ident_t* [[DEFLOC]], i32 [[GTID]], i8* [[TASKV]], i32 1, i64* [[DOWN]], i64* [[UP]], i64 [[ST_VAL]], i32 1, i32 2, i64 4, i8* null) |
| 203 | #pragma omp parallel master taskloop shared(c) num_tasks(4) |
| 204 | for (a = 0; a < c; ++a) |
| 205 | ; |
| 206 | } |
| 207 | } s(1); |
| 208 | |
| 209 | // CHECK: define internal i32 [[TASK4]]( |
| 210 | // CHECK: [[DOWN:%.+]] = getelementptr inbounds [[TD_TY:%.+]], [[TD_TY]]* %{{.+}}, i32 0, i32 5 |
| 211 | // CHECK: [[DOWN_VAL:%.+]] = load i64, i64* [[DOWN]], |
| 212 | // CHECK: [[UP:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* %{{.+}}, i32 0, i32 6 |
| 213 | // CHECK: [[UP_VAL:%.+]] = load i64, i64* [[UP]], |
| 214 | // CHECK: [[ST:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* %{{.+}}, i32 0, i32 7 |
| 215 | // CHECK: [[ST_VAL:%.+]] = load i64, i64* [[ST]], |
| 216 | // CHECK: [[LITER:%.+]] = getelementptr inbounds [[TD_TY]], [[TD_TY]]* %{{.+}}, i32 0, i32 8 |
| 217 | // CHECK: [[LITER_VAL:%.+]] = load i32, i32* [[LITER]], |
| 218 | // CHECK: store i64 [[DOWN_VAL]], i64* [[LB:%[^,]+]], |
| 219 | // CHECK: store i64 [[UP_VAL]], i64* [[UB:%[^,]+]], |
| 220 | // CHECK: store i64 [[ST_VAL]], i64* [[ST:%[^,]+]], |
| 221 | // CHECK: store i32 [[LITER_VAL]], i32* [[LITER:%[^,]+]], |
| 222 | // CHECK: [[LB_VAL:%.+]] = load i64, i64* [[LB]], |
| 223 | // CHECK: [[LB_I32:%.+]] = trunc i64 [[LB_VAL]] to i32 |
| 224 | // CHECK: store i32 [[LB_I32]], i32* [[CNT:%.+]], |
| 225 | // CHECK: br label |
| 226 | // CHECK: [[VAL:%.+]] = load i32, i32* [[CNT]], |
| 227 | // CHECK: [[VAL_I64:%.+]] = sext i32 [[VAL]] to i64 |
| 228 | // CHECK: [[UB_VAL:%.+]] = load i64, i64* [[UB]], |
| 229 | // CHECK: [[CMP:%.+]] = icmp ule i64 [[VAL_I64]], [[UB_VAL]] |
| 230 | // CHECK: br i1 [[CMP]], label %{{.+}}, label %{{.+}} |
| 231 | // CHECK: load i32, i32* % |
| 232 | // CHECK: store i32 % |
| 233 | // CHECK: load i32, i32* % |
| 234 | // CHECK: add nsw i32 %{{.+}}, 1 |
| 235 | // CHECK: store i32 %{{.+}}, i32* % |
| 236 | // CHECK: br label % |
| 237 | // CHECK: ret i32 0 |
| 238 | |
| 239 | #endif |