Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 1 | // RUN: %clang_cc1 -verify -triple x86_64-apple-darwin10 -fopenmp=libiomp5 -x c -emit-llvm %s -o - | FileCheck %s |
| 2 | // RUN: %clang_cc1 -fopenmp=libiomp5 -x c -triple x86_64-apple-darwin10 -emit-pch -o %t %s |
| 3 | // RUN: %clang_cc1 -fopenmp=libiomp5 -x c -triple x86_64-apple-darwin10 -include-pch %t -verify %s -emit-llvm -o - | FileCheck %s |
| 4 | // expected-no-diagnostics |
| 5 | |
| 6 | #ifndef HEADER |
| 7 | #define HEADER |
| 8 | |
| 9 | _Bool bv, bx; |
| 10 | char cv, cx; |
| 11 | unsigned char ucv, ucx; |
| 12 | short sv, sx; |
| 13 | unsigned short usv, usx; |
| 14 | int iv, ix; |
| 15 | unsigned int uiv, uix; |
| 16 | long lv, lx; |
| 17 | unsigned long ulv, ulx; |
| 18 | long long llv, llx; |
| 19 | unsigned long long ullv, ullx; |
| 20 | float fv, fx; |
| 21 | double dv, dx; |
| 22 | long double ldv, ldx; |
| 23 | _Complex int civ, cix; |
| 24 | _Complex float cfv, cfx; |
| 25 | _Complex double cdv, cdx; |
| 26 | |
| 27 | typedef int int4 __attribute__((__vector_size__(16))); |
| 28 | int4 int4x; |
| 29 | |
| 30 | struct BitFields { |
| 31 | int : 32; |
| 32 | int a : 31; |
| 33 | } bfx; |
| 34 | |
| 35 | struct BitFields_packed { |
| 36 | int : 32; |
| 37 | int a : 31; |
| 38 | } __attribute__ ((__packed__)) bfx_packed; |
| 39 | |
| 40 | struct BitFields2 { |
| 41 | int : 31; |
| 42 | int a : 1; |
| 43 | } bfx2; |
| 44 | |
| 45 | struct BitFields2_packed { |
| 46 | int : 31; |
| 47 | int a : 1; |
| 48 | } __attribute__ ((__packed__)) bfx2_packed; |
| 49 | |
| 50 | struct BitFields3 { |
| 51 | int : 11; |
| 52 | int a : 14; |
| 53 | } bfx3; |
| 54 | |
| 55 | struct BitFields3_packed { |
| 56 | int : 11; |
| 57 | int a : 14; |
| 58 | } __attribute__ ((__packed__)) bfx3_packed; |
| 59 | |
| 60 | struct BitFields4 { |
| 61 | short : 16; |
| 62 | int a: 1; |
| 63 | long b : 7; |
| 64 | } bfx4; |
| 65 | |
| 66 | struct BitFields4_packed { |
| 67 | short : 16; |
| 68 | int a: 1; |
| 69 | long b : 7; |
| 70 | } __attribute__ ((__packed__)) bfx4_packed; |
| 71 | |
| 72 | typedef float float2 __attribute__((ext_vector_type(2))); |
| 73 | float2 float2x; |
| 74 | |
| 75 | register int rix __asm__("0"); |
| 76 | |
| 77 | int main() { |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 78 | // CHECK: load atomic i8, i8* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 79 | // CHECK: store i8 |
| 80 | #pragma omp atomic read |
| 81 | bv = bx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 82 | // CHECK: load atomic i8, i8* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 83 | // CHECK: store i8 |
| 84 | #pragma omp atomic read |
| 85 | cv = cx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 86 | // CHECK: load atomic i8, i8* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 87 | // CHECK: store i8 |
| 88 | #pragma omp atomic read |
| 89 | ucv = ucx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 90 | // CHECK: load atomic i16, i16* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 91 | // CHECK: store i16 |
| 92 | #pragma omp atomic read |
| 93 | sv = sx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 94 | // CHECK: load atomic i16, i16* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 95 | // CHECK: store i16 |
| 96 | #pragma omp atomic read |
| 97 | usv = usx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 98 | // CHECK: load atomic i32, i32* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 99 | // CHECK: store i32 |
| 100 | #pragma omp atomic read |
| 101 | iv = ix; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 102 | // CHECK: load atomic i32, i32* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 103 | // CHECK: store i32 |
| 104 | #pragma omp atomic read |
| 105 | uiv = uix; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 106 | // CHECK: load atomic i64, i64* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 107 | // CHECK: store i64 |
| 108 | #pragma omp atomic read |
| 109 | lv = lx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 110 | // CHECK: load atomic i64, i64* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 111 | // CHECK: store i64 |
| 112 | #pragma omp atomic read |
| 113 | ulv = ulx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 114 | // CHECK: load atomic i64, i64* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 115 | // CHECK: store i64 |
| 116 | #pragma omp atomic read |
| 117 | llv = llx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 118 | // CHECK: load atomic i64, i64* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 119 | // CHECK: store i64 |
| 120 | #pragma omp atomic read |
| 121 | ullv = ullx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 122 | // CHECK: load atomic i32, i32* bitcast (float* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 123 | // CHECK: bitcast i32 {{.*}} to float |
| 124 | // CHECK: store float |
| 125 | #pragma omp atomic read |
| 126 | fv = fx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 127 | // CHECK: load atomic i64, i64* bitcast (double* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 128 | // CHECK: bitcast i64 {{.*}} to double |
| 129 | // CHECK: store double |
| 130 | #pragma omp atomic read |
| 131 | dv = dx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 132 | // CHECK: [[LD:%.+]] = load atomic i128, i128* bitcast (x86_fp80* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 133 | // CHECK: [[BITCAST:%.+]] = bitcast x86_fp80* [[LDTEMP:%.*]] to i128* |
| 134 | // CHECK: store i128 [[LD]], i128* [[BITCAST]] |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 135 | // CHECK: [[LD:%.+]] = load x86_fp80, x86_fp80* [[LDTEMP]] |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 136 | // CHECK: store x86_fp80 [[LD]] |
| 137 | #pragma omp atomic read |
| 138 | ldv = ldx; |
| 139 | // CHECK: call{{.*}} void @__atomic_load(i64 8, |
| 140 | // CHECK: store i32 |
| 141 | // CHECK: store i32 |
| 142 | #pragma omp atomic read |
| 143 | civ = cix; |
| 144 | // CHECK: call{{.*}} void @__atomic_load(i64 8, |
| 145 | // CHECK: store float |
| 146 | // CHECK: store float |
| 147 | #pragma omp atomic read |
| 148 | cfv = cfx; |
| 149 | // CHECK: call{{.*}} void @__atomic_load(i64 16, |
| 150 | // CHECK: call{{.*}} @__kmpc_flush( |
| 151 | // CHECK: store double |
| 152 | // CHECK: store double |
| 153 | #pragma omp atomic seq_cst read |
| 154 | cdv = cdx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 155 | // CHECK: load atomic i64, i64* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 156 | // CHECK: store i8 |
| 157 | #pragma omp atomic read |
| 158 | bv = ulx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 159 | // CHECK: load atomic i8, i8* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 160 | // CHECK: store i8 |
| 161 | #pragma omp atomic read |
| 162 | cv = bx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 163 | // CHECK: load atomic i8, i8* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 164 | // CHECK: call{{.*}} @__kmpc_flush( |
| 165 | // CHECK: store i8 |
| 166 | #pragma omp atomic read, seq_cst |
| 167 | ucv = cx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 168 | // CHECK: load atomic i64, i64* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 169 | // CHECK: store i16 |
| 170 | #pragma omp atomic read |
| 171 | sv = ulx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 172 | // CHECK: load atomic i64, i64* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 173 | // CHECK: store i16 |
| 174 | #pragma omp atomic read |
| 175 | usv = lx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 176 | // CHECK: load atomic i32, i32* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 177 | // CHECK: call{{.*}} @__kmpc_flush( |
| 178 | // CHECK: store i32 |
| 179 | #pragma omp atomic seq_cst, read |
| 180 | iv = uix; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 181 | // CHECK: load atomic i32, i32* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 182 | // CHECK: store i32 |
| 183 | #pragma omp atomic read |
| 184 | uiv = ix; |
| 185 | // CHECK: call{{.*}} void @__atomic_load(i64 8, |
| 186 | // CHECK: store i64 |
| 187 | #pragma omp atomic read |
| 188 | lv = cix; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 189 | // CHECK: load atomic i32, i32* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 190 | // CHECK: store i64 |
| 191 | #pragma omp atomic read |
| 192 | ulv = fx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 193 | // CHECK: load atomic i64, i64* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 194 | // CHECK: store i64 |
| 195 | #pragma omp atomic read |
| 196 | llv = dx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 197 | // CHECK: load atomic i128, i128* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 198 | // CHECK: store i64 |
| 199 | #pragma omp atomic read |
| 200 | ullv = ldx; |
| 201 | // CHECK: call{{.*}} void @__atomic_load(i64 8, |
| 202 | // CHECK: store float |
| 203 | #pragma omp atomic read |
| 204 | fv = cix; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 205 | // CHECK: load atomic i16, i16* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 206 | // CHECK: store double |
| 207 | #pragma omp atomic read |
| 208 | dv = sx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 209 | // CHECK: load atomic i8, i8* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 210 | // CHECK: store x86_fp80 |
| 211 | #pragma omp atomic read |
| 212 | ldv = bx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 213 | // CHECK: load atomic i8, i8* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 214 | // CHECK: store i32 |
| 215 | // CHECK: store i32 |
| 216 | #pragma omp atomic read |
| 217 | civ = bx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 218 | // CHECK: load atomic i16, i16* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 219 | // CHECK: store float |
| 220 | // CHECK: store float |
| 221 | #pragma omp atomic read |
| 222 | cfv = usx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 223 | // CHECK: load atomic i64, i64* |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 224 | // CHECK: store double |
| 225 | // CHECK: store double |
| 226 | #pragma omp atomic read |
| 227 | cdv = llx; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 228 | // CHECK: [[I128VAL:%.+]] = load atomic i128, i128* bitcast (<4 x i32>* @{{.+}} to i128*) monotonic |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 229 | // CHECK: [[I128PTR:%.+]] = bitcast <4 x i32>* [[LDTEMP:%.+]] to i128* |
| 230 | // CHECK: store i128 [[I128VAL]], i128* [[I128PTR]] |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 231 | // CHECK: [[LD:%.+]] = load <4 x i32>, <4 x i32>* [[LDTEMP]] |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 232 | // CHECK: extractelement <4 x i32> [[LD]] |
| 233 | // CHECK: store i8 |
| 234 | #pragma omp atomic read |
| 235 | bv = int4x[0]; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 236 | // CHECK: [[LD:%.+]] = load atomic i32, i32* bitcast (i8* getelementptr (i8, i8* bitcast (%{{.+}}* @{{.+}} to i8*), i64 4) to i32*) monotonic |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 237 | // CHECK: store i32 [[LD]], i32* [[LDTEMP:%.+]] |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 238 | // CHECK: [[LD:%.+]] = load i32, i32* [[LDTEMP]] |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 239 | // CHECK: [[SHL:%.+]] = shl i32 [[LD]], 1 |
| 240 | // CHECK: ashr i32 [[SHL]], 1 |
| 241 | // CHECK: store x86_fp80 |
| 242 | #pragma omp atomic read |
| 243 | ldv = bfx.a; |
| 244 | // CHECK: [[LDTEMP_VOID_PTR:%.+]] = bitcast i32* [[LDTEMP:%.+]] to i8* |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 245 | // CHECK: call void @__atomic_load(i64 4, i8* getelementptr (i8, i8* bitcast (%struct.BitFields_packed* @bfx_packed to i8*), i64 4), i8* [[LDTEMP_VOID_PTR]], i32 0) |
| 246 | // CHECK: [[LD:%.+]] = load i32, i32* [[LDTEMP]] |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 247 | // CHECK: [[SHL:%.+]] = shl i32 [[LD]], 1 |
| 248 | // CHECK: ashr i32 [[SHL]], 1 |
| 249 | // CHECK: store x86_fp80 |
| 250 | #pragma omp atomic read |
| 251 | ldv = bfx_packed.a; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 252 | // CHECK: [[LD:%.+]] = load atomic i32, i32* getelementptr inbounds (%struct.BitFields2, %struct.BitFields2* @bfx2, i32 0, i32 0) monotonic |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 253 | // CHECK: store i32 [[LD]], i32* [[LDTEMP:%.+]] |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 254 | // CHECK: [[LD:%.+]] = load i32, i32* [[LDTEMP]] |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 255 | // CHECK: ashr i32 [[LD]], 31 |
| 256 | // CHECK: store x86_fp80 |
| 257 | #pragma omp atomic read |
| 258 | ldv = bfx2.a; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 259 | // CHECK: [[LD:%.+]] = load atomic i8, i8* getelementptr (i8, i8* bitcast (%struct.BitFields2_packed* @bfx2_packed to i8*), i64 3) monotonic |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 260 | // CHECK: store i8 [[LD]], i8* [[LDTEMP:%.+]] |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 261 | // CHECK: [[LD:%.+]] = load i8, i8* [[LDTEMP]] |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 262 | // CHECK: ashr i8 [[LD]], 7 |
| 263 | // CHECK: store x86_fp80 |
| 264 | #pragma omp atomic read |
| 265 | ldv = bfx2_packed.a; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 266 | // CHECK: [[LD:%.+]] = load atomic i32, i32* getelementptr inbounds (%struct.BitFields3, %struct.BitFields3* @bfx3, i32 0, i32 0) monotonic |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 267 | // CHECK: store i32 [[LD]], i32* [[LDTEMP:%.+]] |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 268 | // CHECK: [[LD:%.+]] = load i32, i32* [[LDTEMP]] |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 269 | // CHECK: [[SHL:%.+]] = shl i32 [[LD]], 7 |
| 270 | // CHECK: ashr i32 [[SHL]], 18 |
| 271 | // CHECK: store x86_fp80 |
| 272 | #pragma omp atomic read |
| 273 | ldv = bfx3.a; |
| 274 | // CHECK: [[LDTEMP_VOID_PTR:%.+]] = bitcast i24* [[LDTEMP:%.+]] to i8* |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 275 | // CHECK: call void @__atomic_load(i64 3, i8* getelementptr (i8, i8* bitcast (%struct.BitFields3_packed* @bfx3_packed to i8*), i64 1), i8* [[LDTEMP_VOID_PTR]], i32 0) |
| 276 | // CHECK: [[LD:%.+]] = load i24, i24* [[LDTEMP]] |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 277 | // CHECK: [[SHL:%.+]] = shl i24 [[LD]], 7 |
| 278 | // CHECK: [[ASHR:%.+]] = ashr i24 [[SHL]], 10 |
| 279 | // CHECK: sext i24 [[ASHR]] to i32 |
| 280 | // CHECK: store x86_fp80 |
| 281 | #pragma omp atomic read |
| 282 | ldv = bfx3_packed.a; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 283 | // CHECK: [[LD:%.+]] = load atomic i64, i64* bitcast (%struct.BitFields4* @bfx4 to i64*) monotonic |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 284 | // CHECK: store i64 [[LD]], i64* [[LDTEMP:%.+]] |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 285 | // CHECK: [[LD:%.+]] = load i64, i64* [[LDTEMP]] |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 286 | // CHECK: [[SHL:%.+]] = shl i64 [[LD]], 47 |
| 287 | // CHECK: [[ASHR:%.+]] = ashr i64 [[SHL]], 63 |
| 288 | // CHECK: trunc i64 [[ASHR]] to i32 |
| 289 | // CHECK: store x86_fp80 |
| 290 | #pragma omp atomic read |
| 291 | ldv = bfx4.a; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 292 | // CHECK: [[LD:%.+]] = load atomic i8, i8* getelementptr inbounds (%struct.BitFields4_packed, %struct.BitFields4_packed* @bfx4_packed, i32 0, i32 0, i64 2) monotonic |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 293 | // CHECK: store i8 [[LD]], i8* [[LDTEMP:%.+]] |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 294 | // CHECK: [[LD:%.+]] = load i8, i8* [[LDTEMP]] |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 295 | // CHECK: [[SHL:%.+]] = shl i8 [[LD]], 7 |
| 296 | // CHECK: [[ASHR:%.+]] = ashr i8 [[SHL]], 7 |
| 297 | // CHECK: sext i8 [[ASHR]] to i32 |
| 298 | // CHECK: store x86_fp80 |
| 299 | #pragma omp atomic read |
| 300 | ldv = bfx4_packed.a; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 301 | // CHECK: [[LD:%.+]] = load atomic i64, i64* bitcast (%struct.BitFields4* @bfx4 to i64*) monotonic |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 302 | // CHECK: store i64 [[LD]], i64* [[LDTEMP:%.+]] |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 303 | // CHECK: [[LD:%.+]] = load i64, i64* [[LDTEMP]] |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 304 | // CHECK: [[SHL:%.+]] = shl i64 [[LD]], 40 |
| 305 | // CHECK: [[ASHR:%.+]] = ashr i64 [[SHL]], 57 |
| 306 | // CHECK: store x86_fp80 |
| 307 | #pragma omp atomic read |
| 308 | ldv = bfx4.b; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 309 | // CHECK: [[LD:%.+]] = load atomic i8, i8* getelementptr inbounds (%struct.BitFields4_packed, %struct.BitFields4_packed* @bfx4_packed, i32 0, i32 0, i64 2) monotonic |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 310 | // CHECK: store i8 [[LD]], i8* [[LDTEMP:%.+]] |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 311 | // CHECK: [[LD:%.+]] = load i8, i8* [[LDTEMP]] |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 312 | // CHECK: [[ASHR:%.+]] = ashr i8 [[LD]], 1 |
| 313 | // CHECK: sext i8 [[ASHR]] to i64 |
| 314 | // CHECK: store x86_fp80 |
| 315 | #pragma omp atomic read |
| 316 | ldv = bfx4_packed.b; |
Pirama Arumuga Nainar | 3ea9e33 | 2015-04-08 08:57:32 -0700 | [diff] [blame] | 317 | // CHECK: [[LD:%.+]] = load atomic i64, i64* bitcast (<2 x float>* @{{.+}} to i64*) monotonic |
| 318 | // CHECK: [[BITCAST:%.+]] = bitcast <2 x float>* [[LDTEMP:%.+]] to i64* |
| 319 | // CHECK: store i64 [[LD]], i64* [[BITCAST]] |
| 320 | // CHECK: [[LD:%.+]] = load <2 x float>, <2 x float>* [[LDTEMP]] |
Stephen Hines | 0e2c34f | 2015-03-23 12:09:02 -0700 | [diff] [blame] | 321 | // CHECK: extractelement <2 x float> [[LD]] |
| 322 | // CHECK: store i64 |
| 323 | #pragma omp atomic read |
| 324 | ulv = float2x.x; |
| 325 | // CHECK: call{{.*}} i{{[0-9]+}} @llvm.read_register |
| 326 | // CHECK: call{{.*}} @__kmpc_flush( |
| 327 | // CHECK: store double |
| 328 | #pragma omp atomic read seq_cst |
| 329 | dv = rix; |
| 330 | return 0; |
| 331 | } |
| 332 | |
| 333 | #endif |