blob: 4db3c8e6d69cbdc98d1330f04c5cbb85344d104d [file] [log] [blame]
Chris Lattnerc7f78c72010-09-21 23:22:41 +00001// RUN: %clang_cc1 %s -emit-llvm -o - -triple=i686-apple-darwin9 | FileCheck %s
Mon P Wang1ffe2812008-05-09 22:40:52 +00002
Chris Lattnerc7f78c72010-09-21 23:22:41 +00003int atomic(void) {
Daniel Dunbareb4f81e2009-05-27 23:45:33 +00004 // non-sensical test for sync functions
Mon P Wang1ffe2812008-05-09 22:40:52 +00005 int old;
6 int val = 1;
Chris Lattner5caa3702009-05-08 06:58:22 +00007 char valc = 1;
Chandler Carruthdb4325b2010-07-18 07:23:17 +00008 _Bool valb = 0;
Mon P Wang1ffe2812008-05-09 22:40:52 +00009 unsigned int uval = 1;
10 int cmp = 0;
Eli Friedmanff993202012-03-16 01:48:04 +000011 int* ptrval;
Mon P Wang1ffe2812008-05-09 22:40:52 +000012
13 old = __sync_fetch_and_add(&val, 1);
Eli Friedmanc83b9752011-09-07 01:41:24 +000014 // CHECK: atomicrmw add i32* %val, i32 1 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000015
Chris Lattner5caa3702009-05-08 06:58:22 +000016 old = __sync_fetch_and_sub(&valc, 2);
Eli Friedmanc83b9752011-09-07 01:41:24 +000017 // CHECK: atomicrmw sub i8* %valc, i8 2 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000018
Mon P Wang1ffe2812008-05-09 22:40:52 +000019 old = __sync_fetch_and_min(&val, 3);
Eli Friedmanc83b9752011-09-07 01:41:24 +000020 // CHECK: atomicrmw min i32* %val, i32 3 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000021
Mon P Wang1ffe2812008-05-09 22:40:52 +000022 old = __sync_fetch_and_max(&val, 4);
Eli Friedmanc83b9752011-09-07 01:41:24 +000023 // CHECK: atomicrmw max i32* %val, i32 4 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000024
Mon P Wang1ffe2812008-05-09 22:40:52 +000025 old = __sync_fetch_and_umin(&uval, 5u);
Eli Friedmanc83b9752011-09-07 01:41:24 +000026 // CHECK: atomicrmw umin i32* %uval, i32 5 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000027
Mon P Wang1ffe2812008-05-09 22:40:52 +000028 old = __sync_fetch_and_umax(&uval, 6u);
Eli Friedmanc83b9752011-09-07 01:41:24 +000029 // CHECK: atomicrmw umax i32* %uval, i32 6 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000030
Mon P Wang1ffe2812008-05-09 22:40:52 +000031 old = __sync_lock_test_and_set(&val, 7);
Eli Friedmanc83b9752011-09-07 01:41:24 +000032 // CHECK: atomicrmw xchg i32* %val, i32 7 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000033
Chris Lattner23aa9c82011-04-09 03:57:26 +000034 old = __sync_swap(&val, 8);
Eli Friedmanc83b9752011-09-07 01:41:24 +000035 // CHECK: atomicrmw xchg i32* %val, i32 8 seq_cst
Chris Lattner23aa9c82011-04-09 03:57:26 +000036
Mon P Wang1ffe2812008-05-09 22:40:52 +000037 old = __sync_val_compare_and_swap(&val, 4, 1976);
Stephen Hinesc568f1e2014-07-21 00:47:37 -070038 // CHECK: [[PAIR:%[a-z0-9_.]+]] = cmpxchg i32* %val, i32 4, i32 1976 seq_cst
39 // CHECK: extractvalue { i32, i1 } [[PAIR]], 0
40
Daniel Dunbar0002d232009-04-07 00:55:51 +000041 old = __sync_bool_compare_and_swap(&val, 4, 1976);
Stephen Hinesc568f1e2014-07-21 00:47:37 -070042 // CHECK: [[PAIR:%[a-z0-9_.]+]] = cmpxchg i32* %val, i32 4, i32 1976 seq_cst
43 // CHECK: extractvalue { i32, i1 } [[PAIR]], 1
Chris Lattnerc7f78c72010-09-21 23:22:41 +000044
Mon P Wang1ffe2812008-05-09 22:40:52 +000045 old = __sync_fetch_and_and(&val, 0x9);
Eli Friedmanc83b9752011-09-07 01:41:24 +000046 // CHECK: atomicrmw and i32* %val, i32 9 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000047
Mon P Wang1ffe2812008-05-09 22:40:52 +000048 old = __sync_fetch_and_or(&val, 0xa);
Eli Friedmanc83b9752011-09-07 01:41:24 +000049 // CHECK: atomicrmw or i32* %val, i32 10 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000050
Mon P Wang1ffe2812008-05-09 22:40:52 +000051 old = __sync_fetch_and_xor(&val, 0xb);
Eli Friedmanc83b9752011-09-07 01:41:24 +000052 // CHECK: atomicrmw xor i32* %val, i32 11 seq_cst
Stephen Hines176edba2014-12-01 14:53:08 -080053
54 old = __sync_fetch_and_nand(&val, 0xc);
55 // CHECK: atomicrmw nand i32* %val, i32 12 seq_cst
56
Daniel Dunbar0002d232009-04-07 00:55:51 +000057 old = __sync_add_and_fetch(&val, 1);
Eli Friedmanc83b9752011-09-07 01:41:24 +000058 // CHECK: atomicrmw add i32* %val, i32 1 seq_cst
Daniel Dunbar0002d232009-04-07 00:55:51 +000059
Chris Lattnerc7f78c72010-09-21 23:22:41 +000060 old = __sync_sub_and_fetch(&val, 2);
Eli Friedmanc83b9752011-09-07 01:41:24 +000061 // CHECK: atomicrmw sub i32* %val, i32 2 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000062
63 old = __sync_and_and_fetch(&valc, 3);
Eli Friedmanc83b9752011-09-07 01:41:24 +000064 // CHECK: atomicrmw and i8* %valc, i8 3 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000065
66 old = __sync_or_and_fetch(&valc, 4);
Eli Friedmanc83b9752011-09-07 01:41:24 +000067 // CHECK: atomicrmw or i8* %valc, i8 4 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000068
69 old = __sync_xor_and_fetch(&valc, 5);
Eli Friedmanc83b9752011-09-07 01:41:24 +000070 // CHECK: atomicrmw xor i8* %valc, i8 5 seq_cst
Stephen Hines176edba2014-12-01 14:53:08 -080071
72 old = __sync_nand_and_fetch(&valc, 6);
73 // CHECK: atomicrmw nand i8* %valc, i8 6 seq_cst
74
Chris Lattnere7ac0a92009-05-08 15:36:58 +000075 __sync_val_compare_and_swap((void **)0, (void *)0, (void *)0);
Stephen Hinesc568f1e2014-07-21 00:47:37 -070076 // CHECK: [[PAIR:%[a-z0-9_.]+]] = cmpxchg i32* null, i32 0, i32 0 seq_cst
77 // CHECK: extractvalue { i32, i1 } [[PAIR]], 0
Chris Lattnerc7f78c72010-09-21 23:22:41 +000078
Chandler Carruthdb4325b2010-07-18 07:23:17 +000079 if ( __sync_val_compare_and_swap(&valb, 0, 1)) {
Stephen Hinesc568f1e2014-07-21 00:47:37 -070080 // CHECK: [[PAIR:%[a-z0-9_.]+]] = cmpxchg i8* %valb, i8 0, i8 1 seq_cst
81 // CHECK: [[VAL:%[a-z0-9_.]+]] = extractvalue { i8, i1 } [[PAIR]], 0
82 // CHECK: trunc i8 [[VAL]] to i1
Chandler Carruthdb4325b2010-07-18 07:23:17 +000083 old = 42;
84 }
Chris Lattnerc7f78c72010-09-21 23:22:41 +000085
Chandler Carruth8d13d222010-07-18 20:54:12 +000086 __sync_bool_compare_and_swap((void **)0, (void *)0, (void *)0);
Eli Friedmanc83b9752011-09-07 01:41:24 +000087 // CHECK: cmpxchg i32* null, i32 0, i32 0 seq_cst
Chris Lattnerf58cd9b2009-05-13 04:46:13 +000088
89 __sync_lock_release(&val);
Eli Friedmanff993202012-03-16 01:48:04 +000090 // CHECK: store atomic i32 0, {{.*}} release, align 4
91
92 __sync_lock_release(&ptrval);
93 // CHECK: store atomic i32 0, {{.*}} release, align 4
94
Chris Lattnerf58cd9b2009-05-13 04:46:13 +000095 __sync_synchronize ();
Eli Friedmanc83b9752011-09-07 01:41:24 +000096 // CHECK: fence seq_cst
Chris Lattnerf58cd9b2009-05-13 04:46:13 +000097
Mon P Wang1ffe2812008-05-09 22:40:52 +000098 return old;
99}
Chandler Carruth8d13d222010-07-18 20:54:12 +0000100
Chris Lattnerf2b95272010-09-21 23:24:52 +0000101// CHECK: @release_return
Chandler Carruth8d13d222010-07-18 20:54:12 +0000102void release_return(int *lock) {
103 // Ensure this is actually returning void all the way through.
104 return __sync_lock_release(lock);
Eli Friedmaneb43f4a2011-09-13 22:21:56 +0000105 // CHECK: store atomic {{.*}} release, align 4
Chandler Carruth8d13d222010-07-18 20:54:12 +0000106}
Chris Lattnerf2b95272010-09-21 23:24:52 +0000107
108
Chris Lattner4f209442010-09-21 23:40:48 +0000109// rdar://8461279 - Atomics with address spaces.
Chris Lattnerf2b95272010-09-21 23:24:52 +0000110// CHECK: @addrspace
111void addrspace(int __attribute__((address_space(256))) * P) {
112 __sync_bool_compare_and_swap(P, 0, 1);
Eli Friedmanc83b9752011-09-07 01:41:24 +0000113 // CHECK: cmpxchg i32 addrspace(256)*{{.*}}, i32 0, i32 1 seq_cst
Chris Lattnerf2b95272010-09-21 23:24:52 +0000114
Eli Friedmanc83b9752011-09-07 01:41:24 +0000115 __sync_val_compare_and_swap(P, 0, 1);
116 // CHECK: cmpxchg i32 addrspace(256)*{{.*}}, i32 0, i32 1 seq_cst
117
118 __sync_xor_and_fetch(P, 123);
119 // CHECK: atomicrmw xor i32 addrspace(256)*{{.*}}, i32 123 seq_cst
120}