blob: c8f4fd09bbc514ed809122d2bd7599d083994893 [file] [log] [blame]
Chris Lattnerc7f78c72010-09-21 23:22:41 +00001// RUN: %clang_cc1 %s -emit-llvm -o - -triple=i686-apple-darwin9 | FileCheck %s
Mon P Wang1ffe2812008-05-09 22:40:52 +00002
Chris Lattnerc7f78c72010-09-21 23:22:41 +00003int atomic(void) {
Daniel Dunbareb4f81e2009-05-27 23:45:33 +00004 // non-sensical test for sync functions
Mon P Wang1ffe2812008-05-09 22:40:52 +00005 int old;
6 int val = 1;
Chris Lattner5caa3702009-05-08 06:58:22 +00007 char valc = 1;
Chandler Carruthdb4325b2010-07-18 07:23:17 +00008 _Bool valb = 0;
Mon P Wang1ffe2812008-05-09 22:40:52 +00009 unsigned int uval = 1;
10 int cmp = 0;
11
12 old = __sync_fetch_and_add(&val, 1);
Eli Friedmanc83b9752011-09-07 01:41:24 +000013 // CHECK: atomicrmw add i32* %val, i32 1 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000014
Chris Lattner5caa3702009-05-08 06:58:22 +000015 old = __sync_fetch_and_sub(&valc, 2);
Eli Friedmanc83b9752011-09-07 01:41:24 +000016 // CHECK: atomicrmw sub i8* %valc, i8 2 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000017
Mon P Wang1ffe2812008-05-09 22:40:52 +000018 old = __sync_fetch_and_min(&val, 3);
Eli Friedmanc83b9752011-09-07 01:41:24 +000019 // CHECK: atomicrmw min i32* %val, i32 3 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000020
Mon P Wang1ffe2812008-05-09 22:40:52 +000021 old = __sync_fetch_and_max(&val, 4);
Eli Friedmanc83b9752011-09-07 01:41:24 +000022 // CHECK: atomicrmw max i32* %val, i32 4 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000023
Mon P Wang1ffe2812008-05-09 22:40:52 +000024 old = __sync_fetch_and_umin(&uval, 5u);
Eli Friedmanc83b9752011-09-07 01:41:24 +000025 // CHECK: atomicrmw umin i32* %uval, i32 5 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000026
Mon P Wang1ffe2812008-05-09 22:40:52 +000027 old = __sync_fetch_and_umax(&uval, 6u);
Eli Friedmanc83b9752011-09-07 01:41:24 +000028 // CHECK: atomicrmw umax i32* %uval, i32 6 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000029
Mon P Wang1ffe2812008-05-09 22:40:52 +000030 old = __sync_lock_test_and_set(&val, 7);
Eli Friedmanc83b9752011-09-07 01:41:24 +000031 // CHECK: atomicrmw xchg i32* %val, i32 7 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000032
Chris Lattner23aa9c82011-04-09 03:57:26 +000033 old = __sync_swap(&val, 8);
Eli Friedmanc83b9752011-09-07 01:41:24 +000034 // CHECK: atomicrmw xchg i32* %val, i32 8 seq_cst
Chris Lattner23aa9c82011-04-09 03:57:26 +000035
Mon P Wang1ffe2812008-05-09 22:40:52 +000036 old = __sync_val_compare_and_swap(&val, 4, 1976);
Eli Friedmanc83b9752011-09-07 01:41:24 +000037 // CHECK: cmpxchg i32* %val, i32 4, i32 1976 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000038
Daniel Dunbar0002d232009-04-07 00:55:51 +000039 old = __sync_bool_compare_and_swap(&val, 4, 1976);
Eli Friedmanc83b9752011-09-07 01:41:24 +000040 // CHECK: cmpxchg i32* %val, i32 4, i32 1976 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000041
Mon P Wang1ffe2812008-05-09 22:40:52 +000042 old = __sync_fetch_and_and(&val, 0x9);
Eli Friedmanc83b9752011-09-07 01:41:24 +000043 // CHECK: atomicrmw and i32* %val, i32 9 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000044
Mon P Wang1ffe2812008-05-09 22:40:52 +000045 old = __sync_fetch_and_or(&val, 0xa);
Eli Friedmanc83b9752011-09-07 01:41:24 +000046 // CHECK: atomicrmw or i32* %val, i32 10 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000047
Mon P Wang1ffe2812008-05-09 22:40:52 +000048 old = __sync_fetch_and_xor(&val, 0xb);
Eli Friedmanc83b9752011-09-07 01:41:24 +000049 // CHECK: atomicrmw xor i32* %val, i32 11 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000050
Daniel Dunbar0002d232009-04-07 00:55:51 +000051 old = __sync_add_and_fetch(&val, 1);
Eli Friedmanc83b9752011-09-07 01:41:24 +000052 // CHECK: atomicrmw add i32* %val, i32 1 seq_cst
Daniel Dunbar0002d232009-04-07 00:55:51 +000053
Chris Lattnerc7f78c72010-09-21 23:22:41 +000054 old = __sync_sub_and_fetch(&val, 2);
Eli Friedmanc83b9752011-09-07 01:41:24 +000055 // CHECK: atomicrmw sub i32* %val, i32 2 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000056
57 old = __sync_and_and_fetch(&valc, 3);
Eli Friedmanc83b9752011-09-07 01:41:24 +000058 // CHECK: atomicrmw and i8* %valc, i8 3 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000059
60 old = __sync_or_and_fetch(&valc, 4);
Eli Friedmanc83b9752011-09-07 01:41:24 +000061 // CHECK: atomicrmw or i8* %valc, i8 4 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000062
63 old = __sync_xor_and_fetch(&valc, 5);
Eli Friedmanc83b9752011-09-07 01:41:24 +000064 // CHECK: atomicrmw xor i8* %valc, i8 5 seq_cst
Chris Lattnere7ac0a92009-05-08 15:36:58 +000065
66 __sync_val_compare_and_swap((void **)0, (void *)0, (void *)0);
Eli Friedmanc83b9752011-09-07 01:41:24 +000067 // CHECK: cmpxchg i32* null, i32 0, i32 0 seq_cst
Chris Lattnerc7f78c72010-09-21 23:22:41 +000068
Chandler Carruthdb4325b2010-07-18 07:23:17 +000069 if ( __sync_val_compare_and_swap(&valb, 0, 1)) {
Eli Friedmanc83b9752011-09-07 01:41:24 +000070 // CHECK: cmpxchg i8* %valb, i8 0, i8 1 seq_cst
Chandler Carruthdb4325b2010-07-18 07:23:17 +000071 old = 42;
72 }
Chris Lattnerc7f78c72010-09-21 23:22:41 +000073
Chandler Carruth8d13d222010-07-18 20:54:12 +000074 __sync_bool_compare_and_swap((void **)0, (void *)0, (void *)0);
Eli Friedmanc83b9752011-09-07 01:41:24 +000075 // CHECK: cmpxchg i32* null, i32 0, i32 0 seq_cst
Chris Lattnerf58cd9b2009-05-13 04:46:13 +000076
77 __sync_lock_release(&val);
Eli Friedmaneb43f4a2011-09-13 22:21:56 +000078 // CHECK: store atomic {{.*}} release, align 4
Chris Lattnerc7f78c72010-09-21 23:22:41 +000079
Chris Lattnerf58cd9b2009-05-13 04:46:13 +000080 __sync_synchronize ();
Eli Friedmanc83b9752011-09-07 01:41:24 +000081 // CHECK: fence seq_cst
Chris Lattnerf58cd9b2009-05-13 04:46:13 +000082
Mon P Wang1ffe2812008-05-09 22:40:52 +000083 return old;
84}
Chandler Carruth8d13d222010-07-18 20:54:12 +000085
Chris Lattnerf2b95272010-09-21 23:24:52 +000086// CHECK: @release_return
Chandler Carruth8d13d222010-07-18 20:54:12 +000087void release_return(int *lock) {
88 // Ensure this is actually returning void all the way through.
89 return __sync_lock_release(lock);
Eli Friedmaneb43f4a2011-09-13 22:21:56 +000090 // CHECK: store atomic {{.*}} release, align 4
Chandler Carruth8d13d222010-07-18 20:54:12 +000091}
Chris Lattnerf2b95272010-09-21 23:24:52 +000092
93
Chris Lattner4f209442010-09-21 23:40:48 +000094// rdar://8461279 - Atomics with address spaces.
Chris Lattnerf2b95272010-09-21 23:24:52 +000095// CHECK: @addrspace
96void addrspace(int __attribute__((address_space(256))) * P) {
97 __sync_bool_compare_and_swap(P, 0, 1);
Eli Friedmanc83b9752011-09-07 01:41:24 +000098 // CHECK: cmpxchg i32 addrspace(256)*{{.*}}, i32 0, i32 1 seq_cst
Chris Lattnerf2b95272010-09-21 23:24:52 +000099
Eli Friedmanc83b9752011-09-07 01:41:24 +0000100 __sync_val_compare_and_swap(P, 0, 1);
101 // CHECK: cmpxchg i32 addrspace(256)*{{.*}}, i32 0, i32 1 seq_cst
102
103 __sync_xor_and_fetch(P, 123);
104 // CHECK: atomicrmw xor i32 addrspace(256)*{{.*}}, i32 123 seq_cst
105}