Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 1 | // RUN: %clang_cc1 %s -emit-llvm -o - -triple=i686-apple-darwin9 | FileCheck %s |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 2 | |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 3 | int atomic(void) { |
Daniel Dunbar | eb4f81e | 2009-05-27 23:45:33 +0000 | [diff] [blame] | 4 | // non-sensical test for sync functions |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 5 | int old; |
| 6 | int val = 1; |
Chris Lattner | 5caa370 | 2009-05-08 06:58:22 +0000 | [diff] [blame] | 7 | char valc = 1; |
Chandler Carruth | db4325b | 2010-07-18 07:23:17 +0000 | [diff] [blame] | 8 | _Bool valb = 0; |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 9 | unsigned int uval = 1; |
| 10 | int cmp = 0; |
Eli Friedman | ff99320 | 2012-03-16 01:48:04 +0000 | [diff] [blame] | 11 | int* ptrval; |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 12 | |
| 13 | old = __sync_fetch_and_add(&val, 1); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 14 | // CHECK: atomicrmw add i32* %val, i32 1 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 15 | |
Chris Lattner | 5caa370 | 2009-05-08 06:58:22 +0000 | [diff] [blame] | 16 | old = __sync_fetch_and_sub(&valc, 2); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 17 | // CHECK: atomicrmw sub i8* %valc, i8 2 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 18 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 19 | old = __sync_fetch_and_min(&val, 3); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 20 | // CHECK: atomicrmw min i32* %val, i32 3 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 21 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 22 | old = __sync_fetch_and_max(&val, 4); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 23 | // CHECK: atomicrmw max i32* %val, i32 4 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 24 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 25 | old = __sync_fetch_and_umin(&uval, 5u); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 26 | // CHECK: atomicrmw umin i32* %uval, i32 5 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 27 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 28 | old = __sync_fetch_and_umax(&uval, 6u); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 29 | // CHECK: atomicrmw umax i32* %uval, i32 6 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 30 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 31 | old = __sync_lock_test_and_set(&val, 7); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 32 | // CHECK: atomicrmw xchg i32* %val, i32 7 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 33 | |
Chris Lattner | 23aa9c8 | 2011-04-09 03:57:26 +0000 | [diff] [blame] | 34 | old = __sync_swap(&val, 8); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 35 | // CHECK: atomicrmw xchg i32* %val, i32 8 seq_cst |
Chris Lattner | 23aa9c8 | 2011-04-09 03:57:26 +0000 | [diff] [blame] | 36 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 37 | old = __sync_val_compare_and_swap(&val, 4, 1976); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 38 | // CHECK: cmpxchg i32* %val, i32 4, i32 1976 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 39 | |
Daniel Dunbar | 0002d23 | 2009-04-07 00:55:51 +0000 | [diff] [blame] | 40 | old = __sync_bool_compare_and_swap(&val, 4, 1976); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 41 | // CHECK: cmpxchg i32* %val, i32 4, i32 1976 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 42 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 43 | old = __sync_fetch_and_and(&val, 0x9); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 44 | // CHECK: atomicrmw and i32* %val, i32 9 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 45 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 46 | old = __sync_fetch_and_or(&val, 0xa); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 47 | // CHECK: atomicrmw or i32* %val, i32 10 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 48 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 49 | old = __sync_fetch_and_xor(&val, 0xb); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 50 | // CHECK: atomicrmw xor i32* %val, i32 11 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 51 | |
Daniel Dunbar | 0002d23 | 2009-04-07 00:55:51 +0000 | [diff] [blame] | 52 | old = __sync_add_and_fetch(&val, 1); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 53 | // CHECK: atomicrmw add i32* %val, i32 1 seq_cst |
Daniel Dunbar | 0002d23 | 2009-04-07 00:55:51 +0000 | [diff] [blame] | 54 | |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 55 | old = __sync_sub_and_fetch(&val, 2); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 56 | // CHECK: atomicrmw sub i32* %val, i32 2 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 57 | |
| 58 | old = __sync_and_and_fetch(&valc, 3); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 59 | // CHECK: atomicrmw and i8* %valc, i8 3 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 60 | |
| 61 | old = __sync_or_and_fetch(&valc, 4); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 62 | // CHECK: atomicrmw or i8* %valc, i8 4 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 63 | |
| 64 | old = __sync_xor_and_fetch(&valc, 5); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 65 | // CHECK: atomicrmw xor i8* %valc, i8 5 seq_cst |
Chris Lattner | e7ac0a9 | 2009-05-08 15:36:58 +0000 | [diff] [blame] | 66 | |
| 67 | __sync_val_compare_and_swap((void **)0, (void *)0, (void *)0); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 68 | // CHECK: cmpxchg i32* null, i32 0, i32 0 seq_cst |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 69 | |
Chandler Carruth | db4325b | 2010-07-18 07:23:17 +0000 | [diff] [blame] | 70 | if ( __sync_val_compare_and_swap(&valb, 0, 1)) { |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 71 | // CHECK: cmpxchg i8* %valb, i8 0, i8 1 seq_cst |
Chandler Carruth | db4325b | 2010-07-18 07:23:17 +0000 | [diff] [blame] | 72 | old = 42; |
| 73 | } |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 74 | |
Chandler Carruth | 8d13d22 | 2010-07-18 20:54:12 +0000 | [diff] [blame] | 75 | __sync_bool_compare_and_swap((void **)0, (void *)0, (void *)0); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 76 | // CHECK: cmpxchg i32* null, i32 0, i32 0 seq_cst |
Chris Lattner | f58cd9b | 2009-05-13 04:46:13 +0000 | [diff] [blame] | 77 | |
| 78 | __sync_lock_release(&val); |
Eli Friedman | ff99320 | 2012-03-16 01:48:04 +0000 | [diff] [blame] | 79 | // CHECK: store atomic i32 0, {{.*}} release, align 4 |
| 80 | |
| 81 | __sync_lock_release(&ptrval); |
| 82 | // CHECK: store atomic i32 0, {{.*}} release, align 4 |
| 83 | |
Chris Lattner | f58cd9b | 2009-05-13 04:46:13 +0000 | [diff] [blame] | 84 | __sync_synchronize (); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 85 | // CHECK: fence seq_cst |
Chris Lattner | f58cd9b | 2009-05-13 04:46:13 +0000 | [diff] [blame] | 86 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 87 | return old; |
| 88 | } |
Chandler Carruth | 8d13d22 | 2010-07-18 20:54:12 +0000 | [diff] [blame] | 89 | |
Chris Lattner | f2b9527 | 2010-09-21 23:24:52 +0000 | [diff] [blame] | 90 | // CHECK: @release_return |
Chandler Carruth | 8d13d22 | 2010-07-18 20:54:12 +0000 | [diff] [blame] | 91 | void release_return(int *lock) { |
| 92 | // Ensure this is actually returning void all the way through. |
| 93 | return __sync_lock_release(lock); |
Eli Friedman | eb43f4a | 2011-09-13 22:21:56 +0000 | [diff] [blame] | 94 | // CHECK: store atomic {{.*}} release, align 4 |
Chandler Carruth | 8d13d22 | 2010-07-18 20:54:12 +0000 | [diff] [blame] | 95 | } |
Chris Lattner | f2b9527 | 2010-09-21 23:24:52 +0000 | [diff] [blame] | 96 | |
| 97 | |
Chris Lattner | 4f20944 | 2010-09-21 23:40:48 +0000 | [diff] [blame] | 98 | // rdar://8461279 - Atomics with address spaces. |
Chris Lattner | f2b9527 | 2010-09-21 23:24:52 +0000 | [diff] [blame] | 99 | // CHECK: @addrspace |
| 100 | void addrspace(int __attribute__((address_space(256))) * P) { |
| 101 | __sync_bool_compare_and_swap(P, 0, 1); |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 102 | // CHECK: cmpxchg i32 addrspace(256)*{{.*}}, i32 0, i32 1 seq_cst |
Chris Lattner | f2b9527 | 2010-09-21 23:24:52 +0000 | [diff] [blame] | 103 | |
Eli Friedman | c83b975 | 2011-09-07 01:41:24 +0000 | [diff] [blame] | 104 | __sync_val_compare_and_swap(P, 0, 1); |
| 105 | // CHECK: cmpxchg i32 addrspace(256)*{{.*}}, i32 0, i32 1 seq_cst |
| 106 | |
| 107 | __sync_xor_and_fetch(P, 123); |
| 108 | // CHECK: atomicrmw xor i32 addrspace(256)*{{.*}}, i32 123 seq_cst |
| 109 | } |