Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 1 | // RUN: %clang_cc1 %s -emit-llvm -o - -triple=i686-apple-darwin9 | FileCheck %s |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 2 | |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 3 | int atomic(void) { |
Daniel Dunbar | eb4f81e | 2009-05-27 23:45:33 +0000 | [diff] [blame] | 4 | // non-sensical test for sync functions |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 5 | int old; |
| 6 | int val = 1; |
Chris Lattner | 5caa370 | 2009-05-08 06:58:22 +0000 | [diff] [blame] | 7 | char valc = 1; |
Chandler Carruth | db4325b | 2010-07-18 07:23:17 +0000 | [diff] [blame] | 8 | _Bool valb = 0; |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 9 | unsigned int uval = 1; |
| 10 | int cmp = 0; |
| 11 | |
| 12 | old = __sync_fetch_and_add(&val, 1); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 13 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 14 | // CHECK: call i32 @llvm.atomic.load.add.i32.p0i32(i32* %val, i32 1) |
| 15 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 16 | |
Chris Lattner | 5caa370 | 2009-05-08 06:58:22 +0000 | [diff] [blame] | 17 | old = __sync_fetch_and_sub(&valc, 2); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 18 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 19 | // CHECK: call i8 @llvm.atomic.load.sub.i8.p0i8(i8* %valc, i8 2) |
| 20 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 21 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 22 | old = __sync_fetch_and_min(&val, 3); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 23 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 24 | // CHECK: call i32 @llvm.atomic.load.min.i32.p0i32(i32* %val, i32 3) |
| 25 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 26 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 27 | old = __sync_fetch_and_max(&val, 4); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 28 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 29 | // CHECK: call i32 @llvm.atomic.load.max.i32.p0i32(i32* %val, i32 4) |
| 30 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 31 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 32 | old = __sync_fetch_and_umin(&uval, 5u); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 33 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 34 | // CHECK: call i32 @llvm.atomic.load.umin.i32.p0i32(i32* %uval, i32 5) |
| 35 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 36 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 37 | old = __sync_fetch_and_umax(&uval, 6u); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 38 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 39 | // CHECK: call i32 @llvm.atomic.load.umax.i32.p0i32(i32* %uval, i32 6) |
| 40 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 41 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 42 | old = __sync_lock_test_and_set(&val, 7); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 43 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 44 | // CHECK: call i32 @llvm.atomic.swap.i32.p0i32(i32* %val, i32 7) |
| 45 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 46 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 47 | old = __sync_val_compare_and_swap(&val, 4, 1976); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 48 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 49 | // CHECK: call i32 @llvm.atomic.cmp.swap.i32.p0i32(i32* %val, i32 4, i32 1976) |
| 50 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 51 | |
Daniel Dunbar | 0002d23 | 2009-04-07 00:55:51 +0000 | [diff] [blame] | 52 | old = __sync_bool_compare_and_swap(&val, 4, 1976); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 53 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 54 | // CHECK: call i32 @llvm.atomic.cmp.swap.i32.p0i32(i32* %val, i32 4, i32 1976) |
| 55 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 56 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 57 | old = __sync_fetch_and_and(&val, 0x9); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 58 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 59 | // CHECK: call i32 @llvm.atomic.load.and.i32.p0i32(i32* %val, i32 9) |
| 60 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 61 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 62 | old = __sync_fetch_and_or(&val, 0xa); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 63 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 64 | // CHECK: call i32 @llvm.atomic.load.or.i32.p0i32(i32* %val, i32 10) |
| 65 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 66 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 67 | old = __sync_fetch_and_xor(&val, 0xb); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 68 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 69 | // CHECK: call i32 @llvm.atomic.load.xor.i32.p0i32(i32* %val, i32 11) |
| 70 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 71 | |
Daniel Dunbar | 0002d23 | 2009-04-07 00:55:51 +0000 | [diff] [blame] | 72 | old = __sync_add_and_fetch(&val, 1); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 73 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 74 | // CHECK: call i32 @llvm.atomic.load.add.i32.p0i32(i32* %val, i32 1) |
| 75 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
Daniel Dunbar | 0002d23 | 2009-04-07 00:55:51 +0000 | [diff] [blame] | 76 | |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 77 | old = __sync_sub_and_fetch(&val, 2); |
| 78 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 79 | // CHECK: call i32 @llvm.atomic.load.sub.i32.p0i32(i32* %val, i32 2) |
| 80 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 81 | |
| 82 | old = __sync_and_and_fetch(&valc, 3); |
| 83 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 84 | // CHECK: call i8 @llvm.atomic.load.and.i8.p0i8(i8* %valc, i8 3) |
| 85 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 86 | |
| 87 | old = __sync_or_and_fetch(&valc, 4); |
| 88 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 89 | // CHECK: call i8 @llvm.atomic.load.or.i8.p0i8(i8* %valc, i8 4) |
| 90 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 91 | |
| 92 | old = __sync_xor_and_fetch(&valc, 5); |
| 93 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 94 | // CHECK: call i8 @llvm.atomic.load.xor.i8.p0i8(i8* %valc, i8 5) |
| 95 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 96 | |
Chris Lattner | e7ac0a9 | 2009-05-08 15:36:58 +0000 | [diff] [blame] | 97 | |
| 98 | __sync_val_compare_and_swap((void **)0, (void *)0, (void *)0); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 99 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 100 | // CHECK: call i32 @llvm.atomic.cmp.swap.i32.p0i32(i32* null, i32 0, i32 0) |
| 101 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 102 | |
Chandler Carruth | db4325b | 2010-07-18 07:23:17 +0000 | [diff] [blame] | 103 | if ( __sync_val_compare_and_swap(&valb, 0, 1)) { |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 104 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 105 | // FIXME: Doesn't seem right! |
| 106 | // CHECK: call i8 @llvm.atomic.cmp.swap.i8.p0i8(i8* %valb, i8 0, i8 -1) |
| 107 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
Chandler Carruth | db4325b | 2010-07-18 07:23:17 +0000 | [diff] [blame] | 108 | old = 42; |
| 109 | } |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 110 | |
Chandler Carruth | 8d13d22 | 2010-07-18 20:54:12 +0000 | [diff] [blame] | 111 | __sync_bool_compare_and_swap((void **)0, (void *)0, (void *)0); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 112 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 113 | // CHECK: call i32 @llvm.atomic.cmp.swap.i32.p0i32(i32* null, i32 0, i32 0) |
| 114 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
Chris Lattner | f58cd9b | 2009-05-13 04:46:13 +0000 | [diff] [blame] | 115 | |
| 116 | __sync_lock_release(&val); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 117 | // CHECK: volatile store i32 0, i32* |
| 118 | |
Chris Lattner | f58cd9b | 2009-05-13 04:46:13 +0000 | [diff] [blame] | 119 | __sync_synchronize (); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 120 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 false) |
Chris Lattner | f58cd9b | 2009-05-13 04:46:13 +0000 | [diff] [blame] | 121 | |
Mon P Wang | 1ffe281 | 2008-05-09 22:40:52 +0000 | [diff] [blame] | 122 | return old; |
| 123 | } |
Chandler Carruth | 8d13d22 | 2010-07-18 20:54:12 +0000 | [diff] [blame] | 124 | |
Chris Lattner | f2b9527 | 2010-09-21 23:24:52 +0000 | [diff] [blame] | 125 | // CHECK: @release_return |
Chandler Carruth | 8d13d22 | 2010-07-18 20:54:12 +0000 | [diff] [blame] | 126 | void release_return(int *lock) { |
| 127 | // Ensure this is actually returning void all the way through. |
| 128 | return __sync_lock_release(lock); |
Chris Lattner | c7f78c7 | 2010-09-21 23:22:41 +0000 | [diff] [blame] | 129 | // CHECK: volatile store i32 0, i32* |
Chandler Carruth | 8d13d22 | 2010-07-18 20:54:12 +0000 | [diff] [blame] | 130 | } |
Chris Lattner | f2b9527 | 2010-09-21 23:24:52 +0000 | [diff] [blame] | 131 | |
| 132 | |
| 133 | // CHECK: @addrspace |
| 134 | void addrspace(int __attribute__((address_space(256))) * P) { |
| 135 | __sync_bool_compare_and_swap(P, 0, 1); |
| 136 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
Chris Lattner | 780a2eb | 2010-09-21 23:35:30 +0000 | [diff] [blame^] | 137 | // CHECK: call i32 @llvm.atomic.cmp.swap.i32.p256i32(i32 addrspace(256)*{{.*}}, i32 0, i32 1) |
| 138 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 139 | |
| 140 | |
| 141 | __sync_val_compare_and_swap(P, 0, 1); |
| 142 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 143 | // CHECK: call i32 @llvm.atomic.cmp.swap.i32.p256i32(i32 addrspace(256)*{{.*}}, i32 0, i32 1) |
Chris Lattner | f2b9527 | 2010-09-21 23:24:52 +0000 | [diff] [blame] | 144 | // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true) |
| 145 | } |
| 146 | |