blob: 88a96568a436190c8af90be6b1e13c42589cc9be [file] [log] [blame]
Chris Lattnerc7f78c72010-09-21 23:22:41 +00001// RUN: %clang_cc1 %s -emit-llvm -o - -triple=i686-apple-darwin9 | FileCheck %s
Mon P Wang1ffe2812008-05-09 22:40:52 +00002
Chris Lattnerc7f78c72010-09-21 23:22:41 +00003int atomic(void) {
Daniel Dunbareb4f81e2009-05-27 23:45:33 +00004 // non-sensical test for sync functions
Mon P Wang1ffe2812008-05-09 22:40:52 +00005 int old;
6 int val = 1;
Chris Lattner5caa3702009-05-08 06:58:22 +00007 char valc = 1;
Chandler Carruthdb4325b2010-07-18 07:23:17 +00008 _Bool valb = 0;
Mon P Wang1ffe2812008-05-09 22:40:52 +00009 unsigned int uval = 1;
10 int cmp = 0;
11
12 old = __sync_fetch_and_add(&val, 1);
Chris Lattnerc7f78c72010-09-21 23:22:41 +000013 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
14 // CHECK: call i32 @llvm.atomic.load.add.i32.p0i32(i32* %val, i32 1)
15 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
16
Chris Lattner5caa3702009-05-08 06:58:22 +000017 old = __sync_fetch_and_sub(&valc, 2);
Chris Lattnerc7f78c72010-09-21 23:22:41 +000018 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
19 // CHECK: call i8 @llvm.atomic.load.sub.i8.p0i8(i8* %valc, i8 2)
20 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
21
Mon P Wang1ffe2812008-05-09 22:40:52 +000022 old = __sync_fetch_and_min(&val, 3);
Chris Lattnerc7f78c72010-09-21 23:22:41 +000023 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
24 // CHECK: call i32 @llvm.atomic.load.min.i32.p0i32(i32* %val, i32 3)
25 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
26
Mon P Wang1ffe2812008-05-09 22:40:52 +000027 old = __sync_fetch_and_max(&val, 4);
Chris Lattnerc7f78c72010-09-21 23:22:41 +000028 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
29 // CHECK: call i32 @llvm.atomic.load.max.i32.p0i32(i32* %val, i32 4)
30 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
31
Mon P Wang1ffe2812008-05-09 22:40:52 +000032 old = __sync_fetch_and_umin(&uval, 5u);
Chris Lattnerc7f78c72010-09-21 23:22:41 +000033 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
34 // CHECK: call i32 @llvm.atomic.load.umin.i32.p0i32(i32* %uval, i32 5)
35 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
36
Mon P Wang1ffe2812008-05-09 22:40:52 +000037 old = __sync_fetch_and_umax(&uval, 6u);
Chris Lattnerc7f78c72010-09-21 23:22:41 +000038 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
39 // CHECK: call i32 @llvm.atomic.load.umax.i32.p0i32(i32* %uval, i32 6)
40 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
41
Mon P Wang1ffe2812008-05-09 22:40:52 +000042 old = __sync_lock_test_and_set(&val, 7);
Chris Lattnerc7f78c72010-09-21 23:22:41 +000043 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
44 // CHECK: call i32 @llvm.atomic.swap.i32.p0i32(i32* %val, i32 7)
45 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
46
Mon P Wang1ffe2812008-05-09 22:40:52 +000047 old = __sync_val_compare_and_swap(&val, 4, 1976);
Chris Lattnerc7f78c72010-09-21 23:22:41 +000048 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
49 // CHECK: call i32 @llvm.atomic.cmp.swap.i32.p0i32(i32* %val, i32 4, i32 1976)
50 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
51
Daniel Dunbar0002d232009-04-07 00:55:51 +000052 old = __sync_bool_compare_and_swap(&val, 4, 1976);
Chris Lattnerc7f78c72010-09-21 23:22:41 +000053 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
54 // CHECK: call i32 @llvm.atomic.cmp.swap.i32.p0i32(i32* %val, i32 4, i32 1976)
55 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
56
Mon P Wang1ffe2812008-05-09 22:40:52 +000057 old = __sync_fetch_and_and(&val, 0x9);
Chris Lattnerc7f78c72010-09-21 23:22:41 +000058 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
59 // CHECK: call i32 @llvm.atomic.load.and.i32.p0i32(i32* %val, i32 9)
60 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
61
Mon P Wang1ffe2812008-05-09 22:40:52 +000062 old = __sync_fetch_and_or(&val, 0xa);
Chris Lattnerc7f78c72010-09-21 23:22:41 +000063 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
64 // CHECK: call i32 @llvm.atomic.load.or.i32.p0i32(i32* %val, i32 10)
65 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
66
Mon P Wang1ffe2812008-05-09 22:40:52 +000067 old = __sync_fetch_and_xor(&val, 0xb);
Chris Lattnerc7f78c72010-09-21 23:22:41 +000068 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
69 // CHECK: call i32 @llvm.atomic.load.xor.i32.p0i32(i32* %val, i32 11)
70 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
71
Daniel Dunbar0002d232009-04-07 00:55:51 +000072 old = __sync_add_and_fetch(&val, 1);
Chris Lattnerc7f78c72010-09-21 23:22:41 +000073 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
74 // CHECK: call i32 @llvm.atomic.load.add.i32.p0i32(i32* %val, i32 1)
75 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
Daniel Dunbar0002d232009-04-07 00:55:51 +000076
Chris Lattnerc7f78c72010-09-21 23:22:41 +000077 old = __sync_sub_and_fetch(&val, 2);
78 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
79 // CHECK: call i32 @llvm.atomic.load.sub.i32.p0i32(i32* %val, i32 2)
80 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
81
82 old = __sync_and_and_fetch(&valc, 3);
83 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
84 // CHECK: call i8 @llvm.atomic.load.and.i8.p0i8(i8* %valc, i8 3)
85 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
86
87 old = __sync_or_and_fetch(&valc, 4);
88 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
89 // CHECK: call i8 @llvm.atomic.load.or.i8.p0i8(i8* %valc, i8 4)
90 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
91
92 old = __sync_xor_and_fetch(&valc, 5);
93 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
94 // CHECK: call i8 @llvm.atomic.load.xor.i8.p0i8(i8* %valc, i8 5)
95 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
96
Chris Lattnere7ac0a92009-05-08 15:36:58 +000097
98 __sync_val_compare_and_swap((void **)0, (void *)0, (void *)0);
Chris Lattnerc7f78c72010-09-21 23:22:41 +000099 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
100 // CHECK: call i32 @llvm.atomic.cmp.swap.i32.p0i32(i32* null, i32 0, i32 0)
101 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
102
Chandler Carruthdb4325b2010-07-18 07:23:17 +0000103 if ( __sync_val_compare_and_swap(&valb, 0, 1)) {
Chris Lattnerc7f78c72010-09-21 23:22:41 +0000104 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
105// FIXME: Doesn't seem right!
106 // CHECK: call i8 @llvm.atomic.cmp.swap.i8.p0i8(i8* %valb, i8 0, i8 -1)
107 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
Chandler Carruthdb4325b2010-07-18 07:23:17 +0000108 old = 42;
109 }
Chris Lattnerc7f78c72010-09-21 23:22:41 +0000110
Chandler Carruth8d13d222010-07-18 20:54:12 +0000111 __sync_bool_compare_and_swap((void **)0, (void *)0, (void *)0);
Chris Lattnerc7f78c72010-09-21 23:22:41 +0000112 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
113 // CHECK: call i32 @llvm.atomic.cmp.swap.i32.p0i32(i32* null, i32 0, i32 0)
114 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 true)
Chris Lattnerf58cd9b2009-05-13 04:46:13 +0000115
116 __sync_lock_release(&val);
Chris Lattnerc7f78c72010-09-21 23:22:41 +0000117 // CHECK: volatile store i32 0, i32*
118
Chris Lattnerf58cd9b2009-05-13 04:46:13 +0000119 __sync_synchronize ();
Chris Lattnerc7f78c72010-09-21 23:22:41 +0000120 // CHECK: call void @llvm.memory.barrier(i1 true, i1 true, i1 true, i1 true, i1 false)
Chris Lattnerf58cd9b2009-05-13 04:46:13 +0000121
Mon P Wang1ffe2812008-05-09 22:40:52 +0000122 return old;
123}
Chandler Carruth8d13d222010-07-18 20:54:12 +0000124
125void release_return(int *lock) {
126 // Ensure this is actually returning void all the way through.
127 return __sync_lock_release(lock);
Chris Lattnerc7f78c72010-09-21 23:22:41 +0000128 // CHECK: volatile store i32 0, i32*
Chandler Carruth8d13d222010-07-18 20:54:12 +0000129}