blob: 019147f8c5099456d8d6a7d982f557bfe21c2a71 [file] [log] [blame]
Aaron Watry50a7bcb2013-09-05 16:04:01 +00001define i32 @__clc_atomic_add_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
2entry:
3 %0 = atomicrmw volatile add i32 addrspace(1)* %ptr, i32 %value seq_cst
4 ret i32 %0
5}
6
7define i32 @__clc_atomic_add_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
8entry:
9 %0 = atomicrmw volatile add i32 addrspace(3)* %ptr, i32 %value seq_cst
10 ret i32 %0
11}
Aaron Watry283e3fa2013-09-06 20:20:21 +000012
Aaron Watrycc684052014-09-16 22:34:28 +000013define i32 @__clc_atomic_and_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
14entry:
15 %0 = atomicrmw volatile and i32 addrspace(1)* %ptr, i32 %value seq_cst
16 ret i32 %0
17}
18
19define i32 @__clc_atomic_and_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
20entry:
21 %0 = atomicrmw volatile and i32 addrspace(3)* %ptr, i32 %value seq_cst
22 ret i32 %0
23}
24
Aaron Watry0d976ba2014-09-16 22:34:49 +000025define i32 @__clc_atomic_cmpxchg_addr1(i32 addrspace(1)* nocapture %ptr, i32 %compare, i32 %value) nounwind alwaysinline {
26entry:
27 %0 = cmpxchg volatile i32 addrspace(1)* %ptr, i32 %compare, i32 %value seq_cst seq_cst
28 %1 = extractvalue { i32, i1 } %0, 0
29 ret i32 %1
30}
31
32define i32 @__clc_atomic_cmpxchg_addr3(i32 addrspace(3)* nocapture %ptr, i32 %compare, i32 %value) nounwind alwaysinline {
33entry:
34 %0 = cmpxchg volatile i32 addrspace(3)* %ptr, i32 %compare, i32 %value seq_cst seq_cst
35 %1 = extractvalue { i32, i1 } %0, 0
36 ret i32 %1
37}
38
Aaron Watry49614fb2014-09-16 22:34:24 +000039define i32 @__clc_atomic_max_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
40entry:
41 %0 = atomicrmw volatile max i32 addrspace(1)* %ptr, i32 %value seq_cst
42 ret i32 %0
43}
44
45define i32 @__clc_atomic_max_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
46entry:
47 %0 = atomicrmw volatile max i32 addrspace(3)* %ptr, i32 %value seq_cst
48 ret i32 %0
49}
50
Aaron Watry7cfa12c2014-09-16 22:34:41 +000051define i32 @__clc_atomic_min_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
52entry:
53 %0 = atomicrmw volatile min i32 addrspace(1)* %ptr, i32 %value seq_cst
54 ret i32 %0
55}
56
57define i32 @__clc_atomic_min_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
58entry:
59 %0 = atomicrmw volatile min i32 addrspace(3)* %ptr, i32 %value seq_cst
60 ret i32 %0
61}
62
Aaron Watry31e67d12014-09-16 22:34:32 +000063define i32 @__clc_atomic_or_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
64entry:
65 %0 = atomicrmw volatile or i32 addrspace(1)* %ptr, i32 %value seq_cst
66 ret i32 %0
67}
68
69define i32 @__clc_atomic_or_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
70entry:
71 %0 = atomicrmw volatile or i32 addrspace(3)* %ptr, i32 %value seq_cst
72 ret i32 %0
73}
Aaron Watry3f0a1a42014-09-16 22:34:36 +000074
Aaron Watry49614fb2014-09-16 22:34:24 +000075define i32 @__clc_atomic_umax_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
76entry:
77 %0 = atomicrmw volatile umax i32 addrspace(1)* %ptr, i32 %value seq_cst
78 ret i32 %0
79}
80
81define i32 @__clc_atomic_umax_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
82entry:
83 %0 = atomicrmw volatile umax i32 addrspace(3)* %ptr, i32 %value seq_cst
84 ret i32 %0
85}
86
Aaron Watry7cfa12c2014-09-16 22:34:41 +000087define i32 @__clc_atomic_umin_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
88entry:
89 %0 = atomicrmw volatile umin i32 addrspace(1)* %ptr, i32 %value seq_cst
90 ret i32 %0
91}
92
93define i32 @__clc_atomic_umin_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
94entry:
95 %0 = atomicrmw volatile umin i32 addrspace(3)* %ptr, i32 %value seq_cst
96 ret i32 %0
97}
98
Aaron Watry283e3fa2013-09-06 20:20:21 +000099define i32 @__clc_atomic_sub_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
100entry:
101 %0 = atomicrmw volatile sub i32 addrspace(1)* %ptr, i32 %value seq_cst
102 ret i32 %0
103}
104
105define i32 @__clc_atomic_sub_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
106entry:
107 %0 = atomicrmw volatile sub i32 addrspace(3)* %ptr, i32 %value seq_cst
108 ret i32 %0
109}
Aaron Watry3f0a1a42014-09-16 22:34:36 +0000110
Aaron Watry025d79a2014-09-16 22:34:45 +0000111define i32 @__clc_atomic_xchg_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
112entry:
113 %0 = atomicrmw volatile xchg i32 addrspace(1)* %ptr, i32 %value seq_cst
114 ret i32 %0
115}
116
117define i32 @__clc_atomic_xchg_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
118entry:
119 %0 = atomicrmw volatile xchg i32 addrspace(3)* %ptr, i32 %value seq_cst
120 ret i32 %0
121}
122
Aaron Watry3f0a1a42014-09-16 22:34:36 +0000123define i32 @__clc_atomic_xor_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
124entry:
125 %0 = atomicrmw volatile xor i32 addrspace(1)* %ptr, i32 %value seq_cst
126 ret i32 %0
127}
128
129define i32 @__clc_atomic_xor_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
130entry:
131 %0 = atomicrmw volatile xor i32 addrspace(3)* %ptr, i32 %value seq_cst
132 ret i32 %0
133}