blob: 792762b2172744c5243a390dc4396a7bdaad2a21 [file] [log] [blame]
Aaron Watry50a7bcb2013-09-05 16:04:01 +00001define i32 @__clc_atomic_add_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
2entry:
3 %0 = atomicrmw volatile add i32 addrspace(1)* %ptr, i32 %value seq_cst
4 ret i32 %0
5}
6
7define i32 @__clc_atomic_add_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
8entry:
9 %0 = atomicrmw volatile add i32 addrspace(3)* %ptr, i32 %value seq_cst
10 ret i32 %0
11}
Aaron Watry283e3fa2013-09-06 20:20:21 +000012
Aaron Watrycc684052014-09-16 22:34:28 +000013define i32 @__clc_atomic_and_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
14entry:
15 %0 = atomicrmw volatile and i32 addrspace(1)* %ptr, i32 %value seq_cst
16 ret i32 %0
17}
18
19define i32 @__clc_atomic_and_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
20entry:
21 %0 = atomicrmw volatile and i32 addrspace(3)* %ptr, i32 %value seq_cst
22 ret i32 %0
23}
24
Aaron Watry49614fb2014-09-16 22:34:24 +000025define i32 @__clc_atomic_max_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
26entry:
27 %0 = atomicrmw volatile max i32 addrspace(1)* %ptr, i32 %value seq_cst
28 ret i32 %0
29}
30
31define i32 @__clc_atomic_max_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
32entry:
33 %0 = atomicrmw volatile max i32 addrspace(3)* %ptr, i32 %value seq_cst
34 ret i32 %0
35}
36
Aaron Watry7cfa12c2014-09-16 22:34:41 +000037define i32 @__clc_atomic_min_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
38entry:
39 %0 = atomicrmw volatile min i32 addrspace(1)* %ptr, i32 %value seq_cst
40 ret i32 %0
41}
42
43define i32 @__clc_atomic_min_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
44entry:
45 %0 = atomicrmw volatile min i32 addrspace(3)* %ptr, i32 %value seq_cst
46 ret i32 %0
47}
48
Aaron Watry31e67d12014-09-16 22:34:32 +000049define i32 @__clc_atomic_or_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
50entry:
51 %0 = atomicrmw volatile or i32 addrspace(1)* %ptr, i32 %value seq_cst
52 ret i32 %0
53}
54
55define i32 @__clc_atomic_or_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
56entry:
57 %0 = atomicrmw volatile or i32 addrspace(3)* %ptr, i32 %value seq_cst
58 ret i32 %0
59}
Aaron Watry3f0a1a42014-09-16 22:34:36 +000060
Aaron Watry49614fb2014-09-16 22:34:24 +000061define i32 @__clc_atomic_umax_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
62entry:
63 %0 = atomicrmw volatile umax i32 addrspace(1)* %ptr, i32 %value seq_cst
64 ret i32 %0
65}
66
67define i32 @__clc_atomic_umax_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
68entry:
69 %0 = atomicrmw volatile umax i32 addrspace(3)* %ptr, i32 %value seq_cst
70 ret i32 %0
71}
72
Aaron Watry7cfa12c2014-09-16 22:34:41 +000073define i32 @__clc_atomic_umin_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
74entry:
75 %0 = atomicrmw volatile umin i32 addrspace(1)* %ptr, i32 %value seq_cst
76 ret i32 %0
77}
78
79define i32 @__clc_atomic_umin_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
80entry:
81 %0 = atomicrmw volatile umin i32 addrspace(3)* %ptr, i32 %value seq_cst
82 ret i32 %0
83}
84
Aaron Watry283e3fa2013-09-06 20:20:21 +000085define i32 @__clc_atomic_sub_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
86entry:
87 %0 = atomicrmw volatile sub i32 addrspace(1)* %ptr, i32 %value seq_cst
88 ret i32 %0
89}
90
91define i32 @__clc_atomic_sub_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
92entry:
93 %0 = atomicrmw volatile sub i32 addrspace(3)* %ptr, i32 %value seq_cst
94 ret i32 %0
95}
Aaron Watry3f0a1a42014-09-16 22:34:36 +000096
97define i32 @__clc_atomic_xor_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
98entry:
99 %0 = atomicrmw volatile xor i32 addrspace(1)* %ptr, i32 %value seq_cst
100 ret i32 %0
101}
102
103define i32 @__clc_atomic_xor_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
104entry:
105 %0 = atomicrmw volatile xor i32 addrspace(3)* %ptr, i32 %value seq_cst
106 ret i32 %0
107}