blob: a62399d2b6f90990a6fd485ddd554fd8e1d6f71b [file] [log] [blame]
Aaron Watry50a7bcb2013-09-05 16:04:01 +00001define i32 @__clc_atomic_add_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
2entry:
3 %0 = atomicrmw volatile add i32 addrspace(1)* %ptr, i32 %value seq_cst
4 ret i32 %0
5}
6
7define i32 @__clc_atomic_add_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
8entry:
9 %0 = atomicrmw volatile add i32 addrspace(3)* %ptr, i32 %value seq_cst
10 ret i32 %0
11}
Aaron Watry283e3fa2013-09-06 20:20:21 +000012
Aaron Watrycc684052014-09-16 22:34:28 +000013define i32 @__clc_atomic_and_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
14entry:
15 %0 = atomicrmw volatile and i32 addrspace(1)* %ptr, i32 %value seq_cst
16 ret i32 %0
17}
18
19define i32 @__clc_atomic_and_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
20entry:
21 %0 = atomicrmw volatile and i32 addrspace(3)* %ptr, i32 %value seq_cst
22 ret i32 %0
23}
24
Aaron Watry49614fb2014-09-16 22:34:24 +000025define i32 @__clc_atomic_max_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
26entry:
27 %0 = atomicrmw volatile max i32 addrspace(1)* %ptr, i32 %value seq_cst
28 ret i32 %0
29}
30
31define i32 @__clc_atomic_max_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
32entry:
33 %0 = atomicrmw volatile max i32 addrspace(3)* %ptr, i32 %value seq_cst
34 ret i32 %0
35}
36
Aaron Watry31e67d12014-09-16 22:34:32 +000037define i32 @__clc_atomic_or_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
38entry:
39 %0 = atomicrmw volatile or i32 addrspace(1)* %ptr, i32 %value seq_cst
40 ret i32 %0
41}
42
43define i32 @__clc_atomic_or_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
44entry:
45 %0 = atomicrmw volatile or i32 addrspace(3)* %ptr, i32 %value seq_cst
46 ret i32 %0
47}
Aaron Watry3f0a1a42014-09-16 22:34:36 +000048
Aaron Watry49614fb2014-09-16 22:34:24 +000049define i32 @__clc_atomic_umax_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
50entry:
51 %0 = atomicrmw volatile umax i32 addrspace(1)* %ptr, i32 %value seq_cst
52 ret i32 %0
53}
54
55define i32 @__clc_atomic_umax_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
56entry:
57 %0 = atomicrmw volatile umax i32 addrspace(3)* %ptr, i32 %value seq_cst
58 ret i32 %0
59}
60
Aaron Watry283e3fa2013-09-06 20:20:21 +000061define i32 @__clc_atomic_sub_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
62entry:
63 %0 = atomicrmw volatile sub i32 addrspace(1)* %ptr, i32 %value seq_cst
64 ret i32 %0
65}
66
67define i32 @__clc_atomic_sub_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
68entry:
69 %0 = atomicrmw volatile sub i32 addrspace(3)* %ptr, i32 %value seq_cst
70 ret i32 %0
71}
Aaron Watry3f0a1a42014-09-16 22:34:36 +000072
73define i32 @__clc_atomic_xor_addr1(i32 addrspace(1)* nocapture %ptr, i32 %value) nounwind alwaysinline {
74entry:
75 %0 = atomicrmw volatile xor i32 addrspace(1)* %ptr, i32 %value seq_cst
76 ret i32 %0
77}
78
79define i32 @__clc_atomic_xor_addr3(i32 addrspace(3)* nocapture %ptr, i32 %value) nounwind alwaysinline {
80entry:
81 %0 = atomicrmw volatile xor i32 addrspace(3)* %ptr, i32 %value seq_cst
82 ret i32 %0
83}