blob: 10ab73d541ee90a4c73dd58e50b5e39bb86848ef [file] [log] [blame]
Justin Holewinski7be57de2014-06-27 18:35:30 +00001; RUN: llc < %s -march=nvptx -mcpu=sm_20 | FileCheck %s
2
3
4; CHECK: atom0
5define i32 @atom0(i32* %addr, i32 %val) {
6; CHECK: atom.add.u32
7 %ret = atomicrmw add i32* %addr, i32 %val seq_cst
8 ret i32 %ret
9}
10
11; CHECK: atom1
12define i64 @atom1(i64* %addr, i64 %val) {
13; CHECK: atom.add.u64
14 %ret = atomicrmw add i64* %addr, i64 %val seq_cst
15 ret i64 %ret
16}
17
18; CHECK: atom2
19define i32 @atom2(i32* %subr, i32 %val) {
20; CHECK: neg.s32
21; CHECK: atom.add.u32
22 %ret = atomicrmw sub i32* %subr, i32 %val seq_cst
23 ret i32 %ret
24}
25
26; CHECK: atom3
27define i64 @atom3(i64* %subr, i64 %val) {
28; CHECK: neg.s64
29; CHECK: atom.add.u64
30 %ret = atomicrmw sub i64* %subr, i64 %val seq_cst
31 ret i64 %ret
32}
33
34; CHECK: atom4
35define i32 @atom4(i32* %subr, i32 %val) {
36; CHECK: atom.and.b32
37 %ret = atomicrmw and i32* %subr, i32 %val seq_cst
38 ret i32 %ret
39}
40
41; CHECK: atom5
42define i64 @atom5(i64* %subr, i64 %val) {
43; CHECK: atom.and.b64
44 %ret = atomicrmw and i64* %subr, i64 %val seq_cst
45 ret i64 %ret
46}
47
48;; NAND not yet supported
49;define i32 @atom6(i32* %subr, i32 %val) {
50; %ret = atomicrmw nand i32* %subr, i32 %val seq_cst
51; ret i32 %ret
52;}
53
54;define i64 @atom7(i64* %subr, i64 %val) {
55; %ret = atomicrmw nand i64* %subr, i64 %val seq_cst
56; ret i64 %ret
57;}
58
59; CHECK: atom8
60define i32 @atom8(i32* %subr, i32 %val) {
61; CHECK: atom.or.b32
62 %ret = atomicrmw or i32* %subr, i32 %val seq_cst
63 ret i32 %ret
64}
65
66; CHECK: atom9
67define i64 @atom9(i64* %subr, i64 %val) {
68; CHECK: atom.or.b64
69 %ret = atomicrmw or i64* %subr, i64 %val seq_cst
70 ret i64 %ret
71}
72
73; CHECK: atom10
74define i32 @atom10(i32* %subr, i32 %val) {
75; CHECK: atom.xor.b32
76 %ret = atomicrmw xor i32* %subr, i32 %val seq_cst
77 ret i32 %ret
78}
79
80; CHECK: atom11
81define i64 @atom11(i64* %subr, i64 %val) {
82; CHECK: atom.xor.b64
83 %ret = atomicrmw xor i64* %subr, i64 %val seq_cst
84 ret i64 %ret
85}
86
87; CHECK: atom12
88define i32 @atom12(i32* %subr, i32 %val) {
89; CHECK: atom.max.s32
90 %ret = atomicrmw max i32* %subr, i32 %val seq_cst
91 ret i32 %ret
92}
93
94; CHECK: atom13
95define i64 @atom13(i64* %subr, i64 %val) {
96; CHECK: atom.max.s64
97 %ret = atomicrmw max i64* %subr, i64 %val seq_cst
98 ret i64 %ret
99}
100
101; CHECK: atom14
102define i32 @atom14(i32* %subr, i32 %val) {
103; CHECK: atom.min.s32
104 %ret = atomicrmw min i32* %subr, i32 %val seq_cst
105 ret i32 %ret
106}
107
108; CHECK: atom15
109define i64 @atom15(i64* %subr, i64 %val) {
110; CHECK: atom.min.s64
111 %ret = atomicrmw min i64* %subr, i64 %val seq_cst
112 ret i64 %ret
113}
114
115; CHECK: atom16
116define i32 @atom16(i32* %subr, i32 %val) {
117; CHECK: atom.max.u32
118 %ret = atomicrmw umax i32* %subr, i32 %val seq_cst
119 ret i32 %ret
120}
121
122; CHECK: atom17
123define i64 @atom17(i64* %subr, i64 %val) {
124; CHECK: atom.max.u64
125 %ret = atomicrmw umax i64* %subr, i64 %val seq_cst
126 ret i64 %ret
127}
128
129; CHECK: atom18
130define i32 @atom18(i32* %subr, i32 %val) {
131; CHECK: atom.min.u32
132 %ret = atomicrmw umin i32* %subr, i32 %val seq_cst
133 ret i32 %ret
134}
135
136; CHECK: atom19
137define i64 @atom19(i64* %subr, i64 %val) {
138; CHECK: atom.min.u64
139 %ret = atomicrmw umin i64* %subr, i64 %val seq_cst
140 ret i64 %ret
141}