blob: d00f8e861c21e8a963ec409b25bc851ba5617e25 [file] [log] [blame]
Dan Gohman0a063102009-09-08 23:54:48 +00001; RUN: llc < %s -march=x86-64 | FileCheck %s
Evan Chengb723fb52009-07-30 08:33:02 +00002
3; rdar://7103704
4
5define void @sub1(i32* nocapture %p, i32 %v) nounwind ssp {
6entry:
7; CHECK: sub1:
8; CHECK: subl
9 %0 = tail call i32 @llvm.atomic.load.sub.i32.p0i32(i32* %p, i32 %v) ; <i32> [#uses=0]
10 ret void
11}
12
13define void @inc4(i64* nocapture %p) nounwind ssp {
14entry:
15; CHECK: inc4:
16; CHECK: incq
17 %0 = tail call i64 @llvm.atomic.load.add.i64.p0i64(i64* %p, i64 1) ; <i64> [#uses=0]
18 ret void
19}
20
21declare i64 @llvm.atomic.load.add.i64.p0i64(i64* nocapture, i64) nounwind
22
23define void @add8(i64* nocapture %p) nounwind ssp {
24entry:
25; CHECK: add8:
26; CHECK: addq $2
27 %0 = tail call i64 @llvm.atomic.load.add.i64.p0i64(i64* %p, i64 2) ; <i64> [#uses=0]
28 ret void
29}
30
31define void @add4(i64* nocapture %p, i32 %v) nounwind ssp {
32entry:
33; CHECK: add4:
34; CHECK: addq
35 %0 = sext i32 %v to i64 ; <i64> [#uses=1]
36 %1 = tail call i64 @llvm.atomic.load.add.i64.p0i64(i64* %p, i64 %0) ; <i64> [#uses=0]
37 ret void
38}
39
40define void @inc3(i8* nocapture %p) nounwind ssp {
41entry:
42; CHECK: inc3:
43; CHECK: incb
44 %0 = tail call i8 @llvm.atomic.load.add.i8.p0i8(i8* %p, i8 1) ; <i8> [#uses=0]
45 ret void
46}
47
48declare i8 @llvm.atomic.load.add.i8.p0i8(i8* nocapture, i8) nounwind
49
50define void @add7(i8* nocapture %p) nounwind ssp {
51entry:
52; CHECK: add7:
53; CHECK: addb $2
54 %0 = tail call i8 @llvm.atomic.load.add.i8.p0i8(i8* %p, i8 2) ; <i8> [#uses=0]
55 ret void
56}
57
58define void @add3(i8* nocapture %p, i32 %v) nounwind ssp {
59entry:
60; CHECK: add3:
61; CHECK: addb
62 %0 = trunc i32 %v to i8 ; <i8> [#uses=1]
63 %1 = tail call i8 @llvm.atomic.load.add.i8.p0i8(i8* %p, i8 %0) ; <i8> [#uses=0]
64 ret void
65}
66
67define void @inc2(i16* nocapture %p) nounwind ssp {
68entry:
69; CHECK: inc2:
70; CHECK: incw
71 %0 = tail call i16 @llvm.atomic.load.add.i16.p0i16(i16* %p, i16 1) ; <i16> [#uses=0]
72 ret void
73}
74
75declare i16 @llvm.atomic.load.add.i16.p0i16(i16* nocapture, i16) nounwind
76
77define void @add6(i16* nocapture %p) nounwind ssp {
78entry:
79; CHECK: add6:
80; CHECK: addw $2
81 %0 = tail call i16 @llvm.atomic.load.add.i16.p0i16(i16* %p, i16 2) ; <i16> [#uses=0]
82 ret void
83}
84
85define void @add2(i16* nocapture %p, i32 %v) nounwind ssp {
86entry:
87; CHECK: add2:
88; CHECK: addw
89 %0 = trunc i32 %v to i16 ; <i16> [#uses=1]
90 %1 = tail call i16 @llvm.atomic.load.add.i16.p0i16(i16* %p, i16 %0) ; <i16> [#uses=0]
91 ret void
92}
93
94define void @inc1(i32* nocapture %p) nounwind ssp {
95entry:
96; CHECK: inc1:
97; CHECK: incl
98 %0 = tail call i32 @llvm.atomic.load.add.i32.p0i32(i32* %p, i32 1) ; <i32> [#uses=0]
99 ret void
100}
101
102declare i32 @llvm.atomic.load.add.i32.p0i32(i32* nocapture, i32) nounwind
103
104define void @add5(i32* nocapture %p) nounwind ssp {
105entry:
106; CHECK: add5:
107; CHECK: addl $2
108 %0 = tail call i32 @llvm.atomic.load.add.i32.p0i32(i32* %p, i32 2) ; <i32> [#uses=0]
109 ret void
110}
111
112define void @add1(i32* nocapture %p, i32 %v) nounwind ssp {
113entry:
114; CHECK: add1:
115; CHECK: addl
116 %0 = tail call i32 @llvm.atomic.load.add.i32.p0i32(i32* %p, i32 %v) ; <i32> [#uses=0]
117 ret void
118}
119
120define void @dec4(i64* nocapture %p) nounwind ssp {
121entry:
122; CHECK: dec4:
123; CHECK: decq
124 %0 = tail call i64 @llvm.atomic.load.sub.i64.p0i64(i64* %p, i64 1) ; <i64> [#uses=0]
125 ret void
126}
127
128declare i64 @llvm.atomic.load.sub.i64.p0i64(i64* nocapture, i64) nounwind
129
130define void @sub8(i64* nocapture %p) nounwind ssp {
131entry:
132; CHECK: sub8:
133; CHECK: subq $2
134 %0 = tail call i64 @llvm.atomic.load.sub.i64.p0i64(i64* %p, i64 2) ; <i64> [#uses=0]
135 ret void
136}
137
138define void @sub4(i64* nocapture %p, i32 %v) nounwind ssp {
139entry:
140; CHECK: sub4:
141; CHECK: subq
142 %0 = sext i32 %v to i64 ; <i64> [#uses=1]
143 %1 = tail call i64 @llvm.atomic.load.sub.i64.p0i64(i64* %p, i64 %0) ; <i64> [#uses=0]
144 ret void
145}
146
147define void @dec3(i8* nocapture %p) nounwind ssp {
148entry:
149; CHECK: dec3:
150; CHECK: decb
151 %0 = tail call i8 @llvm.atomic.load.sub.i8.p0i8(i8* %p, i8 1) ; <i8> [#uses=0]
152 ret void
153}
154
155declare i8 @llvm.atomic.load.sub.i8.p0i8(i8* nocapture, i8) nounwind
156
157define void @sub7(i8* nocapture %p) nounwind ssp {
158entry:
159; CHECK: sub7:
160; CHECK: subb $2
161 %0 = tail call i8 @llvm.atomic.load.sub.i8.p0i8(i8* %p, i8 2) ; <i8> [#uses=0]
162 ret void
163}
164
165define void @sub3(i8* nocapture %p, i32 %v) nounwind ssp {
166entry:
167; CHECK: sub3:
168; CHECK: subb
169 %0 = trunc i32 %v to i8 ; <i8> [#uses=1]
170 %1 = tail call i8 @llvm.atomic.load.sub.i8.p0i8(i8* %p, i8 %0) ; <i8> [#uses=0]
171 ret void
172}
173
174define void @dec2(i16* nocapture %p) nounwind ssp {
175entry:
176; CHECK: dec2:
177; CHECK: decw
178 %0 = tail call i16 @llvm.atomic.load.sub.i16.p0i16(i16* %p, i16 1) ; <i16> [#uses=0]
179 ret void
180}
181
182declare i16 @llvm.atomic.load.sub.i16.p0i16(i16* nocapture, i16) nounwind
183
184define void @sub6(i16* nocapture %p) nounwind ssp {
185entry:
186; CHECK: sub6:
187; CHECK: subw $2
188 %0 = tail call i16 @llvm.atomic.load.sub.i16.p0i16(i16* %p, i16 2) ; <i16> [#uses=0]
189 ret void
190}
191
192define void @sub2(i16* nocapture %p, i32 %v) nounwind ssp {
193entry:
194; CHECK: sub2:
195; CHECK: subw
196 %0 = trunc i32 %v to i16 ; <i16> [#uses=1]
197 %1 = tail call i16 @llvm.atomic.load.sub.i16.p0i16(i16* %p, i16 %0) ; <i16> [#uses=0]
198 ret void
199}
200
201define void @dec1(i32* nocapture %p) nounwind ssp {
202entry:
203; CHECK: dec1:
204; CHECK: decl
205 %0 = tail call i32 @llvm.atomic.load.sub.i32.p0i32(i32* %p, i32 1) ; <i32> [#uses=0]
206 ret void
207}
208
209declare i32 @llvm.atomic.load.sub.i32.p0i32(i32* nocapture, i32) nounwind
210
211define void @sub5(i32* nocapture %p) nounwind ssp {
212entry:
213; CHECK: sub5:
214; CHECK: subl $2
215 %0 = tail call i32 @llvm.atomic.load.sub.i32.p0i32(i32* %p, i32 2) ; <i32> [#uses=0]
216 ret void
217}