blob: 310b807c5d5636cf2f638861c03306526eb8ff66 [file] [log] [blame]
Dan Gohman1224c382009-07-20 21:19:07 +00001; RUN: llvm-as < %s | llvm-dis | FileCheck %s
2
3@addr = external global i64
4
Dan Gohman1224c382009-07-20 21:19:07 +00005define i64 @add_unsigned(i64 %x, i64 %y) {
Dan Gohman59858cf2009-07-27 16:11:46 +00006; CHECK: %z = add nuw i64 %x, %y
7 %z = add nuw i64 %x, %y
Dan Gohman1224c382009-07-20 21:19:07 +00008 ret i64 %z
9}
10
11define i64 @sub_unsigned(i64 %x, i64 %y) {
Dan Gohman59858cf2009-07-27 16:11:46 +000012; CHECK: %z = sub nuw i64 %x, %y
13 %z = sub nuw i64 %x, %y
Dan Gohman1224c382009-07-20 21:19:07 +000014 ret i64 %z
15}
16
17define i64 @mul_unsigned(i64 %x, i64 %y) {
Dan Gohman59858cf2009-07-27 16:11:46 +000018; CHECK: %z = mul nuw i64 %x, %y
19 %z = mul nuw i64 %x, %y
Dan Gohman08d012e2009-07-22 22:44:56 +000020 ret i64 %z
21}
22
23define i64 @add_signed(i64 %x, i64 %y) {
Dan Gohman59858cf2009-07-27 16:11:46 +000024; CHECK: %z = add nsw i64 %x, %y
25 %z = add nsw i64 %x, %y
Dan Gohman08d012e2009-07-22 22:44:56 +000026 ret i64 %z
27}
28
29define i64 @sub_signed(i64 %x, i64 %y) {
Dan Gohman59858cf2009-07-27 16:11:46 +000030; CHECK: %z = sub nsw i64 %x, %y
31 %z = sub nsw i64 %x, %y
Dan Gohman08d012e2009-07-22 22:44:56 +000032 ret i64 %z
33}
34
35define i64 @mul_signed(i64 %x, i64 %y) {
Dan Gohman59858cf2009-07-27 16:11:46 +000036; CHECK: %z = mul nsw i64 %x, %y
37 %z = mul nsw i64 %x, %y
Dan Gohman1224c382009-07-20 21:19:07 +000038 ret i64 %z
39}
40
41define i64 @add_plain(i64 %x, i64 %y) {
42; CHECK: %z = add i64 %x, %y
43 %z = add i64 %x, %y
44 ret i64 %z
45}
46
47define i64 @sub_plain(i64 %x, i64 %y) {
48; CHECK: %z = sub i64 %x, %y
49 %z = sub i64 %x, %y
50 ret i64 %z
51}
52
53define i64 @mul_plain(i64 %x, i64 %y) {
54; CHECK: %z = mul i64 %x, %y
55 %z = mul i64 %x, %y
56 ret i64 %z
57}
58
59define i64 @add_both(i64 %x, i64 %y) {
Dan Gohman59858cf2009-07-27 16:11:46 +000060; CHECK: %z = add nuw nsw i64 %x, %y
61 %z = add nuw nsw i64 %x, %y
Dan Gohman1224c382009-07-20 21:19:07 +000062 ret i64 %z
63}
64
65define i64 @sub_both(i64 %x, i64 %y) {
Dan Gohman59858cf2009-07-27 16:11:46 +000066; CHECK: %z = sub nuw nsw i64 %x, %y
67 %z = sub nuw nsw i64 %x, %y
Dan Gohman1224c382009-07-20 21:19:07 +000068 ret i64 %z
69}
70
71define i64 @mul_both(i64 %x, i64 %y) {
Dan Gohman59858cf2009-07-27 16:11:46 +000072; CHECK: %z = mul nuw nsw i64 %x, %y
73 %z = mul nuw nsw i64 %x, %y
Dan Gohman1224c382009-07-20 21:19:07 +000074 ret i64 %z
75}
76
77define i64 @add_both_reversed(i64 %x, i64 %y) {
Dan Gohman59858cf2009-07-27 16:11:46 +000078; CHECK: %z = add nuw nsw i64 %x, %y
79 %z = add nsw nuw i64 %x, %y
Dan Gohman1224c382009-07-20 21:19:07 +000080 ret i64 %z
81}
82
83define i64 @sub_both_reversed(i64 %x, i64 %y) {
Dan Gohman59858cf2009-07-27 16:11:46 +000084; CHECK: %z = sub nuw nsw i64 %x, %y
85 %z = sub nsw nuw i64 %x, %y
Dan Gohman1224c382009-07-20 21:19:07 +000086 ret i64 %z
87}
88
89define i64 @mul_both_reversed(i64 %x, i64 %y) {
Dan Gohman59858cf2009-07-27 16:11:46 +000090; CHECK: %z = mul nuw nsw i64 %x, %y
91 %z = mul nsw nuw i64 %x, %y
Dan Gohman1224c382009-07-20 21:19:07 +000092 ret i64 %z
93}
94
Chris Lattnerf067d582011-02-07 16:40:21 +000095define i64 @shl_both(i64 %x, i64 %y) {
96; CHECK: %z = shl nuw nsw i64 %x, %y
97 %z = shl nuw nsw i64 %x, %y
98 ret i64 %z
99}
100
Dan Gohman1224c382009-07-20 21:19:07 +0000101define i64 @sdiv_exact(i64 %x, i64 %y) {
Dan Gohman59858cf2009-07-27 16:11:46 +0000102; CHECK: %z = sdiv exact i64 %x, %y
103 %z = sdiv exact i64 %x, %y
Dan Gohman1224c382009-07-20 21:19:07 +0000104 ret i64 %z
105}
106
107define i64 @sdiv_plain(i64 %x, i64 %y) {
108; CHECK: %z = sdiv i64 %x, %y
109 %z = sdiv i64 %x, %y
110 ret i64 %z
111}
112
Chris Lattner35bda892011-02-06 21:44:57 +0000113define i64 @udiv_exact(i64 %x, i64 %y) {
114; CHECK: %z = udiv exact i64 %x, %y
115 %z = udiv exact i64 %x, %y
116 ret i64 %z
117}
118
119define i64 @udiv_plain(i64 %x, i64 %y) {
120; CHECK: %z = udiv i64 %x, %y
121 %z = udiv i64 %x, %y
122 ret i64 %z
123}
124
Chris Lattnerf067d582011-02-07 16:40:21 +0000125define i64 @ashr_plain(i64 %x, i64 %y) {
126; CHECK: %z = ashr i64 %x, %y
127 %z = ashr i64 %x, %y
128 ret i64 %z
129}
130
131define i64 @ashr_exact(i64 %x, i64 %y) {
132; CHECK: %z = ashr exact i64 %x, %y
133 %z = ashr exact i64 %x, %y
134 ret i64 %z
135}
136
137define i64 @lshr_plain(i64 %x, i64 %y) {
138; CHECK: %z = lshr i64 %x, %y
139 %z = lshr i64 %x, %y
140 ret i64 %z
141}
142
143define i64 @lshr_exact(i64 %x, i64 %y) {
144; CHECK: %z = lshr exact i64 %x, %y
145 %z = lshr exact i64 %x, %y
146 ret i64 %z
147}
Chris Lattner35bda892011-02-06 21:44:57 +0000148
Dan Gohmandd8004d2009-07-27 21:53:46 +0000149define i64* @gep_nw(i64* %p, i64 %x) {
150; CHECK: %z = getelementptr inbounds i64* %p, i64 %x
151 %z = getelementptr inbounds i64* %p, i64 %x
152 ret i64* %z
153}
154
155define i64* @gep_plain(i64* %p, i64 %x) {
156; CHECK: %z = getelementptr i64* %p, i64 %x
157 %z = getelementptr i64* %p, i64 %x
158 ret i64* %z
159}
160
Dan Gohman1224c382009-07-20 21:19:07 +0000161define i64 @add_both_ce() {
Dan Gohman59858cf2009-07-27 16:11:46 +0000162; CHECK: ret i64 add nuw nsw (i64 ptrtoint (i64* @addr to i64), i64 91)
163 ret i64 add nsw nuw (i64 ptrtoint (i64* @addr to i64), i64 91)
Dan Gohman1224c382009-07-20 21:19:07 +0000164}
165
166define i64 @sub_both_ce() {
Dan Gohman59858cf2009-07-27 16:11:46 +0000167; CHECK: ret i64 sub nuw nsw (i64 ptrtoint (i64* @addr to i64), i64 91)
168 ret i64 sub nsw nuw (i64 ptrtoint (i64* @addr to i64), i64 91)
Dan Gohman1224c382009-07-20 21:19:07 +0000169}
170
171define i64 @mul_both_ce() {
Dan Gohman59858cf2009-07-27 16:11:46 +0000172; CHECK: ret i64 mul nuw nsw (i64 ptrtoint (i64* @addr to i64), i64 91)
173 ret i64 mul nuw nsw (i64 ptrtoint (i64* @addr to i64), i64 91)
Dan Gohman1224c382009-07-20 21:19:07 +0000174}
175
176define i64 @sdiv_exact_ce() {
Dan Gohman59858cf2009-07-27 16:11:46 +0000177; CHECK: ret i64 sdiv exact (i64 ptrtoint (i64* @addr to i64), i64 91)
178 ret i64 sdiv exact (i64 ptrtoint (i64* @addr to i64), i64 91)
Dan Gohman1224c382009-07-20 21:19:07 +0000179}
Dan Gohmandd8004d2009-07-27 21:53:46 +0000180
Chris Lattner35bda892011-02-06 21:44:57 +0000181define i64 @udiv_exact_ce() {
182; CHECK: ret i64 udiv exact (i64 ptrtoint (i64* @addr to i64), i64 91)
183 ret i64 udiv exact (i64 ptrtoint (i64* @addr to i64), i64 91)
184}
185
Chris Lattnerf067d582011-02-07 16:40:21 +0000186define i64 @ashr_exact_ce() {
187; CHECK: ret i64 ashr exact (i64 ptrtoint (i64* @addr to i64), i64 9)
188 ret i64 ashr exact (i64 ptrtoint (i64* @addr to i64), i64 9)
189}
190
191define i64 @lshr_exact_ce() {
192; CHECK: ret i64 lshr exact (i64 ptrtoint (i64* @addr to i64), i64 9)
193 ret i64 lshr exact (i64 ptrtoint (i64* @addr to i64), i64 9)
194}
195
Dan Gohmandd8004d2009-07-27 21:53:46 +0000196define i64* @gep_nw_ce() {
197; CHECK: ret i64* getelementptr inbounds (i64* @addr, i64 171)
198 ret i64* getelementptr inbounds (i64* @addr, i64 171)
199}
200
Dan Gohmanf8dbee72009-09-07 23:54:19 +0000201define i64 @add_plain_ce() {
202; CHECK: ret i64 add (i64 ptrtoint (i64* @addr to i64), i64 91)
203 ret i64 add (i64 ptrtoint (i64* @addr to i64), i64 91)
204}
Dan Gohmandd8004d2009-07-27 21:53:46 +0000205
Dan Gohmanf8dbee72009-09-07 23:54:19 +0000206define i64 @sub_plain_ce() {
207; CHECK: ret i64 sub (i64 ptrtoint (i64* @addr to i64), i64 91)
208 ret i64 sub (i64 ptrtoint (i64* @addr to i64), i64 91)
209}
210
211define i64 @mul_plain_ce() {
212; CHECK: ret i64 mul (i64 ptrtoint (i64* @addr to i64), i64 91)
213 ret i64 mul (i64 ptrtoint (i64* @addr to i64), i64 91)
214}
215
216define i64 @sdiv_plain_ce() {
217; CHECK: ret i64 sdiv (i64 ptrtoint (i64* @addr to i64), i64 91)
218 ret i64 sdiv (i64 ptrtoint (i64* @addr to i64), i64 91)
219}
220
221define i64* @gep_plain_ce() {
222; CHECK: ret i64* getelementptr (i64* @addr, i64 171)
223 ret i64* getelementptr (i64* @addr, i64 171)
224}
225
226define i64 @add_both_reversed_ce() {
227; CHECK: ret i64 add nuw nsw (i64 ptrtoint (i64* @addr to i64), i64 91)
228 ret i64 add nsw nuw (i64 ptrtoint (i64* @addr to i64), i64 91)
229}
230
231define i64 @sub_both_reversed_ce() {
232; CHECK: ret i64 sub nuw nsw (i64 ptrtoint (i64* @addr to i64), i64 91)
233 ret i64 sub nsw nuw (i64 ptrtoint (i64* @addr to i64), i64 91)
234}
235
236define i64 @mul_both_reversed_ce() {
237; CHECK: ret i64 mul nuw nsw (i64 ptrtoint (i64* @addr to i64), i64 91)
238 ret i64 mul nsw nuw (i64 ptrtoint (i64* @addr to i64), i64 91)
239}
240
241define i64 @add_signed_ce() {
242; CHECK: ret i64 add nsw (i64 ptrtoint (i64* @addr to i64), i64 91)
243 ret i64 add nsw (i64 ptrtoint (i64* @addr to i64), i64 91)
244}
245
246define i64 @sub_signed_ce() {
247; CHECK: ret i64 sub nsw (i64 ptrtoint (i64* @addr to i64), i64 91)
248 ret i64 sub nsw (i64 ptrtoint (i64* @addr to i64), i64 91)
249}
250
251define i64 @mul_signed_ce() {
252; CHECK: ret i64 mul nsw (i64 ptrtoint (i64* @addr to i64), i64 91)
253 ret i64 mul nsw (i64 ptrtoint (i64* @addr to i64), i64 91)
254}
255
Chris Lattnerf067d582011-02-07 16:40:21 +0000256define i64 @shl_signed_ce() {
257; CHECK: ret i64 shl nsw (i64 ptrtoint (i64* @addr to i64), i64 17)
258 ret i64 shl nsw (i64 ptrtoint (i64* @addr to i64), i64 17)
259}
260
261
Dan Gohmanf8dbee72009-09-07 23:54:19 +0000262define i64 @add_unsigned_ce() {
263; CHECK: ret i64 add nuw (i64 ptrtoint (i64* @addr to i64), i64 91)
264 ret i64 add nuw (i64 ptrtoint (i64* @addr to i64), i64 91)
265}
266
267define i64 @sub_unsigned_ce() {
268; CHECK: ret i64 sub nuw (i64 ptrtoint (i64* @addr to i64), i64 91)
269 ret i64 sub nuw (i64 ptrtoint (i64* @addr to i64), i64 91)
270}
271
272define i64 @mul_unsigned_ce() {
273; CHECK: ret i64 mul nuw (i64 ptrtoint (i64* @addr to i64), i64 91)
274 ret i64 mul nuw (i64 ptrtoint (i64* @addr to i64), i64 91)
275}
Chris Lattner35bda892011-02-06 21:44:57 +0000276