blob: 2d26ce0af6eaa442dd15c1c298a03a94de025e18 [file] [log] [blame]
Vasileios Kalintiris2ed214f2015-01-26 12:04:40 +00001; RUN: llc < %s -march=mips -mcpu=mips2 | FileCheck %s \
2; RUN: -check-prefix=ALL -check-prefix=NOT-R2-R6 -check-prefix=GP32
3; RUN: llc < %s -march=mips -mcpu=mips32 | FileCheck %s \
4; RUN: -check-prefix=ALL -check-prefix=NOT-R2-R6 -check-prefix=GP32
5; RUN: llc < %s -march=mips -mcpu=mips32r2 | FileCheck %s \
6; RUN: -check-prefix=ALL -check-prefix=R2-R6 -check-prefix=GP32
Daniel Sanders17793142015-02-18 16:24:50 +00007; RUN: llc < %s -march=mips -mcpu=mips32r3 | FileCheck %s \
8; RUN: -check-prefix=ALL -check-prefix=R2-R6 -check-prefix=GP32
9; RUN: llc < %s -march=mips -mcpu=mips32r5 | FileCheck %s \
10; RUN: -check-prefix=ALL -check-prefix=R2-R6 -check-prefix=GP32
Vasileios Kalintiris2ed214f2015-01-26 12:04:40 +000011; RUN: llc < %s -march=mips -mcpu=mips32r6 | FileCheck %s \
12; RUN: -check-prefix=ALL -check-prefix=R2-R6 -check-prefix=GP32
13; RUN: llc < %s -march=mips64 -mcpu=mips3 | FileCheck %s \
14; RUN: -check-prefix=ALL -check-prefix=NOT-R2-R6 -check-prefix=GP64
15; RUN: llc < %s -march=mips64 -mcpu=mips4 | FileCheck %s \
16; RUN: -check-prefix=ALL -check-prefix=NOT-R2-R6 -check-prefix=GP64
17; RUN: llc < %s -march=mips64 -mcpu=mips64 | FileCheck %s \
18; RUN: -check-prefix=ALL -check-prefix=NOT-R2-R6 -check-prefix=GP64
19; RUN: llc < %s -march=mips64 -mcpu=mips64r2 | FileCheck %s \
20; RUN: -check-prefix=ALL -check-prefix=R2-R6 -check-prefix=GP64
Daniel Sanders17793142015-02-18 16:24:50 +000021; RUN: llc < %s -march=mips64 -mcpu=mips64r3 | FileCheck %s \
22; RUN: -check-prefix=ALL -check-prefix=R2-R6 -check-prefix=GP64
23; RUN: llc < %s -march=mips64 -mcpu=mips64r5 | FileCheck %s \
24; RUN: -check-prefix=ALL -check-prefix=R2-R6 -check-prefix=GP64
Vasileios Kalintiris2ed214f2015-01-26 12:04:40 +000025; RUN: llc < %s -march=mips64 -mcpu=mips64r6 | FileCheck %s \
26; RUN: -check-prefix=ALL -check-prefix=R2-R6 -check-prefix=GP64
Zlatko Buljan53a037f2016-04-08 07:27:26 +000027; RUN: llc < %s -march=mips -mcpu=mips32r3 -mattr=+micromips -O2 | FileCheck %s \
28; RUN: -check-prefix=ALL -check-prefix=MMR6 -check-prefix=MM32
29; RUN: llc < %s -march=mips -mcpu=mips32r6 -mattr=+micromips -O2 | FileCheck %s \
30; RUN: -check-prefix=ALL -check-prefix=MMR6 -check-prefix=MM32
31; RUN: llc < %s -march=mips -mcpu=mips64r6 -mattr=+micromips -O2 | FileCheck %s \
32; RUN: -check-prefix=ALL -check-prefix=MMR6 -check-prefix=MM64
Vasileios Kalintiris2ed214f2015-01-26 12:04:40 +000033
34define signext i1 @add_i1(i1 signext %a, i1 signext %b) {
35entry:
36; ALL-LABEL: add_i1:
37
Zlatko Buljan53a037f2016-04-08 07:27:26 +000038 ; NOT-R2-R6: addu $[[T0:[0-9]+]], $4, $5
39 ; NOT-R2-R6: sll $[[T0]], $[[T0]], 31
40 ; NOT-R2-R6: sra $2, $[[T0]], 31
41
42 ; R2-R6: addu $[[T0:[0-9]+]], $4, $5
43 ; R2-R6: sll $[[T0]], $[[T0]], 31
44 ; R2-R6: sra $2, $[[T0]], 31
45
46 ; MMR6: addu16 $[[T0:[0-9]+]], $4, $5
47 ; MMR6: sll $[[T1:[0-9]+]], $[[T0]], 31
48 ; MMR6: sra $2, $[[T1]], 31
Vasileios Kalintiris2ed214f2015-01-26 12:04:40 +000049
50 %r = add i1 %a, %b
51 ret i1 %r
52}
53
54define signext i8 @add_i8(i8 signext %a, i8 signext %b) {
55entry:
56; ALL-LABEL: add_i8:
57
58 ; NOT-R2-R6: addu $[[T0:[0-9]+]], $4, $5
59 ; NOT-R2-R6: sll $[[T0]], $[[T0]], 24
60 ; NOT-R2-R6: sra $2, $[[T0]], 24
61
Zlatko Buljan53a037f2016-04-08 07:27:26 +000062 ; R2-R6: addu $[[T0:[0-9]+]], $4, $5
63 ; R2-R6: seb $2, $[[T0:[0-9]+]]
64
65 ; MMR6: addu16 $[[T0:[0-9]+]], $4, $5
66 ; MMR6: seb $2, $[[T0]]
Vasileios Kalintiris2ed214f2015-01-26 12:04:40 +000067
68 %r = add i8 %a, %b
69 ret i8 %r
70}
71
72define signext i16 @add_i16(i16 signext %a, i16 signext %b) {
73entry:
74; ALL-LABEL: add_i16:
75
76 ; NOT-R2-R6: addu $[[T0:[0-9]+]], $4, $5
77 ; NOT-R2-R6: sll $[[T0]], $[[T0]], 16
78 ; NOT-R2-R6: sra $2, $[[T0]], 16
79
Zlatko Buljan53a037f2016-04-08 07:27:26 +000080 ; R2-R6: addu $[[T0:[0-9]+]], $4, $5
81 ; R2-R6: seh $2, $[[T0]]
82
83 ; MMR6: addu16 $[[T0:[0-9]+]], $4, $5
84 ; MMR6: seh $2, $[[T0]]
Vasileios Kalintiris2ed214f2015-01-26 12:04:40 +000085
86 %r = add i16 %a, %b
87 ret i16 %r
88}
89
90define signext i32 @add_i32(i32 signext %a, i32 signext %b) {
91entry:
92; ALL-LABEL: add_i32:
93
Zlatko Buljan53a037f2016-04-08 07:27:26 +000094 ; NOT-R2-R6: addu $2, $4, $5
95 ; R2-R6: addu $2, $4, $5
96
97 ; MMR6: addu16 $[[T0:[0-9]+]], $4, $5
Vasileios Kalintiris2ed214f2015-01-26 12:04:40 +000098
99 %r = add i32 %a, %b
100 ret i32 %r
101}
102
103define signext i64 @add_i64(i64 signext %a, i64 signext %b) {
104entry:
105; ALL-LABEL: add_i64:
106
107 ; GP32: addu $3, $5, $7
108 ; GP32: sltu $[[T0:[0-9]+]], $3, $7
109 ; GP32: addu $[[T1:[0-9]+]], $[[T0]], $6
110 ; GP32: addu $2, $4, $[[T1]]
111
112 ; GP64: daddu $2, $4, $5
113
Zlatko Buljan53a037f2016-04-08 07:27:26 +0000114 ; MM32: addu $3, $5, $7
115 ; MM32: sltu $[[T0:[0-9]+]], $3, $7
116 ; MM32: addu $[[T1:[0-9]+]], $[[T0]], $6
117 ; MM32: addu $2, $4, $[[T1]]
118
119 ; MM64: daddu $2, $4, $5
120
Vasileios Kalintiris2ed214f2015-01-26 12:04:40 +0000121 %r = add i64 %a, %b
122 ret i64 %r
123}
Vasileios Kalintirisef96a8e2015-01-26 12:33:22 +0000124
125define signext i128 @add_i128(i128 signext %a, i128 signext %b) {
126entry:
127; ALL-LABEL: add_i128:
128
129 ; GP32: lw $[[T0:[0-9]+]], 28($sp)
130 ; GP32: addu $[[T1:[0-9]+]], $7, $[[T0]]
131 ; GP32: sltu $[[T2:[0-9]+]], $[[T1]], $[[T0]]
132 ; GP32: lw $[[T3:[0-9]+]], 24($sp)
133 ; GP32: addu $[[T4:[0-9]+]], $[[T2]], $[[T3]]
134 ; GP32: addu $[[T5:[0-9]+]], $6, $[[T4]]
Simon Dardise661e522016-06-14 09:35:29 +0000135 ; GP32: lw $[[T6:[0-9]+]], 16($sp)
Vasileios Kalintirisef96a8e2015-01-26 12:33:22 +0000136 ; GP32: lw $[[T7:[0-9]+]], 20($sp)
Simon Dardise661e522016-06-14 09:35:29 +0000137 ; GP32: sltu $[[T8:[0-9]+]], $[[T5]], $[[T3]]
138 ; GP32: addu $[[T9:[0-9]+]], $[[T8]], $[[T7]]
Vasileios Kalintirisef96a8e2015-01-26 12:33:22 +0000139 ; GP32: addu $3, $5, $[[T8]]
140 ; GP32: sltu $[[T10:[0-9]+]], $3, $[[T7]]
Simon Dardise661e522016-06-14 09:35:29 +0000141 ; GP32: addu $[[T11:[0-9]+]], $[[T10]], $[[T6]]
Vasileios Kalintirisef96a8e2015-01-26 12:33:22 +0000142 ; GP32: addu $2, $4, $[[T11]]
143 ; GP32: move $4, $[[T5]]
144 ; GP32: move $5, $[[T1]]
145
146 ; GP64: daddu $3, $5, $7
147 ; GP64: sltu $[[T0:[0-9]+]], $3, $7
148 ; GP64: daddu $[[T1:[0-9]+]], $[[T0]], $6
149 ; GP64: daddu $2, $4, $[[T1]]
150
Zlatko Buljan53a037f2016-04-08 07:27:26 +0000151 ; MM32: lw $[[T0:[0-9]+]], 28($sp)
152 ; MM32: addu $[[T1:[0-9]+]], $7, $[[T0]]
153 ; MM32: sltu $[[T2:[0-9]+]], $[[T1]], $[[T0]]
154 ; MM32: lw $[[T3:[0-9]+]], 24($sp)
155 ; MM32: addu $[[T4:[0-9]+]], $[[T2]], $[[T3]]
156 ; MM32: addu $[[T5:[0-9]+]], $6, $[[T4]]
157 ; MM32: sltu $[[T6:[0-9]+]], $[[T5]], $[[T3]]
158 ; MM32: lw $[[T7:[0-9]+]], 20($sp)
159 ; MM32: addu $[[T8:[0-9]+]], $[[T6]], $[[T7]]
160 ; MM32: addu $[[T9:[0-9]+]], $5, $[[T8]]
Simon Dardise661e522016-06-14 09:35:29 +0000161 ; MM32: lw $[[T10:[0-9]+]], 16($sp)
162 ; MM32: sltu $[[T11:[0-9]+]], $[[T9]], $[[T7]]
163 ; MM32: addu $[[T12:[0-9]+]], $[[T11]], $[[T10]]
Zlatko Buljan53a037f2016-04-08 07:27:26 +0000164 ; MM32: addu $[[T13:[0-9]+]], $4, $[[T12]]
165 ; MM32: move $4, $[[T5]]
166 ; MM32: move $5, $[[T1]]
167
168 ; MM64: daddu $3, $5, $7
169 ; MM64: sltu $[[T0:[0-9]+]], $3, $7
170 ; MM64: daddu $[[T1:[0-9]+]], $[[T0]], $6
171 ; MM64: daddu $2, $4, $[[T1]]
172
Vasileios Kalintirisef96a8e2015-01-26 12:33:22 +0000173 %r = add i128 %a, %b
174 ret i128 %r
175}
Zlatko Buljan53a037f2016-04-08 07:27:26 +0000176
177define signext i1 @add_i1_4(i1 signext %a) {
178; ALL-LABEL: add_i1_4:
179
180 ; ALL: move $2, $4
181
182 %r = add i1 4, %a
183 ret i1 %r
184}
185
186define signext i8 @add_i8_4(i8 signext %a) {
187; ALL-LABEL: add_i8_4:
188
189 ; NOT-R2-R6: sll $[[T0:[0-9]+]], $4, 24
190 ; NOT-R2-R6: lui $[[T1:[0-9]+]], 1024
191 ; NOT-R2-R6: addu $[[T0]], $[[T0]], $[[T1]]
192 ; NOT-R2-R6: sra $2, $[[T0]], 24
193
194 ; R2-R6: addiu $[[T0:[0-9]+]], $4, 4
195 ; R2-R6: seb $2, $[[T0]]
196
197 ; MM32: addiur2 $[[T0:[0-9]+]], $4, 4
198 ; MM32: seb $2, $[[T0]]
199
200 ; MM64: addiur2 $[[T0:[0-9]+]], $4, 4
201 ; MM64: seb $2, $[[T0]]
202
203 %r = add i8 4, %a
204 ret i8 %r
205}
206
207define signext i16 @add_i16_4(i16 signext %a) {
208; ALL-LABEL: add_i16_4:
209
210 ; NOT-R2-R6: sll $[[T0:[0-9]+]], $4, 16
211 ; NOT-R2-R6: lui $[[T1:[0-9]+]], 4
212 ; NOT-R2-R6: addu $[[T0]], $[[T0]], $[[T1]]
213 ; NOT-R2-R6: sra $2, $[[T0]], 16
214
215 ; R2-R6: addiu $[[T0:[0-9]+]], $4, 4
216 ; R2-R6: seh $2, $[[T0]]
217
218 ; MM32: addiur2 $[[T0:[0-9]+]], $4, 4
219 ; MM32: seh $2, $[[T0]]
220
221 ; MM64: addiur2 $[[T0:[0-9]+]], $4, 4
222 ; MM64: seh $2, $[[T0]]
223
224 %r = add i16 4, %a
225 ret i16 %r
226}
227
228define signext i32 @add_i32_4(i32 signext %a) {
229; ALL-LABEL: add_i32_4:
230
231 ; GP32: addiu $2, $4, 4
232
233 ; GP64: addiu $2, $4, 4
234
235 ; MM32: addiur2 $2, $4, 4
236
237 ; MM64: addiur2 $2, $4, 4
238
239 %r = add i32 4, %a
240 ret i32 %r
241}
242
243define signext i64 @add_i64_4(i64 signext %a) {
244; ALL-LABEL: add_i64_4:
245
246 ; GP32: addiu $[[T0:[0-9]+]], $5, 4
247 ; GP32: addiu $[[T1:[0-9]+]], $zero, 4
248 ; GP32: sltu $[[T1]], $[[T0]], $[[T1]]
249 ; GP32: addu $2, $4, $[[T1]]
250
251 ; GP64: daddiu $2, $4, 4
252
253 ; MM32: addiu $[[T0:[0-9]+]], $5, 4
254 ; MM32: li16 $[[T1:[0-9]+]], 4
255 ; MM32: sltu $[[T2:[0-9]+]], $[[T0]], $[[T1]]
256 ; MM32: addu $2, $4, $[[T2]]
257
258 ; MM64: daddiu $2, $4, 4
259
260 %r = add i64 4, %a
261 ret i64 %r
262}
263
264define signext i128 @add_i128_4(i128 signext %a) {
265; ALL-LABEL: add_i128_4:
266
267 ; GP32: addiu $[[T0:[0-9]+]], $7, 4
268 ; GP32: addiu $[[T1:[0-9]+]], $zero, 4
269 ; GP32: sltu $[[T1]], $[[T0]], $[[T1]]
270 ; GP32: addu $[[T2:[0-9]+]], $6, $[[T1]]
271 ; GP32: sltu $[[T1]], $[[T2]], $zero
272 ; GP32: addu $[[T3:[0-9]+]], $5, $[[T1]]
273 ; GP32: sltu $[[T1]], $[[T3]], $zero
274 ; GP32: addu $[[T1]], $4, $[[T1]]
275 ; GP32: move $4, $[[T2]]
276 ; GP32: move $5, $[[T0]]
277
278 ; GP64: daddiu $[[T0:[0-9]+]], $5, 4
279 ; GP64: daddiu $[[T1:[0-9]+]], $zero, 4
280 ; GP64: sltu $[[T1]], $[[T0]], $[[T1]]
281 ; GP64: daddu $2, $4, $[[T1]]
282
283 ; MM32: addiu $[[T0:[0-9]+]], $7, 4
284 ; MM32: li16 $[[T1:[0-9]+]], 4
285 ; MM32: sltu $[[T1]], $[[T0]], $[[T1]]
286 ; MM32: addu $[[T2:[0-9]+]], $6, $[[T1]]
287 ; MM32: lui $[[T1]], 0
288 ; MM32: sltu $[[T3:[0-9]+]], $[[T2]], $[[T1]]
289 ; MM32: addu $[[T3]], $5, $[[T3]]
290 ; MM32: sltu $[[T1]], $[[T3]], $[[T1]]
291 ; MM32: addu $[[T1]], $4, $[[T1]]
292 ; MM32: move $4, $[[T2]]
293 ; MM32: move $5, $[[T0]]
294
295 ; MM64: daddiu $[[T0:[0-9]+]], $5, 4
296 ; MM64: daddiu $[[T1:[0-9]+]], $zero, 4
297 ; MM64: sltu $[[T1]], $[[T0]], $[[T1]]
298 ; MM64: daddu $2, $4, $[[T1]]
299
300 %r = add i128 4, %a
301 ret i128 %r
302}
303
304define signext i1 @add_i1_3(i1 signext %a) {
305; ALL-LABEL: add_i1_3:
306
307 ; ALL: sll $[[T0:[0-9]+]], $4, 31
308 ; ALL: lui $[[T1:[0-9]+]], 32768
309
310 ; GP32: addu $[[T0]], $[[T0]], $[[T1]]
311 ; GP32: sra $[[T1]], $[[T0]], 31
312
313 ; GP64: addu $[[T0]], $[[T0]], $[[T1]]
314 ; GP64: sra $[[T1]], $[[T0]], 31
315
316 ; MMR6: addu16 $[[T0]], $[[T0]], $[[T1]]
317 ; MMR6: sra $[[T0]], $[[T0]], 31
318
319 %r = add i1 3, %a
320 ret i1 %r
321}
322
323define signext i8 @add_i8_3(i8 signext %a) {
324; ALL-LABEL: add_i8_3:
325
326 ; NOT-R2-R6: sll $[[T0:[0-9]+]], $4, 24
327 ; NOT-R2-R6: lui $[[T1:[0-9]+]], 768
328 ; NOT-R2-R6: addu $[[T0]], $[[T0]], $[[T1]]
329 ; NOT-R2-R6: sra $2, $[[T0]], 24
330
331 ; R2-R6: addiu $[[T0:[0-9]+]], $4, 3
332 ; R2-R6: seb $2, $[[T0]]
333
334 ; MMR6: addius5 $[[T0:[0-9]+]], 3
335 ; MMR6: seb $2, $[[T0]]
336
337 %r = add i8 3, %a
338 ret i8 %r
339}
340
341define signext i16 @add_i16_3(i16 signext %a) {
342; ALL-LABEL: add_i16_3:
343
344 ; NOT-R2-R6: sll $[[T0:[0-9]+]], $4, 16
345 ; NOT-R2-R6: lui $[[T1:[0-9]+]], 3
346 ; NOT-R2-R6: addu $[[T0]], $[[T0]], $[[T1]]
347 ; NOT-R2-R6: sra $2, $[[T0]], 16
348
349 ; R2-R6: addiu $[[T0:[0-9]+]], $4, 3
350 ; R2-R6: seh $2, $[[T0]]
351
352 ; MMR6: addius5 $[[T0:[0-9]+]], 3
353 ; MMR6: seh $2, $[[T0]]
354
355 %r = add i16 3, %a
356 ret i16 %r
357}
358
359define signext i32 @add_i32_3(i32 signext %a) {
360; ALL-LABEL: add_i32_3:
361
362 ; NOT-R2-R6: addiu $2, $4, 3
363
364 ; R2-R6: addiu $2, $4, 3
365
366 ; MMR6: addius5 $[[T0:[0-9]+]], 3
367 ; MMR6: move $2, $[[T0]]
368
369 %r = add i32 3, %a
370 ret i32 %r
371}
372
373define signext i64 @add_i64_3(i64 signext %a) {
374; ALL-LABEL: add_i64_3:
375
376 ; GP32: addiu $[[T0:[0-9]+]], $5, 3
377 ; GP32: addiu $[[T1:[0-9]+]], $zero, 3
378 ; GP32: sltu $[[T1]], $[[T0]], $[[T1]]
379 ; GP32: addu $2, $4, $[[T1]]
380
381 ; GP64: daddiu $2, $4, 3
382
383 ; MM32: addiu $[[T0:[0-9]+]], $5, 3
384 ; MM32: li16 $[[T1:[0-9]+]], 3
385 ; MM32: sltu $[[T2:[0-9]+]], $[[T0]], $[[T1]]
386 ; MM32: addu $2, $4, $[[T2]]
387
388 ; MM64: daddiu $2, $4, 3
389
390 %r = add i64 3, %a
391 ret i64 %r
392}
393
394define signext i128 @add_i128_3(i128 signext %a) {
395; ALL-LABEL: add_i128_3:
396
397 ; GP32: addiu $[[T0:[0-9]+]], $7, 3
398 ; GP32: addiu $[[T1:[0-9]+]], $zero, 3
399 ; GP32: sltu $[[T1]], $[[T0]], $[[T1]]
400 ; GP32: addu $[[T2:[0-9]+]], $6, $[[T1]]
401 ; GP32: sltu $[[T3:[0-9]+]], $[[T2]], $zero
402 ; GP32: addu $[[T4:[0-9]+]], $5, $[[T3]]
403 ; GP32: sltu $[[T5:[0-9]+]], $[[T4]], $zero
404 ; GP32: addu $[[T5]], $4, $[[T5]]
405 ; GP32: move $4, $[[T2]]
406 ; GP32: move $5, $[[T0]]
407
408 ; GP64: daddiu $[[T0:[0-9]+]], $5, 3
409 ; GP64: daddiu $[[T1:[0-9]+]], $zero, 3
410 ; GP64: sltu $[[T1]], $[[T0]], $[[T1]]
411 ; GP64: daddu $2, $4, $[[T1]]
412
413 ; MM32: addiu $[[T0:[0-9]+]], $7, 3
414 ; MM32: li16 $[[T1:[0-9]+]], 3
415 ; MM32: sltu $[[T1]], $[[T0]], $[[T1]]
416 ; MM32: addu $[[T2:[0-9]+]], $6, $[[T1]]
417 ; MM32: lui $[[T3:[0-9]+]], 0
418 ; MM32: sltu $[[T4:[0-9]+]], $[[T2]], $[[T3]]
419 ; MM32: addu $[[T4]], $5, $[[T4]]
420 ; MM32: sltu $[[T5:[0-9]+]], $[[T4]], $[[T3]]
421 ; MM32: addu $[[T5]], $4, $[[T5]]
422 ; MM32: move $4, $[[T2]]
423 ; MM32: move $5, $[[T0]]
424
425 ; MM64: daddiu $[[T0:[0-9]+]], $5, 3
426 ; MM64: daddiu $[[T1:[0-9]+]], $zero, 3
427 ; MM64: sltu $[[T1]], $[[T0]], $[[T1]]
428 ; MM64: daddu $2, $4, $[[T1]]
429
430 %r = add i128 3, %a
431 ret i128 %r
432}