blob: 85da61abb5c5112144c47a140a8b0a577291444c [file] [log] [blame]
Chris Lattneradc29562010-11-23 02:26:52 +00001; RUN: llc < %s -march=sparc | FileCheck %s
Chris Lattnera590d802006-02-05 05:52:55 +00002
Chris Lattneradc29562010-11-23 02:26:52 +00003define i32 @test0(i32 %X) {
Chris Lattnerae0f8dc2007-03-28 01:27:12 +00004 %tmp.1 = add i32 %X, 1
5 ret i32 %tmp.1
Stephen Linf799e3f2013-07-13 20:38:47 +00006; CHECK-LABEL: test0:
Venkatraman Govindaraju3e8c7d92013-06-02 02:24:27 +00007; CHECK: add %o0, 1, %o0
Chris Lattneradc29562010-11-23 02:26:52 +00008}
9
10
11;; xnor tests.
12define i32 @test1(i32 %X, i32 %Y) {
13 %A = xor i32 %X, %Y
14 %B = xor i32 %A, -1
15 ret i32 %B
Stephen Linf799e3f2013-07-13 20:38:47 +000016; CHECK-LABEL: test1:
Venkatraman Govindaraju3e8c7d92013-06-02 02:24:27 +000017; CHECK: xnor %o0, %o1, %o0
Chris Lattneradc29562010-11-23 02:26:52 +000018}
19
20define i32 @test2(i32 %X, i32 %Y) {
21 %A = xor i32 %X, -1
22 %B = xor i32 %A, %Y
23 ret i32 %B
Stephen Linf799e3f2013-07-13 20:38:47 +000024; CHECK-LABEL: test2:
Venkatraman Govindaraju3e8c7d92013-06-02 02:24:27 +000025; CHECK: xnor %o0, %o1, %o0
Chris Lattnera590d802006-02-05 05:52:55 +000026}
Venkatraman Govindaraju774fe2e22013-06-03 00:21:54 +000027
Stephen Lind24ab202013-07-14 06:24:09 +000028; CHECK-LABEL: store_zero:
Venkatraman Govindaraju774fe2e22013-06-03 00:21:54 +000029; CHECK: st %g0, [%o0]
30; CHECK: st %g0, [%o1+4]
31define i32 @store_zero(i32* %a, i32* %b) {
32entry:
33 store i32 0, i32* %a, align 4
David Blaikie79e6c742015-02-27 19:29:02 +000034 %0 = getelementptr inbounds i32, i32* %b, i32 1
Venkatraman Govindaraju774fe2e22013-06-03 00:21:54 +000035 store i32 0, i32* %0, align 4
36 ret i32 0
37}
38
James Y Knight807563d2015-05-18 16:29:48 +000039; CHECK-LABEL: signed_divide:
40; CHECK: sra %o0, 31, %o2
James Y Knightf238d172015-07-08 16:25:12 +000041; CHECK: wr %g0, %o2, %y
James Y Knight807563d2015-05-18 16:29:48 +000042; CHECK: sdiv %o0, %o1, %o0
43define i32 @signed_divide(i32 %a, i32 %b) {
44 %r = sdiv i32 %a, %b
45 ret i32 %r
46}
47
48; CHECK-LABEL: unsigned_divide:
49; CHECK: wr %g0, %g0, %y
50; CHECK: udiv %o0, %o1, %o0
51define i32 @unsigned_divide(i32 %a, i32 %b) {
52 %r = udiv i32 %a, %b
53 ret i32 %r
54}
55
56; CHECK-LABEL: multiply_32x32:
57; CHECK: smul %o0, %o1, %o0
58define i32 @multiply_32x32(i32 %a, i32 %b) {
59 %r = mul i32 %a, %b
60 ret i32 %r
61}
62
63; CHECK-LABEL: signed_multiply_32x32_64:
64; CHECK: smul %o0, %o1, %o1
65; CHECK: rd %y, %o0
66define i64 @signed_multiply_32x32_64(i32 %a, i32 %b) {
67 %xa = sext i32 %a to i64
68 %xb = sext i32 %b to i64
69 %r = mul i64 %xa, %xb
70 ret i64 %r
71}
72
73; CHECK-LABEL: unsigned_multiply_32x32_64:
James Y Knightb0a473a2016-10-05 20:54:17 +000074; CHECK: umul %o0, %o1, %o1
James Y Knight1f3e6af2015-09-10 21:49:06 +000075; CHECK: rd %y, %o0
James Y Knight807563d2015-05-18 16:29:48 +000076; CHECK: retl
James Y Knightb0a473a2016-10-05 20:54:17 +000077; CHECK: nop
James Y Knight807563d2015-05-18 16:29:48 +000078define i64 @unsigned_multiply_32x32_64(i32 %a, i32 %b) {
79 %xa = zext i32 %a to i64
80 %xb = zext i32 %b to i64
81 %r = mul i64 %xa, %xb
82 ret i64 %r
83}
84
James Y Knight3994be82015-08-10 19:11:39 +000085; CHECK-LABEL: load_store_64bit:
86; CHECK: ldd [%o0], %o2
87; CHECK: addcc %o3, 3, %o5
88; CHECK: addxcc %o2, 0, %o4
89; CHECK: retl
90; CHECK: std %o4, [%o1]
91define void @load_store_64bit(i64* %x, i64* %y) {
92entry:
93 %0 = load i64, i64* %x
94 %add = add nsw i64 %0, 3
95 store i64 %add, i64* %y
96 ret void
97}