blob: 7bbbec2d9691b1dae2819387be312ddd35e4f9a0 [file] [log] [blame]
Matt Arsenault284ae082014-06-09 08:36:53 +00001; RUN: llc -march=r600 -mcpu=redwood < %s | FileCheck -check-prefix=EG -check-prefix=FUNC %s
2; RUN: llc -march=r600 -mcpu=verde -verify-machineinstrs < %s | FileCheck -check-prefix=SI -check-prefix=FUNC %s
Tom Stellard75aadc22012-12-11 21:25:42 +00003
Matt Arsenault284ae082014-06-09 08:36:53 +00004; FUNC-LABEL: @test2
5; EG: AND_INT {{\*? *}}T{{[0-9]+\.[XYZW], T[0-9]+\.[XYZW], T[0-9]+\.[XYZW]}}
6; EG: AND_INT {{\*? *}}T{{[0-9]+\.[XYZW], T[0-9]+\.[XYZW], T[0-9]+\.[XYZW]}}
Tom Stellard75aadc22012-12-11 21:25:42 +00007
Matt Arsenault284ae082014-06-09 08:36:53 +00008; SI: V_AND_B32_e32 v{{[0-9]+, v[0-9]+, v[0-9]+}}
9; SI: V_AND_B32_e32 v{{[0-9]+, v[0-9]+, v[0-9]+}}
Aaron Watry00aeb112013-06-25 13:55:23 +000010
11define void @test2(<2 x i32> addrspace(1)* %out, <2 x i32> addrspace(1)* %in) {
12 %b_ptr = getelementptr <2 x i32> addrspace(1)* %in, i32 1
13 %a = load <2 x i32> addrspace(1) * %in
14 %b = load <2 x i32> addrspace(1) * %b_ptr
15 %result = and <2 x i32> %a, %b
16 store <2 x i32> %result, <2 x i32> addrspace(1)* %out
17 ret void
18}
19
Matt Arsenault284ae082014-06-09 08:36:53 +000020; FUNC-LABEL: @test4
21; EG: AND_INT {{\** *}}T{{[0-9]+\.[XYZW], T[0-9]+\.[XYZW], T[0-9]+\.[XYZW]}}
22; EG: AND_INT {{\** *}}T{{[0-9]+\.[XYZW], T[0-9]+\.[XYZW], T[0-9]+\.[XYZW]}}
23; EG: AND_INT {{\** *}}T{{[0-9]+\.[XYZW], T[0-9]+\.[XYZW], T[0-9]+\.[XYZW]}}
24; EG: AND_INT {{\** *}}T{{[0-9]+\.[XYZW], T[0-9]+\.[XYZW], T[0-9]+\.[XYZW]}}
Aaron Watry00aeb112013-06-25 13:55:23 +000025
Matt Arsenault284ae082014-06-09 08:36:53 +000026; SI: V_AND_B32_e32 v{{[0-9]+, v[0-9]+, v[0-9]+}}
27; SI: V_AND_B32_e32 v{{[0-9]+, v[0-9]+, v[0-9]+}}
28; SI: V_AND_B32_e32 v{{[0-9]+, v[0-9]+, v[0-9]+}}
29; SI: V_AND_B32_e32 v{{[0-9]+, v[0-9]+, v[0-9]+}}
Aaron Watry00aeb112013-06-25 13:55:23 +000030
31define void @test4(<4 x i32> addrspace(1)* %out, <4 x i32> addrspace(1)* %in) {
Tom Stellard75aadc22012-12-11 21:25:42 +000032 %b_ptr = getelementptr <4 x i32> addrspace(1)* %in, i32 1
33 %a = load <4 x i32> addrspace(1) * %in
34 %b = load <4 x i32> addrspace(1) * %b_ptr
35 %result = and <4 x i32> %a, %b
36 store <4 x i32> %result, <4 x i32> addrspace(1)* %out
37 ret void
38}
Matt Arsenault284ae082014-06-09 08:36:53 +000039
40; FUNC-LABEL: @s_and_i32
41; SI: S_AND_B32
42define void @s_and_i32(i32 addrspace(1)* %out, i32 %a, i32 %b) {
43 %and = and i32 %a, %b
44 store i32 %and, i32 addrspace(1)* %out, align 4
45 ret void
46}
47
48; FUNC-LABEL: @s_and_constant_i32
49; SI: S_AND_B32 s{{[0-9]+}}, s{{[0-9]+}}, 0x12d687
50define void @s_and_constant_i32(i32 addrspace(1)* %out, i32 %a) {
51 %and = and i32 %a, 1234567
52 store i32 %and, i32 addrspace(1)* %out, align 4
53 ret void
54}
55
56; FUNC-LABEL: @v_and_i32
57; SI: V_AND_B32
58define void @v_and_i32(i32 addrspace(1)* %out, i32 addrspace(1)* %aptr, i32 addrspace(1)* %bptr) {
59 %a = load i32 addrspace(1)* %aptr, align 4
60 %b = load i32 addrspace(1)* %bptr, align 4
61 %and = and i32 %a, %b
62 store i32 %and, i32 addrspace(1)* %out, align 4
63 ret void
64}
65
66; FUNC-LABEL: @v_and_constant_i32
67; SI: V_AND_B32
68define void @v_and_constant_i32(i32 addrspace(1)* %out, i32 addrspace(1)* %aptr) {
69 %a = load i32 addrspace(1)* %aptr, align 4
70 %and = and i32 %a, 1234567
71 store i32 %and, i32 addrspace(1)* %out, align 4
72 ret void
73}
74
75; FUNC-LABEL: @s_and_i64
76; SI: S_AND_B64
77define void @s_and_i64(i64 addrspace(1)* %out, i64 %a, i64 %b) {
78 %and = and i64 %a, %b
79 store i64 %and, i64 addrspace(1)* %out, align 8
80 ret void
81}
82
Matt Arsenault0d89e842014-07-15 21:44:37 +000083; FIXME: Should use SGPRs
84; FUNC-LABEL: @s_and_i1
85; SI: V_AND_B32
86define void @s_and_i1(i1 addrspace(1)* %out, i1 %a, i1 %b) {
87 %and = and i1 %a, %b
88 store i1 %and, i1 addrspace(1)* %out
89 ret void
90}
91
Matt Arsenault284ae082014-06-09 08:36:53 +000092; FUNC-LABEL: @s_and_constant_i64
93; SI: S_AND_B64
94define void @s_and_constant_i64(i64 addrspace(1)* %out, i64 %a) {
95 %and = and i64 %a, 281474976710655
96 store i64 %and, i64 addrspace(1)* %out, align 8
97 ret void
98}
99
100; FUNC-LABEL: @v_and_i64
101; SI: V_AND_B32
102; SI: V_AND_B32
103define void @v_and_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 addrspace(1)* %bptr) {
104 %a = load i64 addrspace(1)* %aptr, align 8
105 %b = load i64 addrspace(1)* %bptr, align 8
106 %and = and i64 %a, %b
107 store i64 %and, i64 addrspace(1)* %out, align 8
108 ret void
109}
110
Tom Stellard102c6872014-09-03 15:22:41 +0000111; FUNC-LABEL: @v_and_i64_br
112; SI: V_AND_B32
113; SI: V_AND_B32
114define void @v_and_i64_br(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr, i64 addrspace(1)* %bptr, i32 %cond) {
115entry:
116 %tmp0 = icmp eq i32 %cond, 0
117 br i1 %tmp0, label %if, label %endif
118
119if:
120 %a = load i64 addrspace(1)* %aptr, align 8
121 %b = load i64 addrspace(1)* %bptr, align 8
122 %and = and i64 %a, %b
123 br label %endif
124
125endif:
126 %tmp1 = phi i64 [%and, %if], [0, %entry]
127 store i64 %tmp1, i64 addrspace(1)* %out, align 8
128 ret void
129}
130
Matt Arsenault284ae082014-06-09 08:36:53 +0000131; FUNC-LABEL: @v_and_constant_i64
132; SI: V_AND_B32
133; SI: V_AND_B32
134define void @v_and_constant_i64(i64 addrspace(1)* %out, i64 addrspace(1)* %aptr) {
135 %a = load i64 addrspace(1)* %aptr, align 8
136 %and = and i64 %a, 1234567
137 store i64 %and, i64 addrspace(1)* %out, align 8
138 ret void
139}