blob: 2d8cb893bd86711ab90d44de3d80c4cc129cd1ef [file] [log] [blame]
Bob Wilson99c372e2009-10-07 20:51:42 +00001; RUN: llc < %s -march=arm -mattr=+neon | FileCheck %s
Bob Wilson5bafff32009-06-22 23:27:02 +00002
Chris Lattnerb2773e12009-07-08 00:46:57 +00003; This tests icmp operations that do not map directly to NEON instructions.
Bob Wilson5bafff32009-06-22 23:27:02 +00004; Not-equal (ne) operations are implemented by VCEQ/VMVN. Less-than (lt/ult)
5; and less-than-or-equal (le/ule) are implemented by swapping the arguments
6; to VCGT and VCGE. Test all the operand types for not-equal but only sample
7; the other operations.
8
9define <8 x i8> @vcnei8(<8 x i8>* %A, <8 x i8>* %B) nounwind {
Bob Wilson99c372e2009-10-07 20:51:42 +000010;CHECK: vcnei8:
11;CHECK: vceq.i8
12;CHECK-NEXT: vmvn
Bob Wilson5bafff32009-06-22 23:27:02 +000013 %tmp1 = load <8 x i8>* %A
14 %tmp2 = load <8 x i8>* %B
Chris Lattnerb2773e12009-07-08 00:46:57 +000015 %tmp3 = icmp ne <8 x i8> %tmp1, %tmp2
16 %tmp4 = sext <8 x i1> %tmp3 to <8 x i8>
17 ret <8 x i8> %tmp4
Bob Wilson5bafff32009-06-22 23:27:02 +000018}
19
20define <4 x i16> @vcnei16(<4 x i16>* %A, <4 x i16>* %B) nounwind {
Bob Wilson99c372e2009-10-07 20:51:42 +000021;CHECK: vcnei16:
22;CHECK: vceq.i16
23;CHECK-NEXT: vmvn
Bob Wilson5bafff32009-06-22 23:27:02 +000024 %tmp1 = load <4 x i16>* %A
25 %tmp2 = load <4 x i16>* %B
Chris Lattnerb2773e12009-07-08 00:46:57 +000026 %tmp3 = icmp ne <4 x i16> %tmp1, %tmp2
27 %tmp4 = sext <4 x i1> %tmp3 to <4 x i16>
28 ret <4 x i16> %tmp4
Bob Wilson5bafff32009-06-22 23:27:02 +000029}
30
31define <2 x i32> @vcnei32(<2 x i32>* %A, <2 x i32>* %B) nounwind {
Bob Wilson99c372e2009-10-07 20:51:42 +000032;CHECK: vcnei32:
33;CHECK: vceq.i32
34;CHECK-NEXT: vmvn
Bob Wilson5bafff32009-06-22 23:27:02 +000035 %tmp1 = load <2 x i32>* %A
36 %tmp2 = load <2 x i32>* %B
Chris Lattnerb2773e12009-07-08 00:46:57 +000037 %tmp3 = icmp ne <2 x i32> %tmp1, %tmp2
38 %tmp4 = sext <2 x i1> %tmp3 to <2 x i32>
39 ret <2 x i32> %tmp4
Bob Wilson5bafff32009-06-22 23:27:02 +000040}
41
42define <16 x i8> @vcneQi8(<16 x i8>* %A, <16 x i8>* %B) nounwind {
Bob Wilson99c372e2009-10-07 20:51:42 +000043;CHECK: vcneQi8:
44;CHECK: vceq.i8
45;CHECK-NEXT: vmvn
Bob Wilson5bafff32009-06-22 23:27:02 +000046 %tmp1 = load <16 x i8>* %A
47 %tmp2 = load <16 x i8>* %B
Chris Lattnerb2773e12009-07-08 00:46:57 +000048 %tmp3 = icmp ne <16 x i8> %tmp1, %tmp2
49 %tmp4 = sext <16 x i1> %tmp3 to <16 x i8>
50 ret <16 x i8> %tmp4
Bob Wilson5bafff32009-06-22 23:27:02 +000051}
52
53define <8 x i16> @vcneQi16(<8 x i16>* %A, <8 x i16>* %B) nounwind {
Bob Wilson99c372e2009-10-07 20:51:42 +000054;CHECK: vcneQi16:
55;CHECK: vceq.i16
56;CHECK-NEXT: vmvn
Bob Wilson5bafff32009-06-22 23:27:02 +000057 %tmp1 = load <8 x i16>* %A
58 %tmp2 = load <8 x i16>* %B
Chris Lattnerb2773e12009-07-08 00:46:57 +000059 %tmp3 = icmp ne <8 x i16> %tmp1, %tmp2
60 %tmp4 = sext <8 x i1> %tmp3 to <8 x i16>
61 ret <8 x i16> %tmp4
Bob Wilson5bafff32009-06-22 23:27:02 +000062}
63
64define <4 x i32> @vcneQi32(<4 x i32>* %A, <4 x i32>* %B) nounwind {
Bob Wilson99c372e2009-10-07 20:51:42 +000065;CHECK: vcneQi32:
66;CHECK: vceq.i32
67;CHECK-NEXT: vmvn
Bob Wilson5bafff32009-06-22 23:27:02 +000068 %tmp1 = load <4 x i32>* %A
69 %tmp2 = load <4 x i32>* %B
Chris Lattnerb2773e12009-07-08 00:46:57 +000070 %tmp3 = icmp ne <4 x i32> %tmp1, %tmp2
71 %tmp4 = sext <4 x i1> %tmp3 to <4 x i32>
72 ret <4 x i32> %tmp4
Bob Wilson5bafff32009-06-22 23:27:02 +000073}
74
75define <16 x i8> @vcltQs8(<16 x i8>* %A, <16 x i8>* %B) nounwind {
Bob Wilson99c372e2009-10-07 20:51:42 +000076;CHECK: vcltQs8:
77;CHECK: vcgt.s8
Bob Wilson5bafff32009-06-22 23:27:02 +000078 %tmp1 = load <16 x i8>* %A
79 %tmp2 = load <16 x i8>* %B
Chris Lattnerb2773e12009-07-08 00:46:57 +000080 %tmp3 = icmp slt <16 x i8> %tmp1, %tmp2
81 %tmp4 = sext <16 x i1> %tmp3 to <16 x i8>
82 ret <16 x i8> %tmp4
Bob Wilson5bafff32009-06-22 23:27:02 +000083}
84
85define <4 x i16> @vcles16(<4 x i16>* %A, <4 x i16>* %B) nounwind {
Bob Wilson99c372e2009-10-07 20:51:42 +000086;CHECK: vcles16:
87;CHECK: vcge.s16
Bob Wilson5bafff32009-06-22 23:27:02 +000088 %tmp1 = load <4 x i16>* %A
89 %tmp2 = load <4 x i16>* %B
Chris Lattnerb2773e12009-07-08 00:46:57 +000090 %tmp3 = icmp sle <4 x i16> %tmp1, %tmp2
91 %tmp4 = sext <4 x i1> %tmp3 to <4 x i16>
92 ret <4 x i16> %tmp4
Bob Wilson5bafff32009-06-22 23:27:02 +000093}
94
95define <4 x i16> @vcltu16(<4 x i16>* %A, <4 x i16>* %B) nounwind {
Bob Wilson99c372e2009-10-07 20:51:42 +000096;CHECK: vcltu16:
97;CHECK: vcgt.u16
Bob Wilson5bafff32009-06-22 23:27:02 +000098 %tmp1 = load <4 x i16>* %A
99 %tmp2 = load <4 x i16>* %B
Chris Lattnerb2773e12009-07-08 00:46:57 +0000100 %tmp3 = icmp ult <4 x i16> %tmp1, %tmp2
101 %tmp4 = sext <4 x i1> %tmp3 to <4 x i16>
102 ret <4 x i16> %tmp4
Bob Wilson5bafff32009-06-22 23:27:02 +0000103}
104
105define <4 x i32> @vcleQu32(<4 x i32>* %A, <4 x i32>* %B) nounwind {
Bob Wilson99c372e2009-10-07 20:51:42 +0000106;CHECK: vcleQu32:
107;CHECK: vcge.u32
Bob Wilson5bafff32009-06-22 23:27:02 +0000108 %tmp1 = load <4 x i32>* %A
109 %tmp2 = load <4 x i32>* %B
Chris Lattnerb2773e12009-07-08 00:46:57 +0000110 %tmp3 = icmp ule <4 x i32> %tmp1, %tmp2
111 %tmp4 = sext <4 x i1> %tmp3 to <4 x i32>
112 ret <4 x i32> %tmp4
Bob Wilson5bafff32009-06-22 23:27:02 +0000113}