Dan Gohman | fce288f | 2009-09-09 00:09:15 +0000 | [diff] [blame] | 1 | ; RUN: llc < %s -march=arm -mattr=+neon | FileCheck %s |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 2 | |
| 3 | define <8 x i8> @vcges8(<8 x i8>* %A, <8 x i8>* %B) nounwind { |
Bob Wilson | d1fa3f5 | 2009-08-11 05:51:19 +0000 | [diff] [blame] | 4 | ;CHECK: vcges8: |
| 5 | ;CHECK: vcge.s8 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 6 | %tmp1 = load <8 x i8>* %A |
| 7 | %tmp2 = load <8 x i8>* %B |
Chris Lattner | b2773e1 | 2009-07-08 00:46:57 +0000 | [diff] [blame] | 8 | %tmp3 = icmp sge <8 x i8> %tmp1, %tmp2 |
| 9 | %tmp4 = sext <8 x i1> %tmp3 to <8 x i8> |
| 10 | ret <8 x i8> %tmp4 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 11 | } |
| 12 | |
| 13 | define <4 x i16> @vcges16(<4 x i16>* %A, <4 x i16>* %B) nounwind { |
Bob Wilson | d1fa3f5 | 2009-08-11 05:51:19 +0000 | [diff] [blame] | 14 | ;CHECK: vcges16: |
| 15 | ;CHECK: vcge.s16 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 16 | %tmp1 = load <4 x i16>* %A |
| 17 | %tmp2 = load <4 x i16>* %B |
Chris Lattner | b2773e1 | 2009-07-08 00:46:57 +0000 | [diff] [blame] | 18 | %tmp3 = icmp sge <4 x i16> %tmp1, %tmp2 |
| 19 | %tmp4 = sext <4 x i1> %tmp3 to <4 x i16> |
| 20 | ret <4 x i16> %tmp4 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 21 | } |
| 22 | |
| 23 | define <2 x i32> @vcges32(<2 x i32>* %A, <2 x i32>* %B) nounwind { |
Bob Wilson | d1fa3f5 | 2009-08-11 05:51:19 +0000 | [diff] [blame] | 24 | ;CHECK: vcges32: |
| 25 | ;CHECK: vcge.s32 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 26 | %tmp1 = load <2 x i32>* %A |
| 27 | %tmp2 = load <2 x i32>* %B |
Chris Lattner | b2773e1 | 2009-07-08 00:46:57 +0000 | [diff] [blame] | 28 | %tmp3 = icmp sge <2 x i32> %tmp1, %tmp2 |
| 29 | %tmp4 = sext <2 x i1> %tmp3 to <2 x i32> |
| 30 | ret <2 x i32> %tmp4 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 31 | } |
| 32 | |
| 33 | define <8 x i8> @vcgeu8(<8 x i8>* %A, <8 x i8>* %B) nounwind { |
Bob Wilson | d1fa3f5 | 2009-08-11 05:51:19 +0000 | [diff] [blame] | 34 | ;CHECK: vcgeu8: |
| 35 | ;CHECK: vcge.u8 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 36 | %tmp1 = load <8 x i8>* %A |
| 37 | %tmp2 = load <8 x i8>* %B |
Chris Lattner | b2773e1 | 2009-07-08 00:46:57 +0000 | [diff] [blame] | 38 | %tmp3 = icmp uge <8 x i8> %tmp1, %tmp2 |
| 39 | %tmp4 = sext <8 x i1> %tmp3 to <8 x i8> |
| 40 | ret <8 x i8> %tmp4 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 41 | } |
| 42 | |
| 43 | define <4 x i16> @vcgeu16(<4 x i16>* %A, <4 x i16>* %B) nounwind { |
Bob Wilson | d1fa3f5 | 2009-08-11 05:51:19 +0000 | [diff] [blame] | 44 | ;CHECK: vcgeu16: |
| 45 | ;CHECK: vcge.u16 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 46 | %tmp1 = load <4 x i16>* %A |
| 47 | %tmp2 = load <4 x i16>* %B |
Chris Lattner | b2773e1 | 2009-07-08 00:46:57 +0000 | [diff] [blame] | 48 | %tmp3 = icmp uge <4 x i16> %tmp1, %tmp2 |
| 49 | %tmp4 = sext <4 x i1> %tmp3 to <4 x i16> |
| 50 | ret <4 x i16> %tmp4 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 51 | } |
| 52 | |
| 53 | define <2 x i32> @vcgeu32(<2 x i32>* %A, <2 x i32>* %B) nounwind { |
Bob Wilson | d1fa3f5 | 2009-08-11 05:51:19 +0000 | [diff] [blame] | 54 | ;CHECK: vcgeu32: |
| 55 | ;CHECK: vcge.u32 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 56 | %tmp1 = load <2 x i32>* %A |
| 57 | %tmp2 = load <2 x i32>* %B |
Chris Lattner | b2773e1 | 2009-07-08 00:46:57 +0000 | [diff] [blame] | 58 | %tmp3 = icmp uge <2 x i32> %tmp1, %tmp2 |
| 59 | %tmp4 = sext <2 x i1> %tmp3 to <2 x i32> |
| 60 | ret <2 x i32> %tmp4 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 61 | } |
| 62 | |
| 63 | define <2 x i32> @vcgef32(<2 x float>* %A, <2 x float>* %B) nounwind { |
Bob Wilson | d1fa3f5 | 2009-08-11 05:51:19 +0000 | [diff] [blame] | 64 | ;CHECK: vcgef32: |
| 65 | ;CHECK: vcge.f32 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 66 | %tmp1 = load <2 x float>* %A |
| 67 | %tmp2 = load <2 x float>* %B |
Chris Lattner | b2773e1 | 2009-07-08 00:46:57 +0000 | [diff] [blame] | 68 | %tmp3 = fcmp oge <2 x float> %tmp1, %tmp2 |
| 69 | %tmp4 = sext <2 x i1> %tmp3 to <2 x i32> |
| 70 | ret <2 x i32> %tmp4 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 71 | } |
| 72 | |
| 73 | define <16 x i8> @vcgeQs8(<16 x i8>* %A, <16 x i8>* %B) nounwind { |
Bob Wilson | d1fa3f5 | 2009-08-11 05:51:19 +0000 | [diff] [blame] | 74 | ;CHECK: vcgeQs8: |
| 75 | ;CHECK: vcge.s8 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 76 | %tmp1 = load <16 x i8>* %A |
| 77 | %tmp2 = load <16 x i8>* %B |
Chris Lattner | b2773e1 | 2009-07-08 00:46:57 +0000 | [diff] [blame] | 78 | %tmp3 = icmp sge <16 x i8> %tmp1, %tmp2 |
| 79 | %tmp4 = sext <16 x i1> %tmp3 to <16 x i8> |
| 80 | ret <16 x i8> %tmp4 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 81 | } |
| 82 | |
| 83 | define <8 x i16> @vcgeQs16(<8 x i16>* %A, <8 x i16>* %B) nounwind { |
Bob Wilson | d1fa3f5 | 2009-08-11 05:51:19 +0000 | [diff] [blame] | 84 | ;CHECK: vcgeQs16: |
| 85 | ;CHECK: vcge.s16 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 86 | %tmp1 = load <8 x i16>* %A |
| 87 | %tmp2 = load <8 x i16>* %B |
Chris Lattner | b2773e1 | 2009-07-08 00:46:57 +0000 | [diff] [blame] | 88 | %tmp3 = icmp sge <8 x i16> %tmp1, %tmp2 |
| 89 | %tmp4 = sext <8 x i1> %tmp3 to <8 x i16> |
| 90 | ret <8 x i16> %tmp4 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 91 | } |
| 92 | |
| 93 | define <4 x i32> @vcgeQs32(<4 x i32>* %A, <4 x i32>* %B) nounwind { |
Bob Wilson | d1fa3f5 | 2009-08-11 05:51:19 +0000 | [diff] [blame] | 94 | ;CHECK: vcgeQs32: |
| 95 | ;CHECK: vcge.s32 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 96 | %tmp1 = load <4 x i32>* %A |
| 97 | %tmp2 = load <4 x i32>* %B |
Chris Lattner | b2773e1 | 2009-07-08 00:46:57 +0000 | [diff] [blame] | 98 | %tmp3 = icmp sge <4 x i32> %tmp1, %tmp2 |
| 99 | %tmp4 = sext <4 x i1> %tmp3 to <4 x i32> |
| 100 | ret <4 x i32> %tmp4 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 101 | } |
| 102 | |
| 103 | define <16 x i8> @vcgeQu8(<16 x i8>* %A, <16 x i8>* %B) nounwind { |
Bob Wilson | d1fa3f5 | 2009-08-11 05:51:19 +0000 | [diff] [blame] | 104 | ;CHECK: vcgeQu8: |
| 105 | ;CHECK: vcge.u8 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 106 | %tmp1 = load <16 x i8>* %A |
| 107 | %tmp2 = load <16 x i8>* %B |
Chris Lattner | b2773e1 | 2009-07-08 00:46:57 +0000 | [diff] [blame] | 108 | %tmp3 = icmp uge <16 x i8> %tmp1, %tmp2 |
| 109 | %tmp4 = sext <16 x i1> %tmp3 to <16 x i8> |
| 110 | ret <16 x i8> %tmp4 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 111 | } |
| 112 | |
| 113 | define <8 x i16> @vcgeQu16(<8 x i16>* %A, <8 x i16>* %B) nounwind { |
Bob Wilson | d1fa3f5 | 2009-08-11 05:51:19 +0000 | [diff] [blame] | 114 | ;CHECK: vcgeQu16: |
| 115 | ;CHECK: vcge.u16 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 116 | %tmp1 = load <8 x i16>* %A |
| 117 | %tmp2 = load <8 x i16>* %B |
Chris Lattner | b2773e1 | 2009-07-08 00:46:57 +0000 | [diff] [blame] | 118 | %tmp3 = icmp uge <8 x i16> %tmp1, %tmp2 |
| 119 | %tmp4 = sext <8 x i1> %tmp3 to <8 x i16> |
| 120 | ret <8 x i16> %tmp4 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 121 | } |
| 122 | |
| 123 | define <4 x i32> @vcgeQu32(<4 x i32>* %A, <4 x i32>* %B) nounwind { |
Bob Wilson | d1fa3f5 | 2009-08-11 05:51:19 +0000 | [diff] [blame] | 124 | ;CHECK: vcgeQu32: |
| 125 | ;CHECK: vcge.u32 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 126 | %tmp1 = load <4 x i32>* %A |
| 127 | %tmp2 = load <4 x i32>* %B |
Chris Lattner | b2773e1 | 2009-07-08 00:46:57 +0000 | [diff] [blame] | 128 | %tmp3 = icmp uge <4 x i32> %tmp1, %tmp2 |
| 129 | %tmp4 = sext <4 x i1> %tmp3 to <4 x i32> |
| 130 | ret <4 x i32> %tmp4 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 131 | } |
| 132 | |
| 133 | define <4 x i32> @vcgeQf32(<4 x float>* %A, <4 x float>* %B) nounwind { |
Bob Wilson | d1fa3f5 | 2009-08-11 05:51:19 +0000 | [diff] [blame] | 134 | ;CHECK: vcgeQf32: |
| 135 | ;CHECK: vcge.f32 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 136 | %tmp1 = load <4 x float>* %A |
| 137 | %tmp2 = load <4 x float>* %B |
Chris Lattner | b2773e1 | 2009-07-08 00:46:57 +0000 | [diff] [blame] | 138 | %tmp3 = fcmp oge <4 x float> %tmp1, %tmp2 |
| 139 | %tmp4 = sext <4 x i1> %tmp3 to <4 x i32> |
| 140 | ret <4 x i32> %tmp4 |
Bob Wilson | 5bafff3 | 2009-06-22 23:27:02 +0000 | [diff] [blame] | 141 | } |
Bob Wilson | 83815ae | 2009-10-09 20:20:54 +0000 | [diff] [blame] | 142 | |
| 143 | define <2 x i32> @vacgef32(<2 x float>* %A, <2 x float>* %B) nounwind { |
| 144 | ;CHECK: vacgef32: |
| 145 | ;CHECK: vacge.f32 |
| 146 | %tmp1 = load <2 x float>* %A |
| 147 | %tmp2 = load <2 x float>* %B |
| 148 | %tmp3 = call <2 x i32> @llvm.arm.neon.vacged(<2 x float> %tmp1, <2 x float> %tmp2) |
| 149 | ret <2 x i32> %tmp3 |
| 150 | } |
| 151 | |
| 152 | define <4 x i32> @vacgeQf32(<4 x float>* %A, <4 x float>* %B) nounwind { |
| 153 | ;CHECK: vacgeQf32: |
| 154 | ;CHECK: vacge.f32 |
| 155 | %tmp1 = load <4 x float>* %A |
| 156 | %tmp2 = load <4 x float>* %B |
| 157 | %tmp3 = call <4 x i32> @llvm.arm.neon.vacgeq(<4 x float> %tmp1, <4 x float> %tmp2) |
| 158 | ret <4 x i32> %tmp3 |
| 159 | } |
| 160 | |
| 161 | declare <2 x i32> @llvm.arm.neon.vacged(<2 x float>, <2 x float>) nounwind readnone |
| 162 | declare <4 x i32> @llvm.arm.neon.vacgeq(<4 x float>, <4 x float>) nounwind readnone |
Owen Anderson | c24cb35 | 2010-11-08 23:21:22 +0000 | [diff] [blame^] | 163 | |
| 164 | define <8 x i8> @vcgei8Z(<8 x i8>* %A) nounwind { |
| 165 | ;CHECK: vcgei8Z: |
| 166 | ;CHECK-NOT: vmov |
| 167 | ;CHECK-NOT: vmvn |
| 168 | ;CHECK: vcge.s8 |
| 169 | %tmp1 = load <8 x i8>* %A |
| 170 | %tmp3 = icmp sge <8 x i8> %tmp1, <i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0> |
| 171 | %tmp4 = sext <8 x i1> %tmp3 to <8 x i8> |
| 172 | ret <8 x i8> %tmp4 |
| 173 | } |
| 174 | |
| 175 | define <8 x i8> @vclei8Z(<8 x i8>* %A) nounwind { |
| 176 | ;CHECK: vclei8Z: |
| 177 | ;CHECK-NOT: vmov |
| 178 | ;CHECK-NOT: vmvn |
| 179 | ;CHECK: vcle.s8 |
| 180 | %tmp1 = load <8 x i8>* %A |
| 181 | %tmp3 = icmp sle <8 x i8> %tmp1, <i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0, i8 0> |
| 182 | %tmp4 = sext <8 x i1> %tmp3 to <8 x i8> |
| 183 | ret <8 x i8> %tmp4 |
| 184 | } |