| Chad Rosier | 31e7d2d | 2012-11-30 19:15:10 +0000 | [diff] [blame] | 1 | ; RUN: llc < %s -mtriple=powerpc-unknown-linux-gnu -march=ppc32 -mattr=+altivec | FileCheck %s |
| Chris Lattner | 2dea154 | 2006-04-18 03:22:16 +0000 | [diff] [blame] | 2 | |
| Tanya Lattner | a99d8b5 | 2008-02-19 08:07:33 +0000 | [diff] [blame] | 3 | define <4 x i32> @test_v4i32(<4 x i32>* %X, <4 x i32>* %Y) { |
| 4 | %tmp = load <4 x i32>* %X ; <<4 x i32>> [#uses=1] |
| 5 | %tmp2 = load <4 x i32>* %Y ; <<4 x i32>> [#uses=1] |
| 6 | %tmp3 = mul <4 x i32> %tmp, %tmp2 ; <<4 x i32>> [#uses=1] |
| 7 | ret <4 x i32> %tmp3 |
| Chris Lattner | 2dea154 | 2006-04-18 03:22:16 +0000 | [diff] [blame] | 8 | } |
| Adhemerval Zanella | 812410f | 2012-11-30 13:05:44 +0000 | [diff] [blame] | 9 | ; CHECK: test_v4i32: |
| 10 | ; CHECK: vmsumuhm |
| 11 | ; CHECK-NOT: mullw |
| Chris Lattner | 2dea154 | 2006-04-18 03:22:16 +0000 | [diff] [blame] | 12 | |
| Tanya Lattner | a99d8b5 | 2008-02-19 08:07:33 +0000 | [diff] [blame] | 13 | define <8 x i16> @test_v8i16(<8 x i16>* %X, <8 x i16>* %Y) { |
| 14 | %tmp = load <8 x i16>* %X ; <<8 x i16>> [#uses=1] |
| 15 | %tmp2 = load <8 x i16>* %Y ; <<8 x i16>> [#uses=1] |
| 16 | %tmp3 = mul <8 x i16> %tmp, %tmp2 ; <<8 x i16>> [#uses=1] |
| 17 | ret <8 x i16> %tmp3 |
| Chris Lattner | 48786e4 | 2006-04-18 03:54:50 +0000 | [diff] [blame] | 18 | } |
| Adhemerval Zanella | 812410f | 2012-11-30 13:05:44 +0000 | [diff] [blame] | 19 | ; CHECK: test_v8i16: |
| 20 | ; CHECK: vmladduhm |
| 21 | ; CHECK-NOT: mullw |
| Chris Lattner | 48786e4 | 2006-04-18 03:54:50 +0000 | [diff] [blame] | 22 | |
| Tanya Lattner | a99d8b5 | 2008-02-19 08:07:33 +0000 | [diff] [blame] | 23 | define <16 x i8> @test_v16i8(<16 x i8>* %X, <16 x i8>* %Y) { |
| 24 | %tmp = load <16 x i8>* %X ; <<16 x i8>> [#uses=1] |
| 25 | %tmp2 = load <16 x i8>* %Y ; <<16 x i8>> [#uses=1] |
| 26 | %tmp3 = mul <16 x i8> %tmp, %tmp2 ; <<16 x i8>> [#uses=1] |
| 27 | ret <16 x i8> %tmp3 |
| Chris Lattner | 48786e4 | 2006-04-18 03:54:50 +0000 | [diff] [blame] | 28 | } |
| Adhemerval Zanella | 812410f | 2012-11-30 13:05:44 +0000 | [diff] [blame] | 29 | ; CHECK: test_v16i8: |
| 30 | ; CHECK: vmuloub |
| 31 | ; CHECK: vmuleub |
| 32 | ; CHECK-NOT: mullw |
| 33 | |
| 34 | define <4 x float> @test_float(<4 x float>* %X, <4 x float>* %Y) { |
| 35 | %tmp = load <4 x float>* %X |
| 36 | %tmp2 = load <4 x float>* %Y |
| 37 | %tmp3 = fmul <4 x float> %tmp, %tmp2 |
| 38 | ret <4 x float> %tmp3 |
| 39 | } |
| 40 | ; Check the creation of a negative zero float vector by creating a vector of |
| 41 | ; all bits set and shifting it 31 bits to left, resulting a an vector of |
| 42 | ; 4 x 0x80000000 (-0.0 as float). |
| 43 | ; CHECK: test_float: |
| Chad Rosier | 31e7d2d | 2012-11-30 19:15:10 +0000 | [diff] [blame] | 44 | ; CHECK: vspltisw [[ZNEG:[0-9]+]], -1 |
| 45 | ; CHECK: vslw {{[0-9]+}}, [[ZNEG]], [[ZNEG]] |
| Adhemerval Zanella | 812410f | 2012-11-30 13:05:44 +0000 | [diff] [blame] | 46 | ; CHECK: vmaddfp |