blob: a6311370003847e70e3aa427f8308670caf1e9a6 [file] [log] [blame]
Chris Lattner6bf97912006-03-22 19:12:46 +00001; Test that vectors are scalarized/lowered correctly.
Tanya Lattnercfab3da2008-02-19 08:07:33 +00002; RUN: llvm-as < %s | llc -march=ppc32 -mcpu=g3 | \
Dan Gohman28beeea2007-08-15 13:36:28 +00003; RUN: grep stfs | count 4
Tanya Lattnercfab3da2008-02-19 08:07:33 +00004; RUN: llvm-as < %s | llc -march=ppc32 -mcpu=g5 -o %t -f
Dan Gohman28beeea2007-08-15 13:36:28 +00005; RUN: grep vspltw %t | count 2
6; RUN: grep vsplti %t | count 3
7; RUN: grep vsplth %t | count 1
Chris Lattner6bf97912006-03-22 19:12:46 +00008
Tanya Lattnercfab3da2008-02-19 08:07:33 +00009 %f4 = type <4 x float>
10 %i4 = type <4 x i32>
Chris Lattner6bf97912006-03-22 19:12:46 +000011
Chris Lattner629b5262008-10-26 18:53:07 +000012define void @splat(%f4* %P, %f4* %Q, float %X) nounwind {
Tanya Lattnercfab3da2008-02-19 08:07:33 +000013 %tmp = insertelement %f4 undef, float %X, i32 0 ; <%f4> [#uses=1]
14 %tmp2 = insertelement %f4 %tmp, float %X, i32 1 ; <%f4> [#uses=1]
15 %tmp4 = insertelement %f4 %tmp2, float %X, i32 2 ; <%f4> [#uses=1]
16 %tmp6 = insertelement %f4 %tmp4, float %X, i32 3 ; <%f4> [#uses=1]
17 %q = load %f4* %Q ; <%f4> [#uses=1]
18 %R = add %f4 %q, %tmp6 ; <%f4> [#uses=1]
Chris Lattner6bf97912006-03-22 19:12:46 +000019 store %f4 %R, %f4* %P
20 ret void
21}
22
Chris Lattner629b5262008-10-26 18:53:07 +000023define void @splat_i4(%i4* %P, %i4* %Q, i32 %X) nounwind {
Tanya Lattnercfab3da2008-02-19 08:07:33 +000024 %tmp = insertelement %i4 undef, i32 %X, i32 0 ; <%i4> [#uses=1]
25 %tmp2 = insertelement %i4 %tmp, i32 %X, i32 1 ; <%i4> [#uses=1]
26 %tmp4 = insertelement %i4 %tmp2, i32 %X, i32 2 ; <%i4> [#uses=1]
27 %tmp6 = insertelement %i4 %tmp4, i32 %X, i32 3 ; <%i4> [#uses=1]
28 %q = load %i4* %Q ; <%i4> [#uses=1]
29 %R = add %i4 %q, %tmp6 ; <%i4> [#uses=1]
Chris Lattner6bf97912006-03-22 19:12:46 +000030 store %i4 %R, %i4* %P
31 ret void
32}
33
Chris Lattner629b5262008-10-26 18:53:07 +000034define void @splat_imm_i32(%i4* %P, %i4* %Q, i32 %X) nounwind {
Tanya Lattnercfab3da2008-02-19 08:07:33 +000035 %q = load %i4* %Q ; <%i4> [#uses=1]
36 %R = add %i4 %q, < i32 -1, i32 -1, i32 -1, i32 -1 > ; <%i4> [#uses=1]
Chris Lattnerb45854f2006-03-25 06:11:56 +000037 store %i4 %R, %i4* %P
38 ret void
39}
40
Chris Lattner629b5262008-10-26 18:53:07 +000041define void @splat_imm_i16(%i4* %P, %i4* %Q, i32 %X) nounwind {
Tanya Lattnercfab3da2008-02-19 08:07:33 +000042 %q = load %i4* %Q ; <%i4> [#uses=1]
43 %R = add %i4 %q, < i32 65537, i32 65537, i32 65537, i32 65537 > ; <%i4> [#uses=1]
Chris Lattnerb45854f2006-03-25 06:11:56 +000044 store %i4 %R, %i4* %P
45 ret void
46}
47
Chris Lattner629b5262008-10-26 18:53:07 +000048define void @splat_h(i16 %tmp, <16 x i8>* %dst) nounwind {
Tanya Lattnercfab3da2008-02-19 08:07:33 +000049 %tmp.upgrd.1 = insertelement <8 x i16> undef, i16 %tmp, i32 0
50 %tmp72 = insertelement <8 x i16> %tmp.upgrd.1, i16 %tmp, i32 1
51 %tmp73 = insertelement <8 x i16> %tmp72, i16 %tmp, i32 2
52 %tmp74 = insertelement <8 x i16> %tmp73, i16 %tmp, i32 3
53 %tmp75 = insertelement <8 x i16> %tmp74, i16 %tmp, i32 4
54 %tmp76 = insertelement <8 x i16> %tmp75, i16 %tmp, i32 5
55 %tmp77 = insertelement <8 x i16> %tmp76, i16 %tmp, i32 6
56 %tmp78 = insertelement <8 x i16> %tmp77, i16 %tmp, i32 7
57 %tmp78.upgrd.2 = bitcast <8 x i16> %tmp78 to <16 x i8>
58 store <16 x i8> %tmp78.upgrd.2, <16 x i8>* %dst
59 ret void
Chris Lattner2ffc00a2006-04-04 17:20:45 +000060}
61
Chris Lattner629b5262008-10-26 18:53:07 +000062define void @spltish(<16 x i8>* %A, <16 x i8>* %B) nounwind {
Tanya Lattnercfab3da2008-02-19 08:07:33 +000063 %tmp = load <16 x i8>* %B ; <<16 x i8>> [#uses=1]
64 %tmp.s = bitcast <16 x i8> %tmp to <16 x i8> ; <<16 x i8>> [#uses=1]
65 %tmp4 = sub <16 x i8> %tmp.s, bitcast (<8 x i16> < i16 15, i16 15, i16 15, i16 15, i16 15, i16
66 15, i16 15, i16 15 > to <16 x i8>) ; <<16 x i8>> [#uses=1]
67 %tmp4.u = bitcast <16 x i8> %tmp4 to <16 x i8> ; <<16 x i8>> [#uses=1]
68 store <16 x i8> %tmp4.u, <16 x i8>* %A
Chris Lattnera7cdc882006-04-08 07:13:46 +000069 ret void
70}
71