blob: 0ec630dd990e17ddd0556c33030c8c7279c54c35 [file] [log] [blame]
Nate Begeman2bbcca32005-08-03 18:11:23 +00001; All of these ands and shifts should be folded into rlwimi's
John Criswelld41abfa2005-10-17 21:54:18 +00002; RUN: llvm-as < %s | llc -march=ppc32 | grep rlwimi | wc -l | grep 3 &&
3; RUN: llvm-as < %s | llc -march=ppc32 | grep srwi | wc -l | grep 1 &&
4; RUN: llvm-as < %s | llc -march=ppc32 | not grep slwi
Nate Begeman2bbcca32005-08-03 18:11:23 +00005
6implementation ; Functions:
7
8ushort %test1(uint %srcA, uint %srcB, uint %alpha) {
9entry:
10 %tmp.1 = shl uint %srcA, ubyte 15 ; <uint> [#uses=1]
11 %tmp.4 = and uint %tmp.1, 32505856 ; <uint> [#uses=1]
12 %tmp.6 = and uint %srcA, 31775 ; <uint> [#uses=1]
13 %tmp.7 = or uint %tmp.4, %tmp.6 ; <uint> [#uses=1]
14 %tmp.9 = shl uint %srcB, ubyte 15 ; <uint> [#uses=1]
15 %tmp.12 = and uint %tmp.9, 32505856 ; <uint> [#uses=1]
16 %tmp.14 = and uint %srcB, 31775 ; <uint> [#uses=1]
17 %tmp.15 = or uint %tmp.12, %tmp.14 ; <uint> [#uses=1]
18 %tmp.18 = mul uint %tmp.7, %alpha ; <uint> [#uses=1]
19 %tmp.20 = sub uint 32, %alpha ; <uint> [#uses=1]
20 %tmp.22 = mul uint %tmp.15, %tmp.20 ; <uint> [#uses=1]
21 %tmp.23 = add uint %tmp.22, %tmp.18 ; <uint> [#uses=2]
22 %tmp.27 = shr uint %tmp.23, ubyte 5 ; <uint> [#uses=1]
23 %tmp.28 = cast uint %tmp.27 to ushort ; <ushort> [#uses=1]
24 %tmp.29 = and ushort %tmp.28, 31775 ; <ushort> [#uses=1]
25 %tmp.33 = shr uint %tmp.23, ubyte 20 ; <uint> [#uses=1]
26 %tmp.34 = cast uint %tmp.33 to ushort ; <ushort> [#uses=1]
27 %tmp.35 = and ushort %tmp.34, 992 ; <ushort> [#uses=1]
28 %tmp.36 = or ushort %tmp.29, %tmp.35 ; <ushort> [#uses=1]
29 ret ushort %tmp.36
30}