blob: d6a3946b85dd756a9af58ad936696bd10a6fcd68 [file] [log] [blame]
Chris Lattnerb22a04d2006-03-25 07:51:43 +00001//===- PPCInstrAltivec.td - The PowerPC Altivec Extension --*- tablegen -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file was developed by Chris Lattner and is distributed under
6// the University of Illinois Open Source License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file describes the Altivec extension to the PowerPC instruction set.
11//
12//===----------------------------------------------------------------------===//
13
14//===----------------------------------------------------------------------===//
15// Altivec transformation functions and pattern fragments.
16//
17
18// VSPLT_get_imm xform function: convert vector_shuffle mask to VSPLT* imm.
19def VSPLT_get_imm : SDNodeXForm<build_vector, [{
20 return getI32Imm(PPC::getVSPLTImmediate(N));
21}]>;
22
23def VSPLT_shuffle_mask : PatLeaf<(build_vector), [{
24 return PPC::isSplatShuffleMask(N);
25}], VSPLT_get_imm>;
26
Chris Lattnerb22a04d2006-03-25 07:51:43 +000027
28// VSPLTISB_get_imm xform function: convert build_vector to VSPLTISB imm.
29def VSPLTISB_get_imm : SDNodeXForm<build_vector, [{
30 char Val;
31 PPC::isVecSplatImm(N, 1, &Val);
32 return getI32Imm(Val);
33}]>;
34def vecspltisb : PatLeaf<(build_vector), [{
35 return PPC::isVecSplatImm(N, 1);
36}], VSPLTISB_get_imm>;
37
38// VSPLTISH_get_imm xform function: convert build_vector to VSPLTISH imm.
39def VSPLTISH_get_imm : SDNodeXForm<build_vector, [{
40 char Val;
41 PPC::isVecSplatImm(N, 2, &Val);
42 return getI32Imm(Val);
43}]>;
44def vecspltish : PatLeaf<(build_vector), [{
45 return PPC::isVecSplatImm(N, 2);
46}], VSPLTISH_get_imm>;
47
48// VSPLTISW_get_imm xform function: convert build_vector to VSPLTISW imm.
49def VSPLTISW_get_imm : SDNodeXForm<build_vector, [{
50 char Val;
51 PPC::isVecSplatImm(N, 4, &Val);
52 return getI32Imm(Val);
53}]>;
54def vecspltisw : PatLeaf<(build_vector), [{
55 return PPC::isVecSplatImm(N, 4);
56}], VSPLTISW_get_imm>;
57
Chris Lattnerb8a45c22006-03-26 04:57:17 +000058class isVDOT { // vector dot instruction.
59 list<Register> Defs = [CR6];
60 bit RC = 1;
61}
Chris Lattnerb22a04d2006-03-25 07:51:43 +000062
63//===----------------------------------------------------------------------===//
64// Instruction Definitions.
65
66def IMPLICIT_DEF_VRRC : Pseudo<(ops VRRC:$rD), "; $rD = IMPLICIT_DEF_VRRC",
67 [(set VRRC:$rD, (v4f32 (undef)))]>;
68
69let isLoad = 1, PPC970_Unit = 2 in { // Loads.
70def LVEBX: XForm_1<31, 7, (ops VRRC:$vD, memrr:$src),
71 "lvebx $vD, $src", LdStGeneral,
Chris Lattnerecc219b2006-03-28 02:29:37 +000072 [(set VRRC:$vD, (int_ppc_altivec_lvebx xoaddr:$src))]>;
Chris Lattnere7d959c2006-03-26 00:41:48 +000073def LVEHX: XForm_1<31, 39, (ops VRRC:$vD, memrr:$src),
Chris Lattnerb22a04d2006-03-25 07:51:43 +000074 "lvehx $vD, $src", LdStGeneral,
Chris Lattnerecc219b2006-03-28 02:29:37 +000075 [(set VRRC:$vD, (int_ppc_altivec_lvehx xoaddr:$src))]>;
Chris Lattnere7d959c2006-03-26 00:41:48 +000076def LVEWX: XForm_1<31, 71, (ops VRRC:$vD, memrr:$src),
Chris Lattnerb22a04d2006-03-25 07:51:43 +000077 "lvewx $vD, $src", LdStGeneral,
Chris Lattnerecc219b2006-03-28 02:29:37 +000078 [(set VRRC:$vD, (int_ppc_altivec_lvewx xoaddr:$src))]>;
Chris Lattnere7d959c2006-03-26 00:41:48 +000079def LVX : XForm_1<31, 103, (ops VRRC:$vD, memrr:$src),
Chris Lattnerb22a04d2006-03-25 07:51:43 +000080 "lvx $vD, $src", LdStGeneral,
Chris Lattnerecc219b2006-03-28 02:29:37 +000081 [(set VRRC:$vD, (int_ppc_altivec_lvx xoaddr:$src))]>;
82def LVXL : XForm_1<31, 359, (ops VRRC:$vD, memrr:$src),
83 "lvxl $vD, $src", LdStGeneral,
84 [(set VRRC:$vD, (int_ppc_altivec_lvxl xoaddr:$src))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +000085}
86
87def LVSL : XForm_1<31, 6, (ops VRRC:$vD, GPRC:$base, GPRC:$rA),
88 "lvsl $vD, $base, $rA", LdStGeneral,
89 []>, PPC970_Unit_LSU;
90def LVSR : XForm_1<31, 38, (ops VRRC:$vD, GPRC:$base, GPRC:$rA),
91 "lvsl $vD, $base, $rA", LdStGeneral,
92 []>, PPC970_Unit_LSU;
93
94let isStore = 1, noResults = 1, PPC970_Unit = 2 in { // Stores.
Chris Lattner48b61a72006-03-28 00:40:33 +000095def STVEBX: XForm_8<31, 135, (ops VRRC:$rS, memrr:$dst),
96 "stvebx $rS, $dst", LdStGeneral,
97 [(int_ppc_altivec_stvebx VRRC:$rS, xoaddr:$dst)]>;
98def STVEHX: XForm_8<31, 167, (ops VRRC:$rS, memrr:$dst),
99 "stvehx $rS, $dst", LdStGeneral,
100 [(int_ppc_altivec_stvehx VRRC:$rS, xoaddr:$dst)]>;
101def STVEWX: XForm_8<31, 199, (ops VRRC:$rS, memrr:$dst),
102 "stvewx $rS, $dst", LdStGeneral,
103 [(int_ppc_altivec_stvewx VRRC:$rS, xoaddr:$dst)]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000104def STVX : XForm_8<31, 231, (ops VRRC:$rS, memrr:$dst),
105 "stvx $rS, $dst", LdStGeneral,
Chris Lattnerecc219b2006-03-28 02:29:37 +0000106 [(int_ppc_altivec_stvx VRRC:$rS, xoaddr:$dst)]>;
107def STVXL : XForm_8<31, 487, (ops VRRC:$rS, memrr:$dst),
108 "stvxl $rS, $dst", LdStGeneral,
109 [(int_ppc_altivec_stvxl VRRC:$rS, xoaddr:$dst)]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000110}
111
112let PPC970_Unit = 5 in { // VALU Operations.
113// VA-Form instructions. 3-input AltiVec ops.
114def VMADDFP : VAForm_1<46, (ops VRRC:$vD, VRRC:$vA, VRRC:$vC, VRRC:$vB),
115 "vmaddfp $vD, $vA, $vC, $vB", VecFP,
116 [(set VRRC:$vD, (fadd (fmul VRRC:$vA, VRRC:$vC),
117 VRRC:$vB))]>,
118 Requires<[FPContractions]>;
119def VNMSUBFP: VAForm_1<47, (ops VRRC:$vD, VRRC:$vA, VRRC:$vC, VRRC:$vB),
120 "vnmsubfp $vD, $vA, $vC, $vB", VecFP,
121 [(set VRRC:$vD, (fneg (fsub (fmul VRRC:$vA, VRRC:$vC),
122 VRRC:$vB)))]>,
123 Requires<[FPContractions]>;
Nate Begeman98e70cc2006-03-28 04:15:58 +0000124def VMHADDSHS : VAForm_1a<32, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB, VRRC:$vC),
125 "vmhaddshs $vD, $vA, $vB, $vC", VecFP,
126 [(set VRRC:$vD,
127 (int_ppc_altivec_vmhaddshs VRRC:$vA, VRRC:$vB, VRRC:$vC))]>;
128def VMHRADDSHS : VAForm_1a<33, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB, VRRC:$vC),
129 "vmhraddshs $vD, $vA, $vB, $vC", VecFP,
130 [(set VRRC:$vD,
131 (int_ppc_altivec_vmhraddshs VRRC:$vA, VRRC:$vB, VRRC:$vC))]>;
Chris Lattnerfb143ce2006-03-27 03:34:17 +0000132def VPERM : VAForm_1a<43, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB, VRRC:$vC),
133 "vperm $vD, $vA, $vB, $vC", VecPerm,
134 [(set VRRC:$vD,
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000135 (PPCvperm (v4f32 VRRC:$vA), VRRC:$vB, VRRC:$vC))]>;
Chris Lattnere7d959c2006-03-26 00:41:48 +0000136def VSLDOI : VAForm_2<44, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB, u5imm:$SH),
137 "vsldoi $vD, $vA, $vB, $SH", VecFP,
138 [(set VRRC:$vD,
139 (int_ppc_altivec_vsldoi VRRC:$vA, VRRC:$vB,
140 imm:$SH))]>;
Chris Lattnerfb143ce2006-03-27 03:34:17 +0000141def VSEL : VAForm_1a<42, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB, VRRC:$vC),
Chris Lattnerbd6be6f2006-03-26 22:38:43 +0000142 "vsel $vD, $vA, $vB, $vC", VecFP,
143 [(set VRRC:$vD,
144 (int_ppc_altivec_vsel VRRC:$vA, VRRC:$vB, VRRC:$vC))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000145
146// VX-Form instructions. AltiVec arithmetic ops.
Chris Lattner984f38b2006-03-25 08:01:02 +0000147def VADDCUW : VXForm_1<384, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
148 "vaddcuw $vD, $vA, $vB", VecFP,
149 [(set VRRC:$vD,
150 (int_ppc_altivec_vaddcuw VRRC:$vA, VRRC:$vB))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000151def VADDFP : VXForm_1<10, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
152 "vaddfp $vD, $vA, $vB", VecFP,
153 [(set VRRC:$vD, (fadd VRRC:$vA, VRRC:$vB))]>;
Chris Lattner5d729072006-03-26 02:39:02 +0000154
155def VADDUBM : VXForm_1<0, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
156 "vaddubm $vD, $vA, $vB", VecGeneral,
157 [(set VRRC:$vD, (add (v16i8 VRRC:$vA), VRRC:$vB))]>;
158def VADDUHM : VXForm_1<64, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
159 "vadduhm $vD, $vA, $vB", VecGeneral,
160 [(set VRRC:$vD, (add (v8i16 VRRC:$vA), VRRC:$vB))]>;
161def VADDUWM : VXForm_1<128, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
162 "vadduwm $vD, $vA, $vB", VecGeneral,
163 [(set VRRC:$vD, (add (v4i32 VRRC:$vA), VRRC:$vB))]>;
164
Chris Lattner984f38b2006-03-25 08:01:02 +0000165def VADDSBS : VXForm_1<768, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
166 "vaddsbs $vD, $vA, $vB", VecFP,
167 [(set VRRC:$vD,
168 (int_ppc_altivec_vaddsbs VRRC:$vA, VRRC:$vB))]>;
169def VADDSHS : VXForm_1<832, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
170 "vaddshs $vD, $vA, $vB", VecFP,
171 [(set VRRC:$vD,
172 (int_ppc_altivec_vaddshs VRRC:$vA, VRRC:$vB))]>;
173def VADDSWS : VXForm_1<896, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
174 "vaddsws $vD, $vA, $vB", VecFP,
175 [(set VRRC:$vD,
176 (int_ppc_altivec_vaddsws VRRC:$vA, VRRC:$vB))]>;
Chris Lattner5d729072006-03-26 02:39:02 +0000177
Chris Lattner984f38b2006-03-25 08:01:02 +0000178def VADDUBS : VXForm_1<512, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
179 "vaddubs $vD, $vA, $vB", VecFP,
180 [(set VRRC:$vD,
181 (int_ppc_altivec_vaddubs VRRC:$vA, VRRC:$vB))]>;
182def VADDUHS : VXForm_1<576, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
183 "vadduhs $vD, $vA, $vB", VecFP,
184 [(set VRRC:$vD,
185 (int_ppc_altivec_vadduhs VRRC:$vA, VRRC:$vB))]>;
Chris Lattner984f38b2006-03-25 08:01:02 +0000186def VADDUWS : VXForm_1<640, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
187 "vadduws $vD, $vA, $vB", VecFP,
188 [(set VRRC:$vD,
189 (int_ppc_altivec_vadduws VRRC:$vA, VRRC:$vB))]>;
Chris Lattner2430a5f2006-03-25 22:16:05 +0000190def VAND : VXForm_1<1028, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
191 "vand $vD, $vA, $vB", VecFP,
192 [(set VRRC:$vD, (and (v4i32 VRRC:$vA), VRRC:$vB))]>;
193def VANDC : VXForm_1<1092, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
194 "vandc $vD, $vA, $vB", VecFP,
Chris Lattneraf9136b2006-03-25 23:10:40 +0000195 [(set VRRC:$vD, (and (v4i32 VRRC:$vA), (vnot VRRC:$vB)))]>;
Chris Lattner2430a5f2006-03-25 22:16:05 +0000196
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000197def VCFSX : VXForm_1<842, (ops VRRC:$vD, u5imm:$UIMM, VRRC:$vB),
198 "vcfsx $vD, $vB, $UIMM", VecFP,
Chris Lattner984f38b2006-03-25 08:01:02 +0000199 [(set VRRC:$vD,
200 (int_ppc_altivec_vcfsx VRRC:$vB, imm:$UIMM))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000201def VCFUX : VXForm_1<778, (ops VRRC:$vD, u5imm:$UIMM, VRRC:$vB),
202 "vcfux $vD, $vB, $UIMM", VecFP,
Chris Lattner984f38b2006-03-25 08:01:02 +0000203 [(set VRRC:$vD,
204 (int_ppc_altivec_vcfux VRRC:$vB, imm:$UIMM))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000205def VCTSXS : VXForm_1<970, (ops VRRC:$vD, u5imm:$UIMM, VRRC:$vB),
206 "vctsxs $vD, $vB, $UIMM", VecFP,
207 []>;
208def VCTUXS : VXForm_1<906, (ops VRRC:$vD, u5imm:$UIMM, VRRC:$vB),
209 "vctuxs $vD, $vB, $UIMM", VecFP,
210 []>;
211def VEXPTEFP : VXForm_2<394, (ops VRRC:$vD, VRRC:$vB),
212 "vexptefp $vD, $vB", VecFP,
Chris Lattnerecc219b2006-03-28 02:29:37 +0000213 [(set VRRC:$vD, (int_ppc_altivec_vexptefp VRRC:$vB))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000214def VLOGEFP : VXForm_2<458, (ops VRRC:$vD, VRRC:$vB),
215 "vlogefp $vD, $vB", VecFP,
Chris Lattnerecc219b2006-03-28 02:29:37 +0000216 [(set VRRC:$vD, (int_ppc_altivec_vlogefp VRRC:$vB))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000217def VMAXFP : VXForm_1<1034, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
218 "vmaxfp $vD, $vA, $vB", VecFP,
219 []>;
220def VMINFP : VXForm_1<1098, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
221 "vminfp $vD, $vA, $vB", VecFP,
222 []>;
Nate Begeman98e70cc2006-03-28 04:15:58 +0000223def VMRGHH : VXForm_1<76, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
224 "vmrghh $vD, $vA, $vB", VecFP,
225 [(set VRRC:$vD,
226 (int_ppc_altivec_vmrghh VRRC:$vA, VRRC:$vB))]>;
227def VMRGHW : VXForm_1<140, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
228 "vmrghh $vD, $vA, $vB", VecFP,
229 [(set VRRC:$vD,
230 (int_ppc_altivec_vmrghw VRRC:$vA, VRRC:$vB))]>;
231def VMRGLH : VXForm_1<332, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
232 "vmrglh $vD, $vA, $vB", VecFP,
233 [(set VRRC:$vD,
234 (int_ppc_altivec_vmrglh VRRC:$vA, VRRC:$vB))]>;
235def VMRGLW : VXForm_1<396, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
236 "vmrglh $vD, $vA, $vB", VecFP,
237 [(set VRRC:$vD,
238 (int_ppc_altivec_vmrglw VRRC:$vA, VRRC:$vB))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000239def VREFP : VXForm_2<266, (ops VRRC:$vD, VRRC:$vB),
240 "vrefp $vD, $vB", VecFP,
Chris Lattnerecc219b2006-03-28 02:29:37 +0000241 [(set VRRC:$vD, (int_ppc_altivec_vrefp VRRC:$vB))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000242def VRFIM : VXForm_2<714, (ops VRRC:$vD, VRRC:$vB),
243 "vrfim $vD, $vB", VecFP,
Chris Lattnerecc219b2006-03-28 02:29:37 +0000244 [(set VRRC:$vD, (int_ppc_altivec_vrfim VRRC:$vB))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000245def VRFIN : VXForm_2<522, (ops VRRC:$vD, VRRC:$vB),
246 "vrfin $vD, $vB", VecFP,
Chris Lattnerecc219b2006-03-28 02:29:37 +0000247 [(set VRRC:$vD, (int_ppc_altivec_vrfin VRRC:$vB))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000248def VRFIP : VXForm_2<650, (ops VRRC:$vD, VRRC:$vB),
249 "vrfip $vD, $vB", VecFP,
Chris Lattnerecc219b2006-03-28 02:29:37 +0000250 [(set VRRC:$vD, (int_ppc_altivec_vrfip VRRC:$vB))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000251def VRFIZ : VXForm_2<586, (ops VRRC:$vD, VRRC:$vB),
252 "vrfiz $vD, $vB", VecFP,
Chris Lattnerecc219b2006-03-28 02:29:37 +0000253 [(set VRRC:$vD, (int_ppc_altivec_vrfiz VRRC:$vB))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000254def VRSQRTEFP : VXForm_2<330, (ops VRRC:$vD, VRRC:$vB),
255 "vrsqrtefp $vD, $vB", VecFP,
Chris Lattner984f38b2006-03-25 08:01:02 +0000256 [(set VRRC:$vD,(int_ppc_altivec_vrsqrtefp VRRC:$vB))]>;
Chris Lattner5d729072006-03-26 02:39:02 +0000257def VSUBCUW : VXForm_1<74, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
258 "vsubcuw $vD, $vA, $vB", VecFP,
259 [(set VRRC:$vD,
260 (int_ppc_altivec_vsubcuw VRRC:$vA, VRRC:$vB))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000261def VSUBFP : VXForm_1<74, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
262 "vsubfp $vD, $vA, $vB", VecFP,
263 [(set VRRC:$vD, (fsub VRRC:$vA, VRRC:$vB))]>;
Chris Lattner5d729072006-03-26 02:39:02 +0000264
265def VSUBUBM : VXForm_1<1024, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
266 "vsububm $vD, $vA, $vB", VecGeneral,
267 [(set VRRC:$vD, (sub (v16i8 VRRC:$vA), VRRC:$vB))]>;
268def VSUBUHM : VXForm_1<1088, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
269 "vsubuhm $vD, $vA, $vB", VecGeneral,
270 [(set VRRC:$vD, (sub (v8i16 VRRC:$vA), VRRC:$vB))]>;
271def VSUBUWM : VXForm_1<1152, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
272 "vsubuwm $vD, $vA, $vB", VecGeneral,
273 [(set VRRC:$vD, (sub (v4i32 VRRC:$vA), VRRC:$vB))]>;
274
275def VSUBSBS : VXForm_1<1792, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
276 "vsubsbs $vD, $vA, $vB", VecFP,
277 [(set VRRC:$vD,
278 (int_ppc_altivec_vsubsbs VRRC:$vA, VRRC:$vB))]>;
279def VSUBSHS : VXForm_1<1856, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
280 "vsubshs $vD, $vA, $vB", VecFP,
281 [(set VRRC:$vD,
282 (int_ppc_altivec_vsubshs VRRC:$vA, VRRC:$vB))]>;
283def VSUBSWS : VXForm_1<1920, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
284 "vsubsws $vD, $vA, $vB", VecFP,
285 [(set VRRC:$vD,
286 (int_ppc_altivec_vsubsws VRRC:$vA, VRRC:$vB))]>;
287
288def VSUBUBS : VXForm_1<1536, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
289 "vsububs $vD, $vA, $vB", VecFP,
290 [(set VRRC:$vD,
291 (int_ppc_altivec_vsububs VRRC:$vA, VRRC:$vB))]>;
292def VSUBUHS : VXForm_1<1600, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
293 "vsubuhs $vD, $vA, $vB", VecFP,
294 [(set VRRC:$vD,
295 (int_ppc_altivec_vsubuhs VRRC:$vA, VRRC:$vB))]>;
296def VSUBUWS : VXForm_1<1664, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
297 "vsubuws $vD, $vA, $vB", VecFP,
298 [(set VRRC:$vD,
299 (int_ppc_altivec_vsubuws VRRC:$vA, VRRC:$vB))]>;
Chris Lattnerecc219b2006-03-28 02:29:37 +0000300
301def VSUMSWS : VXForm_1<1928, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
302 "vsumsws $vD, $vA, $vB", VecFP,
303 [(set VRRC:$vD,
304 (int_ppc_altivec_vsumsws VRRC:$vA, VRRC:$vB))]>;
305def VSUM2SWS: VXForm_1<1672, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
306 "vsum2sws $vD, $vA, $vB", VecFP,
307 [(set VRRC:$vD,
308 (int_ppc_altivec_vsum2sws VRRC:$vA, VRRC:$vB))]>;
309def VSUM4SBS: VXForm_1<1672, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
310 "vsum4sbs $vD, $vA, $vB", VecFP,
311 [(set VRRC:$vD,
312 (int_ppc_altivec_vsum4sbs VRRC:$vA, VRRC:$vB))]>;
313def VSUM4SHS: VXForm_1<1608, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
314 "vsum4shs $vD, $vA, $vB", VecFP,
315 [(set VRRC:$vD,
316 (int_ppc_altivec_vsum4shs VRRC:$vA, VRRC:$vB))]>;
317def VSUM4UBS: VXForm_1<1544, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
318 "vsum4ubs $vD, $vA, $vB", VecFP,
319 [(set VRRC:$vD,
320 (int_ppc_altivec_vsum4ubs VRRC:$vA, VRRC:$vB))]>;
321
Chris Lattner2430a5f2006-03-25 22:16:05 +0000322def VNOR : VXForm_1<1284, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
323 "vnor $vD, $vA, $vB", VecFP,
Chris Lattner6509ae82006-03-25 23:05:29 +0000324 [(set VRRC:$vD, (vnot (or (v4i32 VRRC:$vA), VRRC:$vB)))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000325def VOR : VXForm_1<1156, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
326 "vor $vD, $vA, $vB", VecFP,
Chris Lattner2430a5f2006-03-25 22:16:05 +0000327 [(set VRRC:$vD, (or (v4i32 VRRC:$vA), VRRC:$vB))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000328def VXOR : VXForm_1<1220, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
329 "vxor $vD, $vA, $vB", VecFP,
Chris Lattner2430a5f2006-03-25 22:16:05 +0000330 [(set VRRC:$vD, (xor (v4i32 VRRC:$vA), VRRC:$vB))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000331
Chris Lattnerecc219b2006-03-28 02:29:37 +0000332def VRLB : VXForm_1<4, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
333 "vrlb $vD, $vA, $vB", VecFP,
334 [(set VRRC:$vD,
335 (int_ppc_altivec_vrlb VRRC:$vA, VRRC:$vB))]>;
336def VRLH : VXForm_1<68, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
337 "vrlh $vD, $vA, $vB", VecFP,
338 [(set VRRC:$vD,
339 (int_ppc_altivec_vrlh VRRC:$vA, VRRC:$vB))]>;
340def VRLW : VXForm_1<132, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
341 "vrlw $vD, $vA, $vB", VecFP,
342 [(set VRRC:$vD,
343 (int_ppc_altivec_vrlw VRRC:$vA, VRRC:$vB))]>;
344
345def VSLO : VXForm_1<1036, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
346 "vslo $vD, $vA, $vB", VecFP,
347 [(set VRRC:$vD,
348 (int_ppc_altivec_vslo VRRC:$vA, VRRC:$vB))]>;
349def VSLB : VXForm_1<260, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
350 "vslb $vD, $vA, $vB", VecFP,
351 [(set VRRC:$vD,
352 (int_ppc_altivec_vslb VRRC:$vA, VRRC:$vB))]>;
353def VSLH : VXForm_1<324, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
354 "vslh $vD, $vA, $vB", VecFP,
355 [(set VRRC:$vD,
356 (int_ppc_altivec_vslh VRRC:$vA, VRRC:$vB))]>;
357def VSLW : VXForm_1<388, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
358 "vslw $vD, $vA, $vB", VecFP,
359 [(set VRRC:$vD,
360 (int_ppc_altivec_vslw VRRC:$vA, VRRC:$vB))]>;
361
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000362def VSPLTB : VXForm_1<524, (ops VRRC:$vD, u5imm:$UIMM, VRRC:$vB),
363 "vspltb $vD, $vB, $UIMM", VecPerm,
364 []>;
365def VSPLTH : VXForm_1<588, (ops VRRC:$vD, u5imm:$UIMM, VRRC:$vB),
366 "vsplth $vD, $vB, $UIMM", VecPerm,
367 []>;
368def VSPLTW : VXForm_1<652, (ops VRRC:$vD, u5imm:$UIMM, VRRC:$vB),
369 "vspltw $vD, $vB, $UIMM", VecPerm,
370 [(set VRRC:$vD, (vector_shuffle (v4f32 VRRC:$vB), (undef),
371 VSPLT_shuffle_mask:$UIMM))]>;
372
Chris Lattnerecc219b2006-03-28 02:29:37 +0000373def VSR : VXForm_1<708, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
374 "vsr $vD, $vA, $vB", VecFP,
375 [(set VRRC:$vD,
376 (int_ppc_altivec_vsr VRRC:$vA, VRRC:$vB))]>;
377def VSRO : VXForm_1<1100, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
378 "vsro $vD, $vA, $vB", VecFP,
379 [(set VRRC:$vD,
380 (int_ppc_altivec_vsro VRRC:$vA, VRRC:$vB))]>;
381def VSRAB : VXForm_1<772, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
382 "vsrab $vD, $vA, $vB", VecFP,
383 [(set VRRC:$vD,
384 (int_ppc_altivec_vsrab VRRC:$vA, VRRC:$vB))]>;
385def VSRAH : VXForm_1<836, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
386 "vsrah $vD, $vA, $vB", VecFP,
387 [(set VRRC:$vD,
388 (int_ppc_altivec_vsrah VRRC:$vA, VRRC:$vB))]>;
389def VSRAW : VXForm_1<900, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
390 "vsraw $vD, $vA, $vB", VecFP,
391 [(set VRRC:$vD,
392 (int_ppc_altivec_vsraw VRRC:$vA, VRRC:$vB))]>;
393def VSRB : VXForm_1<516, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
394 "vsrb $vD, $vA, $vB", VecFP,
395 [(set VRRC:$vD,
396 (int_ppc_altivec_vsrb VRRC:$vA, VRRC:$vB))]>;
397def VSRH : VXForm_1<580, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
398 "vsrh $vD, $vA, $vB", VecFP,
399 [(set VRRC:$vD,
400 (int_ppc_altivec_vsrh VRRC:$vA, VRRC:$vB))]>;
401def VSRW : VXForm_1<644, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
402 "vsrw $vD, $vA, $vB", VecFP,
403 [(set VRRC:$vD,
404 (int_ppc_altivec_vsrw VRRC:$vA, VRRC:$vB))]>;
405
406
Chris Lattnereeaf72a2006-03-27 03:28:57 +0000407def VSPLTISB : VXForm_3<780, (ops VRRC:$vD, s5imm:$SIMM),
408 "vspltisb $vD, $SIMM", VecPerm,
409 [(set VRRC:$vD, (v4f32 vecspltisb:$SIMM))]>;
410def VSPLTISH : VXForm_3<844, (ops VRRC:$vD, s5imm:$SIMM),
411 "vspltish $vD, $SIMM", VecPerm,
412 [(set VRRC:$vD, (v4f32 vecspltish:$SIMM))]>;
413def VSPLTISW : VXForm_3<908, (ops VRRC:$vD, s5imm:$SIMM),
414 "vspltisw $vD, $SIMM", VecPerm,
415 [(set VRRC:$vD, (v4f32 vecspltisw:$SIMM))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000416
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000417
Chris Lattnerb8a45c22006-03-26 04:57:17 +0000418// Altivec Comparisons.
419
420// f32 element comparisons.
421def VCMPBFP : VXRForm_1<966, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
422 "vcmpbfp $vD, $vA, $vB", VecFPCompare,
423 [(set VRRC:$vD,
424 (int_ppc_altivec_vcmpbfp VRRC:$vA, VRRC:$vB))]>;
425def VCMPBFPo : VXRForm_1<966, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
426 "vcmpbfp. $vD, $vA, $vB", VecFPCompare,
Chris Lattner6d92cad2006-03-26 10:06:40 +0000427 [(set VRRC:$vD, (v4f32
428 (PPCvcmp_o VRRC:$vA, VRRC:$vB, 966)))]>, isVDOT;
Chris Lattnerb8a45c22006-03-26 04:57:17 +0000429def VCMPEQFP : VXRForm_1<198, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
430 "vcmpeqfp $vD, $vA, $vB", VecFPCompare,
431 [(set VRRC:$vD,
432 (int_ppc_altivec_vcmpeqfp VRRC:$vA, VRRC:$vB))]>;
433def VCMPEQFPo : VXRForm_1<198, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
434 "vcmpeqfp. $vD, $vA, $vB", VecFPCompare,
Chris Lattner6d92cad2006-03-26 10:06:40 +0000435 [(set VRRC:$vD, (v4f32
436 (PPCvcmp_o VRRC:$vA, VRRC:$vB, 198)))]>, isVDOT;
Chris Lattnerb8a45c22006-03-26 04:57:17 +0000437def VCMPGEFP : VXRForm_1<454, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
438 "vcmpgefp $vD, $vA, $vB", VecFPCompare,
439 [(set VRRC:$vD,
440 (int_ppc_altivec_vcmpgefp VRRC:$vA, VRRC:$vB))]>;
441def VCMPGEFPo : VXRForm_1<454, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
442 "vcmpgefp. $vD, $vA, $vB", VecFPCompare,
Chris Lattner6d92cad2006-03-26 10:06:40 +0000443 [(set VRRC:$vD, (v4f32
444 (PPCvcmp_o VRRC:$vA, VRRC:$vB, 454)))]>, isVDOT;
Chris Lattnerb8a45c22006-03-26 04:57:17 +0000445def VCMPGTFP : VXRForm_1<710, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
446 "vcmpgtfp $vD, $vA, $vB", VecFPCompare,
447 [(set VRRC:$vD,
448 (int_ppc_altivec_vcmpgtfp VRRC:$vA, VRRC:$vB))]>;
449def VCMPGTFPo : VXRForm_1<710, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
450 "vcmpgtfp. $vD, $vA, $vB", VecFPCompare,
Chris Lattner6d92cad2006-03-26 10:06:40 +0000451 [(set VRRC:$vD, (v4f32
452 (PPCvcmp_o VRRC:$vA, VRRC:$vB, 710)))]>, isVDOT;
Chris Lattnerb8a45c22006-03-26 04:57:17 +0000453
454// i8 element comparisons.
455def VCMPEQUB : VXRForm_1<6, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
456 "vcmpequb $vD, $vA, $vB", VecFPCompare,
457 [(set VRRC:$vD,
458 (int_ppc_altivec_vcmpequb VRRC:$vA, VRRC:$vB))]>;
459def VCMPEQUBo : VXRForm_1<6, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
460 "vcmpequb. $vD, $vA, $vB", VecFPCompare,
Chris Lattner6d92cad2006-03-26 10:06:40 +0000461 [(set VRRC:$vD, (v16i8
462 (PPCvcmp_o VRRC:$vA, VRRC:$vB, 6)))]>, isVDOT;
Chris Lattnerb8a45c22006-03-26 04:57:17 +0000463def VCMPGTSB : VXRForm_1<774, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
464 "vcmpgtsb $vD, $vA, $vB", VecFPCompare,
465 [(set VRRC:$vD,
466 (int_ppc_altivec_vcmpgtsb VRRC:$vA, VRRC:$vB))]>;
467def VCMPGTSBo : VXRForm_1<774, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
468 "vcmpgtsb. $vD, $vA, $vB", VecFPCompare,
Chris Lattner6d92cad2006-03-26 10:06:40 +0000469 [(set VRRC:$vD, (v16i8
470 (PPCvcmp_o VRRC:$vA, VRRC:$vB, 774)))]>, isVDOT;
Chris Lattnerb8a45c22006-03-26 04:57:17 +0000471def VCMPGTUB : VXRForm_1<518, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
472 "vcmpgtub $vD, $vA, $vB", VecFPCompare,
473 [(set VRRC:$vD,
474 (int_ppc_altivec_vcmpgtub VRRC:$vA, VRRC:$vB))]>;
475def VCMPGTUBo : VXRForm_1<518, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
476 "vcmpgtub. $vD, $vA, $vB", VecFPCompare,
Chris Lattner6d92cad2006-03-26 10:06:40 +0000477 [(set VRRC:$vD, (v16i8
478 (PPCvcmp_o VRRC:$vA, VRRC:$vB, 518)))]>, isVDOT;
Chris Lattnerb8a45c22006-03-26 04:57:17 +0000479
480// i16 element comparisons.
481def VCMPEQUH : VXRForm_1<70, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
482 "vcmpequh $vD, $vA, $vB", VecFPCompare,
483 [(set VRRC:$vD,
484 (int_ppc_altivec_vcmpequh VRRC:$vA, VRRC:$vB))]>;
485def VCMPEQUHo : VXRForm_1<70, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
486 "vcmpequh. $vD, $vA, $vB", VecFPCompare,
Chris Lattner6d92cad2006-03-26 10:06:40 +0000487 [(set VRRC:$vD, (v8i16
488 (PPCvcmp_o VRRC:$vA, VRRC:$vB, 70)))]>, isVDOT;
Chris Lattnerb8a45c22006-03-26 04:57:17 +0000489def VCMPGTSH : VXRForm_1<838, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
490 "vcmpgtsh $vD, $vA, $vB", VecFPCompare,
491 [(set VRRC:$vD,
492 (int_ppc_altivec_vcmpgtsh VRRC:$vA, VRRC:$vB))]>;
493def VCMPGTSHo : VXRForm_1<838, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
494 "vcmpgtsh. $vD, $vA, $vB", VecFPCompare,
Chris Lattner6d92cad2006-03-26 10:06:40 +0000495 [(set VRRC:$vD, (v8i16
496 (PPCvcmp_o VRRC:$vA, VRRC:$vB, 838)))]>, isVDOT;
Chris Lattnerb8a45c22006-03-26 04:57:17 +0000497def VCMPGTUH : VXRForm_1<582, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
498 "vcmpgtuh $vD, $vA, $vB", VecFPCompare,
499 [(set VRRC:$vD,
500 (int_ppc_altivec_vcmpgtuh VRRC:$vA, VRRC:$vB))]>;
501def VCMPGTUHo : VXRForm_1<582, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
502 "vcmpgtuh. $vD, $vA, $vB", VecFPCompare,
Chris Lattner6d92cad2006-03-26 10:06:40 +0000503 [(set VRRC:$vD, (v8i16
504 (PPCvcmp_o VRRC:$vA, VRRC:$vB, 582)))]>, isVDOT;
Chris Lattnerb8a45c22006-03-26 04:57:17 +0000505
506// i32 element comparisons.
507def VCMPEQUW : VXRForm_1<134, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
508 "vcmpequw $vD, $vA, $vB", VecFPCompare,
509 [(set VRRC:$vD,
510 (int_ppc_altivec_vcmpequw VRRC:$vA, VRRC:$vB))]>;
511def VCMPEQUWo : VXRForm_1<134, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
512 "vcmpequw. $vD, $vA, $vB", VecFPCompare,
Chris Lattner6d92cad2006-03-26 10:06:40 +0000513 [(set VRRC:$vD, (v4i32
514 (PPCvcmp_o VRRC:$vA, VRRC:$vB, 134)))]>, isVDOT;
Chris Lattnerb8a45c22006-03-26 04:57:17 +0000515def VCMPGTSW : VXRForm_1<902, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
516 "vcmpgtsw $vD, $vA, $vB", VecFPCompare,
517 [(set VRRC:$vD,
518 (int_ppc_altivec_vcmpgtsw VRRC:$vA, VRRC:$vB))]>;
519def VCMPGTSWo : VXRForm_1<902, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
520 "vcmpgtsw. $vD, $vA, $vB", VecFPCompare,
Chris Lattner6d92cad2006-03-26 10:06:40 +0000521 [(set VRRC:$vD, (v4i32
522 (PPCvcmp_o VRRC:$vA, VRRC:$vB, 902)))]>, isVDOT;
Chris Lattnerb8a45c22006-03-26 04:57:17 +0000523def VCMPGTUW : VXRForm_1<646, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
524 "vcmpgtuw $vD, $vA, $vB", VecFPCompare,
525 [(set VRRC:$vD,
526 (int_ppc_altivec_vcmpgtuw VRRC:$vA, VRRC:$vB))]>;
527def VCMPGTUWo : VXRForm_1<646, (ops VRRC:$vD, VRRC:$vA, VRRC:$vB),
528 "vcmpgtuw. $vD, $vA, $vB", VecFPCompare,
Chris Lattner6d92cad2006-03-26 10:06:40 +0000529 [(set VRRC:$vD, (v4i32
530 (PPCvcmp_o VRRC:$vA, VRRC:$vB, 646)))]>, isVDOT;
Chris Lattnerb8a45c22006-03-26 04:57:17 +0000531
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000532def V_SET0 : VXForm_setzero<1220, (ops VRRC:$vD),
533 "vxor $vD, $vD, $vD", VecFP,
Evan Cheng5b6a01b2006-03-26 09:52:32 +0000534 [(set VRRC:$vD, (v4f32 immAllZerosV))]>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000535}
536
537//===----------------------------------------------------------------------===//
538// Additional Altivec Patterns
539//
540
541// Undef/Zero.
542def : Pat<(v16i8 (undef)), (v16i8 (IMPLICIT_DEF_VRRC))>;
543def : Pat<(v8i16 (undef)), (v8i16 (IMPLICIT_DEF_VRRC))>;
544def : Pat<(v4i32 (undef)), (v4i32 (IMPLICIT_DEF_VRRC))>;
Evan Cheng5b6a01b2006-03-26 09:52:32 +0000545def : Pat<(v16i8 immAllZerosV), (v16i8 (V_SET0))>;
546def : Pat<(v8i16 immAllZerosV), (v8i16 (V_SET0))>;
547def : Pat<(v4i32 immAllZerosV), (v4i32 (V_SET0))>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000548
549// Loads.
550def : Pat<(v16i8 (load xoaddr:$src)), (v16i8 (LVX xoaddr:$src))>;
551def : Pat<(v8i16 (load xoaddr:$src)), (v8i16 (LVX xoaddr:$src))>;
552def : Pat<(v4i32 (load xoaddr:$src)), (v4i32 (LVX xoaddr:$src))>;
Chris Lattnerecc219b2006-03-28 02:29:37 +0000553def : Pat<(v4f32 (load xoaddr:$src)), (v4f32 (LVX xoaddr:$src))>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000554
555// Stores.
556def : Pat<(store (v16i8 VRRC:$rS), xoaddr:$dst),
557 (STVX (v16i8 VRRC:$rS), xoaddr:$dst)>;
558def : Pat<(store (v8i16 VRRC:$rS), xoaddr:$dst),
559 (STVX (v8i16 VRRC:$rS), xoaddr:$dst)>;
560def : Pat<(store (v4i32 VRRC:$rS), xoaddr:$dst),
561 (STVX (v4i32 VRRC:$rS), xoaddr:$dst)>;
Chris Lattnerecc219b2006-03-28 02:29:37 +0000562def : Pat<(store (v4f32 VRRC:$rS), xoaddr:$dst),
563 (STVX (v4f32 VRRC:$rS), xoaddr:$dst)>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000564
565// Bit conversions.
566def : Pat<(v16i8 (bitconvert (v8i16 VRRC:$src))), (v16i8 VRRC:$src)>;
567def : Pat<(v16i8 (bitconvert (v4i32 VRRC:$src))), (v16i8 VRRC:$src)>;
568def : Pat<(v16i8 (bitconvert (v4f32 VRRC:$src))), (v16i8 VRRC:$src)>;
569
570def : Pat<(v8i16 (bitconvert (v16i8 VRRC:$src))), (v8i16 VRRC:$src)>;
571def : Pat<(v8i16 (bitconvert (v4i32 VRRC:$src))), (v8i16 VRRC:$src)>;
572def : Pat<(v8i16 (bitconvert (v4f32 VRRC:$src))), (v8i16 VRRC:$src)>;
573
574def : Pat<(v4i32 (bitconvert (v16i8 VRRC:$src))), (v4i32 VRRC:$src)>;
575def : Pat<(v4i32 (bitconvert (v8i16 VRRC:$src))), (v4i32 VRRC:$src)>;
576def : Pat<(v4i32 (bitconvert (v4f32 VRRC:$src))), (v4i32 VRRC:$src)>;
577
578def : Pat<(v4f32 (bitconvert (v16i8 VRRC:$src))), (v4f32 VRRC:$src)>;
579def : Pat<(v4f32 (bitconvert (v8i16 VRRC:$src))), (v4f32 VRRC:$src)>;
580def : Pat<(v4f32 (bitconvert (v4i32 VRRC:$src))), (v4f32 VRRC:$src)>;
581
582// Immediate vector formation with vsplti*.
583def : Pat<(v16i8 vecspltisb:$invec), (v16i8 (VSPLTISB vecspltisb:$invec))>;
584def : Pat<(v16i8 vecspltish:$invec), (v16i8 (VSPLTISH vecspltish:$invec))>;
585def : Pat<(v16i8 vecspltisw:$invec), (v16i8 (VSPLTISW vecspltisw:$invec))>;
586
587def : Pat<(v8i16 vecspltisb:$invec), (v8i16 (VSPLTISB vecspltisb:$invec))>;
588def : Pat<(v8i16 vecspltish:$invec), (v8i16 (VSPLTISH vecspltish:$invec))>;
589def : Pat<(v8i16 vecspltisw:$invec), (v8i16 (VSPLTISW vecspltisw:$invec))>;
590
591def : Pat<(v4i32 vecspltisb:$invec), (v4i32 (VSPLTISB vecspltisb:$invec))>;
592def : Pat<(v4i32 vecspltish:$invec), (v4i32 (VSPLTISH vecspltish:$invec))>;
593def : Pat<(v4i32 vecspltisw:$invec), (v4i32 (VSPLTISW vecspltisw:$invec))>;
594
Chris Lattner2430a5f2006-03-25 22:16:05 +0000595// Logical Operations
596def : Pat<(v16i8 (and VRRC:$A, VRRC:$B)), (v16i8 (VAND VRRC:$A, VRRC:$B))>;
597def : Pat<(v8i16 (and VRRC:$A, VRRC:$B)), (v8i16 (VAND VRRC:$A, VRRC:$B))>;
598def : Pat<(v16i8 (or VRRC:$A, VRRC:$B)), (v16i8 (VOR VRRC:$A, VRRC:$B))>;
599def : Pat<(v8i16 (or VRRC:$A, VRRC:$B)), (v8i16 (VOR VRRC:$A, VRRC:$B))>;
600def : Pat<(v16i8 (xor VRRC:$A, VRRC:$B)), (v16i8 (VXOR VRRC:$A, VRRC:$B))>;
601def : Pat<(v8i16 (xor VRRC:$A, VRRC:$B)), (v8i16 (VXOR VRRC:$A, VRRC:$B))>;
Chris Lattner6509ae82006-03-25 23:05:29 +0000602def : Pat<(v16i8 (vnot (or VRRC:$A, VRRC:$B))),(v16i8 (VNOR VRRC:$A, VRRC:$B))>;
603def : Pat<(v8i16 (vnot (or VRRC:$A, VRRC:$B))),(v8i16 (VNOR VRRC:$A, VRRC:$B))>;
Chris Lattneraf9136b2006-03-25 23:10:40 +0000604def : Pat<(v16i8 (and VRRC:$A, (vnot VRRC:$B))),
Chris Lattner6509ae82006-03-25 23:05:29 +0000605 (v16i8 (VANDC VRRC:$A, VRRC:$B))>;
Chris Lattneraf9136b2006-03-25 23:10:40 +0000606def : Pat<(v8i16 (and VRRC:$A, (vnot VRRC:$B))),
Chris Lattner6509ae82006-03-25 23:05:29 +0000607 (v8i16 (VANDC VRRC:$A, VRRC:$B))>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000608
609def : Pat<(fmul VRRC:$vA, VRRC:$vB),
610 (VMADDFP VRRC:$vA, VRRC:$vB, (V_SET0))>;
611
612// Fused multiply add and multiply sub for packed float. These are represented
613// separately from the real instructions above, for operations that must have
614// the additional precision, such as Newton-Rhapson (used by divide, sqrt)
615def : Pat<(PPCvmaddfp VRRC:$A, VRRC:$B, VRRC:$C),
616 (VMADDFP VRRC:$A, VRRC:$B, VRRC:$C)>;
617def : Pat<(PPCvnmsubfp VRRC:$A, VRRC:$B, VRRC:$C),
618 (VNMSUBFP VRRC:$A, VRRC:$B, VRRC:$C)>;
619
620def : Pat<(int_ppc_altivec_vmaddfp VRRC:$A, VRRC:$B, VRRC:$C),
621 (VMADDFP VRRC:$A, VRRC:$B, VRRC:$C)>;
622def : Pat<(int_ppc_altivec_vnmsubfp VRRC:$A, VRRC:$B, VRRC:$C),
623 (VNMSUBFP VRRC:$A, VRRC:$B, VRRC:$C)>;
Nate Begeman98e70cc2006-03-28 04:15:58 +0000624def : Pat<(int_ppc_altivec_vperm VRRC:$A, VRRC:$B, VRRC:$C),
625 (VPERM VRRC:$A, VRRC:$B, VRRC:$C)>;
Chris Lattnerb22a04d2006-03-25 07:51:43 +0000626def : Pat<(vector_shuffle (v4i32 VRRC:$vB), (undef), VSPLT_shuffle_mask:$UIMM),
627 (v4i32 (VSPLTW VSPLT_shuffle_mask:$UIMM, VRRC:$vB))>;
628
629def : Pat<(PPCvperm (v4i32 VRRC:$vA), VRRC:$vB, VRRC:$vC),
630 (v4i32 (VPERM VRRC:$vA, VRRC:$vB, VRRC:$vC))>;
631