blob: d3cd5b77c15945cb5592fccfd7a1fd1da46a4d14 [file] [log] [blame]
cerionbcf8c3e2005-02-04 16:17:07 +00001
2/*---------------------------------------------------------------*/
3/*--- ---*/
4/*--- This file (host-ppc32/hdefs.c) is ---*/
5/*--- Copyright (c) 2005 OpenWorks LLP. All rights reserved. ---*/
6/*--- ---*/
7/*---------------------------------------------------------------*/
8
9/*
10 This file is part of LibVEX, a library for dynamic binary
11 instrumentation and translation.
12
13 Copyright (C) 2004-2005 OpenWorks, LLP.
14
15 This program is free software; you can redistribute it and/or modify
16 it under the terms of the GNU General Public License as published by
17 the Free Software Foundation; Version 2 dated June 1991 of the
18 license.
19
20 This program is distributed in the hope that it will be useful,
21 but WITHOUT ANY WARRANTY; without even the implied warranty of
22 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or liability
23 for damages. See the GNU General Public License for more details.
24
25 Neither the names of the U.S. Department of Energy nor the
26 University of California nor the names of its contributors may be
27 used to endorse or promote products derived from this software
28 without prior written permission.
29
30 You should have received a copy of the GNU General Public License
31 along with this program; if not, write to the Free Software
32 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
33 USA.
34*/
35
36#include "libvex_basictypes.h"
37#include "libvex.h"
38#include "libvex_trc_values.h"
39
40#include "main/vex_util.h"
41#include "host-generic/h_generic_regs.h"
42#include "host-ppc32/hdefs.h"
43
44
45/* --------- Registers. --------- */
46
47void ppHRegPPC32 ( HReg reg )
cerioncd304492005-02-08 19:40:24 +000048{
49 Int r;
50 static HChar* ireg32_names[32]
51 = { "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
52 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
53 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
54 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31" };
55 /* Be generic for all virtual regs. */
56 if (hregIsVirtual(reg)) {
57 ppHReg(reg);
58 return;
59 }
60 /* But specific for real regs. */
61 switch (hregClass(reg)) {
62 case HRcInt32:
63 r = hregNumber(reg);
64 vassert(r >= 0 && r < 32);
65 vex_printf("%s", ireg32_names[r]);
66 return;
67 case HRcFlt64:
68 r = hregNumber(reg);
69 vassert(r >= 0 && r < 6);
70 vex_printf("%%fpr%d", r);
71 return;
72 default:
73 vpanic("ppHRegPPC32");
74 }
cerionbcf8c3e2005-02-04 16:17:07 +000075}
76
cerioncd304492005-02-08 19:40:24 +000077HReg hregPPC32_GPR0 ( void ) { return mkHReg( 0, HRcInt32, False); }
78HReg hregPPC32_GPR1 ( void ) { return mkHReg( 1, HRcInt32, False); }
79HReg hregPPC32_GPR2 ( void ) { return mkHReg( 2, HRcInt32, False); }
80HReg hregPPC32_GPR3 ( void ) { return mkHReg( 3, HRcInt32, False); }
81HReg hregPPC32_GPR4 ( void ) { return mkHReg( 4, HRcInt32, False); }
82HReg hregPPC32_GPR5 ( void ) { return mkHReg( 5, HRcInt32, False); }
83HReg hregPPC32_GPR6 ( void ) { return mkHReg( 6, HRcInt32, False); }
84HReg hregPPC32_GPR7 ( void ) { return mkHReg( 7, HRcInt32, False); }
85HReg hregPPC32_GPR8 ( void ) { return mkHReg( 8, HRcInt32, False); }
86HReg hregPPC32_GPR9 ( void ) { return mkHReg( 9, HRcInt32, False); }
87HReg hregPPC32_GPR10 ( void ) { return mkHReg(10, HRcInt32, False); }
88HReg hregPPC32_GPR11 ( void ) { return mkHReg(11, HRcInt32, False); }
89HReg hregPPC32_GPR12 ( void ) { return mkHReg(12, HRcInt32, False); }
90HReg hregPPC32_GPR13 ( void ) { return mkHReg(13, HRcInt32, False); }
91HReg hregPPC32_GPR14 ( void ) { return mkHReg(14, HRcInt32, False); }
92HReg hregPPC32_GPR15 ( void ) { return mkHReg(15, HRcInt32, False); }
93HReg hregPPC32_GPR16 ( void ) { return mkHReg(16, HRcInt32, False); }
94HReg hregPPC32_GPR17 ( void ) { return mkHReg(17, HRcInt32, False); }
95HReg hregPPC32_GPR18 ( void ) { return mkHReg(18, HRcInt32, False); }
96HReg hregPPC32_GPR19 ( void ) { return mkHReg(19, HRcInt32, False); }
97HReg hregPPC32_GPR20 ( void ) { return mkHReg(20, HRcInt32, False); }
98HReg hregPPC32_GPR21 ( void ) { return mkHReg(21, HRcInt32, False); }
99HReg hregPPC32_GPR22 ( void ) { return mkHReg(22, HRcInt32, False); }
100HReg hregPPC32_GPR23 ( void ) { return mkHReg(23, HRcInt32, False); }
101HReg hregPPC32_GPR24 ( void ) { return mkHReg(24, HRcInt32, False); }
102HReg hregPPC32_GPR25 ( void ) { return mkHReg(25, HRcInt32, False); }
103HReg hregPPC32_GPR26 ( void ) { return mkHReg(26, HRcInt32, False); }
104HReg hregPPC32_GPR27 ( void ) { return mkHReg(27, HRcInt32, False); }
105HReg hregPPC32_GPR28 ( void ) { return mkHReg(28, HRcInt32, False); }
106HReg hregPPC32_GPR29 ( void ) { return mkHReg(29, HRcInt32, False); }
107HReg hregPPC32_GPR30 ( void ) { return mkHReg(30, HRcInt32, False); }
108HReg hregPPC32_GPR31 ( void ) { return mkHReg(31, HRcInt32, False); }
109
110HReg hregPPC32_FPR0 ( void ) { return mkHReg( 0, HRcFlt64, False); }
111HReg hregPPC32_FPR1 ( void ) { return mkHReg( 1, HRcFlt64, False); }
112HReg hregPPC32_FPR2 ( void ) { return mkHReg( 2, HRcFlt64, False); }
113HReg hregPPC32_FPR3 ( void ) { return mkHReg( 3, HRcFlt64, False); }
114HReg hregPPC32_FPR4 ( void ) { return mkHReg( 4, HRcFlt64, False); }
115HReg hregPPC32_FPR5 ( void ) { return mkHReg( 5, HRcFlt64, False); }
116HReg hregPPC32_FPR6 ( void ) { return mkHReg( 6, HRcFlt64, False); }
117HReg hregPPC32_FPR7 ( void ) { return mkHReg( 7, HRcFlt64, False); }
118HReg hregPPC32_FPR8 ( void ) { return mkHReg( 8, HRcFlt64, False); }
119HReg hregPPC32_FPR9 ( void ) { return mkHReg( 9, HRcFlt64, False); }
120HReg hregPPC32_FPR10 ( void ) { return mkHReg(10, HRcFlt64, False); }
121HReg hregPPC32_FPR11 ( void ) { return mkHReg(11, HRcFlt64, False); }
122HReg hregPPC32_FPR12 ( void ) { return mkHReg(12, HRcFlt64, False); }
123HReg hregPPC32_FPR13 ( void ) { return mkHReg(13, HRcFlt64, False); }
124HReg hregPPC32_FPR14 ( void ) { return mkHReg(14, HRcFlt64, False); }
125HReg hregPPC32_FPR15 ( void ) { return mkHReg(15, HRcFlt64, False); }
126HReg hregPPC32_FPR16 ( void ) { return mkHReg(16, HRcFlt64, False); }
127HReg hregPPC32_FPR17 ( void ) { return mkHReg(17, HRcFlt64, False); }
128HReg hregPPC32_FPR18 ( void ) { return mkHReg(18, HRcFlt64, False); }
129HReg hregPPC32_FPR19 ( void ) { return mkHReg(19, HRcFlt64, False); }
130HReg hregPPC32_FPR20 ( void ) { return mkHReg(20, HRcFlt64, False); }
131HReg hregPPC32_FPR21 ( void ) { return mkHReg(21, HRcFlt64, False); }
132HReg hregPPC32_FPR22 ( void ) { return mkHReg(22, HRcFlt64, False); }
133HReg hregPPC32_FPR23 ( void ) { return mkHReg(23, HRcFlt64, False); }
134HReg hregPPC32_FPR24 ( void ) { return mkHReg(24, HRcFlt64, False); }
135HReg hregPPC32_FPR25 ( void ) { return mkHReg(25, HRcFlt64, False); }
136HReg hregPPC32_FPR26 ( void ) { return mkHReg(26, HRcFlt64, False); }
137HReg hregPPC32_FPR27 ( void ) { return mkHReg(27, HRcFlt64, False); }
138HReg hregPPC32_FPR28 ( void ) { return mkHReg(28, HRcFlt64, False); }
139HReg hregPPC32_FPR29 ( void ) { return mkHReg(29, HRcFlt64, False); }
140HReg hregPPC32_FPR30 ( void ) { return mkHReg(30, HRcFlt64, False); }
141HReg hregPPC32_FPR31 ( void ) { return mkHReg(31, HRcFlt64, False); }
cerionbcf8c3e2005-02-04 16:17:07 +0000142
cerionbcf8c3e2005-02-04 16:17:07 +0000143void getAllocableRegs_PPC32 ( Int* nregs, HReg** arr )
cerioncd304492005-02-08 19:40:24 +0000144{
cerionb4a632a2005-02-09 21:01:22 +0000145 *nregs = 59;
cerioncd304492005-02-08 19:40:24 +0000146 *arr = LibVEX_Alloc(*nregs * sizeof(HReg));
cerionb4a632a2005-02-09 21:01:22 +0000147 // GPR0 = reserved
cerion2c49e032005-02-09 17:29:49 +0000148 // GPR1 = stack pointer
149 // GPR2 = TOC pointer
cerionb4a632a2005-02-09 21:01:22 +0000150 (*arr)[ 0] = hregPPC32_GPR3();
151 (*arr)[ 1] = hregPPC32_GPR4();
152 (*arr)[ 2] = hregPPC32_GPR5();
153 (*arr)[ 3] = hregPPC32_GPR6();
154 (*arr)[ 4] = hregPPC32_GPR7();
155 (*arr)[ 5] = hregPPC32_GPR8();
156 (*arr)[ 6] = hregPPC32_GPR9();
157 (*arr)[ 7] = hregPPC32_GPR10();
158 (*arr)[ 8] = hregPPC32_GPR11();
159 (*arr)[ 9] = hregPPC32_GPR12();
cerion2c49e032005-02-09 17:29:49 +0000160 // GPR13 = thread specific pointer
cerionb4a632a2005-02-09 21:01:22 +0000161 (*arr)[10] = hregPPC32_GPR14();
162 (*arr)[11] = hregPPC32_GPR15();
163 (*arr)[12] = hregPPC32_GPR16();
164 (*arr)[13] = hregPPC32_GPR17();
165 (*arr)[14] = hregPPC32_GPR18();
166 (*arr)[15] = hregPPC32_GPR19();
167 (*arr)[16] = hregPPC32_GPR20();
168 (*arr)[17] = hregPPC32_GPR21();
169 (*arr)[18] = hregPPC32_GPR22();
170 (*arr)[19] = hregPPC32_GPR23();
171 (*arr)[20] = hregPPC32_GPR24();
172 (*arr)[21] = hregPPC32_GPR25();
173 (*arr)[22] = hregPPC32_GPR26();
174 (*arr)[23] = hregPPC32_GPR27();
175 (*arr)[24] = hregPPC32_GPR28();
176 (*arr)[25] = hregPPC32_GPR29();
177 (*arr)[26] = hregPPC32_GPR30();
cerion2c49e032005-02-09 17:29:49 +0000178 // GPR31 = GuestStatePtr
cerioncd304492005-02-08 19:40:24 +0000179
cerionb4a632a2005-02-09 21:01:22 +0000180 (*arr)[27] = hregPPC32_FPR0();
181 (*arr)[28] = hregPPC32_FPR1();
182 (*arr)[29] = hregPPC32_FPR2();
183 (*arr)[30] = hregPPC32_FPR3();
184 (*arr)[31] = hregPPC32_FPR4();
185 (*arr)[32] = hregPPC32_FPR5();
186 (*arr)[33] = hregPPC32_FPR6();
187 (*arr)[34] = hregPPC32_FPR7();
188 (*arr)[35] = hregPPC32_FPR8();
189 (*arr)[36] = hregPPC32_FPR9();
190 (*arr)[37] = hregPPC32_FPR10();
191 (*arr)[38] = hregPPC32_FPR11();
192 (*arr)[39] = hregPPC32_FPR12();
193 (*arr)[40] = hregPPC32_FPR13();
194 (*arr)[41] = hregPPC32_FPR14();
195 (*arr)[42] = hregPPC32_FPR15();
196 (*arr)[43] = hregPPC32_FPR16();
197 (*arr)[44] = hregPPC32_FPR17();
198 (*arr)[45] = hregPPC32_FPR18();
199 (*arr)[46] = hregPPC32_FPR19();
200 (*arr)[47] = hregPPC32_FPR20();
201 (*arr)[48] = hregPPC32_FPR21();
202 (*arr)[49] = hregPPC32_FPR22();
203 (*arr)[50] = hregPPC32_FPR23();
204 (*arr)[51] = hregPPC32_FPR24();
205 (*arr)[52] = hregPPC32_FPR25();
206 (*arr)[53] = hregPPC32_FPR26();
207 (*arr)[54] = hregPPC32_FPR27();
208 (*arr)[55] = hregPPC32_FPR28();
209 (*arr)[56] = hregPPC32_FPR29();
210 (*arr)[57] = hregPPC32_FPR30();
211 (*arr)[58] = hregPPC32_FPR31();
cerion2c49e032005-02-09 17:29:49 +0000212}
213
214
215/* --------- Condition codes, Intel encoding. --------- */
216
217HChar* showPPC32CondCode ( PPC32CondCode cond )
218{
219 switch (cond) {
220 case Pcc_O: return "o";
221 case Pcc_NO: return "no";
222 case Pcc_B: return "b";
223 case Pcc_NB: return "nb";
224 case Pcc_Z: return "z";
225 case Pcc_NZ: return "nz";
226 case Pcc_BE: return "be";
227 case Pcc_NBE: return "nbe";
228 case Pcc_S: return "s";
229 case Pcc_NS: return "ns";
230 case Pcc_P: return "p";
231 case Pcc_NP: return "np";
232 case Pcc_L: return "l";
233 case Pcc_NL: return "nl";
234 case Pcc_LE: return "le";
235 case Pcc_NLE: return "nle";
236 case Pcc_ALWAYS: return "ALWAYS";
237 default: vpanic("ppPPC32CondCode");
238 }
cerionbcf8c3e2005-02-04 16:17:07 +0000239}
240
241
cerioncd304492005-02-08 19:40:24 +0000242/* --------- PPCAMode: memory address expressions. --------- */
243
244PPC32AMode* PPC32AMode_IR ( UInt idx, HReg base ) {
cerion2c49e032005-02-09 17:29:49 +0000245 // CAB: Rem assert immediate
cerioncd304492005-02-08 19:40:24 +0000246 PPC32AMode* am = LibVEX_Alloc(sizeof(PPC32AMode));
247 am->tag = Pam_IR;
248 am->Pam.IR.base = base;
249 am->Pam.IR.index = idx;
250 return am;
251}
252PPC32AMode* PPC32AMode_RR ( HReg idx, HReg base ) {
253 PPC32AMode* am = LibVEX_Alloc(sizeof(PPC32AMode));
254 am->tag = Pam_RR;
255 am->Pam.RR.base = base;
256 am->Pam.RR.index = idx;
257 return am;
258}
259
260PPC32AMode* dopyPPC32AMode ( PPC32AMode* am ) {
261 switch (am->tag) {
262 case Pam_IR:
263 return PPC32AMode_IR( am->Pam.IR.index, am->Pam.IR.base );
264 case Pam_RR:
265 return PPC32AMode_RR( am->Pam.RR.index, am->Pam.RR.base );
266 default:
267 vpanic("dopyPPC32AMode");
268 }
269}
270
271void ppPPC32AMode ( PPC32AMode* am ) {
272 switch (am->tag) {
273 case Pam_IR:
274 if (am->Pam.IR.index == 0)
275 vex_printf("(");
276 else
277 vex_printf("0x%x(", am->Pam.IR.index);
278 ppHRegPPC32(am->Pam.IR.base);
279 vex_printf(")");
280 return;
281 case Pam_RR:
282 ppHRegPPC32(am->Pam.RR.base);
283 vex_printf(",");
284 ppHRegPPC32(am->Pam.RR.index);
285 return;
286 default:
287 vpanic("ppPPC32AMode");
288 }
289}
290
cerioncd304492005-02-08 19:40:24 +0000291static void addRegUsage_PPC32AMode ( HRegUsage* u, PPC32AMode* am ) {
292 switch (am->tag) {
293 case Pam_IR:
294 addHRegUse(u, HRmRead, am->Pam.IR.base);
295 return;
296 case Pam_RR:
297 addHRegUse(u, HRmRead, am->Pam.RR.base);
298 addHRegUse(u, HRmRead, am->Pam.RR.index);
299 return;
300 default:
301 vpanic("addRegUsage_PPC32AMode");
302 }
303}
cerioncd304492005-02-08 19:40:24 +0000304
cerioncd304492005-02-08 19:40:24 +0000305static void mapRegs_PPC32AMode ( HRegRemap* m, PPC32AMode* am ) {
306 switch (am->tag) {
307 case Pam_IR:
308 am->Pam.IR.base = lookupHRegRemap(m, am->Pam.IR.base);
309 return;
310 case Pam_RR:
311 am->Pam.RR.base = lookupHRegRemap(m, am->Pam.RR.base);
312 am->Pam.RR.index = lookupHRegRemap(m, am->Pam.RR.index);
313 return;
314 default:
315 vpanic("mapRegs_PPC32AMode");
316 }
317}
cerioncd304492005-02-08 19:40:24 +0000318
319/* --------- Operand, which can be reg or immediate only. --------- */
320
321PPC32RI* PPC32RI_Imm ( UInt imm32 ) {
cerion2c49e032005-02-09 17:29:49 +0000322 // CAB: Rem assert immediate
cerioncd304492005-02-08 19:40:24 +0000323 PPC32RI* op = LibVEX_Alloc(sizeof(PPC32RI));
324 op->tag = Pri_Imm;
325 op->Pri.Imm.imm32 = imm32;
326 return op;
327}
328PPC32RI* PPC32RI_Reg ( HReg reg ) {
329 PPC32RI* op = LibVEX_Alloc(sizeof(PPC32RI));
330 op->tag = Pri_Reg;
331 op->Pri.Reg.reg = reg;
332 return op;
333}
334
335void ppPPC32RI ( PPC32RI* op ) {
336 switch (op->tag) {
337 case Pri_Imm:
338 vex_printf("$0x%x", op->Pri.Imm.imm32);
339 return;
340 case Pri_Reg:
341 ppHRegPPC32(op->Pri.Reg.reg);
342 return;
343 default:
344 vpanic("ppPPC32RI");
345 }
346}
347
348/* An PPC32RI can only be used in a "read" context (what would it mean
349 to write or modify a literal?) and so we enumerate its registers
350 accordingly. */
351static void addRegUsage_PPC32RI ( HRegUsage* u, PPC32RI* op ) {
352 switch (op->tag) {
353 case Pri_Imm:
354 return;
355 case Pri_Reg:
356 addHRegUse(u, HRmRead, op->Pri.Reg.reg);
357 return;
358 default:
359 vpanic("addRegUsage_PPC32RI");
360 }
361}
362
363static void mapRegs_PPC32RI ( HRegRemap* m, PPC32RI* op ) {
364 switch (op->tag) {
365 case Pri_Imm:
366 return;
367 case Pri_Reg:
368 op->Pri.Reg.reg = lookupHRegRemap(m, op->Pri.Reg.reg);
369 return;
370 default:
371 vpanic("mapRegs_PPC32RI");
372 }
373}
374
375/* --------- Instructions. --------- */
376
cerionbcf8c3e2005-02-04 16:17:07 +0000377//.. HChar* showX86ScalarSz ( X86ScalarSz sz ) {
378//.. switch (sz) {
379//.. case Xss_16: return "w";
380//.. case Xss_32: return "l";
381//.. default: vpanic("showX86ScalarSz");
382//.. }
383//.. }
cerioncd304492005-02-08 19:40:24 +0000384
cerion2c49e032005-02-09 17:29:49 +0000385HChar* showPPC32UnaryOp ( PPC32UnaryOp op ) {
386 switch (op) {
387 case Pun_NOT: return "not";
388 case Pun_NEG: return "neg";
389 default: vpanic("showPPC32UnaryOp");
390 }
391}
cerioncd304492005-02-08 19:40:24 +0000392
393HChar* showPPC32AluOp ( PPC32AluOp op ) {
394 switch (op) {
395 case Palu_CMP: return "cmp";
396 case Palu_ADD: return "add";
397 case Palu_SUB: return "sub";
398 case Palu_ADC: return "adc";
399 case Palu_SBB: return "sbb";
400 case Palu_AND: return "and";
401 case Palu_OR: return "or";
402 case Palu_XOR: return "xor";
403 case Palu_MUL: return "mul";
404 default: vpanic("showPPC32AluOp");
405 }
406}
407
408HChar* showPPC32ShiftOp ( PPC32ShiftOp op ) {
409 switch (op) {
410 case Psh_SHL: return "shl";
411 case Psh_SHR: return "shr";
412 case Psh_SAR: return "sar";
413 case Psh_ROL: return "rol";
414 default: vpanic("showPPC32ShiftOp");
415 }
416}
417
cerionbcf8c3e2005-02-04 16:17:07 +0000418//.. HChar* showX86FpOp ( X86FpOp op ) {
419//.. switch (op) {
420//.. case Xfp_ADD: return "add";
421//.. case Xfp_SUB: return "sub";
422//.. case Xfp_MUL: return "mul";
423//.. case Xfp_DIV: return "div";
424//.. case Xfp_SCALE: return "scale";
425//.. case Xfp_ATAN: return "atan";
426//.. case Xfp_YL2X: return "yl2x";
427//.. case Xfp_YL2XP1: return "yl2xp1";
428//.. case Xfp_PREM: return "prem";
429//.. case Xfp_PREM1: return "prem1";
430//.. case Xfp_SQRT: return "sqrt";
431//.. case Xfp_ABS: return "abs";
432//.. case Xfp_NEG: return "chs";
433//.. case Xfp_MOV: return "mov";
434//.. case Xfp_SIN: return "sin";
435//.. case Xfp_COS: return "cos";
436//.. case Xfp_TAN: return "tan";
437//.. case Xfp_ROUND: return "round";
438//.. case Xfp_2XM1: return "2xm1";
439//.. default: vpanic("showX86FpOp");
440//.. }
441//.. }
cerioncd304492005-02-08 19:40:24 +0000442
443PPC32Instr* PPC32Instr_Alu32 ( PPC32AluOp op, HReg dst, HReg src1, PPC32RI* src2 ) {
444 PPC32Instr* i = LibVEX_Alloc(sizeof(PPC32Instr));
445 i->tag = Pin_Alu32;
446 i->Pin.Alu32.op = op;
447 i->Pin.Alu32.dst = dst;
448 i->Pin.Alu32.src1 = src1;
449 i->Pin.Alu32.src2 = src2;
450 return i;
451}
452PPC32Instr* PPC32Instr_Sh32 ( PPC32ShiftOp op, HReg dst, HReg src, PPC32RI* shft ) {
453 PPC32Instr* i = LibVEX_Alloc(sizeof(PPC32Instr));
454 i->tag = Pin_Sh32;
455 i->Pin.Sh32.op = op;
456 i->Pin.Sh32.dst = dst;
457 i->Pin.Sh32.src = src;
458 i->Pin.Sh32.shft = shft;
459 return i;
460}
cerion2c49e032005-02-09 17:29:49 +0000461PPC32Instr* PPC32Instr_Test32 ( HReg dst, PPC32RI* src ) {
462 PPC32Instr* i = LibVEX_Alloc(sizeof(PPC32Instr));
463 i->tag = Pin_Test32;
464 i->Pin.Test32.dst = dst;
465 i->Pin.Test32.src = src;
466 return i;
467}
468PPC32Instr* PPC32Instr_Unary32 ( PPC32UnaryOp op, PPC32RI* dst ) {
469 PPC32Instr* i = LibVEX_Alloc(sizeof(PPC32Instr));
470 i->tag = Pin_Unary32;
471 i->Pin.Unary32.op = op;
472 i->Pin.Unary32.dst = dst;
473 return i;
474}
cerionbcf8c3e2005-02-04 16:17:07 +0000475//.. X86Instr* X86Instr_MulL ( Bool syned, X86ScalarSz ssz , X86RM* src ) {
476//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
477//.. i->tag = Xin_MulL;
478//.. i->Xin.MulL.syned = syned;
479//.. i->Xin.MulL.ssz = ssz;
480//.. i->Xin.MulL.src = src;
481//.. return i;
482//.. }
483//.. X86Instr* X86Instr_Div ( Bool syned, X86ScalarSz ssz, X86RM* src ) {
484//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
485//.. i->tag = Xin_Div;
486//.. i->Xin.Div.syned = syned;
487//.. i->Xin.Div.ssz = ssz;
488//.. i->Xin.Div.src = src;
489//.. return i;
490//.. }
491//.. X86Instr* X86Instr_Sh3232 ( X86ShiftOp op, UInt amt, HReg src, HReg dst ) {
492//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
493//.. i->tag = Xin_Sh3232;
494//.. i->Xin.Sh3232.op = op;
495//.. i->Xin.Sh3232.amt = amt;
496//.. i->Xin.Sh3232.src = src;
497//.. i->Xin.Sh3232.dst = dst;
498//.. vassert(op == Xsh_SHL || op == Xsh_SHR);
499//.. return i;
500//.. }
501//.. X86Instr* X86Instr_Push( X86RMI* src ) {
502//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
503//.. i->tag = Xin_Push;
504//.. i->Xin.Push.src = src;
505//.. return i;
506//.. }
cerion2c49e032005-02-09 17:29:49 +0000507PPC32Instr* PPC32Instr_Call ( PPC32CondCode cond, Addr32 target, Int regparms ) {
508 PPC32Instr* i = LibVEX_Alloc(sizeof(PPC32Instr));
509 i->tag = Pin_Call;
510 i->Pin.Call.cond = cond;
511 i->Pin.Call.target = target;
512 i->Pin.Call.regparms = regparms;
513 vassert(regparms >= 0 && regparms < PPC32_N_REGPARMS);
514 return i;
515}
516PPC32Instr* PPC32Instr_Goto ( IRJumpKind jk, PPC32CondCode cond, PPC32RI* dst ) {
517 PPC32Instr* i = LibVEX_Alloc(sizeof(PPC32Instr));
518 i->tag = Pin_Goto;
519 i->Pin.Goto.cond = cond;
520 i->Pin.Goto.dst = dst;
521 i->Pin.Goto.jk = jk;
522 return i;
523}
cerionbcf8c3e2005-02-04 16:17:07 +0000524//.. X86Instr* X86Instr_CMov32 ( X86CondCode cond, X86RM* src, HReg dst ) {
525//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
526//.. i->tag = Xin_CMov32;
527//.. i->Xin.CMov32.cond = cond;
528//.. i->Xin.CMov32.src = src;
529//.. i->Xin.CMov32.dst = dst;
530//.. vassert(cond != Xcc_ALWAYS);
531//.. return i;
532//.. }
cerion2c49e032005-02-09 17:29:49 +0000533PPC32Instr* PPC32Instr_LoadEX ( UChar sz, Bool syned,
cerioncd304492005-02-08 19:40:24 +0000534 HReg dst, PPC32AMode* src ) {
cerion2c49e032005-02-09 17:29:49 +0000535 PPC32Instr* i = LibVEX_Alloc(sizeof(PPC32Instr));
536 i->tag = Pin_LoadEX;
537 i->Pin.LoadEX.sz = sz;
538 i->Pin.LoadEX.syned = syned;
539 i->Pin.LoadEX.src = src;
540 i->Pin.LoadEX.dst = dst;
541 vassert(sz == 1 || sz == 2 || sz == 4);
cerioncd304492005-02-08 19:40:24 +0000542 return i;
543}
544PPC32Instr* PPC32Instr_Store ( UChar sz, PPC32AMode* dst, HReg src ) {
545 PPC32Instr* i = LibVEX_Alloc(sizeof(PPC32Instr));
546 i->tag = Pin_Store;
547 i->Pin.Store.sz = sz;
548 i->Pin.Store.src = src;
549 i->Pin.Store.dst = dst;
550 vassert(sz == 1 || sz == 2 || sz == 4);
551 return i;
552}
cerionbcf8c3e2005-02-04 16:17:07 +0000553//.. X86Instr* X86Instr_Set32 ( X86CondCode cond, HReg dst ) {
554//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
555//.. i->tag = Xin_Set32;
556//.. i->Xin.Set32.cond = cond;
557//.. i->Xin.Set32.dst = dst;
558//.. return i;
559//.. }
560//.. X86Instr* X86Instr_Bsfr32 ( Bool isFwds, HReg src, HReg dst ) {
561//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
562//.. i->tag = Xin_Bsfr32;
563//.. i->Xin.Bsfr32.isFwds = isFwds;
564//.. i->Xin.Bsfr32.src = src;
565//.. i->Xin.Bsfr32.dst = dst;
566//.. return i;
567//.. }
568//.. X86Instr* X86Instr_MFence ( VexSubArch subarch )
569//.. {
570//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
571//.. i->tag = Xin_MFence;
572//.. i->Xin.MFence.subarch = subarch;
573//.. vassert(subarch == VexSubArchX86_sse0
574//.. || subarch == VexSubArchX86_sse1
575//.. || subarch == VexSubArchX86_sse2);
576//.. return i;
577//.. }
cerioncd304492005-02-08 19:40:24 +0000578
cerionbcf8c3e2005-02-04 16:17:07 +0000579//.. X86Instr* X86Instr_FpUnary ( X86FpOp op, HReg src, HReg dst ) {
580//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
581//.. i->tag = Xin_FpUnary;
582//.. i->Xin.FpUnary.op = op;
583//.. i->Xin.FpUnary.src = src;
584//.. i->Xin.FpUnary.dst = dst;
585//.. return i;
586//.. }
587//.. X86Instr* X86Instr_FpBinary ( X86FpOp op, HReg srcL, HReg srcR, HReg dst ) {
588//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
589//.. i->tag = Xin_FpBinary;
590//.. i->Xin.FpBinary.op = op;
591//.. i->Xin.FpBinary.srcL = srcL;
592//.. i->Xin.FpBinary.srcR = srcR;
593//.. i->Xin.FpBinary.dst = dst;
594//.. return i;
595//.. }
596//.. X86Instr* X86Instr_FpLdSt ( Bool isLoad, UChar sz, HReg reg, X86AMode* addr ) {
597//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
598//.. i->tag = Xin_FpLdSt;
599//.. i->Xin.FpLdSt.isLoad = isLoad;
600//.. i->Xin.FpLdSt.sz = sz;
601//.. i->Xin.FpLdSt.reg = reg;
602//.. i->Xin.FpLdSt.addr = addr;
603//.. vassert(sz == 4 || sz == 8);
604//.. return i;
605//.. }
606//.. X86Instr* X86Instr_FpLdStI ( Bool isLoad, UChar sz,
607//.. HReg reg, X86AMode* addr ) {
608//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
609//.. i->tag = Xin_FpLdStI;
610//.. i->Xin.FpLdStI.isLoad = isLoad;
611//.. i->Xin.FpLdStI.sz = sz;
612//.. i->Xin.FpLdStI.reg = reg;
613//.. i->Xin.FpLdStI.addr = addr;
614//.. vassert(sz == 2 || sz == 4 || sz == 8);
615//.. return i;
616//.. }
617//.. X86Instr* X86Instr_Fp64to32 ( HReg src, HReg dst ) {
618//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
619//.. i->tag = Xin_Fp64to32;
620//.. i->Xin.Fp64to32.src = src;
621//.. i->Xin.Fp64to32.dst = dst;
622//.. return i;
623//.. }
624//.. X86Instr* X86Instr_FpCMov ( X86CondCode cond, HReg src, HReg dst ) {
625//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
626//.. i->tag = Xin_FpCMov;
627//.. i->Xin.FpCMov.cond = cond;
628//.. i->Xin.FpCMov.src = src;
629//.. i->Xin.FpCMov.dst = dst;
630//.. vassert(cond != Xcc_ALWAYS);
631//.. return i;
632//.. }
633//.. X86Instr* X86Instr_FpLdStCW ( Bool isLoad, X86AMode* addr ) {
634//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
635//.. i->tag = Xin_FpLdStCW;
636//.. i->Xin.FpLdStCW.isLoad = isLoad;
637//.. i->Xin.FpLdStCW.addr = addr;
638//.. return i;
639//.. }
640//.. X86Instr* X86Instr_FpStSW_AX ( void ) {
641//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
642//.. i->tag = Xin_FpStSW_AX;
643//.. return i;
644//.. }
645//.. X86Instr* X86Instr_FpCmp ( HReg srcL, HReg srcR, HReg dst ) {
646//.. X86Instr* i = LibVEX_Alloc(sizeof(X86Instr));
647//.. i->tag = Xin_FpCmp;
648//.. i->Xin.FpCmp.srcL = srcL;
649//.. i->Xin.FpCmp.srcR = srcR;
650//.. i->Xin.FpCmp.dst = dst;
651//.. return i;
652//.. }
cerionbcf8c3e2005-02-04 16:17:07 +0000653
654void ppPPC32Instr ( PPC32Instr* i )
655{
656 switch (i->tag) {
cerioncd304492005-02-08 19:40:24 +0000657 case Pin_Alu32:
cerion2c49e032005-02-09 17:29:49 +0000658 if (i->Pin.Alu32.op == Palu_OR && // or Rd,Rs,Rs == mr Rd,Rs
659 i->Pin.Alu32.src2->tag == Pri_Reg &&
660 i->Pin.Alu32.src2->Pri.Reg.reg == i->Pin.Alu32.src1) {
661 vex_printf("mr ");
662 ppHRegPPC32(i->Pin.Alu32.dst);
663 vex_printf(",");
664 ppHRegPPC32(i->Pin.Alu32.src1);
665 return;
666 }
667 if (i->Pin.Alu32.op == Palu_ADD && // add Rd,R0,Rs == li Rd,Rs
668 i->Pin.Alu32.src1 == hregPPC32_GPR0()) {
669 vex_printf("li ");
670 ppHRegPPC32(i->Pin.Alu32.dst);
671 vex_printf(",");
672 ppPPC32RI(i->Pin.Alu32.src2);
673 return;
674 }
675 vex_printf("%s ", showPPC32AluOp(i->Pin.Alu32.op));
676 ppHRegPPC32(i->Pin.Alu32.dst);
677 vex_printf(",");
678 ppHRegPPC32(i->Pin.Alu32.src1);
679 vex_printf(",");
680 ppPPC32RI(i->Pin.Alu32.src2);
cerioncd304492005-02-08 19:40:24 +0000681 return;
682 case Pin_Sh32:
683 vex_printf("%sl ", showPPC32ShiftOp(i->Pin.Sh32.op));
684 ppHRegPPC32(i->Pin.Sh32.dst);
685 vex_printf(",");
686 ppHRegPPC32(i->Pin.Sh32.src);
687 vex_printf(",");
688 ppPPC32RI(i->Pin.Sh32.shft);
689 return;
cerion2c49e032005-02-09 17:29:49 +0000690 case Pin_Test32:
691 vex_printf("testl ");
692 ppHRegPPC32(i->Pin.Test32.dst);
693 vex_printf(",");
694 ppPPC32RI(i->Pin.Test32.src);
695 return;
cerionbcf8c3e2005-02-04 16:17:07 +0000696//.. case Xin_Unary32:
697//.. vex_printf("%sl ", showX86UnaryOp(i->Xin.Unary32.op));
698//.. ppX86RM(i->Xin.Unary32.dst);
699//.. return;
700//.. case Xin_MulL:
701//.. vex_printf("%cmul%s ",
702//.. i->Xin.MulL.syned ? 's' : 'u',
703//.. showX86ScalarSz(i->Xin.MulL.ssz));
704//.. ppX86RM(i->Xin.MulL.src);
705//.. return;
706//.. case Xin_Div:
707//.. vex_printf("%cdiv%s ",
708//.. i->Xin.Div.syned ? 's' : 'u',
709//.. showX86ScalarSz(i->Xin.Div.ssz));
710//.. ppX86RM(i->Xin.Div.src);
711//.. return;
712//.. case Xin_Sh3232:
713//.. vex_printf("%sdl ", showX86ShiftOp(i->Xin.Sh3232.op));
714//.. if (i->Xin.Sh3232.amt == 0)
715//.. vex_printf(" %%cl,");
716//.. else
717//.. vex_printf(" $%d,", i->Xin.Sh3232.amt);
718//.. ppHRegX86(i->Xin.Sh3232.src);
719//.. vex_printf(",");
720//.. ppHRegX86(i->Xin.Sh3232.dst);
721//.. return;
722//.. case Xin_Push:
723//.. vex_printf("pushl ");
724//.. ppX86RMI(i->Xin.Push.src);
725//.. return;
cerion2c49e032005-02-09 17:29:49 +0000726 case Pin_Call:
727// CAB: not 'bc' ?
728 vex_printf("call%s[%d] ",
729 i->Pin.Call.cond==Pcc_ALWAYS
730 ? "" : showPPC32CondCode(i->Pin.Call.cond),
731 i->Pin.Call.regparms);
732 vex_printf("0x%x", i->Pin.Call.target);
733 break;
734 case Pin_Goto:
735 if (i->Pin.Goto.cond != Pcc_ALWAYS) {
736 vex_printf("if (%%eflags.%s) { ",
737 showPPC32CondCode(i->Pin.Goto.cond));
738 }
739 if (i->Pin.Goto.jk != Ijk_Boring) {
740 vex_printf("movl $");
741 ppIRJumpKind(i->Pin.Goto.jk);
742 vex_printf(",%%r31 ; ");
743 }
744// CAB: what instead of movl ?
745 vex_printf("movl ");
746 ppPPC32RI(i->Pin.Goto.dst);
747// CAB: eax?
748 vex_printf(",%%eax ; ret");
749 if (i->Pin.Goto.cond != Pcc_ALWAYS) {
750 vex_printf(" }");
751 }
752 return;
cerionbcf8c3e2005-02-04 16:17:07 +0000753//.. case Xin_CMov32:
754//.. vex_printf("cmov%s ", showX86CondCode(i->Xin.CMov32.cond));
755//.. ppX86RM(i->Xin.CMov32.src);
756//.. vex_printf(",");
757//.. ppHRegX86(i->Xin.CMov32.dst);
758//.. return;
cerioncd304492005-02-08 19:40:24 +0000759 case Pin_LoadEX: {
cerion2c49e032005-02-09 17:29:49 +0000760 UChar sz = i->Pin.LoadEX.sz;
cerioncd304492005-02-08 19:40:24 +0000761 Bool syned = i->Pin.LoadEX.syned;
cerion2c49e032005-02-09 17:29:49 +0000762// CAB: How to get 'update'... ?
763 Bool update = False;
cerioncd304492005-02-08 19:40:24 +0000764 Bool idxd = (i->Pin.LoadEX.src->tag == Pam_IR) ? True : False;
cerioncd304492005-02-08 19:40:24 +0000765 vex_printf("l%c%c%s%s ",
cerion2c49e032005-02-09 17:29:49 +0000766 (sz==1) ? 'b' : (sz==2 ? 'h' : 'w'),
cerioncd304492005-02-08 19:40:24 +0000767 syned ? 'a' : 'z',
768 update ? "u" : "",
769 idxd ? "x" : "" );
cerioncd304492005-02-08 19:40:24 +0000770 ppHRegPPC32(i->Pin.LoadEX.dst);
771 vex_printf(",");
772 ppPPC32AMode(i->Pin.LoadEX.src);
773 return;
774 }
775 case Pin_Store: {
776 UChar sz = i->Pin.Store.sz;
cerion2c49e032005-02-09 17:29:49 +0000777// CAB: How to get 'update'... ?
778 Bool update = False;
cerioncd304492005-02-08 19:40:24 +0000779 Bool idxd = (i->Pin.Store.dst->tag == Pam_IR) ? True : False;
cerioncd304492005-02-08 19:40:24 +0000780 vex_printf("st%c%s%s ",
cerion2c49e032005-02-09 17:29:49 +0000781 (sz==1) ? 'b' : (sz==2 ? 'h' : 'w'),
cerioncd304492005-02-08 19:40:24 +0000782 update ? "u" : "",
783 idxd ? "x" : "" );
cerioncd304492005-02-08 19:40:24 +0000784 ppHRegPPC32(i->Pin.Store.src);
785 vex_printf(",");
786 ppPPC32AMode(i->Pin.Store.dst);
787 return;
788 }
cerionbcf8c3e2005-02-04 16:17:07 +0000789//.. case Xin_Set32:
790//.. vex_printf("setl%s ", showX86CondCode(i->Xin.Set32.cond));
791//.. ppHRegX86(i->Xin.Set32.dst);
792//.. return;
793//.. case Xin_Bsfr32:
794//.. vex_printf("bs%cl ", i->Xin.Bsfr32.isFwds ? 'f' : 'r');
795//.. ppHRegX86(i->Xin.Bsfr32.src);
796//.. vex_printf(",");
797//.. ppHRegX86(i->Xin.Bsfr32.dst);
798//.. return;
799//.. case Xin_MFence:
800//.. vex_printf("mfence(%s)",
801//.. LibVEX_ppVexSubArch(i->Xin.MFence.subarch));
802//.. return;
803//.. case Xin_FpUnary:
804//.. vex_printf("g%sD ", showX86FpOp(i->Xin.FpUnary.op));
805//.. ppHRegX86(i->Xin.FpUnary.src);
806//.. vex_printf(",");
807//.. ppHRegX86(i->Xin.FpUnary.dst);
808//.. break;
809//.. case Xin_FpBinary:
810//.. vex_printf("g%sD ", showX86FpOp(i->Xin.FpBinary.op));
811//.. ppHRegX86(i->Xin.FpBinary.srcL);
812//.. vex_printf(",");
813//.. ppHRegX86(i->Xin.FpBinary.srcR);
814//.. vex_printf(",");
815//.. ppHRegX86(i->Xin.FpBinary.dst);
816//.. break;
817//.. case Xin_FpLdSt:
818//.. if (i->Xin.FpLdSt.isLoad) {
819//.. vex_printf("gld%c " , i->Xin.FpLdSt.sz==8 ? 'D' : 'F');
820//.. ppX86AMode(i->Xin.FpLdSt.addr);
821//.. vex_printf(", ");
822//.. ppHRegX86(i->Xin.FpLdSt.reg);
823//.. } else {
824//.. vex_printf("gst%c " , i->Xin.FpLdSt.sz==8 ? 'D' : 'F');
825//.. ppHRegX86(i->Xin.FpLdSt.reg);
826//.. vex_printf(", ");
827//.. ppX86AMode(i->Xin.FpLdSt.addr);
828//.. }
829//.. return;
830//.. case Xin_FpLdStI:
831//.. if (i->Xin.FpLdStI.isLoad) {
832//.. vex_printf("gild%s ", i->Xin.FpLdStI.sz==8 ? "ll" :
833//.. i->Xin.FpLdStI.sz==4 ? "l" : "w");
834//.. ppX86AMode(i->Xin.FpLdStI.addr);
835//.. vex_printf(", ");
836//.. ppHRegX86(i->Xin.FpLdStI.reg);
837//.. } else {
838//.. vex_printf("gist%s ", i->Xin.FpLdStI.sz==8 ? "ll" :
839//.. i->Xin.FpLdStI.sz==4 ? "l" : "w");
840//.. ppHRegX86(i->Xin.FpLdStI.reg);
841//.. vex_printf(", ");
842//.. ppX86AMode(i->Xin.FpLdStI.addr);
843//.. }
844//.. return;
845//.. case Xin_Fp64to32:
846//.. vex_printf("gdtof ");
847//.. ppHRegX86(i->Xin.Fp64to32.src);
848//.. vex_printf(",");
849//.. ppHRegX86(i->Xin.Fp64to32.dst);
850//.. return;
851//.. case Xin_FpCMov:
852//.. vex_printf("gcmov%s ", showX86CondCode(i->Xin.FpCMov.cond));
853//.. ppHRegX86(i->Xin.FpCMov.src);
854//.. vex_printf(",");
855//.. ppHRegX86(i->Xin.FpCMov.dst);
856//.. return;
857//.. case Xin_FpLdStCW:
858//.. vex_printf(i->Xin.FpLdStCW.isLoad ? "fldcw " : "fstcw ");
859//.. ppX86AMode(i->Xin.FpLdStCW.addr);
860//.. return;
861//.. case Xin_FpStSW_AX:
862//.. vex_printf("fstsw %%ax");
863//.. return;
864//.. case Xin_FpCmp:
865//.. vex_printf("gcmp ");
866//.. ppHRegX86(i->Xin.FpCmp.srcL);
867//.. vex_printf(",");
868//.. ppHRegX86(i->Xin.FpCmp.srcR);
869//.. vex_printf(",");
870//.. ppHRegX86(i->Xin.FpCmp.dst);
871//.. break;
872//.. case Xin_SseConst:
873//.. vex_printf("const $0x%04x,", (Int)i->Xin.SseConst.con);
874//.. ppHRegX86(i->Xin.SseConst.dst);
875//.. break;
876//.. case Xin_SseLdSt:
877//.. vex_printf("movups ");
878//.. if (i->Xin.SseLdSt.isLoad) {
879//.. ppX86AMode(i->Xin.SseLdSt.addr);
880//.. vex_printf(",");
881//.. ppHRegX86(i->Xin.SseLdSt.reg);
882//.. } else {
883//.. ppHRegX86(i->Xin.SseLdSt.reg);
884//.. vex_printf(",");
885//.. ppX86AMode(i->Xin.SseLdSt.addr);
886//.. }
887//.. return;
888//.. case Xin_SseLdzLO:
889//.. vex_printf("movs%s ", i->Xin.SseLdzLO.sz==4 ? "s" : "d");
890//.. ppX86AMode(i->Xin.SseLdzLO.addr);
891//.. vex_printf(",");
892//.. ppHRegX86(i->Xin.SseLdzLO.reg);
893//.. return;
894//.. case Xin_Sse32Fx4:
895//.. vex_printf("%sps ", showX86SseOp(i->Xin.Sse32Fx4.op));
896//.. ppHRegX86(i->Xin.Sse32Fx4.src);
897//.. vex_printf(",");
898//.. ppHRegX86(i->Xin.Sse32Fx4.dst);
899//.. return;
900//.. case Xin_Sse32FLo:
901//.. vex_printf("%sss ", showX86SseOp(i->Xin.Sse32FLo.op));
902//.. ppHRegX86(i->Xin.Sse32FLo.src);
903//.. vex_printf(",");
904//.. ppHRegX86(i->Xin.Sse32FLo.dst);
905//.. return;
906//.. case Xin_Sse64Fx2:
907//.. vex_printf("%spd ", showX86SseOp(i->Xin.Sse64Fx2.op));
908//.. ppHRegX86(i->Xin.Sse64Fx2.src);
909//.. vex_printf(",");
910//.. ppHRegX86(i->Xin.Sse64Fx2.dst);
911//.. return;
912//.. case Xin_Sse64FLo:
913//.. vex_printf("%ssd ", showX86SseOp(i->Xin.Sse64FLo.op));
914//.. ppHRegX86(i->Xin.Sse64FLo.src);
915//.. vex_printf(",");
916//.. ppHRegX86(i->Xin.Sse64FLo.dst);
917//.. return;
918//.. case Xin_SseReRg:
919//.. vex_printf("%s ", showX86SseOp(i->Xin.SseReRg.op));
920//.. ppHRegX86(i->Xin.SseReRg.src);
921//.. vex_printf(",");
922//.. ppHRegX86(i->Xin.SseReRg.dst);
923//.. return;
924//.. case Xin_SseCMov:
925//.. vex_printf("cmov%s ", showX86CondCode(i->Xin.SseCMov.cond));
926//.. ppHRegX86(i->Xin.SseCMov.src);
927//.. vex_printf(",");
928//.. ppHRegX86(i->Xin.SseCMov.dst);
929//.. return;
930//.. case Xin_SseShuf:
931//.. vex_printf("pshufd $0x%x,", i->Xin.SseShuf.order);
932//.. ppHRegX86(i->Xin.SseShuf.src);
933//.. vex_printf(",");
934//.. ppHRegX86(i->Xin.SseShuf.dst);
935//.. return;
936
937 default:
938 vpanic("ppPPC32Instr");
939 }
940}
941
942/* --------- Helpers for register allocation. --------- */
943
944void getRegUsage_PPC32Instr ( HRegUsage* u, PPC32Instr* i )
945{
946// Bool unary;
947 initHRegUsage(u);
948 switch (i->tag) {
cerioncd304492005-02-08 19:40:24 +0000949 case Pin_Alu32:
950 addHRegUse(u, HRmRead, i->Pin.Alu32.src1);
951 addRegUsage_PPC32RI(u, i->Pin.Alu32.src2);
952 if (i->Pin.Alu32.op == Palu_CMP) {
953 addHRegUse(u, HRmRead, i->Pin.Alu32.dst);
954 return;
955 }
956 addHRegUse(u, HRmWrite, i->Pin.Alu32.dst);
cerion2c49e032005-02-09 17:29:49 +0000957// CAB TODO: Any circumstance where dst is read & written?
cerioncd304492005-02-08 19:40:24 +0000958 return;
959
960 case Pin_Sh32:
961 addHRegUse(u, HRmWrite, i->Pin.Sh32.dst);
962 addHRegUse(u, HRmRead, i->Pin.Sh32.src);
963 addRegUsage_PPC32RI(u, i->Pin.Sh32.shft);
cerion2c49e032005-02-09 17:29:49 +0000964// CAB TODO: Any circumstance where dst is read & written?
cerioncd304492005-02-08 19:40:24 +0000965 return;
966
cerion2c49e032005-02-09 17:29:49 +0000967 case Pin_Test32:
968 addHRegUse(u, HRmRead, i->Pin.Test32.dst);
969 addRegUsage_PPC32RI(u, i->Pin.Test32.src);
970 return;
cerionbcf8c3e2005-02-04 16:17:07 +0000971//.. case Xin_Unary32:
972//.. addRegUsage_X86RM(u, i->Xin.Unary32.dst, HRmModify);
973//.. return;
974//.. case Xin_MulL:
975//.. addRegUsage_X86RM(u, i->Xin.MulL.src, HRmRead);
976//.. addHRegUse(u, HRmModify, hregX86_EAX());
977//.. addHRegUse(u, HRmWrite, hregX86_EDX());
978//.. return;
979//.. case Xin_Div:
980//.. addRegUsage_X86RM(u, i->Xin.Div.src, HRmRead);
981//.. addHRegUse(u, HRmModify, hregX86_EAX());
982//.. addHRegUse(u, HRmModify, hregX86_EDX());
983//.. return;
984//.. case Xin_Sh3232:
985//.. addHRegUse(u, HRmRead, i->Xin.Sh3232.src);
986//.. addHRegUse(u, HRmModify, i->Xin.Sh3232.dst);
987//.. if (i->Xin.Sh3232.amt == 0)
988//.. addHRegUse(u, HRmRead, hregX86_ECX());
989//.. return;
990//.. case Xin_Push:
991//.. addRegUsage_X86RMI(u, i->Xin.Push.src);
992//.. addHRegUse(u, HRmModify, hregX86_ESP());
993//.. return;
cerion2c49e032005-02-09 17:29:49 +0000994 case Pin_Call:
995 /* This is a bit subtle. */
996 /* First off, claim it trashes all the caller-saved regs
997 which fall within the register allocator's jurisdiction.
998 These I believe to be: r0,r3:12
999 */
1000 addHRegUse(u, HRmWrite, hregPPC32_GPR0());
1001 addHRegUse(u, HRmWrite, hregPPC32_GPR3());
1002 addHRegUse(u, HRmWrite, hregPPC32_GPR4());
1003 addHRegUse(u, HRmWrite, hregPPC32_GPR5());
1004 addHRegUse(u, HRmWrite, hregPPC32_GPR6());
1005 addHRegUse(u, HRmWrite, hregPPC32_GPR7());
1006 addHRegUse(u, HRmWrite, hregPPC32_GPR8());
1007 addHRegUse(u, HRmWrite, hregPPC32_GPR9());
1008 addHRegUse(u, HRmWrite, hregPPC32_GPR10());
1009 addHRegUse(u, HRmWrite, hregPPC32_GPR11());
1010 addHRegUse(u, HRmWrite, hregPPC32_GPR12());
1011
1012 /* Now we have to state any parameter-carrying registers
1013 which might be read. This depends on the regparmness. */
1014 switch (i->Pin.Call.regparms) {
1015 case 8: addHRegUse(u, HRmRead, hregPPC32_GPR10()); /*fallthru*/
1016 case 7: addHRegUse(u, HRmRead, hregPPC32_GPR9() ); /*fallthru*/
1017 case 6: addHRegUse(u, HRmRead, hregPPC32_GPR8() ); /*fallthru*/
1018 case 5: addHRegUse(u, HRmRead, hregPPC32_GPR7() ); /*fallthru*/
1019 case 4: addHRegUse(u, HRmRead, hregPPC32_GPR6() ); /*fallthru*/
1020 case 3: addHRegUse(u, HRmRead, hregPPC32_GPR5() ); /*fallthru*/
1021 case 2: addHRegUse(u, HRmRead, hregPPC32_GPR4() ); /*fallthru*/
1022 case 1: addHRegUse(u, HRmRead, hregPPC32_GPR3() ); /*fallthru*/
1023 case 0: break;
1024 default: vpanic("getRegUsage_PPC32Instr:Call:regparms");
1025 }
1026 /* Finally, there is the issue that the insn trashes a
1027 register because the literal target address has to be
1028 loaded into a register. Fortunately, ?CAB? is stated in the
1029 ABI as a scratch register, and so seems a suitable victim. */
1030 addHRegUse(u, HRmWrite, hregPPC32_GPR3());
1031 /* Upshot of this is that the assembler really must use ?CAB?,
1032 and no other, as a destination temporary. */
1033 return;
1034 case Pin_Goto:
1035 addRegUsage_PPC32RI(u, i->Pin.Goto.dst);
1036 addHRegUse(u, HRmWrite, hregPPC32_GPR4());
1037 if (i->Pin.Goto.jk != Ijk_Boring)
1038 addHRegUse(u, HRmWrite, GuestStatePtr);
1039 return;
cerionbcf8c3e2005-02-04 16:17:07 +00001040//.. case Xin_CMov32:
1041//.. addRegUsage_X86RM(u, i->Xin.CMov32.src, HRmRead);
1042//.. addHRegUse(u, HRmModify, i->Xin.CMov32.dst);
1043//.. return;
cerion2c49e032005-02-09 17:29:49 +00001044 case Pin_LoadEX:
1045 addRegUsage_PPC32AMode(u, i->Pin.LoadEX.src);
1046 addHRegUse(u, HRmWrite, i->Pin.LoadEX.dst);
1047 return;
1048 case Pin_Store:
1049 addHRegUse(u, HRmRead, i->Pin.Store.src);
1050 addRegUsage_PPC32AMode(u, i->Pin.Store.dst);
1051 return;
cerionbcf8c3e2005-02-04 16:17:07 +00001052//.. case Xin_Set32:
1053//.. addHRegUse(u, HRmWrite, i->Xin.Set32.dst);
1054//.. return;
1055//.. case Xin_Bsfr32:
1056//.. addHRegUse(u, HRmRead, i->Xin.Bsfr32.src);
1057//.. addHRegUse(u, HRmWrite, i->Xin.Bsfr32.dst);
1058//.. return;
1059//.. case Xin_MFence:
1060//.. return;
1061//.. case Xin_FpUnary:
1062//.. addHRegUse(u, HRmRead, i->Xin.FpUnary.src);
1063//.. addHRegUse(u, HRmWrite, i->Xin.FpUnary.dst);
1064//.. return;
1065//.. case Xin_FpBinary:
1066//.. addHRegUse(u, HRmRead, i->Xin.FpBinary.srcL);
1067//.. addHRegUse(u, HRmRead, i->Xin.FpBinary.srcR);
1068//.. addHRegUse(u, HRmWrite, i->Xin.FpBinary.dst);
1069//.. return;
1070//.. case Xin_FpLdSt:
1071//.. addRegUsage_X86AMode(u, i->Xin.FpLdSt.addr);
1072//.. addHRegUse(u, i->Xin.FpLdSt.isLoad ? HRmWrite : HRmRead,
1073//.. i->Xin.FpLdSt.reg);
1074//.. return;
1075//.. case Xin_FpLdStI:
1076//.. addRegUsage_X86AMode(u, i->Xin.FpLdStI.addr);
1077//.. addHRegUse(u, i->Xin.FpLdStI.isLoad ? HRmWrite : HRmRead,
1078//.. i->Xin.FpLdStI.reg);
1079//.. return;
1080//.. case Xin_Fp64to32:
1081//.. addHRegUse(u, HRmRead, i->Xin.Fp64to32.src);
1082//.. addHRegUse(u, HRmWrite, i->Xin.Fp64to32.dst);
1083//.. return;
1084//.. case Xin_FpCMov:
1085//.. addHRegUse(u, HRmRead, i->Xin.FpCMov.src);
1086//.. addHRegUse(u, HRmModify, i->Xin.FpCMov.dst);
1087//.. return;
1088//.. case Xin_FpLdStCW:
1089//.. addRegUsage_X86AMode(u, i->Xin.FpLdStCW.addr);
1090//.. return;
1091//.. case Xin_FpStSW_AX:
1092//.. addHRegUse(u, HRmWrite, hregX86_EAX());
1093//.. return;
1094//.. case Xin_FpCmp:
1095//.. addHRegUse(u, HRmRead, i->Xin.FpCmp.srcL);
1096//.. addHRegUse(u, HRmRead, i->Xin.FpCmp.srcR);
1097//.. addHRegUse(u, HRmWrite, i->Xin.FpCmp.dst);
1098//.. addHRegUse(u, HRmWrite, hregX86_EAX());
1099//.. return;
1100//.. case Xin_SseLdSt:
1101//.. addRegUsage_X86AMode(u, i->Xin.SseLdSt.addr);
1102//.. addHRegUse(u, i->Xin.SseLdSt.isLoad ? HRmWrite : HRmRead,
1103//.. i->Xin.SseLdSt.reg);
1104//.. return;
1105//.. case Xin_SseLdzLO:
1106//.. addRegUsage_X86AMode(u, i->Xin.SseLdzLO.addr);
1107//.. addHRegUse(u, HRmWrite, i->Xin.SseLdzLO.reg);
1108//.. return;
1109//.. case Xin_SseConst:
1110//.. addHRegUse(u, HRmWrite, i->Xin.SseConst.dst);
1111//.. return;
1112//.. case Xin_Sse32Fx4:
1113//.. vassert(i->Xin.Sse32Fx4.op != Xsse_MOV);
1114//.. unary = i->Xin.Sse32Fx4.op == Xsse_RCPF
1115//.. || i->Xin.Sse32Fx4.op == Xsse_RSQRTF
1116//.. || i->Xin.Sse32Fx4.op == Xsse_SQRTF;
1117//.. addHRegUse(u, HRmRead, i->Xin.Sse32Fx4.src);
1118//.. addHRegUse(u, unary ? HRmWrite : HRmModify,
1119//.. i->Xin.Sse32Fx4.dst);
1120//.. return;
1121//.. case Xin_Sse32FLo:
1122//.. vassert(i->Xin.Sse32FLo.op != Xsse_MOV);
1123//.. unary = i->Xin.Sse32FLo.op == Xsse_RCPF
1124//.. || i->Xin.Sse32FLo.op == Xsse_RSQRTF
1125//.. || i->Xin.Sse32FLo.op == Xsse_SQRTF;
1126//.. addHRegUse(u, HRmRead, i->Xin.Sse32FLo.src);
1127//.. addHRegUse(u, unary ? HRmWrite : HRmModify,
1128//.. i->Xin.Sse32FLo.dst);
1129//.. return;
1130//.. case Xin_Sse64Fx2:
1131//.. vassert(i->Xin.Sse64Fx2.op != Xsse_MOV);
1132//.. unary = i->Xin.Sse64Fx2.op == Xsse_RCPF
1133//.. || i->Xin.Sse64Fx2.op == Xsse_RSQRTF
1134//.. || i->Xin.Sse64Fx2.op == Xsse_SQRTF;
1135//.. addHRegUse(u, HRmRead, i->Xin.Sse64Fx2.src);
1136//.. addHRegUse(u, unary ? HRmWrite : HRmModify,
1137//.. i->Xin.Sse64Fx2.dst);
1138//.. return;
1139//.. case Xin_Sse64FLo:
1140//.. vassert(i->Xin.Sse64FLo.op != Xsse_MOV);
1141//.. unary = i->Xin.Sse64FLo.op == Xsse_RCPF
1142//.. || i->Xin.Sse64FLo.op == Xsse_RSQRTF
1143//.. || i->Xin.Sse64FLo.op == Xsse_SQRTF;
1144//.. addHRegUse(u, HRmRead, i->Xin.Sse64FLo.src);
1145//.. addHRegUse(u, unary ? HRmWrite : HRmModify,
1146//.. i->Xin.Sse64FLo.dst);
1147//.. return;
1148//.. case Xin_SseReRg:
1149//.. if (i->Xin.SseReRg.op == Xsse_XOR
1150//.. && i->Xin.SseReRg.src == i->Xin.SseReRg.dst) {
1151//.. /* reg-alloc needs to understand 'xor r,r' as a write of r */
1152//.. /* (as opposed to a rite of passage :-) */
1153//.. addHRegUse(u, HRmWrite, i->Xin.SseReRg.dst);
1154//.. } else {
1155//.. addHRegUse(u, HRmRead, i->Xin.SseReRg.src);
1156//.. addHRegUse(u, i->Xin.SseReRg.op == Xsse_MOV
1157//.. ? HRmWrite : HRmModify,
1158//.. i->Xin.SseReRg.dst);
1159//.. }
1160//.. return;
1161//.. case Xin_SseCMov:
1162//.. addHRegUse(u, HRmRead, i->Xin.SseCMov.src);
1163//.. addHRegUse(u, HRmModify, i->Xin.SseCMov.dst);
1164//.. return;
1165//.. case Xin_SseShuf:
1166//.. addHRegUse(u, HRmRead, i->Xin.SseShuf.src);
1167//.. addHRegUse(u, HRmWrite, i->Xin.SseShuf.dst);
1168//.. return;
1169 default:
1170 ppPPC32Instr(i);
1171 vpanic("getRegUsage_PPC32Instr");
1172 }
1173}
1174
cerioncd304492005-02-08 19:40:24 +00001175/* local helper */
1176static void mapReg(HRegRemap* m, HReg* r)
1177{
1178 *r = lookupHRegRemap(m, *r);
1179}
cerionbcf8c3e2005-02-04 16:17:07 +00001180
1181void mapRegs_PPC32Instr (HRegRemap* m, PPC32Instr* i)
1182{
1183 switch (i->tag) {
cerioncd304492005-02-08 19:40:24 +00001184 case Pin_Alu32:
1185 mapReg(m, &i->Pin.Alu32.dst);
1186 mapReg(m, &i->Pin.Alu32.src1);
1187 mapRegs_PPC32RI(m, i->Pin.Alu32.src2);
1188 return;
1189 case Pin_Sh32:
1190 mapReg(m, &i->Pin.Sh32.dst);
1191 mapReg(m, &i->Pin.Sh32.src);
1192 mapRegs_PPC32RI(m, i->Pin.Sh32.shft);
1193 return;
cerion2c49e032005-02-09 17:29:49 +00001194 case Pin_Test32:
1195 mapReg(m, &i->Pin.Test32.dst);
1196 mapRegs_PPC32RI(m, i->Pin.Test32.src);
1197 return;
cerionbcf8c3e2005-02-04 16:17:07 +00001198//.. case Xin_Unary32:
1199//.. mapRegs_X86RM(m, i->Xin.Unary32.dst);
1200//.. return;
1201//.. case Xin_MulL:
1202//.. mapRegs_X86RM(m, i->Xin.MulL.src);
1203//.. return;
1204//.. case Xin_Div:
1205//.. mapRegs_X86RM(m, i->Xin.Div.src);
1206//.. return;
1207//.. case Xin_Sh3232:
1208//.. mapReg(m, &i->Xin.Sh3232.src);
1209//.. mapReg(m, &i->Xin.Sh3232.dst);
1210//.. return;
1211//.. case Xin_Push:
1212//.. mapRegs_X86RMI(m, i->Xin.Push.src);
1213//.. return;
cerion2c49e032005-02-09 17:29:49 +00001214 case Pin_Call:
1215 return;
1216 case Pin_Goto:
1217 mapRegs_PPC32RI(m, i->Pin.Goto.dst);
1218 return;
cerionbcf8c3e2005-02-04 16:17:07 +00001219//.. case Xin_CMov32:
1220//.. mapRegs_X86RM(m, i->Xin.CMov32.src);
1221//.. mapReg(m, &i->Xin.CMov32.dst);
1222//.. return;
cerion2c49e032005-02-09 17:29:49 +00001223 case Pin_LoadEX:
1224 mapRegs_PPC32AMode(m, i->Pin.LoadEX.src);
1225 mapReg(m, &i->Pin.LoadEX.dst);
1226 return;
1227 case Pin_Store:
1228 mapReg(m, &i->Pin.Store.src);
1229 mapRegs_PPC32AMode(m, i->Pin.Store.dst);
1230 return;
cerionbcf8c3e2005-02-04 16:17:07 +00001231//.. case Xin_Set32:
1232//.. mapReg(m, &i->Xin.Set32.dst);
1233//.. return;
1234//.. case Xin_Bsfr32:
1235//.. mapReg(m, &i->Xin.Bsfr32.src);
1236//.. mapReg(m, &i->Xin.Bsfr32.dst);
1237//.. return;
1238//.. case Xin_MFence:
1239//.. return;
1240//.. case Xin_FpUnary:
1241//.. mapReg(m, &i->Xin.FpUnary.src);
1242//.. mapReg(m, &i->Xin.FpUnary.dst);
1243//.. return;
1244//.. case Xin_FpBinary:
1245//.. mapReg(m, &i->Xin.FpBinary.srcL);
1246//.. mapReg(m, &i->Xin.FpBinary.srcR);
1247//.. mapReg(m, &i->Xin.FpBinary.dst);
1248//.. return;
1249//.. case Xin_FpLdSt:
1250//.. mapRegs_X86AMode(m, i->Xin.FpLdSt.addr);
1251//.. mapReg(m, &i->Xin.FpLdSt.reg);
1252//.. return;
1253//.. case Xin_FpLdStI:
1254//.. mapRegs_X86AMode(m, i->Xin.FpLdStI.addr);
1255//.. mapReg(m, &i->Xin.FpLdStI.reg);
1256//.. return;
1257//.. case Xin_Fp64to32:
1258//.. mapReg(m, &i->Xin.Fp64to32.src);
1259//.. mapReg(m, &i->Xin.Fp64to32.dst);
1260//.. return;
1261//.. case Xin_FpCMov:
1262//.. mapReg(m, &i->Xin.FpCMov.src);
1263//.. mapReg(m, &i->Xin.FpCMov.dst);
1264//.. return;
1265//.. case Xin_FpLdStCW:
1266//.. mapRegs_X86AMode(m, i->Xin.FpLdStCW.addr);
1267//.. return;
1268//.. case Xin_FpStSW_AX:
1269//.. return;
1270//.. case Xin_FpCmp:
1271//.. mapReg(m, &i->Xin.FpCmp.srcL);
1272//.. mapReg(m, &i->Xin.FpCmp.srcR);
1273//.. mapReg(m, &i->Xin.FpCmp.dst);
1274//.. return;
1275//.. case Xin_SseConst:
1276//.. mapReg(m, &i->Xin.SseConst.dst);
1277//.. return;
1278//.. case Xin_SseLdSt:
1279//.. mapReg(m, &i->Xin.SseLdSt.reg);
1280//.. mapRegs_X86AMode(m, i->Xin.SseLdSt.addr);
1281//.. break;
1282//.. case Xin_SseLdzLO:
1283//.. mapReg(m, &i->Xin.SseLdzLO.reg);
1284//.. mapRegs_X86AMode(m, i->Xin.SseLdzLO.addr);
1285//.. break;
1286//.. case Xin_Sse32Fx4:
1287//.. mapReg(m, &i->Xin.Sse32Fx4.src);
1288//.. mapReg(m, &i->Xin.Sse32Fx4.dst);
1289//.. return;
1290//.. case Xin_Sse32FLo:
1291//.. mapReg(m, &i->Xin.Sse32FLo.src);
1292//.. mapReg(m, &i->Xin.Sse32FLo.dst);
1293//.. return;
1294//.. case Xin_Sse64Fx2:
1295//.. mapReg(m, &i->Xin.Sse64Fx2.src);
1296//.. mapReg(m, &i->Xin.Sse64Fx2.dst);
1297//.. return;
1298//.. case Xin_Sse64FLo:
1299//.. mapReg(m, &i->Xin.Sse64FLo.src);
1300//.. mapReg(m, &i->Xin.Sse64FLo.dst);
1301//.. return;
1302//.. case Xin_SseReRg:
1303//.. mapReg(m, &i->Xin.SseReRg.src);
1304//.. mapReg(m, &i->Xin.SseReRg.dst);
1305//.. return;
1306//.. case Xin_SseCMov:
1307//.. mapReg(m, &i->Xin.SseCMov.src);
1308//.. mapReg(m, &i->Xin.SseCMov.dst);
1309//.. return;
1310//.. case Xin_SseShuf:
1311//.. mapReg(m, &i->Xin.SseShuf.src);
1312//.. mapReg(m, &i->Xin.SseShuf.dst);
1313//.. return;
1314 default:
1315 ppPPC32Instr(i);
1316 vpanic("mapRegs_PPC32Instr");
1317 }
1318}
1319
1320/* Figure out if i represents a reg-reg move, and if so assign the
1321 source and destination to *src and *dst. If in doubt say No. Used
1322 by the register allocator to do move coalescing.
1323*/
1324Bool isMove_PPC32Instr ( PPC32Instr* i, HReg* src, HReg* dst )
1325{
1326//.. /* Moves between integer regs */
1327//.. if (i->tag == Xin_Alu32R) {
1328//.. if (i->Xin.Alu32R.op != Xalu_MOV)
1329//.. return False;
1330//.. if (i->Xin.Alu32R.src->tag != Xrmi_Reg)
1331//.. return False;
1332//.. *src = i->Xin.Alu32R.src->Xrmi.Reg.reg;
1333//.. *dst = i->Xin.Alu32R.dst;
1334//.. return True;
1335//.. }
1336//.. /* Moves between FP regs */
1337//.. if (i->tag == Xin_FpUnary) {
1338//.. if (i->Xin.FpUnary.op != Xfp_MOV)
1339//.. return False;
1340//.. *src = i->Xin.FpUnary.src;
1341//.. *dst = i->Xin.FpUnary.dst;
1342//.. return True;
1343//.. }
1344//.. if (i->tag == Xin_SseReRg) {
1345//.. if (i->Xin.SseReRg.op != Xsse_MOV)
1346//.. return False;
1347//.. *src = i->Xin.SseReRg.src;
1348//.. *dst = i->Xin.SseReRg.dst;
1349//.. return True;
1350//.. }
1351 return False;
1352}
1353
1354
1355/* Generate x86 spill/reload instructions under the direction of the
1356 register allocator. Note it's critical these don't write the
1357 condition codes. */
1358
1359PPC32Instr* genSpill_PPC32 ( HReg rreg, Int offsetB )
1360{ vassert(0);
1361//.. X86AMode* am;
1362//.. vassert(offsetB >= 0);
1363//.. vassert(!hregIsVirtual(rreg));
1364//.. am = X86AMode_IR(offsetB, hregX86_EBP());
1365//..
1366//.. switch (hregClass(rreg)) {
1367//.. case HRcInt32:
1368//.. return X86Instr_Alu32M ( Xalu_MOV, X86RI_Reg(rreg), am );
1369//.. case HRcFlt64:
1370//.. return X86Instr_FpLdSt ( False/*store*/, 8, rreg, am );
1371//.. case HRcVec128:
1372//.. return X86Instr_SseLdSt ( False/*store*/, rreg, am );
1373//.. default:
1374//.. ppHRegClass(hregClass(rreg));
1375//.. vpanic("genSpill_X86: unimplemented regclass");
1376//.. }
1377}
1378
1379PPC32Instr* genReload_PPC32 ( HReg rreg, Int offsetB )
1380{ vassert(0);
1381//.. X86AMode* am;
1382//.. vassert(offsetB >= 0);
1383//.. vassert(!hregIsVirtual(rreg));
1384//.. am = X86AMode_IR(offsetB, hregX86_EBP());
1385//.. switch (hregClass(rreg)) {
1386//.. case HRcInt32:
1387//.. return X86Instr_Alu32R ( Xalu_MOV, X86RMI_Mem(am), rreg );
1388//.. case HRcFlt64:
1389//.. return X86Instr_FpLdSt ( True/*load*/, 8, rreg, am );
1390//.. case HRcVec128:
1391//.. return X86Instr_SseLdSt ( True/*load*/, rreg, am );
1392//.. default:
1393//.. ppHRegClass(hregClass(rreg));
1394//.. vpanic("genReload_X86: unimplemented regclass");
1395//.. }
1396}
1397
1398
cerioncd304492005-02-08 19:40:24 +00001399/* --------- The x86 assembler (bleh.) --------- */
1400
1401#if 0
1402static UInt iregNo ( HReg r )
1403{
1404 UInt n;
1405 vassert(hregClass(r) == HRcInt32);
1406 vassert(!hregIsVirtual(r));
1407 n = hregNumber(r);
1408 vassert(n <= 32);
1409 return n;
1410}
1411#endif
1412
cerionbcf8c3e2005-02-04 16:17:07 +00001413//.. static UInt fregNo ( HReg r )
1414//.. {
1415//.. UInt n;
1416//.. vassert(hregClass(r) == HRcFlt64);
1417//.. vassert(!hregIsVirtual(r));
1418//.. n = hregNumber(r);
1419//.. vassert(n <= 5);
1420//.. return n;
1421//.. }
cerioncd304492005-02-08 19:40:24 +00001422
cerionbcf8c3e2005-02-04 16:17:07 +00001423//.. static UInt vregNo ( HReg r )
1424//.. {
1425//.. UInt n;
1426//.. vassert(hregClass(r) == HRcVec128);
1427//.. vassert(!hregIsVirtual(r));
1428//.. n = hregNumber(r);
1429//.. vassert(n <= 7);
1430//.. return n;
1431//.. }
cerioncd304492005-02-08 19:40:24 +00001432
cerionbcf8c3e2005-02-04 16:17:07 +00001433//.. static UChar mkModRegRM ( UChar mod, UChar reg, UChar regmem )
1434//.. {
1435//.. return ((mod & 3) << 6) | ((reg & 7) << 3) | (regmem & 7);
1436//.. }
cerioncd304492005-02-08 19:40:24 +00001437
cerionbcf8c3e2005-02-04 16:17:07 +00001438//.. static UChar mkSIB ( Int shift, Int regindex, Int regbase )
1439//.. {
1440//.. return ((shift & 3) << 6) | ((regindex & 7) << 3) | (regbase & 7);
1441//.. }
cerioncd304492005-02-08 19:40:24 +00001442
cerionbcf8c3e2005-02-04 16:17:07 +00001443//.. static UChar* emit32 ( UChar* p, UInt w32 )
1444//.. {
1445//.. *p++ = (w32) & 0x000000FF;
1446//.. *p++ = (w32 >> 8) & 0x000000FF;
1447//.. *p++ = (w32 >> 16) & 0x000000FF;
1448//.. *p++ = (w32 >> 24) & 0x000000FF;
1449//.. return p;
1450//.. }
cerioncd304492005-02-08 19:40:24 +00001451
cerionbcf8c3e2005-02-04 16:17:07 +00001452//.. /* Does a sign-extend of the lowest 8 bits give
1453//.. the original number? */
1454//.. static Bool fits8bits ( UInt w32 )
1455//.. {
1456//.. Int i32 = (Int)w32;
1457//.. return i32 == ((i32 << 24) >> 24);
1458//.. }
cerioncd304492005-02-08 19:40:24 +00001459
1460
cerionbcf8c3e2005-02-04 16:17:07 +00001461//.. /* Forming mod-reg-rm bytes and scale-index-base bytes.
1462//..
1463//.. greg, 0(ereg) | ereg != ESP && ereg != EBP
1464//.. = 00 greg ereg
1465//..
1466//.. greg, d8(ereg) | ereg != ESP
1467//.. = 01 greg ereg, d8
1468//..
1469//.. greg, d32(ereg) | ereg != ESP
1470//.. = 10 greg ereg, d32
1471//..
1472//.. greg, d8(%esp) = 01 greg 100, 0x24, d8
1473//..
1474//.. -----------------------------------------------
1475//..
1476//.. greg, d8(base,index,scale)
1477//.. | index != ESP
1478//.. = 01 greg 100, scale index base, d8
1479//..
1480//.. greg, d32(base,index,scale)
1481//.. | index != ESP
1482//.. = 10 greg 100, scale index base, d32
1483//.. */
1484//.. static UChar* doAMode_M ( UChar* p, HReg greg, X86AMode* am )
1485//.. {
1486//.. if (am->tag == Xam_IR) {
1487//.. if (am->Xam.IR.imm == 0
1488//.. && am->Xam.IR.reg != hregX86_ESP()
1489//.. && am->Xam.IR.reg != hregX86_EBP() ) {
1490//.. *p++ = mkModRegRM(0, iregNo(greg), iregNo(am->Xam.IR.reg));
1491//.. return p;
1492//.. }
1493//.. if (fits8bits(am->Xam.IR.imm)
1494//.. && am->Xam.IR.reg != hregX86_ESP()) {
1495//.. *p++ = mkModRegRM(1, iregNo(greg), iregNo(am->Xam.IR.reg));
1496//.. *p++ = am->Xam.IR.imm & 0xFF;
1497//.. return p;
1498//.. }
1499//.. if (am->Xam.IR.reg != hregX86_ESP()) {
1500//.. *p++ = mkModRegRM(2, iregNo(greg), iregNo(am->Xam.IR.reg));
1501//.. p = emit32(p, am->Xam.IR.imm);
1502//.. return p;
1503//.. }
1504//.. if (am->Xam.IR.reg == hregX86_ESP()
1505//.. && fits8bits(am->Xam.IR.imm)) {
1506//.. *p++ = mkModRegRM(1, iregNo(greg), 4);
1507//.. *p++ = 0x24;
1508//.. *p++ = am->Xam.IR.imm & 0xFF;
1509//.. return p;
1510//.. }
1511//.. ppX86AMode(am);
1512//.. vpanic("doAMode_M: can't emit amode IR");
1513//.. /*NOTREACHED*/
1514//.. }
1515//.. if (am->tag == Xam_IRRS) {
1516//.. if (fits8bits(am->Xam.IRRS.imm)
1517//.. && am->Xam.IRRS.index != hregX86_ESP()) {
1518//.. *p++ = mkModRegRM(1, iregNo(greg), 4);
1519//.. *p++ = mkSIB(am->Xam.IRRS.shift, am->Xam.IRRS.index,
1520//.. am->Xam.IRRS.base);
1521//.. *p++ = am->Xam.IRRS.imm & 0xFF;
1522//.. return p;
1523//.. }
1524//.. if (am->Xam.IRRS.index != hregX86_ESP()) {
1525//.. *p++ = mkModRegRM(2, iregNo(greg), 4);
1526//.. *p++ = mkSIB(am->Xam.IRRS.shift, am->Xam.IRRS.index,
1527//.. am->Xam.IRRS.base);
1528//.. p = emit32(p, am->Xam.IRRS.imm);
1529//.. return p;
1530//.. }
1531//.. ppX86AMode(am);
1532//.. vpanic("doAMode_M: can't emit amode IRRS");
1533//.. /*NOTREACHED*/
1534//.. }
1535//.. vpanic("doAMode_M: unknown amode");
1536//.. /*NOTREACHED*/
1537//.. }
cerioncd304492005-02-08 19:40:24 +00001538
1539
cerionbcf8c3e2005-02-04 16:17:07 +00001540//.. /* Emit a mod-reg-rm byte when the rm bit denotes a reg. */
1541//.. static UChar* doAMode_R ( UChar* p, HReg greg, HReg ereg )
1542//.. {
1543//.. *p++ = mkModRegRM(3, iregNo(greg), iregNo(ereg));
1544//.. return p;
1545//.. }
cerioncd304492005-02-08 19:40:24 +00001546
1547
cerionbcf8c3e2005-02-04 16:17:07 +00001548//.. /* Emit ffree %st(7) */
1549//.. static UChar* do_ffree_st7 ( UChar* p )
1550//.. {
1551//.. *p++ = 0xDD;
1552//.. *p++ = 0xC7;
1553//.. return p;
1554//.. }
cerioncd304492005-02-08 19:40:24 +00001555
cerionbcf8c3e2005-02-04 16:17:07 +00001556//.. /* Emit fstp %st(i), 1 <= i <= 7 */
1557//.. static UChar* do_fstp_st ( UChar* p, Int i )
1558//.. {
1559//.. vassert(1 <= i && i <= 7);
1560//.. *p++ = 0xDD;
1561//.. *p++ = 0xD8+i;
1562//.. return p;
1563//.. }
cerioncd304492005-02-08 19:40:24 +00001564
cerionbcf8c3e2005-02-04 16:17:07 +00001565//.. /* Emit fld %st(i), 0 <= i <= 6 */
1566//.. static UChar* do_fld_st ( UChar* p, Int i )
1567//.. {
1568//.. vassert(0 <= i && i <= 6);
1569//.. *p++ = 0xD9;
1570//.. *p++ = 0xC0+i;
1571//.. return p;
1572//.. }
cerioncd304492005-02-08 19:40:24 +00001573
cerionbcf8c3e2005-02-04 16:17:07 +00001574//.. /* Emit f<op> %st(0) */
1575//.. static UChar* do_fop1_st ( UChar* p, X86FpOp op )
1576//.. {
1577//.. switch (op) {
1578//.. case Xfp_NEG: *p++ = 0xD9; *p++ = 0xE0; break;
1579//.. case Xfp_ABS: *p++ = 0xD9; *p++ = 0xE1; break;
1580//.. case Xfp_SQRT: *p++ = 0xD9; *p++ = 0xFA; break;
1581//.. case Xfp_ROUND: *p++ = 0xD9; *p++ = 0xFC; break;
1582//.. case Xfp_SIN: *p++ = 0xD9; *p++ = 0xFE; break;
1583//.. case Xfp_COS: *p++ = 0xD9; *p++ = 0xFF; break;
1584//.. case Xfp_2XM1: *p++ = 0xD9; *p++ = 0xF0; break;
1585//.. case Xfp_MOV: break;
1586//.. case Xfp_TAN: p = do_ffree_st7(p); /* since fptan pushes 1.0 */
1587//.. *p++ = 0xD9; *p++ = 0xF2; /* fptan */
1588//.. *p++ = 0xD9; *p++ = 0xF7; /* fincstp */
1589//.. break;
1590//.. default: vpanic("do_fop1_st: unknown op");
1591//.. }
1592//.. return p;
1593//.. }
cerioncd304492005-02-08 19:40:24 +00001594
cerionbcf8c3e2005-02-04 16:17:07 +00001595//.. /* Emit f<op> %st(i), 1 <= i <= 5 */
1596//.. static UChar* do_fop2_st ( UChar* p, X86FpOp op, Int i )
1597//.. {
1598//.. # define fake(_n) mkHReg((_n), HRcInt32, False)
1599//.. Int subopc;
1600//.. switch (op) {
1601//.. case Xfp_ADD: subopc = 0; break;
1602//.. case Xfp_SUB: subopc = 4; break;
1603//.. case Xfp_MUL: subopc = 1; break;
1604//.. case Xfp_DIV: subopc = 6; break;
1605//.. default: vpanic("do_fop2_st: unknown op");
1606//.. }
1607//.. *p++ = 0xD8;
1608//.. p = doAMode_R(p, fake(subopc), fake(i));
1609//.. return p;
1610//.. # undef fake
1611//.. }
cerioncd304492005-02-08 19:40:24 +00001612
cerionbcf8c3e2005-02-04 16:17:07 +00001613//.. /* Push a 32-bit word on the stack. The word depends on tags[3:0];
1614//.. each byte is either 0x00 or 0xFF depending on the corresponding bit in tags[].
1615//.. */
1616//.. static UChar* push_word_from_tags ( UChar* p, UShort tags )
1617//.. {
1618//.. UInt w;
1619//.. vassert(0 == (tags & ~0xF));
1620//.. if (tags == 0) {
1621//.. /* pushl $0x00000000 */
1622//.. *p++ = 0x6A;
1623//.. *p++ = 0x00;
1624//.. }
1625//.. else
1626//.. /* pushl $0xFFFFFFFF */
1627//.. if (tags == 0xF) {
1628//.. *p++ = 0x6A;
1629//.. *p++ = 0xFF;
1630//.. } else {
1631//.. vassert(0); /* awaiting test case */
1632//.. w = 0;
1633//.. if (tags & 1) w |= 0x000000FF;
1634//.. if (tags & 2) w |= 0x0000FF00;
1635//.. if (tags & 4) w |= 0x00FF0000;
1636//.. if (tags & 8) w |= 0xFF000000;
1637//.. *p++ = 0x68;
1638//.. p = emit32(p, w);
1639//.. }
1640//.. return p;
1641//.. }
1642
1643/* Emit an instruction into buf and return the number of bytes used.
1644 Note that buf is not the insn's final place, and therefore it is
1645 imperative to emit position-independent code. */
1646
1647Int emit_PPC32Instr ( UChar* buf, Int nbuf, PPC32Instr* i )
1648{
1649//.. UInt irno, opc, opc_rr, subopc_imm, opc_imma, opc_cl, opc_imm, subopc;
1650//..
1651//.. UInt xtra;
1652 UChar* p = &buf[0];
1653//.. UChar* ptmp;
1654//.. vassert(nbuf >= 32);
1655//..
1656//.. /* Wrap an integer as a int register, for use assembling
1657//.. GrpN insns, in which the greg field is used as a sub-opcode
1658//.. and does not really contain a register. */
1659//.. # define fake(_n) mkHReg((_n), HRcInt32, False)
cerion2c49e032005-02-09 17:29:49 +00001660
1661 vex_printf("asm ");ppPPC32Instr(i); vex_printf("\n");
cerionbcf8c3e2005-02-04 16:17:07 +00001662
1663 switch (i->tag) {
1664
1665//.. case Xin_Alu32R:
1666//.. /* Deal specially with MOV */
1667//.. if (i->Xin.Alu32R.op == Xalu_MOV) {
1668//.. switch (i->Xin.Alu32R.src->tag) {
1669//.. case Xrmi_Imm:
1670//.. *p++ = 0xB8 + iregNo(i->Xin.Alu32R.dst);
1671//.. p = emit32(p, i->Xin.Alu32R.src->Xrmi.Imm.imm32);
1672//.. goto done;
1673//.. case Xrmi_Reg:
1674//.. *p++ = 0x89;
1675//.. p = doAMode_R(p, i->Xin.Alu32R.src->Xrmi.Reg.reg,
1676//.. i->Xin.Alu32R.dst);
1677//.. goto done;
1678//.. case Xrmi_Mem:
1679//.. *p++ = 0x8B;
1680//.. p = doAMode_M(p, i->Xin.Alu32R.dst,
1681//.. i->Xin.Alu32R.src->Xrmi.Mem.am);
1682//.. goto done;
1683//.. default:
1684//.. goto bad;
1685//.. }
1686//.. }
1687//.. /* MUL */
1688//.. if (i->Xin.Alu32R.op == Xalu_MUL) {
1689//.. switch (i->Xin.Alu32R.src->tag) {
1690//.. case Xrmi_Reg:
1691//.. *p++ = 0x0F;
1692//.. *p++ = 0xAF;
1693//.. p = doAMode_R(p, i->Xin.Alu32R.dst,
1694//.. i->Xin.Alu32R.src->Xrmi.Reg.reg);
1695//.. goto done;
1696//.. case Xrmi_Mem:
1697//.. *p++ = 0x0F;
1698//.. *p++ = 0xAF;
1699//.. p = doAMode_M(p, i->Xin.Alu32R.dst,
1700//.. i->Xin.Alu32R.src->Xrmi.Mem.am);
1701//.. goto done;
1702//.. case Xrmi_Imm:
1703//.. if (fits8bits(i->Xin.Alu32R.src->Xrmi.Imm.imm32)) {
1704//.. *p++ = 0x6B;
1705//.. p = doAMode_R(p, i->Xin.Alu32R.dst, i->Xin.Alu32R.dst);
1706//.. *p++ = 0xFF & i->Xin.Alu32R.src->Xrmi.Imm.imm32;
1707//.. } else {
1708//.. *p++ = 0x69;
1709//.. p = doAMode_R(p, i->Xin.Alu32R.dst, i->Xin.Alu32R.dst);
1710//.. p = emit32(p, i->Xin.Alu32R.src->Xrmi.Imm.imm32);
1711//.. }
1712//.. goto done;
1713//.. default:
1714//.. goto bad;
1715//.. }
1716//.. }
1717//.. /* ADD/SUB/ADC/SBB/AND/OR/XOR/CMP */
1718//.. opc = opc_rr = subopc_imm = opc_imma = 0;
1719//.. switch (i->Xin.Alu32R.op) {
1720//.. case Xalu_ADC: opc = 0x13; opc_rr = 0x11;
1721//.. subopc_imm = 2; opc_imma = 0x15; break;
1722//.. case Xalu_ADD: opc = 0x03; opc_rr = 0x01;
1723//.. subopc_imm = 0; opc_imma = 0x05; break;
1724//.. case Xalu_SUB: opc = 0x2B; opc_rr = 0x29;
1725//.. subopc_imm = 5; opc_imma = 0x2D; break;
1726//.. case Xalu_SBB: opc = 0x1B; opc_rr = 0x19;
1727//.. subopc_imm = 3; opc_imma = 0x1D; break;
1728//.. case Xalu_AND: opc = 0x23; opc_rr = 0x21;
1729//.. subopc_imm = 4; opc_imma = 0x25; break;
1730//.. case Xalu_XOR: opc = 0x33; opc_rr = 0x31;
1731//.. subopc_imm = 6; opc_imma = 0x35; break;
1732//.. case Xalu_OR: opc = 0x0B; opc_rr = 0x09;
1733//.. subopc_imm = 1; opc_imma = 0x0D; break;
1734//.. case Xalu_CMP: opc = 0x3B; opc_rr = 0x39;
1735//.. subopc_imm = 7; opc_imma = 0x3D; break;
1736//.. default: goto bad;
1737//.. }
1738//.. switch (i->Xin.Alu32R.src->tag) {
1739//.. case Xrmi_Imm:
1740//.. if (i->Xin.Alu32R.dst == hregX86_EAX()
1741//.. && !fits8bits(i->Xin.Alu32R.src->Xrmi.Imm.imm32)) {
1742//.. *p++ = opc_imma;
1743//.. p = emit32(p, i->Xin.Alu32R.src->Xrmi.Imm.imm32);
1744//.. } else
1745//.. if (fits8bits(i->Xin.Alu32R.src->Xrmi.Imm.imm32)) {
1746//.. *p++ = 0x83;
1747//.. p = doAMode_R(p, fake(subopc_imm), i->Xin.Alu32R.dst);
1748//.. *p++ = 0xFF & i->Xin.Alu32R.src->Xrmi.Imm.imm32;
1749//.. } else {
1750//.. *p++ = 0x81;
1751//.. p = doAMode_R(p, fake(subopc_imm), i->Xin.Alu32R.dst);
1752//.. p = emit32(p, i->Xin.Alu32R.src->Xrmi.Imm.imm32);
1753//.. }
1754//.. goto done;
1755//.. case Xrmi_Reg:
1756//.. *p++ = opc_rr;
1757//.. p = doAMode_R(p, i->Xin.Alu32R.src->Xrmi.Reg.reg,
1758//.. i->Xin.Alu32R.dst);
1759//.. goto done;
1760//.. case Xrmi_Mem:
1761//.. *p++ = opc;
1762//.. p = doAMode_M(p, i->Xin.Alu32R.dst,
1763//.. i->Xin.Alu32R.src->Xrmi.Mem.am);
1764//.. goto done;
1765//.. default:
1766//.. goto bad;
1767//.. }
1768//.. break;
cerion2c49e032005-02-09 17:29:49 +00001769
cerionbcf8c3e2005-02-04 16:17:07 +00001770//.. case Xin_Alu32M:
1771//.. /* Deal specially with MOV */
1772//.. if (i->Xin.Alu32M.op == Xalu_MOV) {
1773//.. switch (i->Xin.Alu32M.src->tag) {
1774//.. case Xri_Reg:
1775//.. *p++ = 0x89;
1776//.. p = doAMode_M(p, i->Xin.Alu32M.src->Xri.Reg.reg,
1777//.. i->Xin.Alu32M.dst);
1778//.. goto done;
1779//.. case Xri_Imm:
1780//.. *p++ = 0xC7;
1781//.. p = doAMode_M(p, fake(0), i->Xin.Alu32M.dst);
1782//.. p = emit32(p, i->Xin.Alu32M.src->Xri.Imm.imm32);
1783//.. goto done;
1784//.. default:
1785//.. goto bad;
1786//.. }
1787//.. }
1788//.. /* ADD/SUB/ADC/SBB/AND/OR/XOR/CMP. MUL is not
1789//.. allowed here. */
1790//.. opc = subopc_imm = opc_imma = 0;
1791//.. switch (i->Xin.Alu32M.op) {
1792//.. case Xalu_ADD: opc = 0x01; subopc_imm = 0; break;
1793//.. case Xalu_SUB: opc = 0x29; subopc_imm = 5; break;
1794//.. default: goto bad;
1795//.. }
1796//.. switch (i->Xin.Alu32M.src->tag) {
1797//.. case Xri_Reg:
1798//.. *p++ = opc;
1799//.. p = doAMode_M(p, i->Xin.Alu32M.src->Xri.Reg.reg,
1800//.. i->Xin.Alu32M.dst);
1801//.. goto done;
1802//.. case Xri_Imm:
1803//.. if (fits8bits(i->Xin.Alu32M.src->Xri.Imm.imm32)) {
1804//.. *p++ = 0x83;
1805//.. p = doAMode_M(p, fake(subopc_imm), i->Xin.Alu32M.dst);
1806//.. *p++ = 0xFF & i->Xin.Alu32M.src->Xri.Imm.imm32;
1807//.. goto done;
1808//.. } else {
1809//.. *p++ = 0x81;
1810//.. p = doAMode_M(p, fake(subopc_imm), i->Xin.Alu32M.dst);
1811//.. p = emit32(p, i->Xin.Alu32M.src->Xri.Imm.imm32);
1812//.. goto done;
1813//.. }
1814//.. default:
1815//.. goto bad;
1816//.. }
1817//.. break;
cerion2c49e032005-02-09 17:29:49 +00001818
cerionbcf8c3e2005-02-04 16:17:07 +00001819//.. case Xin_Sh32:
1820//.. opc_cl = opc_imm = subopc = 0;
1821//.. switch (i->Xin.Sh32.op) {
1822//.. case Xsh_SHR: opc_cl = 0xD3; opc_imm = 0xC1; subopc = 5; break;
1823//.. case Xsh_SAR: opc_cl = 0xD3; opc_imm = 0xC1; subopc = 7; break;
1824//.. case Xsh_SHL: opc_cl = 0xD3; opc_imm = 0xC1; subopc = 4; break;
1825//.. default: goto bad;
1826//.. }
1827//.. if (i->Xin.Sh32.src == 0) {
1828//.. *p++ = opc_cl;
1829//.. switch (i->Xin.Sh32.dst->tag) {
1830//.. case Xrm_Reg:
1831//.. p = doAMode_R(p, fake(subopc),
1832//.. i->Xin.Sh32.dst->Xrm.Reg.reg);
1833//.. goto done;
1834//.. default:
1835//.. goto bad;
1836//.. }
1837//.. } else {
1838//.. *p++ = opc_imm;
1839//.. switch (i->Xin.Sh32.dst->tag) {
1840//.. case Xrm_Reg:
1841//.. p = doAMode_R(p, fake(subopc),
1842//.. i->Xin.Sh32.dst->Xrm.Reg.reg);
1843//.. *p++ = (UChar)(i->Xin.Sh32.src);
1844//.. goto done;
1845//.. default:
1846//.. goto bad;
1847//.. }
1848//.. }
1849//.. break;
cerion2c49e032005-02-09 17:29:49 +00001850
cerionbcf8c3e2005-02-04 16:17:07 +00001851//.. case Xin_Test32:
1852//.. if (i->Xin.Test32.src->tag == Xri_Imm
1853//.. && i->Xin.Test32.dst->tag == Xrm_Reg) {
1854//.. /* testl $imm32, %reg */
1855//.. *p++ = 0xF7;
1856//.. p = doAMode_R(p, fake(0), i->Xin.Test32.dst->Xrm.Reg.reg);
1857//.. p = emit32(p, i->Xin.Test32.src->Xri.Imm.imm32);
1858//.. goto done;
1859//.. }
1860//.. break;
cerion2c49e032005-02-09 17:29:49 +00001861
cerionbcf8c3e2005-02-04 16:17:07 +00001862//.. case Xin_Unary32:
1863//.. if (i->Xin.Unary32.op == Xun_NOT) {
1864//.. *p++ = 0xF7;
1865//.. if (i->Xin.Unary32.dst->tag == Xrm_Reg) {
1866//.. p = doAMode_R(p, fake(2), i->Xin.Unary32.dst->Xrm.Reg.reg);
1867//.. goto done;
1868//.. } else {
1869//.. goto bad;
1870//.. }
1871//.. }
1872//.. if (i->Xin.Unary32.op == Xun_NEG) {
1873//.. *p++ = 0xF7;
1874//.. if (i->Xin.Unary32.dst->tag == Xrm_Reg) {
1875//.. p = doAMode_R(p, fake(3), i->Xin.Unary32.dst->Xrm.Reg.reg);
1876//.. goto done;
1877//.. } else {
1878//.. goto bad;
1879//.. }
1880//.. }
1881//.. break;
1882//..
1883//.. case Xin_MulL:
1884//.. subopc = i->Xin.MulL.syned ? 5 : 4;
1885//.. if (i->Xin.MulL.ssz == Xss_32) {
1886//.. *p++ = 0xF7;
1887//.. switch (i->Xin.MulL.src->tag) {
1888//.. case Xrm_Mem:
1889//.. p = doAMode_M(p, fake(subopc),
1890//.. i->Xin.MulL.src->Xrm.Mem.am);
1891//.. goto done;
1892//.. case Xrm_Reg:
1893//.. p = doAMode_R(p, fake(subopc),
1894//.. i->Xin.MulL.src->Xrm.Reg.reg);
1895//.. goto done;
1896//.. default:
1897//.. goto bad;
1898//.. }
1899//.. }
1900//.. break;
cerion2c49e032005-02-09 17:29:49 +00001901
cerionbcf8c3e2005-02-04 16:17:07 +00001902//.. case Xin_Div:
1903//.. subopc = i->Xin.Div.syned ? 7 : 6;
1904//.. if (i->Xin.Div.ssz == Xss_32) {
1905//.. *p++ = 0xF7;
1906//.. switch (i->Xin.Div.src->tag) {
1907//.. case Xrm_Mem:
1908//.. p = doAMode_M(p, fake(subopc),
1909//.. i->Xin.Div.src->Xrm.Mem.am);
1910//.. goto done;
1911//.. case Xrm_Reg:
1912//.. p = doAMode_R(p, fake(subopc),
1913//.. i->Xin.Div.src->Xrm.Reg.reg);
1914//.. goto done;
1915//.. default:
1916//.. goto bad;
1917//.. }
1918//.. }
1919//.. break;
cerion2c49e032005-02-09 17:29:49 +00001920
cerionbcf8c3e2005-02-04 16:17:07 +00001921//.. case Xin_Sh3232:
1922//.. vassert(i->Xin.Sh3232.op == Xsh_SHL || i->Xin.Sh3232.op == Xsh_SHR);
1923//.. if (i->Xin.Sh3232.amt == 0) {
1924//.. /* shldl/shrdl by %cl */
1925//.. *p++ = 0x0F;
1926//.. if (i->Xin.Sh3232.op == Xsh_SHL) {
1927//.. *p++ = 0xA5;
1928//.. } else {
1929//.. *p++ = 0xAD;
1930//.. }
1931//.. p = doAMode_R(p, i->Xin.Sh3232.src, i->Xin.Sh3232.dst);
1932//.. goto done;
1933//.. }
1934//.. break;
cerion2c49e032005-02-09 17:29:49 +00001935
cerionbcf8c3e2005-02-04 16:17:07 +00001936//.. case Xin_Push:
1937//.. switch (i->Xin.Push.src->tag) {
1938//.. case Xrmi_Mem:
1939//.. *p++ = 0xFF;
1940//.. p = doAMode_M(p, fake(6), i->Xin.Push.src->Xrmi.Mem.am);
1941//.. goto done;
1942//.. case Xrmi_Imm:
1943//.. *p++ = 0x68;
1944//.. p = emit32(p, i->Xin.Push.src->Xrmi.Imm.imm32);
1945//.. goto done;
1946//.. case Xrmi_Reg:
1947//.. *p++ = 0x50 + iregNo(i->Xin.Push.src->Xrmi.Reg.reg);
1948//.. goto done;
1949//.. default:
1950//.. goto bad;
1951//.. }
cerion2c49e032005-02-09 17:29:49 +00001952
cerionbcf8c3e2005-02-04 16:17:07 +00001953//.. case Xin_Call:
1954//.. /* See detailed comment for Xin_Call in getRegUsage_X86Instr above
1955//.. for explanation of this. */
1956//.. switch (i->Xin.Call.regparms) {
1957//.. case 0: irno = iregNo(hregX86_EAX()); break;
1958//.. case 1: irno = iregNo(hregX86_EDX()); break;
1959//.. case 2: irno = iregNo(hregX86_ECX()); break;
1960//.. case 3: irno = iregNo(hregX86_EDI()); break;
1961//.. default: vpanic(" emit_X86Instr:call:regparms");
1962//.. }
1963//.. /* jump over the following two insns if the condition does not
1964//.. hold */
1965//.. if (i->Xin.Call.cond != Xcc_ALWAYS) {
1966//.. *p++ = 0x70 + (0xF & (i->Xin.Call.cond ^ 1));
1967//.. *p++ = 0x07; /* 7 bytes in the next two insns */
1968//.. }
1969//.. /* movl $target, %tmp */
1970//.. *p++ = 0xB8 + irno;
1971//.. p = emit32(p, i->Xin.Call.target);
1972//.. /* call *%tmp */
1973//.. *p++ = 0xFF;
1974//.. *p++ = 0xD0 + irno;
1975//.. goto done;
cerion2c49e032005-02-09 17:29:49 +00001976
cerionbcf8c3e2005-02-04 16:17:07 +00001977//.. case Xin_Goto:
1978//.. /* Use ptmp for backpatching conditional jumps. */
1979//.. ptmp = NULL;
1980//..
1981//.. /* First off, if this is conditional, create a conditional
1982//.. jump over the rest of it. */
1983//.. if (i->Xin.Goto.cond != Xcc_ALWAYS) {
1984//.. /* jmp fwds if !condition */
1985//.. *p++ = 0x70 + (i->Xin.Goto.cond ^ 1);
1986//.. ptmp = p; /* fill in this bit later */
1987//.. *p++ = 0; /* # of bytes to jump over; don't know how many yet. */
1988//.. }
1989//..
1990//.. /* If a non-boring, set %ebp (the guest state pointer)
1991//.. appropriately. */
1992//.. /* movl $magic_number, %ebp */
1993//.. switch (i->Xin.Goto.jk) {
1994//.. case Ijk_ClientReq:
1995//.. *p++ = 0xBD;
1996//.. p = emit32(p, VEX_TRC_JMP_CLIENTREQ); break;
1997//.. case Ijk_Syscall:
1998//.. *p++ = 0xBD;
1999//.. p = emit32(p, VEX_TRC_JMP_SYSCALL); break;
2000//.. case Ijk_Yield:
2001//.. *p++ = 0xBD;
2002//.. p = emit32(p, VEX_TRC_JMP_YIELD); break;
2003//.. case Ijk_EmWarn:
2004//.. *p++ = 0xBD;
2005//.. p = emit32(p, VEX_TRC_JMP_EMWARN); break;
2006//.. case Ijk_MapFail:
2007//.. *p++ = 0xBD;
2008//.. p = emit32(p, VEX_TRC_JMP_MAPFAIL); break;
2009//.. case Ijk_NoDecode:
2010//.. *p++ = 0xBD;
2011//.. p = emit32(p, VEX_TRC_JMP_NODECODE); break;
2012//.. case Ijk_Ret:
2013//.. case Ijk_Call:
2014//.. case Ijk_Boring:
2015//.. break;
2016//.. default:
2017//.. ppIRJumpKind(i->Xin.Goto.jk);
2018//.. vpanic("emit_X86Instr.Xin_Goto: unknown jump kind");
2019//.. }
2020//..
2021//.. /* Get the destination address into %eax */
2022//.. if (i->Xin.Goto.dst->tag == Xri_Imm) {
2023//.. /* movl $immediate, %eax ; ret */
2024//.. *p++ = 0xB8;
2025//.. p = emit32(p, i->Xin.Goto.dst->Xri.Imm.imm32);
2026//.. } else {
2027//.. vassert(i->Xin.Goto.dst->tag == Xri_Reg);
2028//.. /* movl %reg, %eax ; ret */
2029//.. if (i->Xin.Goto.dst->Xri.Reg.reg != hregX86_EAX()) {
2030//.. *p++ = 0x89;
2031//.. p = doAMode_R(p, i->Xin.Goto.dst->Xri.Reg.reg, hregX86_EAX());
2032//.. }
2033//.. }
2034//..
2035//.. /* ret */
2036//.. *p++ = 0xC3;
2037//..
2038//.. /* Fix up the conditional jump, if there was one. */
2039//.. if (i->Xin.Goto.cond != Xcc_ALWAYS) {
2040//.. Int delta = p - ptmp;
2041//.. vassert(delta > 0 && delta < 20);
2042//.. *ptmp = (UChar)(delta-1);
2043//.. }
2044//.. goto done;
2045//..
2046//.. case Xin_CMov32:
2047//.. vassert(i->Xin.CMov32.cond != Xcc_ALWAYS);
2048//.. #if 0
2049//.. /* This generates cmov, which is illegal on P54/P55. */
2050//.. *p++ = 0x0F;
2051//.. *p++ = 0x40 + i->Xin.CMov32.cond;
2052//.. if (i->Xin.CMov32.src->tag == Xrm_Reg) {
2053//.. p = doAMode_R(p, i->Xin.CMov32.dst, i->Xin.CMov32.src->Xrm.Reg.reg);
2054//.. goto done;
2055//.. }
2056//.. if (i->Xin.CMov32.src->tag == Xrm_Mem) {
2057//.. p = doAMode_M(p, i->Xin.CMov32.dst, i->Xin.CMov32.src->Xrm.Mem.am);
2058//.. goto done;
2059//.. }
2060//.. #else
2061//.. /* P5 friendly version: conditional jump over an unconditional
2062//.. move. */
2063//.. /* jmp fwds if !condition */
2064//.. *p++ = 0x70 + (i->Xin.CMov32.cond ^ 1);
2065//.. *p++ = 0; /* # of bytes in the next bit, which we don't know yet */
2066//.. ptmp = p;
2067//..
2068//.. switch (i->Xin.CMov32.src->tag) {
2069//.. case Xrm_Reg:
2070//.. /* Big sigh. This is movl E -> G ... */
2071//.. *p++ = 0x89;
2072//.. p = doAMode_R(p, i->Xin.CMov32.src->Xrm.Reg.reg,
2073//.. i->Xin.CMov32.dst);
2074//..
2075//.. break;
2076//.. case Xrm_Mem:
2077//.. /* ... whereas this is movl G -> E. That's why the args
2078//.. to doAMode_R appear to be the wrong way round in the
2079//.. Xrm_Reg case. */
2080//.. *p++ = 0x8B;
2081//.. p = doAMode_M(p, i->Xin.CMov32.dst,
2082//.. i->Xin.CMov32.src->Xrm.Mem.am);
2083//.. break;
2084//.. default:
2085//.. goto bad;
2086//.. }
2087//.. /* Fill in the jump offset. */
2088//.. *(ptmp-1) = p - ptmp;
2089//.. goto done;
2090//.. #endif
2091//.. break;
2092//..
2093//.. case Xin_LoadEX:
2094//.. if (i->Xin.LoadEX.szSmall == 1 && !i->Xin.LoadEX.syned) {
2095//.. /* movzbl */
2096//.. *p++ = 0x0F;
2097//.. *p++ = 0xB6;
2098//.. p = doAMode_M(p, i->Xin.LoadEX.dst, i->Xin.LoadEX.src);
2099//.. goto done;
2100//.. }
2101//.. if (i->Xin.LoadEX.szSmall == 2 && !i->Xin.LoadEX.syned) {
2102//.. /* movzwl */
2103//.. *p++ = 0x0F;
2104//.. *p++ = 0xB7;
2105//.. p = doAMode_M(p, i->Xin.LoadEX.dst, i->Xin.LoadEX.src);
2106//.. goto done;
2107//.. }
2108//.. break;
2109//..
2110//.. case Xin_Set32:
2111//.. /* Make the destination register be 1 or 0, depending on whether
2112//.. the relevant condition holds. We have to dodge and weave
2113//.. when the destination is %esi or %edi as we cannot directly
2114//.. emit the native 'setb %reg' for those. Further complication:
2115//.. the top 24 bits of the destination should be forced to zero,
2116//.. but doing 'xor %r,%r' kills the flag(s) we are about to read.
2117//.. Sigh. So start off my moving $0 into the dest. */
2118//..
2119//.. /* Do we need to swap in %eax? */
2120//.. if (iregNo(i->Xin.Set32.dst) >= 4) {
2121//.. /* xchg %eax, %dst */
2122//.. *p++ = 0x90 + iregNo(i->Xin.Set32.dst);
2123//.. /* movl $0, %eax */
2124//.. *p++ = 0xB8 + iregNo(hregX86_EAX());
2125//.. p = emit32(p, 0);
2126//.. /* setb lo8(%eax) */
2127//.. *p++ = 0x0F;
2128//.. *p++ = 0x90 + (UChar)(i->Xin.Set32.cond);
2129//.. p = doAMode_R(p, fake(0), hregX86_EAX());
2130//.. /* xchg %eax, %dst */
2131//.. *p++ = 0x90 + iregNo(i->Xin.Set32.dst);
2132//.. } else {
2133//.. /* movl $0, %dst */
2134//.. *p++ = 0xB8 + iregNo(i->Xin.Set32.dst);
2135//.. p = emit32(p, 0);
2136//.. /* setb lo8(%dst) */
2137//.. *p++ = 0x0F;
2138//.. *p++ = 0x90 + (UChar)(i->Xin.Set32.cond);
2139//.. p = doAMode_R(p, fake(0), i->Xin.Set32.dst);
2140//.. }
2141//.. goto done;
2142//..
2143//.. case Xin_Bsfr32:
2144//.. *p++ = 0x0F;
2145//.. if (i->Xin.Bsfr32.isFwds) {
2146//.. *p++ = 0xBC;
2147//.. } else {
2148//.. *p++ = 0xBD;
2149//.. }
2150//.. p = doAMode_R(p, i->Xin.Bsfr32.dst, i->Xin.Bsfr32.src);
2151//.. goto done;
2152//..
2153//.. case Xin_MFence:
2154//.. /* see comment in hdefs.h re this insn */
2155//.. if (0) vex_printf("EMIT FENCE\n");
2156//.. switch (i->Xin.MFence.subarch) {
2157//.. case VexSubArchX86_sse0:
2158//.. vassert(0); /* awaiting test case */
2159//.. /* lock addl $0,0(%esp) */
2160//.. *p++ = 0xF0; *p++ = 0x83; *p++ = 0x44;
2161//.. *p++ = 0x24; *p++ = 0x00; *p++ = 0x00;
2162//.. goto done;
2163//.. case VexSubArchX86_sse1:
2164//.. /* sfence */
2165//.. *p++ = 0x0F; *p++ = 0xAE; *p++ = 0xF8;
2166//.. /* lock addl $0,0(%esp) */
2167//.. *p++ = 0xF0; *p++ = 0x83; *p++ = 0x44;
2168//.. *p++ = 0x24; *p++ = 0x00; *p++ = 0x00;
2169//.. goto done;
2170//.. case VexSubArchX86_sse2:
2171//.. /* mfence */
2172//.. *p++ = 0x0F; *p++ = 0xAE; *p++ = 0xF0;
2173//.. goto done;
2174//.. default:
2175//.. vpanic("emit_X86Instr:mfence:subarch");
2176//.. }
2177//.. break;
2178//..
2179//.. case Xin_Store:
2180//.. if (i->Xin.Store.sz == 2) {
2181//.. /* This case, at least, is simple, given that we can
2182//.. reference the low 16 bits of any integer register. */
2183//.. *p++ = 0x66;
2184//.. *p++ = 0x89;
2185//.. p = doAMode_M(p, i->Xin.Store.src, i->Xin.Store.dst);
2186//.. goto done;
2187//.. }
2188//..
2189//.. if (i->Xin.Store.sz == 1) {
2190//.. /* We have to do complex dodging and weaving if src is not
2191//.. the low 8 bits of %eax/%ebx/%ecx/%edx. */
2192//.. if (iregNo(i->Xin.Store.src) < 4) {
2193//.. /* we're OK, can do it directly */
2194//.. *p++ = 0x88;
2195//.. p = doAMode_M(p, i->Xin.Store.src, i->Xin.Store.dst);
2196//.. goto done;
2197//.. } else {
2198//.. /* Bleh. This means the source is %edi or %esi. Since
2199//.. the address mode can only mention three registers, at
2200//.. least one of %eax/%ebx/%ecx/%edx must be available to
2201//.. temporarily swap the source into, so the store can
2202//.. happen. So we have to look at the regs mentioned
2203//.. in the amode. */
2204//.. HReg swap = INVALID_HREG;
2205//.. HReg eax = hregX86_EAX(), ebx = hregX86_EBX(),
2206//.. ecx = hregX86_ECX(), edx = hregX86_EDX();
2207//.. Bool a_ok = True, b_ok = True, c_ok = True, d_ok = True;
2208//.. HRegUsage u;
2209//.. Int j;
2210//.. initHRegUsage(&u);
2211//.. addRegUsage_X86AMode(&u, i->Xin.Store.dst);
2212//.. for (j = 0; j < u.n_used; j++) {
2213//.. HReg r = u.hreg[j];
2214//.. if (r == eax) a_ok = False;
2215//.. if (r == ebx) b_ok = False;
2216//.. if (r == ecx) c_ok = False;
2217//.. if (r == edx) d_ok = False;
2218//.. }
2219//.. if (a_ok) swap = eax;
2220//.. if (b_ok) swap = ebx;
2221//.. if (c_ok) swap = ecx;
2222//.. if (d_ok) swap = edx;
2223//.. vassert(swap != INVALID_HREG);
2224//.. /* xchgl %source, %swap. Could do better if swap is %eax. */
2225//.. *p++ = 0x87;
2226//.. p = doAMode_R(p, i->Xin.Store.src, swap);
2227//.. /* movb lo8{%swap}, (dst) */
2228//.. *p++ = 0x88;
2229//.. p = doAMode_M(p, swap, i->Xin.Store.dst);
2230//.. /* xchgl %source, %swap. Could do better if swap is %eax. */
2231//.. *p++ = 0x87;
2232//.. p = doAMode_R(p, i->Xin.Store.src, swap);
2233//.. goto done;
2234//.. }
2235//.. } /* if (i->Xin.Store.sz == 1) */
2236//.. break;
2237//..
2238//.. case Xin_FpUnary:
2239//.. /* gop %src, %dst
2240//.. --> ffree %st7 ; fld %st(src) ; fop %st(0) ; fstp %st(1+dst)
2241//.. */
2242//.. p = do_ffree_st7(p);
2243//.. p = do_fld_st(p, 0+hregNumber(i->Xin.FpUnary.src));
2244//.. p = do_fop1_st(p, i->Xin.FpUnary.op);
2245//.. p = do_fstp_st(p, 1+hregNumber(i->Xin.FpUnary.dst));
2246//.. goto done;
2247//..
2248//.. case Xin_FpBinary:
2249//.. if (i->Xin.FpBinary.op == Xfp_YL2X
2250//.. || i->Xin.FpBinary.op == Xfp_YL2XP1) {
2251//.. /* Have to do this specially. */
2252//.. /* ffree %st7 ; fld %st(srcL) ;
2253//.. ffree %st7 ; fld %st(srcR+1) ; fyl2x{p1} ; fstp(1+dst) */
2254//.. p = do_ffree_st7(p);
2255//.. p = do_fld_st(p, 0+hregNumber(i->Xin.FpBinary.srcL));
2256//.. p = do_ffree_st7(p);
2257//.. p = do_fld_st(p, 1+hregNumber(i->Xin.FpBinary.srcR));
2258//.. *p++ = 0xD9;
2259//.. *p++ = i->Xin.FpBinary.op==Xfp_YL2X ? 0xF1 : 0xF9;
2260//.. p = do_fstp_st(p, 1+hregNumber(i->Xin.FpBinary.dst));
2261//.. goto done;
2262//.. }
2263//.. if (i->Xin.FpBinary.op == Xfp_ATAN) {
2264//.. /* Have to do this specially. */
2265//.. /* ffree %st7 ; fld %st(srcL) ;
2266//.. ffree %st7 ; fld %st(srcR+1) ; fpatan ; fstp(1+dst) */
2267//.. p = do_ffree_st7(p);
2268//.. p = do_fld_st(p, 0+hregNumber(i->Xin.FpBinary.srcL));
2269//.. p = do_ffree_st7(p);
2270//.. p = do_fld_st(p, 1+hregNumber(i->Xin.FpBinary.srcR));
2271//.. *p++ = 0xD9; *p++ = 0xF3;
2272//.. p = do_fstp_st(p, 1+hregNumber(i->Xin.FpBinary.dst));
2273//.. goto done;
2274//.. }
2275//.. if (i->Xin.FpBinary.op == Xfp_PREM
2276//.. || i->Xin.FpBinary.op == Xfp_PREM1
2277//.. || i->Xin.FpBinary.op == Xfp_SCALE) {
2278//.. /* Have to do this specially. */
2279//.. /* ffree %st7 ; fld %st(srcR) ;
2280//.. ffree %st7 ; fld %st(srcL+1) ; fprem/fprem1/fscale ; fstp(2+dst) ;
2281//.. fincstp ; ffree %st7 */
2282//.. p = do_ffree_st7(p);
2283//.. p = do_fld_st(p, 0+hregNumber(i->Xin.FpBinary.srcR));
2284//.. p = do_ffree_st7(p);
2285//.. p = do_fld_st(p, 1+hregNumber(i->Xin.FpBinary.srcL));
2286//.. *p++ = 0xD9;
2287//.. switch (i->Xin.FpBinary.op) {
2288//.. case Xfp_PREM: *p++ = 0xF8; break;
2289//.. case Xfp_PREM1: *p++ = 0xF5; break;
2290//.. case Xfp_SCALE: *p++ = 0xFD; break;
2291//.. default: vpanic("emitX86Instr(FpBinary,PREM/PREM1/SCALE)");
2292//.. }
2293//.. p = do_fstp_st(p, 2+hregNumber(i->Xin.FpBinary.dst));
2294//.. *p++ = 0xD9; *p++ = 0xF7;
2295//.. p = do_ffree_st7(p);
2296//.. goto done;
2297//.. }
2298//.. /* General case */
2299//.. /* gop %srcL, %srcR, %dst
2300//.. --> ffree %st7 ; fld %st(srcL) ; fop %st(1+srcR) ; fstp %st(1+dst)
2301//.. */
2302//.. p = do_ffree_st7(p);
2303//.. p = do_fld_st(p, 0+hregNumber(i->Xin.FpBinary.srcL));
2304//.. p = do_fop2_st(p, i->Xin.FpBinary.op,
2305//.. 1+hregNumber(i->Xin.FpBinary.srcR));
2306//.. p = do_fstp_st(p, 1+hregNumber(i->Xin.FpBinary.dst));
2307//.. goto done;
2308//..
2309//.. case Xin_FpLdSt:
2310//.. vassert(i->Xin.FpLdSt.sz == 4 || i->Xin.FpLdSt.sz == 8);
2311//.. if (i->Xin.FpLdSt.isLoad) {
2312//.. /* Load from memory into %fakeN.
2313//.. --> ffree %st(7) ; fld{s/l} amode ; fstp st(N+1)
2314//.. */
2315//.. p = do_ffree_st7(p);
2316//.. *p++ = i->Xin.FpLdSt.sz==4 ? 0xD9 : 0xDD;
2317//.. p = doAMode_M(p, fake(0)/*subopcode*/, i->Xin.FpLdSt.addr);
2318//.. p = do_fstp_st(p, 1+hregNumber(i->Xin.FpLdSt.reg));
2319//.. goto done;
2320//.. } else {
2321//.. /* Store from %fakeN into memory.
2322//.. --> ffree %st(7) ; fld st(N) ; fstp{l|s} amode
2323//.. */
2324//.. p = do_ffree_st7(p);
2325//.. p = do_fld_st(p, 0+hregNumber(i->Xin.FpLdSt.reg));
2326//.. *p++ = i->Xin.FpLdSt.sz==4 ? 0xD9 : 0xDD;
2327//.. p = doAMode_M(p, fake(3)/*subopcode*/, i->Xin.FpLdSt.addr);
2328//.. goto done;
2329//.. }
2330//.. break;
2331//..
2332//.. case Xin_FpLdStI:
2333//.. if (i->Xin.FpLdStI.isLoad) {
2334//.. /* Load from memory into %fakeN, converting from an int.
2335//.. --> ffree %st(7) ; fild{w/l/ll} amode ; fstp st(N+1)
2336//.. */
2337//.. switch (i->Xin.FpLdStI.sz) {
2338//.. case 8: opc = 0xDF; subopc_imm = 5; break;
2339//.. case 4: opc = 0xDB; subopc_imm = 0; break;
2340//.. case 2: vassert(0); opc = 0xDF; subopc_imm = 0; break;
2341//.. default: vpanic("emitX86Instr(Xin_FpLdStI-load)");
2342//.. }
2343//.. p = do_ffree_st7(p);
2344//.. *p++ = opc;
2345//.. p = doAMode_M(p, fake(subopc_imm)/*subopcode*/, i->Xin.FpLdStI.addr);
2346//.. p = do_fstp_st(p, 1+hregNumber(i->Xin.FpLdStI.reg));
2347//.. goto done;
2348//.. } else {
2349//.. /* Store from %fakeN into memory, converting to an int.
2350//.. --> ffree %st(7) ; fld st(N) ; fistp{w/l/ll} amode
2351//.. */
2352//.. switch (i->Xin.FpLdStI.sz) {
2353//.. case 8: opc = 0xDF; subopc_imm = 7; break;
2354//.. case 4: opc = 0xDB; subopc_imm = 3; break;
2355//.. case 2: opc = 0xDF; subopc_imm = 3; break;
2356//.. default: vpanic("emitX86Instr(Xin_FpLdStI-store)");
2357//.. }
2358//.. p = do_ffree_st7(p);
2359//.. p = do_fld_st(p, 0+hregNumber(i->Xin.FpLdStI.reg));
2360//.. *p++ = opc;
2361//.. p = doAMode_M(p, fake(subopc_imm)/*subopcode*/, i->Xin.FpLdStI.addr);
2362//.. goto done;
2363//.. }
2364//.. break;
2365//..
2366//.. case Xin_Fp64to32:
2367//.. /* ffree %st7 ; fld %st(src) */
2368//.. p = do_ffree_st7(p);
2369//.. p = do_fld_st(p, 0+fregNo(i->Xin.Fp64to32.src));
2370//.. /* subl $4, %esp */
2371//.. *p++ = 0x83; *p++ = 0xEC; *p++ = 0x04;
2372//.. /* fstps (%esp) */
2373//.. *p++ = 0xD9; *p++ = 0x1C; *p++ = 0x24;
2374//.. /* flds (%esp) */
2375//.. *p++ = 0xD9; *p++ = 0x04; *p++ = 0x24;
2376//.. /* addl $4, %esp */
2377//.. *p++ = 0x83; *p++ = 0xC4; *p++ = 0x04;
2378//.. /* fstp %st(1+dst) */
2379//.. p = do_fstp_st(p, 1+fregNo(i->Xin.Fp64to32.dst));
2380//.. goto done;
2381//..
2382//.. case Xin_FpCMov:
2383//.. /* jmp fwds if !condition */
2384//.. *p++ = 0x70 + (i->Xin.FpCMov.cond ^ 1);
2385//.. *p++ = 0; /* # of bytes in the next bit, which we don't know yet */
2386//.. ptmp = p;
2387//..
2388//.. /* ffree %st7 ; fld %st(src) ; fstp %st(1+dst) */
2389//.. p = do_ffree_st7(p);
2390//.. p = do_fld_st(p, 0+fregNo(i->Xin.FpCMov.src));
2391//.. p = do_fstp_st(p, 1+fregNo(i->Xin.FpCMov.dst));
2392//..
2393//.. /* Fill in the jump offset. */
2394//.. *(ptmp-1) = p - ptmp;
2395//.. goto done;
2396//..
2397//.. case Xin_FpLdStCW:
2398//.. if (i->Xin.FpLdStCW.isLoad) {
2399//.. *p++ = 0xD9;
2400//.. p = doAMode_M(p, fake(5)/*subopcode*/, i->Xin.FpLdStCW.addr);
2401//.. } else {
2402//.. vassert(0);
2403//.. }
2404//.. goto done;
2405//..
2406//.. case Xin_FpStSW_AX:
2407//.. /* note, this emits fnstsw %ax, not fstsw %ax */
2408//.. *p++ = 0xDF;
2409//.. *p++ = 0xE0;
2410//.. goto done;
2411//..
2412//.. case Xin_FpCmp:
2413//.. /* gcmp %fL, %fR, %dst
2414//.. -> ffree %st7; fpush %fL ; fucomp %(fR+1) ;
2415//.. fnstsw %ax ; movl %eax, %dst
2416//.. */
2417//.. /* ffree %st7 */
2418//.. p = do_ffree_st7(p);
2419//.. /* fpush %fL */
2420//.. p = do_fld_st(p, 0+fregNo(i->Xin.FpCmp.srcL));
2421//.. /* fucomp %(fR+1) */
2422//.. *p++ = 0xDD;
2423//.. *p++ = 0xE8 + (7 & (1+fregNo(i->Xin.FpCmp.srcR)));
2424//.. /* fnstsw %ax */
2425//.. *p++ = 0xDF;
2426//.. *p++ = 0xE0;
2427//.. /* movl %eax, %dst */
2428//.. *p++ = 0x89;
2429//.. p = doAMode_R(p, hregX86_EAX(), i->Xin.FpCmp.dst);
2430//.. goto done;
2431//..
2432//.. case Xin_SseConst: {
2433//.. UShort con = i->Xin.SseConst.con;
2434//.. p = push_word_from_tags(p, (con >> 12) & 0xF);
2435//.. p = push_word_from_tags(p, (con >> 8) & 0xF);
2436//.. p = push_word_from_tags(p, (con >> 4) & 0xF);
2437//.. p = push_word_from_tags(p, con & 0xF);
2438//.. /* movl (%esp), %xmm-dst */
2439//.. *p++ = 0x0F;
2440//.. *p++ = 0x10;
2441//.. *p++ = 0x04 + 8 * (7 & vregNo(i->Xin.SseConst.dst));
2442//.. *p++ = 0x24;
2443//.. /* addl $16, %esp */
2444//.. *p++ = 0x83;
2445//.. *p++ = 0xC4;
2446//.. *p++ = 0x10;
2447//.. goto done;
2448//.. }
2449//..
2450//.. case Xin_SseLdSt:
2451//.. *p++ = 0x0F;
2452//.. *p++ = i->Xin.SseLdSt.isLoad ? 0x10 : 0x11;
2453//.. p = doAMode_M(p, fake(vregNo(i->Xin.SseLdSt.reg)), i->Xin.SseLdSt.addr);
2454//.. goto done;
2455//..
2456//.. case Xin_SseLdzLO:
2457//.. vassert(i->Xin.SseLdzLO.sz == 4 || i->Xin.SseLdzLO.sz == 8);
2458//.. /* movs[sd] amode, %xmm-dst */
2459//.. *p++ = i->Xin.SseLdzLO.sz==4 ? 0xF3 : 0xF2;
2460//.. *p++ = 0x0F;
2461//.. *p++ = 0x10;
2462//.. p = doAMode_M(p, fake(vregNo(i->Xin.SseLdzLO.reg)),
2463//.. i->Xin.SseLdzLO.addr);
2464//.. goto done;
2465//..
2466//.. case Xin_Sse32Fx4:
2467//.. xtra = 0;
2468//.. *p++ = 0x0F;
2469//.. switch (i->Xin.Sse32Fx4.op) {
2470//.. case Xsse_ADDF: *p++ = 0x58; break;
2471//.. case Xsse_DIVF: *p++ = 0x5E; break;
2472//.. case Xsse_MAXF: *p++ = 0x5F; break;
2473//.. case Xsse_MINF: *p++ = 0x5D; break;
2474//.. case Xsse_MULF: *p++ = 0x59; break;
2475//.. case Xsse_RCPF: *p++ = 0x53; break;
2476//.. case Xsse_RSQRTF: *p++ = 0x52; break;
2477//.. case Xsse_SQRTF: *p++ = 0x51; break;
2478//.. case Xsse_SUBF: *p++ = 0x5C; break;
2479//.. case Xsse_CMPEQF: *p++ = 0xC2; xtra = 0x100; break;
2480//.. case Xsse_CMPLTF: *p++ = 0xC2; xtra = 0x101; break;
2481//.. case Xsse_CMPLEF: *p++ = 0xC2; xtra = 0x102; break;
2482//.. default: goto bad;
2483//.. }
2484//.. p = doAMode_R(p, fake(vregNo(i->Xin.Sse32Fx4.dst)),
2485//.. fake(vregNo(i->Xin.Sse32Fx4.src)) );
2486//.. if (xtra & 0x100)
2487//.. *p++ = (UChar)(xtra & 0xFF);
2488//.. goto done;
2489//..
2490//.. case Xin_Sse64Fx2:
2491//.. xtra = 0;
2492//.. *p++ = 0x66;
2493//.. *p++ = 0x0F;
2494//.. switch (i->Xin.Sse64Fx2.op) {
2495//.. case Xsse_ADDF: *p++ = 0x58; break;
2496//.. case Xsse_DIVF: *p++ = 0x5E; break;
2497//.. case Xsse_MAXF: *p++ = 0x5F; break;
2498//.. case Xsse_MINF: *p++ = 0x5D; break;
2499//.. case Xsse_MULF: *p++ = 0x59; break;
2500//.. case Xsse_RCPF: *p++ = 0x53; break;
2501//.. case Xsse_RSQRTF: *p++ = 0x52; break;
2502//.. case Xsse_SQRTF: *p++ = 0x51; break;
2503//.. case Xsse_SUBF: *p++ = 0x5C; break;
2504//.. case Xsse_CMPEQF: *p++ = 0xC2; xtra = 0x100; break;
2505//.. case Xsse_CMPLTF: *p++ = 0xC2; xtra = 0x101; break;
2506//.. case Xsse_CMPLEF: *p++ = 0xC2; xtra = 0x102; break;
2507//.. default: goto bad;
2508//.. }
2509//.. p = doAMode_R(p, fake(vregNo(i->Xin.Sse64Fx2.dst)),
2510//.. fake(vregNo(i->Xin.Sse64Fx2.src)) );
2511//.. if (xtra & 0x100)
2512//.. *p++ = (UChar)(xtra & 0xFF);
2513//.. goto done;
2514//..
2515//.. case Xin_Sse32FLo:
2516//.. xtra = 0;
2517//.. *p++ = 0xF3;
2518//.. *p++ = 0x0F;
2519//.. switch (i->Xin.Sse32FLo.op) {
2520//.. case Xsse_ADDF: *p++ = 0x58; break;
2521//.. case Xsse_DIVF: *p++ = 0x5E; break;
2522//.. case Xsse_MAXF: *p++ = 0x5F; break;
2523//.. case Xsse_MINF: *p++ = 0x5D; break;
2524//.. case Xsse_MULF: *p++ = 0x59; break;
2525//.. case Xsse_RCPF: *p++ = 0x53; break;
2526//.. case Xsse_RSQRTF: *p++ = 0x52; break;
2527//.. case Xsse_SQRTF: *p++ = 0x51; break;
2528//.. case Xsse_SUBF: *p++ = 0x5C; break;
2529//.. case Xsse_CMPEQF: *p++ = 0xC2; xtra = 0x100; break;
2530//.. case Xsse_CMPLTF: *p++ = 0xC2; xtra = 0x101; break;
2531//.. case Xsse_CMPLEF: *p++ = 0xC2; xtra = 0x102; break;
2532//.. default: goto bad;
2533//.. }
2534//.. p = doAMode_R(p, fake(vregNo(i->Xin.Sse32FLo.dst)),
2535//.. fake(vregNo(i->Xin.Sse32FLo.src)) );
2536//.. if (xtra & 0x100)
2537//.. *p++ = (UChar)(xtra & 0xFF);
2538//.. goto done;
2539//..
2540//.. case Xin_Sse64FLo:
2541//.. xtra = 0;
2542//.. *p++ = 0xF2;
2543//.. *p++ = 0x0F;
2544//.. switch (i->Xin.Sse64FLo.op) {
2545//.. case Xsse_ADDF: *p++ = 0x58; break;
2546//.. case Xsse_DIVF: *p++ = 0x5E; break;
2547//.. case Xsse_MAXF: *p++ = 0x5F; break;
2548//.. case Xsse_MINF: *p++ = 0x5D; break;
2549//.. case Xsse_MULF: *p++ = 0x59; break;
2550//.. case Xsse_RCPF: *p++ = 0x53; break;
2551//.. case Xsse_RSQRTF: *p++ = 0x52; break;
2552//.. case Xsse_SQRTF: *p++ = 0x51; break;
2553//.. case Xsse_SUBF: *p++ = 0x5C; break;
2554//.. case Xsse_CMPEQF: *p++ = 0xC2; xtra = 0x100; break;
2555//.. case Xsse_CMPLTF: *p++ = 0xC2; xtra = 0x101; break;
2556//.. case Xsse_CMPLEF: *p++ = 0xC2; xtra = 0x102; break;
2557//.. default: goto bad;
2558//.. }
2559//.. p = doAMode_R(p, fake(vregNo(i->Xin.Sse64FLo.dst)),
2560//.. fake(vregNo(i->Xin.Sse64FLo.src)) );
2561//.. if (xtra & 0x100)
2562//.. *p++ = (UChar)(xtra & 0xFF);
2563//.. goto done;
2564//..
2565//.. case Xin_SseReRg:
2566//.. # define XX(_n) *p++ = (_n)
2567//.. switch (i->Xin.SseReRg.op) {
2568//.. case Xsse_MOV: /*movups*/ XX(0x0F); XX(0x10); break;
2569//.. case Xsse_OR: XX(0x0F); XX(0x56); break;
2570//.. case Xsse_XOR: XX(0x0F); XX(0x57); break;
2571//.. case Xsse_AND: XX(0x0F); XX(0x54); break;
2572//.. case Xsse_PACKSSD: XX(0x66); XX(0x0F); XX(0x6B); break;
2573//.. case Xsse_PACKSSW: XX(0x66); XX(0x0F); XX(0x63); break;
2574//.. case Xsse_PACKUSW: XX(0x66); XX(0x0F); XX(0x67); break;
2575//.. case Xsse_ADD8: XX(0x66); XX(0x0F); XX(0xFC); break;
2576//.. case Xsse_ADD16: XX(0x66); XX(0x0F); XX(0xFD); break;
2577//.. case Xsse_ADD32: XX(0x66); XX(0x0F); XX(0xFE); break;
2578//.. case Xsse_ADD64: XX(0x66); XX(0x0F); XX(0xD4); break;
2579//.. case Xsse_QADD8S: XX(0x66); XX(0x0F); XX(0xEC); break;
2580//.. case Xsse_QADD16S: XX(0x66); XX(0x0F); XX(0xED); break;
2581//.. case Xsse_QADD8U: XX(0x66); XX(0x0F); XX(0xDC); break;
2582//.. case Xsse_QADD16U: XX(0x66); XX(0x0F); XX(0xDD); break;
2583//.. case Xsse_AVG8U: XX(0x66); XX(0x0F); XX(0xE0); break;
2584//.. case Xsse_AVG16U: XX(0x66); XX(0x0F); XX(0xE3); break;
2585//.. case Xsse_CMPEQ8: XX(0x66); XX(0x0F); XX(0x74); break;
2586//.. case Xsse_CMPEQ16: XX(0x66); XX(0x0F); XX(0x75); break;
2587//.. case Xsse_CMPEQ32: XX(0x66); XX(0x0F); XX(0x76); break;
2588//.. case Xsse_CMPGT8S: XX(0x66); XX(0x0F); XX(0x64); break;
2589//.. case Xsse_CMPGT16S: XX(0x66); XX(0x0F); XX(0x65); break;
2590//.. case Xsse_CMPGT32S: XX(0x66); XX(0x0F); XX(0x66); break;
2591//.. case Xsse_MAX16S: XX(0x66); XX(0x0F); XX(0xEE); break;
2592//.. case Xsse_MAX8U: XX(0x66); XX(0x0F); XX(0xDE); break;
2593//.. case Xsse_MIN16S: XX(0x66); XX(0x0F); XX(0xEA); break;
2594//.. case Xsse_MIN8U: XX(0x66); XX(0x0F); XX(0xDA); break;
2595//.. case Xsse_MULHI16U: XX(0x66); XX(0x0F); XX(0xE4); break;
2596//.. case Xsse_MULHI16S: XX(0x66); XX(0x0F); XX(0xE5); break;
2597//.. case Xsse_MUL16: XX(0x66); XX(0x0F); XX(0xD5); break;
2598//.. case Xsse_SHL16: XX(0x66); XX(0x0F); XX(0xF1); break;
2599//.. case Xsse_SHL32: XX(0x66); XX(0x0F); XX(0xF2); break;
2600//.. case Xsse_SHL64: XX(0x66); XX(0x0F); XX(0xF3); break;
2601//.. case Xsse_SAR16: XX(0x66); XX(0x0F); XX(0xE1); break;
2602//.. case Xsse_SAR32: XX(0x66); XX(0x0F); XX(0xE2); break;
2603//.. case Xsse_SHR16: XX(0x66); XX(0x0F); XX(0xD1); break;
2604//.. case Xsse_SHR32: XX(0x66); XX(0x0F); XX(0xD2); break;
2605//.. case Xsse_SHR64: XX(0x66); XX(0x0F); XX(0xD3); break;
2606//.. case Xsse_SUB8: XX(0x66); XX(0x0F); XX(0xF8); break;
2607//.. case Xsse_SUB16: XX(0x66); XX(0x0F); XX(0xF9); break;
2608//.. case Xsse_SUB32: XX(0x66); XX(0x0F); XX(0xFA); break;
2609//.. case Xsse_SUB64: XX(0x66); XX(0x0F); XX(0xFB); break;
2610//.. case Xsse_QSUB8S: XX(0x66); XX(0x0F); XX(0xE8); break;
2611//.. case Xsse_QSUB16S: XX(0x66); XX(0x0F); XX(0xE9); break;
2612//.. case Xsse_QSUB8U: XX(0x66); XX(0x0F); XX(0xD8); break;
2613//.. case Xsse_QSUB16U: XX(0x66); XX(0x0F); XX(0xD9); break;
2614//.. case Xsse_UNPCKHB: XX(0x66); XX(0x0F); XX(0x68); break;
2615//.. case Xsse_UNPCKHW: XX(0x66); XX(0x0F); XX(0x69); break;
2616//.. case Xsse_UNPCKHD: XX(0x66); XX(0x0F); XX(0x6A); break;
2617//.. case Xsse_UNPCKHQ: XX(0x66); XX(0x0F); XX(0x6D); break;
2618//.. case Xsse_UNPCKLB: XX(0x66); XX(0x0F); XX(0x60); break;
2619//.. case Xsse_UNPCKLW: XX(0x66); XX(0x0F); XX(0x61); break;
2620//.. case Xsse_UNPCKLD: XX(0x66); XX(0x0F); XX(0x62); break;
2621//.. case Xsse_UNPCKLQ: XX(0x66); XX(0x0F); XX(0x6C); break;
2622//.. default: goto bad;
2623//.. }
2624//.. p = doAMode_R(p, fake(vregNo(i->Xin.SseReRg.dst)),
2625//.. fake(vregNo(i->Xin.SseReRg.src)) );
2626//.. # undef XX
2627//.. goto done;
2628//..
2629//.. case Xin_SseCMov:
2630//.. /* jmp fwds if !condition */
2631//.. *p++ = 0x70 + (i->Xin.SseCMov.cond ^ 1);
2632//.. *p++ = 0; /* # of bytes in the next bit, which we don't know yet */
2633//.. ptmp = p;
2634//..
2635//.. /* movaps %src, %dst */
2636//.. *p++ = 0x0F;
2637//.. *p++ = 0x28;
2638//.. p = doAMode_R(p, fake(vregNo(i->Xin.SseCMov.dst)),
2639//.. fake(vregNo(i->Xin.SseCMov.src)) );
2640//..
2641//.. /* Fill in the jump offset. */
2642//.. *(ptmp-1) = p - ptmp;
2643//.. goto done;
2644//..
2645//.. case Xin_SseShuf:
2646//.. *p++ = 0x66;
2647//.. *p++ = 0x0F;
2648//.. *p++ = 0x70;
2649//.. p = doAMode_R(p, fake(vregNo(i->Xin.SseShuf.dst)),
2650//.. fake(vregNo(i->Xin.SseShuf.src)) );
2651//.. *p++ = (UChar)(i->Xin.SseShuf.order);
2652//.. goto done;
2653
2654 default:
2655 goto bad;
2656 }
2657
2658 bad:
2659 ppPPC32Instr(i);
2660 vpanic("emit_PPC32Instr");
2661 /*NOTREACHED*/
2662
2663 goto done; // CAB: Rem to remove - Just reducing compiler warnings.
2664 done:
2665 vassert(p - &buf[0] <= 32);
2666 return p - &buf[0];
2667
2668# undef fake
2669}
2670
2671/*---------------------------------------------------------------*/
2672/*--- end host-ppc32/hdefs.c ---*/
2673/*---------------------------------------------------------------*/