blob: aaf839ff26297811897248086594dabec77ce52c [file] [log] [blame]
sewardjbbcf1882014-01-12 12:49:10 +00001
2/*---------------------------------------------------------------*/
3/*--- begin host_arm64_defs.h ---*/
4/*---------------------------------------------------------------*/
5
6/*
7 This file is part of Valgrind, a dynamic binary instrumentation
8 framework.
9
10 Copyright (C) 2013-2013 OpenWorks
11 info@open-works.net
12
13 This program is free software; you can redistribute it and/or
14 modify it under the terms of the GNU General Public License as
15 published by the Free Software Foundation; either version 2 of the
16 License, or (at your option) any later version.
17
18 This program is distributed in the hope that it will be useful, but
19 WITHOUT ANY WARRANTY; without even the implied warranty of
20 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
21 General Public License for more details.
22
23 You should have received a copy of the GNU General Public License
24 along with this program; if not, write to the Free Software
25 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
26 02110-1301, USA.
27
28 The GNU General Public License is contained in the file COPYING.
29*/
30
31#ifndef __VEX_HOST_ARM64_DEFS_H
32#define __VEX_HOST_ARM64_DEFS_H
33
34#include "libvex_basictypes.h"
35#include "libvex.h" // VexArch
36#include "host_generic_regs.h" // HReg
37
sewardjbbcf1882014-01-12 12:49:10 +000038
39/* --------- Registers. --------- */
40
sewardj633d9db2014-06-25 12:19:02 +000041/* The usual HReg abstraction.
42 There are 31 general purpose regs.
43*/
sewardjbbcf1882014-01-12 12:49:10 +000044
45extern void ppHRegARM64 ( HReg );
46
47extern HReg hregARM64_X0 ( void );
48extern HReg hregARM64_X1 ( void );
49extern HReg hregARM64_X2 ( void );
50extern HReg hregARM64_X3 ( void );
51extern HReg hregARM64_X4 ( void );
52extern HReg hregARM64_X5 ( void );
53extern HReg hregARM64_X6 ( void );
54extern HReg hregARM64_X7 ( void );
sewardjbbcf1882014-01-12 12:49:10 +000055extern HReg hregARM64_X9 ( void );
56extern HReg hregARM64_X10 ( void );
57extern HReg hregARM64_X11 ( void );
58extern HReg hregARM64_X12 ( void );
59extern HReg hregARM64_X13 ( void );
60extern HReg hregARM64_X14 ( void );
61extern HReg hregARM64_X15 ( void );
62extern HReg hregARM64_X21 ( void );
63extern HReg hregARM64_X22 ( void );
64extern HReg hregARM64_X23 ( void );
65extern HReg hregARM64_X24 ( void );
66extern HReg hregARM64_X25 ( void );
67extern HReg hregARM64_X26 ( void );
68extern HReg hregARM64_X27 ( void );
69extern HReg hregARM64_X28 ( void );
70extern HReg hregARM64_D8 ( void );
71extern HReg hregARM64_D9 ( void );
72extern HReg hregARM64_D10 ( void );
73extern HReg hregARM64_D11 ( void );
74extern HReg hregARM64_D12 ( void );
75extern HReg hregARM64_D13 ( void );
76extern HReg hregARM64_Q16 ( void );
77extern HReg hregARM64_Q17 ( void );
78extern HReg hregARM64_Q18 ( void );
sewardj76ac4762014-06-20 08:30:21 +000079extern HReg hregARM64_Q19 ( void );
80extern HReg hregARM64_Q20 ( void );
sewardjbbcf1882014-01-12 12:49:10 +000081
82/* Number of registers used arg passing in function calls */
83#define ARM64_N_ARGREGS 8 /* x0 .. x7 */
84
85
86/* --------- Condition codes. --------- */
87
88typedef
89 enum {
90 ARM64cc_EQ = 0, /* equal : Z=1 */
91 ARM64cc_NE = 1, /* not equal : Z=0 */
92
93 ARM64cc_CS = 2, /* >=u (higher or same) : C=1 */
94 ARM64cc_CC = 3, /* <u (lower) : C=0 */
95
96 ARM64cc_MI = 4, /* minus (negative) : N=1 */
97 ARM64cc_PL = 5, /* plus (zero or +ve) : N=0 */
98
99 ARM64cc_VS = 6, /* overflow : V=1 */
100 ARM64cc_VC = 7, /* no overflow : V=0 */
101
102 ARM64cc_HI = 8, /* >u (higher) : C=1 && Z=0 */
103 ARM64cc_LS = 9, /* <=u (lower or same) : !(C=1 && Z=0) */
104
105 ARM64cc_GE = 10, /* >=s (signed greater or equal) : N=V */
106 ARM64cc_LT = 11, /* <s (signed less than) : !(N=V) */
107
108 ARM64cc_GT = 12, /* >s (signed greater) : Z=0 && N=V */
109 ARM64cc_LE = 13, /* <=s (signed less or equal) : !(Z=0 && N=V) */
110
111 ARM64cc_AL = 14, /* always (unconditional) */
112 ARM64cc_NV = 15 /* in 64-bit mode also means "always" */
113 }
114 ARM64CondCode;
115
116
117/* --------- Memory address expressions (amodes). --------- */
118
119typedef
120 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000121 ARM64am_RI9=10, /* reg + simm9 */
sewardjbbcf1882014-01-12 12:49:10 +0000122 ARM64am_RI12, /* reg + uimm12 * szB (iow, scaled by access size) */
123 ARM64am_RR /* reg1 + reg2 */
124 }
125 ARM64AModeTag;
126
127typedef
128 struct {
129 ARM64AModeTag tag;
130 union {
131 struct {
132 HReg reg;
133 Int simm9; /* -256 .. +255 */
134 } RI9;
135 struct {
136 HReg reg;
137 UInt uimm12; /* 0 .. 4095 */
138 UChar szB; /* 1, 2, 4, 8 (16 ?) */
139 } RI12;
140 struct {
141 HReg base;
142 HReg index;
143 } RR;
144 } ARM64am;
145 }
146 ARM64AMode;
147
148extern ARM64AMode* ARM64AMode_RI9 ( HReg reg, Int simm9 );
149extern ARM64AMode* ARM64AMode_RI12 ( HReg reg, Int uimm12, UChar szB );
150extern ARM64AMode* ARM64AMode_RR ( HReg base, HReg index );
151
152
153/* --------- Reg or uimm12 or (uimm12 << 12) operands --------- */
154
155typedef
156 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000157 ARM64riA_I12=20, /* uimm12 << 0 or 12 only */
158 ARM64riA_R /* reg */
sewardjbbcf1882014-01-12 12:49:10 +0000159 }
160 ARM64RIATag;
161
162typedef
163 struct {
164 ARM64RIATag tag;
165 union {
166 struct {
167 UShort imm12; /* 0 .. 4095 */
168 UChar shift; /* 0 or 12 only */
169 } I12;
170 struct {
171 HReg reg;
172 } R;
173 } ARM64riA;
174 }
175 ARM64RIA;
176
177extern ARM64RIA* ARM64RIA_I12 ( UShort imm12, UChar shift );
178extern ARM64RIA* ARM64RIA_R ( HReg );
179
180
181/* --------- Reg or "bitfield" (logic immediate) operands --------- */
182
183typedef
184 enum {
185 ARM64riL_I13=6, /* wierd-o bitfield immediate, 13 bits in total */
186 ARM64riL_R /* reg */
187 }
188 ARM64RILTag;
189
190typedef
191 struct {
192 ARM64RILTag tag;
193 union {
194 struct {
195 UChar bitN; /* 0 .. 1 */
196 UChar immR; /* 0 .. 63 */
197 UChar immS; /* 0 .. 63 */
198 } I13;
199 struct {
200 HReg reg;
201 } R;
202 } ARM64riL;
203 }
204 ARM64RIL;
205
206extern ARM64RIL* ARM64RIL_I13 ( UChar bitN, UChar immR, UChar immS );
207extern ARM64RIL* ARM64RIL_R ( HReg );
208
209
210/* --------------- Reg or uimm6 operands --------------- */
211
212typedef
213 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000214 ARM64ri6_I6=30, /* uimm6, 1 .. 63 only */
sewardjbbcf1882014-01-12 12:49:10 +0000215 ARM64ri6_R /* reg */
216 }
217 ARM64RI6Tag;
218
219typedef
220 struct {
221 ARM64RI6Tag tag;
222 union {
223 struct {
224 UInt imm6; /* 1 .. 63 */
225 } I6;
226 struct {
227 HReg reg;
228 } R;
229 } ARM64ri6;
230 }
231 ARM64RI6;
232
233extern ARM64RI6* ARM64RI6_I6 ( UInt imm6 );
234extern ARM64RI6* ARM64RI6_R ( HReg );
235
236
237/* --------------------- Instructions --------------------- */
238
239typedef
240 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000241 ARM64lo_AND=40,
sewardjbbcf1882014-01-12 12:49:10 +0000242 ARM64lo_OR,
243 ARM64lo_XOR
244 }
245 ARM64LogicOp;
246
247typedef
248 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000249 ARM64sh_SHL=50,
sewardjbbcf1882014-01-12 12:49:10 +0000250 ARM64sh_SHR,
251 ARM64sh_SAR
252 }
253 ARM64ShiftOp;
254
255typedef
256 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000257 ARM64un_NEG=60,
sewardjbbcf1882014-01-12 12:49:10 +0000258 ARM64un_NOT,
259 ARM64un_CLZ,
260 }
261 ARM64UnaryOp;
262
263typedef
264 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000265 ARM64mul_PLAIN=70, /* lo64(64 * 64) */
sewardjbbcf1882014-01-12 12:49:10 +0000266 ARM64mul_ZX, /* hi64(64 *u 64) */
267 ARM64mul_SX /* hi64(64 *s 64) */
268 }
269 ARM64MulOp;
270
271typedef
272 /* These characterise an integer-FP conversion, but don't imply any
273 particular direction. */
274 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000275 ARM64cvt_F32_I32S=80,
sewardjbbcf1882014-01-12 12:49:10 +0000276 ARM64cvt_F64_I32S,
277 ARM64cvt_F32_I64S,
278 ARM64cvt_F64_I64S,
279 ARM64cvt_F32_I32U,
280 ARM64cvt_F64_I32U,
281 ARM64cvt_F32_I64U,
282 ARM64cvt_F64_I64U,
283 ARM64cvt_INVALID
284 }
285 ARM64CvtOp;
286
287typedef
288 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000289 ARM64fpb_ADD=100,
sewardjbbcf1882014-01-12 12:49:10 +0000290 ARM64fpb_SUB,
291 ARM64fpb_MUL,
292 ARM64fpb_DIV,
293 ARM64fpb_INVALID
294 }
295 ARM64FpBinOp;
296
297typedef
298 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000299 ARM64fpu_NEG=110,
sewardjbbcf1882014-01-12 12:49:10 +0000300 ARM64fpu_ABS,
301 ARM64fpu_SQRT,
302 ARM64fpu_RINT,
303 ARM64fpu_INVALID
304 }
305 ARM64FpUnaryOp;
306
sewardj606c4ba2014-01-26 19:11:14 +0000307typedef
308 enum {
sewardj25523c42014-06-15 19:36:29 +0000309 ARM64vecb_ADD64x2=120, ARM64vecb_ADD32x4,
310 ARM64vecb_ADD16x8, ARM64vecb_ADD8x16,
311 ARM64vecb_SUB64x2, ARM64vecb_SUB32x4,
312 ARM64vecb_SUB16x8, ARM64vecb_SUB8x16,
313 ARM64vecb_MUL32x4,
314 ARM64vecb_MUL16x8, ARM64vecb_MUL8x16,
315 ARM64vecb_FADD64x2, ARM64vecb_FADD32x4,
316 ARM64vecb_FSUB64x2, ARM64vecb_FSUB32x4,
317 ARM64vecb_FMUL64x2, ARM64vecb_FMUL32x4,
318 ARM64vecb_FDIV64x2, ARM64vecb_FDIV32x4,
319 ARM64vecb_UMAX32x4,
320 ARM64vecb_UMAX16x8, ARM64vecb_UMAX8x16,
321 ARM64vecb_UMIN32x4,
322 ARM64vecb_UMIN16x8, ARM64vecb_UMIN8x16,
323 ARM64vecb_SMAX32x4,
324 ARM64vecb_SMAX16x8, ARM64vecb_SMAX8x16,
325 ARM64vecb_SMIN32x4,
326 ARM64vecb_SMIN16x8, ARM64vecb_SMIN8x16,
sewardjecde6972014-02-05 11:01:19 +0000327 ARM64vecb_AND,
328 ARM64vecb_ORR,
sewardje520bb32014-02-17 11:00:53 +0000329 ARM64vecb_XOR,
sewardj25523c42014-06-15 19:36:29 +0000330 ARM64vecb_CMEQ64x2, ARM64vecb_CMEQ32x4,
331 ARM64vecb_CMEQ16x8, ARM64vecb_CMEQ8x16,
332 ARM64vecb_CMHI64x2, ARM64vecb_CMHI32x4, /* >u */
333 ARM64vecb_CMHI16x8, ARM64vecb_CMHI8x16,
334 ARM64vecb_CMGT64x2, ARM64vecb_CMGT32x4, /* >s */
335 ARM64vecb_CMGT16x8, ARM64vecb_CMGT8x16,
336 ARM64vecb_FCMEQ64x2, ARM64vecb_FCMEQ32x4,
337 ARM64vecb_FCMGE64x2, ARM64vecb_FCMGE32x4,
338 ARM64vecb_FCMGT64x2, ARM64vecb_FCMGT32x4,
sewardj92d0ae32014-04-03 13:48:54 +0000339 ARM64vecb_TBL1,
sewardj25523c42014-06-15 19:36:29 +0000340 ARM64vecb_UZP164x2, ARM64vecb_UZP132x4,
341 ARM64vecb_UZP116x8, ARM64vecb_UZP18x16,
342 ARM64vecb_UZP264x2, ARM64vecb_UZP232x4,
343 ARM64vecb_UZP216x8, ARM64vecb_UZP28x16,
344 ARM64vecb_ZIP132x4, ARM64vecb_ZIP116x8,
345 ARM64vecb_ZIP18x16, ARM64vecb_ZIP232x4,
346 ARM64vecb_ZIP216x8, ARM64vecb_ZIP28x16,
sewardj168c8bd2014-06-25 13:05:23 +0000347 ARM64vecb_PMUL8x16,
sewardj31b5a952014-06-26 07:41:14 +0000348 ARM64vecb_PMULL8x8,
sewardj6f312d02014-06-28 12:21:37 +0000349 ARM64vecb_UMULL2DSS,
350 ARM64vecb_UMULL4SHH, ARM64vecb_UMULL8HBB,
351 ARM64vecb_SMULL2DSS,
352 ARM64vecb_SMULL4SHH, ARM64vecb_SMULL8HBB,
sewardj51d012a2014-07-21 09:19:50 +0000353 ARM64vecb_SQADD64x2, ARM64vecb_SQADD32x4,
354 ARM64vecb_SQADD16x8, ARM64vecb_SQADD8x16,
355 ARM64vecb_UQADD64x2, ARM64vecb_UQADD32x4,
356 ARM64vecb_UQADD16x8, ARM64vecb_UQADD8x16,
357 ARM64vecb_SQSUB64x2, ARM64vecb_SQSUB32x4,
358 ARM64vecb_SQSUB16x8, ARM64vecb_SQSUB8x16,
359 ARM64vecb_UQSUB64x2, ARM64vecb_UQSUB32x4,
360 ARM64vecb_UQSUB16x8, ARM64vecb_UQSUB8x16,
361 ARM64vecb_SQDMULL2DSS,
362 ARM64vecb_SQDMULL4SHH,
sewardj54ffa1d2014-07-22 09:27:49 +0000363 ARM64vecb_SQDMULH32x4,
364 ARM64vecb_SQDMULH16x8,
365 ARM64vecb_SQRDMULH32x4,
366 ARM64vecb_SQRDMULH16x8,
sewardj12972182014-08-04 08:09:47 +0000367 ARM64vecb_SQSHL64x2, ARM64vecb_SQSHL32x4,
368 ARM64vecb_SQSHL16x8, ARM64vecb_SQSHL8x16,
369 ARM64vecb_UQSHL64x2, ARM64vecb_UQSHL32x4,
370 ARM64vecb_UQSHL16x8, ARM64vecb_UQSHL8x16,
371 ARM64vecb_SQRSHL64x2, ARM64vecb_SQRSHL32x4,
372 ARM64vecb_SQRSHL16x8, ARM64vecb_SQRSHL8x16,
373 ARM64vecb_UQRSHL64x2, ARM64vecb_UQRSHL32x4,
374 ARM64vecb_UQRSHL16x8, ARM64vecb_UQRSHL8x16,
sewardj606c4ba2014-01-26 19:11:14 +0000375 ARM64vecb_INVALID
376 }
377 ARM64VecBinOp;
378
sewardjfab09142014-02-10 10:28:13 +0000379typedef
380 enum {
sewardj25523c42014-06-15 19:36:29 +0000381 ARM64vecu_FNEG64x2=300, ARM64vecu_FNEG32x4,
382 ARM64vecu_FABS64x2, ARM64vecu_FABS32x4,
sewardje520bb32014-02-17 11:00:53 +0000383 ARM64vecu_NOT,
sewardj25523c42014-06-15 19:36:29 +0000384 ARM64vecu_ABS64x2, ARM64vecu_ABS32x4,
385 ARM64vecu_ABS16x8, ARM64vecu_ABS8x16,
sewardj2b6fd5e2014-06-19 14:21:37 +0000386 ARM64vecu_CLS32x4, ARM64vecu_CLS16x8, ARM64vecu_CLS8x16,
387 ARM64vecu_CLZ32x4, ARM64vecu_CLZ16x8, ARM64vecu_CLZ8x16,
388 ARM64vecu_CNT8x16,
sewardj715d1622014-06-26 12:39:05 +0000389 ARM64vecu_RBIT,
390 ARM64vecu_REV1616B,
sewardjdf9d6d52014-06-27 10:43:22 +0000391 ARM64vecu_REV3216B, ARM64vecu_REV328H,
392 ARM64vecu_REV6416B, ARM64vecu_REV648H, ARM64vecu_REV644S,
sewardjfab09142014-02-10 10:28:13 +0000393 ARM64vecu_INVALID
394 }
395 ARM64VecUnaryOp;
396
sewardje520bb32014-02-17 11:00:53 +0000397typedef
398 enum {
sewardj25523c42014-06-15 19:36:29 +0000399 ARM64vecsh_USHR64x2=350, ARM64vecsh_USHR32x4,
400 ARM64vecsh_USHR16x8, ARM64vecsh_USHR8x16,
401 ARM64vecsh_SSHR64x2, ARM64vecsh_SSHR32x4,
402 ARM64vecsh_SSHR16x8, ARM64vecsh_SSHR8x16,
403 ARM64vecsh_SHL64x2, ARM64vecsh_SHL32x4,
404 ARM64vecsh_SHL16x8, ARM64vecsh_SHL8x16,
sewardje520bb32014-02-17 11:00:53 +0000405 ARM64vecsh_INVALID
406 }
407 ARM64VecShiftOp;
408
sewardjbbcf1882014-01-12 12:49:10 +0000409typedef
410 enum {
411 /* baseline */
412 ARM64in_Arith=1220,
413 ARM64in_Cmp,
414 ARM64in_Logic,
415 ARM64in_Test,
416 ARM64in_Shift,
417 ARM64in_Unary,
418 ARM64in_MovI, /* int reg-reg move */
419 ARM64in_Imm64,
420 ARM64in_LdSt64,
421 ARM64in_LdSt32, /* w/ ZX loads */
422 ARM64in_LdSt16, /* w/ ZX loads */
423 ARM64in_LdSt8, /* w/ ZX loads */
424 ARM64in_XDirect, /* direct transfer to GA */
425 ARM64in_XIndir, /* indirect transfer to GA */
426 ARM64in_XAssisted, /* assisted transfer to GA */
427 ARM64in_CSel,
428 ARM64in_Call,
429 ARM64in_AddToSP, /* move SP by small, signed constant */
430 ARM64in_FromSP, /* move SP to integer register */
431 ARM64in_Mul,
sewardj7d009132014-02-20 17:43:38 +0000432 ARM64in_LdrEX,
433 ARM64in_StrEX,
434 ARM64in_MFence,
sewardj606c4ba2014-01-26 19:11:14 +0000435 /* ARM64in_V*: scalar ops involving vector registers */
sewardjbbcf1882014-01-12 12:49:10 +0000436 ARM64in_VLdStS, /* 32-bit FP load/store, with imm offset */
437 ARM64in_VLdStD, /* 64-bit FP load/store, with imm offset */
438 ARM64in_VLdStQ,
439 ARM64in_VCvtI2F,
440 ARM64in_VCvtF2I,
441 ARM64in_VCvtSD,
442 ARM64in_VUnaryD,
443 ARM64in_VUnaryS,
444 ARM64in_VBinD,
445 ARM64in_VBinS,
446 ARM64in_VCmpD,
447 ARM64in_VCmpS,
448 ARM64in_FPCR,
sewardj12972182014-08-04 08:09:47 +0000449 ARM64in_FPSR,
sewardj606c4ba2014-01-26 19:11:14 +0000450 /* ARM64in_V*V: vector ops on vector registers */
451 ARM64in_VBinV,
sewardjfab09142014-02-10 10:28:13 +0000452 ARM64in_VUnaryV,
sewardj606c4ba2014-01-26 19:11:14 +0000453 ARM64in_VNarrowV,
sewardje520bb32014-02-17 11:00:53 +0000454 ARM64in_VShiftImmV,
sewardjab33a7a2014-06-19 22:20:47 +0000455 ARM64in_VExtV,
sewardjbbcf1882014-01-12 12:49:10 +0000456 ARM64in_VImmQ,
457 ARM64in_VDfromX, /* Move an Xreg to a Dreg */
sewardj12972182014-08-04 08:09:47 +0000458 ARM64in_VQfromX, /* Move an Xreg to a Qreg lo64, and zero hi64 */
sewardjbbcf1882014-01-12 12:49:10 +0000459 ARM64in_VQfromXX, /* Move 2 Xregs to a Qreg */
460 ARM64in_VXfromQ, /* Move half a Qreg to an Xreg */
sewardj85fbb022014-06-12 13:16:01 +0000461 ARM64in_VXfromDorS, /* Move Dreg or Sreg(ZX) to an Xreg */
sewardjbbcf1882014-01-12 12:49:10 +0000462 ARM64in_VMov, /* vector reg-reg move, 16, 8 or 4 bytes */
463 /* infrastructure */
464 ARM64in_EvCheck, /* Event check */
465//ZZ ARMin_ProfInc /* 64-bit profile counter increment */
466 }
467 ARM64InstrTag;
468
469/* Destinations are on the LEFT (first operand) */
470
471typedef
472 struct {
473 ARM64InstrTag tag;
474 union {
475 /* --- INTEGER INSTRUCTIONS --- */
476 /* 64 bit ADD/SUB reg, reg or uimm12<<{0,12} */
477 struct {
478 HReg dst;
479 HReg argL;
480 ARM64RIA* argR;
481 Bool isAdd;
482 } Arith;
483 /* 64 or 32 bit CMP reg, reg or aimm (SUB and set flags) */
484 struct {
485 HReg argL;
486 ARM64RIA* argR;
487 Bool is64;
488 } Cmp;
489 /* 64 bit AND/OR/XOR reg, reg or bitfield-immediate */
490 struct {
491 HReg dst;
492 HReg argL;
493 ARM64RIL* argR;
494 ARM64LogicOp op;
495 } Logic;
496 /* 64 bit TST reg, reg or bimm (AND and set flags) */
497 struct {
498 HReg argL;
499 ARM64RIL* argR;
500 } Test;
501 /* 64 bit SHL/SHR/SAR, 2nd arg is reg or imm */
502 struct {
503 HReg dst;
504 HReg argL;
505 ARM64RI6* argR;
506 ARM64ShiftOp op;
507 } Shift;
508 /* NOT/NEG/CLZ, 64 bit only */
509 struct {
510 HReg dst;
511 HReg src;
512 ARM64UnaryOp op;
513 } Unary;
514 /* MOV dst, src -- reg-reg move for integer registers */
515 struct {
516 HReg dst;
517 HReg src;
518 } MovI;
519 /* Pseudo-insn; make a 64-bit immediate */
520 struct {
521 HReg dst;
522 ULong imm64;
523 } Imm64;
524 /* 64-bit load or store */
525 struct {
526 Bool isLoad;
527 HReg rD;
528 ARM64AMode* amode;
529 } LdSt64;
530 /* zx-32-to-64-bit load, or 32-bit store */
531 struct {
532 Bool isLoad;
533 HReg rD;
534 ARM64AMode* amode;
535 } LdSt32;
536 /* zx-16-to-64-bit load, or 16-bit store */
537 struct {
538 Bool isLoad;
539 HReg rD;
540 ARM64AMode* amode;
541 } LdSt16;
542 /* zx-8-to-64-bit load, or 8-bit store */
543 struct {
544 Bool isLoad;
545 HReg rD;
546 ARM64AMode* amode;
547 } LdSt8;
548 /* Update the guest PC value, then exit requesting to chain
549 to it. May be conditional. Urr, use of Addr64 implicitly
550 assumes that wordsize(guest) == wordsize(host). */
551 struct {
552 Addr64 dstGA; /* next guest address */
553 ARM64AMode* amPC; /* amode in guest state for PC */
554 ARM64CondCode cond; /* can be ARM64cc_AL */
555 Bool toFastEP; /* chain to the slow or fast point? */
556 } XDirect;
557 /* Boring transfer to a guest address not known at JIT time.
558 Not chainable. May be conditional. */
559 struct {
560 HReg dstGA;
561 ARM64AMode* amPC;
562 ARM64CondCode cond; /* can be ARM64cc_AL */
563 } XIndir;
564 /* Assisted transfer to a guest address, most general case.
565 Not chainable. May be conditional. */
566 struct {
567 HReg dstGA;
568 ARM64AMode* amPC;
569 ARM64CondCode cond; /* can be ARM64cc_AL */
570 IRJumpKind jk;
571 } XAssisted;
572 /* CSEL: dst = if cond then argL else argR. cond may be anything. */
573 struct {
574 HReg dst;
575 HReg argL;
576 HReg argR;
577 ARM64CondCode cond;
578 } CSel;
579 /* Pseudo-insn. Call target (an absolute address), on given
580 condition (which could be ARM64cc_AL). */
581 struct {
582 RetLoc rloc; /* where the return value will be */
583 HWord target;
584 ARM64CondCode cond;
585 Int nArgRegs; /* # regs carrying args: 0 .. 8 */
586 } Call;
587 /* move SP by small, signed constant */
588 struct {
589 Int simm; /* needs to be 0 % 16 and in the range -4095
590 .. 4095 inclusive */
591 } AddToSP;
592 /* move SP to integer register */
593 struct {
594 HReg dst;
595 } FromSP;
596 /* Integer multiply, with 3 variants:
597 (PLAIN) lo64(64 * 64)
598 (ZX) hi64(64 *u 64)
599 (SX) hi64(64 *s 64)
600 */
601 struct {
602 HReg dst;
603 HReg argL;
604 HReg argR;
605 ARM64MulOp op;
606 } Mul;
sewardj7d009132014-02-20 17:43:38 +0000607 /* LDXR{,H,B} x2, [x4] */
608 struct {
609 Int szB; /* 1, 2, 4 or 8 */
610 } LdrEX;
611 /* STXR{,H,B} w0, x2, [x4] */
612 struct {
613 Int szB; /* 1, 2, 4 or 8 */
614 } StrEX;
615 /* Mem fence. An insn which fences all loads and stores as
616 much as possible before continuing. On ARM64 we emit the
617 sequence "dsb sy ; dmb sy ; isb sy", which is probably
618 total nuclear overkill, but better safe than sorry. */
619 struct {
620 } MFence;
sewardjbbcf1882014-01-12 12:49:10 +0000621 /* --- INSTRUCTIONS INVOLVING VECTOR REGISTERS --- */
622 /* 32-bit Fp load/store */
623 struct {
624 Bool isLoad;
625 HReg sD;
626 HReg rN;
627 UInt uimm12; /* 0 .. 16380 inclusive, 0 % 4 */
628 } VLdStS;
629 /* 64-bit Fp load/store */
630 struct {
631 Bool isLoad;
632 HReg dD;
633 HReg rN;
634 UInt uimm12; /* 0 .. 32760 inclusive, 0 % 8 */
635 } VLdStD;
636 /* 128-bit Vector load/store. */
637 struct {
638 Bool isLoad;
639 HReg rQ; // data
640 HReg rN; // address
641 } VLdStQ;
642 /* Scalar conversion of int to float. */
643 struct {
644 ARM64CvtOp how;
645 HReg rD; // dst, a D or S register
646 HReg rS; // src, a W or X register
647 } VCvtI2F;
648 /* Scalar conversion of float to int, w/ specified RM. */
649 struct {
650 ARM64CvtOp how;
651 HReg rD; // dst, a W or X register
652 HReg rS; // src, a D or S register
653 UChar armRM; // ARM encoded RM:
654 // 00=nearest, 01=+inf, 10=-inf, 11=zero
655 } VCvtF2I;
656 /* Convert between 32-bit and 64-bit FP values (both
657 ways). (FCVT) */
658 struct {
659 Bool sToD; /* True: F32->F64. False: F64->F32 */
660 HReg dst;
661 HReg src;
662 } VCvtSD;
663 /* 64-bit FP unary */
664 struct {
665 ARM64FpUnaryOp op;
666 HReg dst;
667 HReg src;
668 } VUnaryD;
669 /* 32-bit FP unary */
670 struct {
671 ARM64FpUnaryOp op;
672 HReg dst;
673 HReg src;
674 } VUnaryS;
675 /* 64-bit FP binary arithmetic */
676 struct {
677 ARM64FpBinOp op;
678 HReg dst;
679 HReg argL;
680 HReg argR;
681 } VBinD;
682 /* 32-bit FP binary arithmetic */
683 struct {
684 ARM64FpBinOp op;
685 HReg dst;
686 HReg argL;
687 HReg argR;
688 } VBinS;
689 /* 64-bit FP compare */
690 struct {
691 HReg argL;
692 HReg argR;
693 } VCmpD;
694 /* 32-bit FP compare */
695 struct {
696 HReg argL;
697 HReg argR;
698 } VCmpS;
699 /* Move a 32-bit value to/from the FPCR */
700 struct {
701 Bool toFPCR;
702 HReg iReg;
703 } FPCR;
sewardj12972182014-08-04 08:09:47 +0000704 /* Move a 32-bit value to/from the FPSR */
705 struct {
706 Bool toFPSR;
707 HReg iReg;
708 } FPSR;
sewardj606c4ba2014-01-26 19:11:14 +0000709 /* binary vector operation on vector registers */
710 struct {
711 ARM64VecBinOp op;
712 HReg dst;
713 HReg argL;
714 HReg argR;
715 } VBinV;
sewardjfab09142014-02-10 10:28:13 +0000716 /* unary vector operation on vector registers */
717 struct {
718 ARM64VecUnaryOp op;
719 HReg dst;
720 HReg arg;
721 } VUnaryV;
sewardj606c4ba2014-01-26 19:11:14 +0000722 /* vector narrowing, Q -> Q. Result goes in the bottom half
723 of dst and the top half is zeroed out. Iow is XTN. */
724 struct {
725 UInt dszBlg2; // 0: 16to8_x8 1: 32to16_x4 2: 64to32_x2
726 HReg dst; // Q reg
727 HReg src; // Q reg
728 } VNarrowV;
sewardje520bb32014-02-17 11:00:53 +0000729 /* Vector shift by immediate. |amt| needs to be > 0 and <
730 implied lane size of |op|. Zero shifts and out of range
731 shifts are not allowed. */
732 struct {
733 ARM64VecShiftOp op;
734 HReg dst;
735 HReg src;
736 UInt amt;
737 } VShiftImmV;
sewardjab33a7a2014-06-19 22:20:47 +0000738 struct {
739 HReg dst;
740 HReg srcLo;
741 HReg srcHi;
742 UInt amtB;
743 } VExtV;
sewardjbbcf1882014-01-12 12:49:10 +0000744 struct {
745 HReg rQ;
746 UShort imm; /* Same 1-bit-per-byte encoding as IR */
747 } VImmQ;
748 struct {
749 HReg rD;
750 HReg rX;
751 } VDfromX;
752 struct {
753 HReg rQ;
sewardj12972182014-08-04 08:09:47 +0000754 HReg rXlo;
755 } VQfromX;
756 struct {
757 HReg rQ;
sewardjbbcf1882014-01-12 12:49:10 +0000758 HReg rXhi;
759 HReg rXlo;
760 } VQfromXX;
761 struct {
762 HReg rX;
763 HReg rQ;
764 UInt laneNo; /* either 0 or 1 */
765 } VXfromQ;
sewardj85fbb022014-06-12 13:16:01 +0000766 struct {
767 HReg rX;
768 HReg rDorS;
769 Bool fromD;
770 } VXfromDorS;
sewardjbbcf1882014-01-12 12:49:10 +0000771 /* MOV dst, src -- reg-reg move for vector registers */
772 struct {
773 UInt szB; // 16=mov qD,qS; 8=mov dD,dS; 4=mov sD,sS
774 HReg dst;
775 HReg src;
776 } VMov;
777 struct {
778 ARM64AMode* amCounter;
779 ARM64AMode* amFailAddr;
780 } EvCheck;
781//ZZ struct {
782//ZZ /* No fields. The address of the counter to inc is
783//ZZ installed later, post-translation, by patching it in,
784//ZZ as it is not known at translation time. */
785//ZZ } ProfInc;
786 } ARM64in;
787 }
788 ARM64Instr;
789
sewardj633d9db2014-06-25 12:19:02 +0000790
sewardjbbcf1882014-01-12 12:49:10 +0000791extern ARM64Instr* ARM64Instr_Arith ( HReg, HReg, ARM64RIA*, Bool isAdd );
792extern ARM64Instr* ARM64Instr_Cmp ( HReg, ARM64RIA*, Bool is64 );
793extern ARM64Instr* ARM64Instr_Logic ( HReg, HReg, ARM64RIL*, ARM64LogicOp );
794extern ARM64Instr* ARM64Instr_Test ( HReg, ARM64RIL* );
795extern ARM64Instr* ARM64Instr_Shift ( HReg, HReg, ARM64RI6*, ARM64ShiftOp );
796extern ARM64Instr* ARM64Instr_Unary ( HReg, HReg, ARM64UnaryOp );
sewardjbbcf1882014-01-12 12:49:10 +0000797extern ARM64Instr* ARM64Instr_MovI ( HReg, HReg );
798extern ARM64Instr* ARM64Instr_Imm64 ( HReg, ULong );
799extern ARM64Instr* ARM64Instr_LdSt64 ( Bool isLoad, HReg, ARM64AMode* );
800extern ARM64Instr* ARM64Instr_LdSt32 ( Bool isLoad, HReg, ARM64AMode* );
801extern ARM64Instr* ARM64Instr_LdSt16 ( Bool isLoad, HReg, ARM64AMode* );
802extern ARM64Instr* ARM64Instr_LdSt8 ( Bool isLoad, HReg, ARM64AMode* );
sewardjbbcf1882014-01-12 12:49:10 +0000803extern ARM64Instr* ARM64Instr_XDirect ( Addr64 dstGA, ARM64AMode* amPC,
804 ARM64CondCode cond, Bool toFastEP );
805extern ARM64Instr* ARM64Instr_XIndir ( HReg dstGA, ARM64AMode* amPC,
806 ARM64CondCode cond );
807extern ARM64Instr* ARM64Instr_XAssisted ( HReg dstGA, ARM64AMode* amPC,
808 ARM64CondCode cond, IRJumpKind jk );
809extern ARM64Instr* ARM64Instr_CSel ( HReg dst, HReg argL, HReg argR,
810 ARM64CondCode cond );
811extern ARM64Instr* ARM64Instr_Call ( ARM64CondCode, HWord, Int nArgRegs,
812 RetLoc rloc );
813extern ARM64Instr* ARM64Instr_AddToSP ( Int simm );
814extern ARM64Instr* ARM64Instr_FromSP ( HReg dst );
815extern ARM64Instr* ARM64Instr_Mul ( HReg dst, HReg argL, HReg argR,
816 ARM64MulOp op );
sewardj7d009132014-02-20 17:43:38 +0000817extern ARM64Instr* ARM64Instr_LdrEX ( Int szB );
818extern ARM64Instr* ARM64Instr_StrEX ( Int szB );
819extern ARM64Instr* ARM64Instr_MFence ( void );
sewardjbbcf1882014-01-12 12:49:10 +0000820extern ARM64Instr* ARM64Instr_VLdStS ( Bool isLoad, HReg sD, HReg rN,
821 UInt uimm12 /* 0 .. 16380, 0 % 4 */ );
822extern ARM64Instr* ARM64Instr_VLdStD ( Bool isLoad, HReg dD, HReg rN,
823 UInt uimm12 /* 0 .. 32760, 0 % 8 */ );
824extern ARM64Instr* ARM64Instr_VLdStQ ( Bool isLoad, HReg rQ, HReg rN );
825extern ARM64Instr* ARM64Instr_VCvtI2F ( ARM64CvtOp how, HReg rD, HReg rS );
826extern ARM64Instr* ARM64Instr_VCvtF2I ( ARM64CvtOp how, HReg rD, HReg rS,
827 UChar armRM );
828extern ARM64Instr* ARM64Instr_VCvtSD ( Bool sToD, HReg dst, HReg src );
829extern ARM64Instr* ARM64Instr_VUnaryD ( ARM64FpUnaryOp op, HReg dst, HReg src );
830extern ARM64Instr* ARM64Instr_VUnaryS ( ARM64FpUnaryOp op, HReg dst, HReg src );
831extern ARM64Instr* ARM64Instr_VBinD ( ARM64FpBinOp op, HReg, HReg, HReg );
832extern ARM64Instr* ARM64Instr_VBinS ( ARM64FpBinOp op, HReg, HReg, HReg );
833extern ARM64Instr* ARM64Instr_VCmpD ( HReg argL, HReg argR );
834extern ARM64Instr* ARM64Instr_VCmpS ( HReg argL, HReg argR );
835extern ARM64Instr* ARM64Instr_FPCR ( Bool toFPCR, HReg iReg );
sewardj12972182014-08-04 08:09:47 +0000836extern ARM64Instr* ARM64Instr_FPSR ( Bool toFPSR, HReg iReg );
sewardj606c4ba2014-01-26 19:11:14 +0000837extern ARM64Instr* ARM64Instr_VBinV ( ARM64VecBinOp op, HReg, HReg, HReg );
sewardjfab09142014-02-10 10:28:13 +0000838extern ARM64Instr* ARM64Instr_VUnaryV ( ARM64VecUnaryOp op, HReg, HReg );
sewardj606c4ba2014-01-26 19:11:14 +0000839extern ARM64Instr* ARM64Instr_VNarrowV ( UInt dszBlg2, HReg dst, HReg src );
sewardje520bb32014-02-17 11:00:53 +0000840extern ARM64Instr* ARM64Instr_VShiftImmV ( ARM64VecShiftOp op,
841 HReg dst, HReg src, UInt amt );
sewardjab33a7a2014-06-19 22:20:47 +0000842extern ARM64Instr* ARM64Instr_VExtV ( HReg dst,
843 HReg srcLo, HReg srcHi, UInt amtB );
sewardjbbcf1882014-01-12 12:49:10 +0000844extern ARM64Instr* ARM64Instr_VImmQ ( HReg, UShort );
845extern ARM64Instr* ARM64Instr_VDfromX ( HReg rD, HReg rX );
sewardj12972182014-08-04 08:09:47 +0000846extern ARM64Instr* ARM64Instr_VQfromX ( HReg rQ, HReg rXlo );
sewardjbbcf1882014-01-12 12:49:10 +0000847extern ARM64Instr* ARM64Instr_VQfromXX( HReg rQ, HReg rXhi, HReg rXlo );
848extern ARM64Instr* ARM64Instr_VXfromQ ( HReg rX, HReg rQ, UInt laneNo );
sewardj85fbb022014-06-12 13:16:01 +0000849extern ARM64Instr* ARM64Instr_VXfromDorS ( HReg rX, HReg rDorS, Bool fromD );
sewardjbbcf1882014-01-12 12:49:10 +0000850extern ARM64Instr* ARM64Instr_VMov ( UInt szB, HReg dst, HReg src );
851
852extern ARM64Instr* ARM64Instr_EvCheck ( ARM64AMode* amCounter,
853 ARM64AMode* amFailAddr );
854//ZZ extern ARMInstr* ARMInstr_ProfInc ( void );
855
856extern void ppARM64Instr ( ARM64Instr* );
857
858
859/* Some functions that insulate the register allocator from details
860 of the underlying instruction set. */
861extern void getRegUsage_ARM64Instr ( HRegUsage*, ARM64Instr*, Bool );
862extern void mapRegs_ARM64Instr ( HRegRemap*, ARM64Instr*, Bool );
863extern Bool isMove_ARM64Instr ( ARM64Instr*, HReg*, HReg* );
864extern Int emit_ARM64Instr ( /*MB_MOD*/Bool* is_profInc,
865 UChar* buf, Int nbuf, ARM64Instr* i,
866 Bool mode64,
sewardj9b769162014-07-24 12:42:03 +0000867 VexEndness endness_host,
sewardjbbcf1882014-01-12 12:49:10 +0000868 void* disp_cp_chain_me_to_slowEP,
869 void* disp_cp_chain_me_to_fastEP,
870 void* disp_cp_xindir,
871 void* disp_cp_xassisted );
872
873extern void genSpill_ARM64 ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
874 HReg rreg, Int offset, Bool );
875extern void genReload_ARM64 ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
876 HReg rreg, Int offset, Bool );
877
878extern void getAllocableRegs_ARM64 ( Int*, HReg** );
879extern HInstrArray* iselSB_ARM64 ( IRSB*,
880 VexArch,
881 VexArchInfo*,
882 VexAbiInfo*,
883 Int offs_Host_EvC_Counter,
884 Int offs_Host_EvC_FailAddr,
885 Bool chainingAllowed,
886 Bool addProfInc,
887 Addr64 max_ga );
888
889/* How big is an event check? This is kind of a kludge because it
890 depends on the offsets of host_EvC_FAILADDR and
891 host_EvC_COUNTER. */
sewardj9b769162014-07-24 12:42:03 +0000892extern Int evCheckSzB_ARM64 ( VexEndness endness_host );
sewardjbbcf1882014-01-12 12:49:10 +0000893
894/* Perform a chaining and unchaining of an XDirect jump. */
sewardj9b769162014-07-24 12:42:03 +0000895extern VexInvalRange chainXDirect_ARM64 ( VexEndness endness_host,
896 void* place_to_chain,
sewardjbbcf1882014-01-12 12:49:10 +0000897 void* disp_cp_chain_me_EXPECTED,
898 void* place_to_jump_to );
899
sewardj9b769162014-07-24 12:42:03 +0000900extern VexInvalRange unchainXDirect_ARM64 ( VexEndness endness_host,
901 void* place_to_unchain,
sewardjc6acaa42014-02-19 17:42:59 +0000902 void* place_to_jump_to_EXPECTED,
903 void* disp_cp_chain_me );
904
sewardjbbcf1882014-01-12 12:49:10 +0000905//ZZ /* Patch the counter location into an existing ProfInc point. */
sewardj9b769162014-07-24 12:42:03 +0000906//ZZ extern VexInvalRange patchProfInc_ARM ( VexEndness endness_host,
907//ZZ void* place_to_patch,
sewardjbbcf1882014-01-12 12:49:10 +0000908//ZZ ULong* location_of_counter );
909
910
911#endif /* ndef __VEX_HOST_ARM64_DEFS_H */
912
913/*---------------------------------------------------------------*/
914/*--- end host_arm64_defs.h ---*/
915/*---------------------------------------------------------------*/