blob: 5905df6fcd696810f87f0df8f8e348a6f29bd618 [file] [log] [blame]
sewardjbbcf1882014-01-12 12:49:10 +00001
2/*---------------------------------------------------------------*/
3/*--- begin host_arm64_defs.h ---*/
4/*---------------------------------------------------------------*/
5
6/*
7 This file is part of Valgrind, a dynamic binary instrumentation
8 framework.
9
10 Copyright (C) 2013-2013 OpenWorks
11 info@open-works.net
12
13 This program is free software; you can redistribute it and/or
14 modify it under the terms of the GNU General Public License as
15 published by the Free Software Foundation; either version 2 of the
16 License, or (at your option) any later version.
17
18 This program is distributed in the hope that it will be useful, but
19 WITHOUT ANY WARRANTY; without even the implied warranty of
20 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
21 General Public License for more details.
22
23 You should have received a copy of the GNU General Public License
24 along with this program; if not, write to the Free Software
25 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
26 02110-1301, USA.
27
28 The GNU General Public License is contained in the file COPYING.
29*/
30
31#ifndef __VEX_HOST_ARM64_DEFS_H
32#define __VEX_HOST_ARM64_DEFS_H
33
34#include "libvex_basictypes.h"
35#include "libvex.h" // VexArch
36#include "host_generic_regs.h" // HReg
37
sewardjbbcf1882014-01-12 12:49:10 +000038
39/* --------- Registers. --------- */
40
sewardj633d9db2014-06-25 12:19:02 +000041/* The usual HReg abstraction.
42 There are 31 general purpose regs.
43*/
sewardjbbcf1882014-01-12 12:49:10 +000044
45extern void ppHRegARM64 ( HReg );
46
47extern HReg hregARM64_X0 ( void );
48extern HReg hregARM64_X1 ( void );
49extern HReg hregARM64_X2 ( void );
50extern HReg hregARM64_X3 ( void );
51extern HReg hregARM64_X4 ( void );
52extern HReg hregARM64_X5 ( void );
53extern HReg hregARM64_X6 ( void );
54extern HReg hregARM64_X7 ( void );
sewardj0ad37a92014-08-29 21:58:03 +000055extern HReg hregARM64_X8 ( void );
sewardjbbcf1882014-01-12 12:49:10 +000056extern HReg hregARM64_X9 ( void );
57extern HReg hregARM64_X10 ( void );
58extern HReg hregARM64_X11 ( void );
59extern HReg hregARM64_X12 ( void );
60extern HReg hregARM64_X13 ( void );
61extern HReg hregARM64_X14 ( void );
62extern HReg hregARM64_X15 ( void );
63extern HReg hregARM64_X21 ( void );
64extern HReg hregARM64_X22 ( void );
65extern HReg hregARM64_X23 ( void );
66extern HReg hregARM64_X24 ( void );
67extern HReg hregARM64_X25 ( void );
68extern HReg hregARM64_X26 ( void );
69extern HReg hregARM64_X27 ( void );
70extern HReg hregARM64_X28 ( void );
71extern HReg hregARM64_D8 ( void );
72extern HReg hregARM64_D9 ( void );
73extern HReg hregARM64_D10 ( void );
74extern HReg hregARM64_D11 ( void );
75extern HReg hregARM64_D12 ( void );
76extern HReg hregARM64_D13 ( void );
77extern HReg hregARM64_Q16 ( void );
78extern HReg hregARM64_Q17 ( void );
79extern HReg hregARM64_Q18 ( void );
sewardj76ac4762014-06-20 08:30:21 +000080extern HReg hregARM64_Q19 ( void );
81extern HReg hregARM64_Q20 ( void );
sewardjbbcf1882014-01-12 12:49:10 +000082
83/* Number of registers used arg passing in function calls */
84#define ARM64_N_ARGREGS 8 /* x0 .. x7 */
85
86
87/* --------- Condition codes. --------- */
88
89typedef
90 enum {
91 ARM64cc_EQ = 0, /* equal : Z=1 */
92 ARM64cc_NE = 1, /* not equal : Z=0 */
93
94 ARM64cc_CS = 2, /* >=u (higher or same) : C=1 */
95 ARM64cc_CC = 3, /* <u (lower) : C=0 */
96
97 ARM64cc_MI = 4, /* minus (negative) : N=1 */
98 ARM64cc_PL = 5, /* plus (zero or +ve) : N=0 */
99
100 ARM64cc_VS = 6, /* overflow : V=1 */
101 ARM64cc_VC = 7, /* no overflow : V=0 */
102
103 ARM64cc_HI = 8, /* >u (higher) : C=1 && Z=0 */
104 ARM64cc_LS = 9, /* <=u (lower or same) : !(C=1 && Z=0) */
105
106 ARM64cc_GE = 10, /* >=s (signed greater or equal) : N=V */
107 ARM64cc_LT = 11, /* <s (signed less than) : !(N=V) */
108
109 ARM64cc_GT = 12, /* >s (signed greater) : Z=0 && N=V */
110 ARM64cc_LE = 13, /* <=s (signed less or equal) : !(Z=0 && N=V) */
111
112 ARM64cc_AL = 14, /* always (unconditional) */
113 ARM64cc_NV = 15 /* in 64-bit mode also means "always" */
114 }
115 ARM64CondCode;
116
117
118/* --------- Memory address expressions (amodes). --------- */
119
120typedef
121 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000122 ARM64am_RI9=10, /* reg + simm9 */
sewardjbbcf1882014-01-12 12:49:10 +0000123 ARM64am_RI12, /* reg + uimm12 * szB (iow, scaled by access size) */
124 ARM64am_RR /* reg1 + reg2 */
125 }
126 ARM64AModeTag;
127
128typedef
129 struct {
130 ARM64AModeTag tag;
131 union {
132 struct {
133 HReg reg;
134 Int simm9; /* -256 .. +255 */
135 } RI9;
136 struct {
137 HReg reg;
138 UInt uimm12; /* 0 .. 4095 */
139 UChar szB; /* 1, 2, 4, 8 (16 ?) */
140 } RI12;
141 struct {
142 HReg base;
143 HReg index;
144 } RR;
145 } ARM64am;
146 }
147 ARM64AMode;
148
149extern ARM64AMode* ARM64AMode_RI9 ( HReg reg, Int simm9 );
150extern ARM64AMode* ARM64AMode_RI12 ( HReg reg, Int uimm12, UChar szB );
151extern ARM64AMode* ARM64AMode_RR ( HReg base, HReg index );
152
153
154/* --------- Reg or uimm12 or (uimm12 << 12) operands --------- */
155
156typedef
157 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000158 ARM64riA_I12=20, /* uimm12 << 0 or 12 only */
159 ARM64riA_R /* reg */
sewardjbbcf1882014-01-12 12:49:10 +0000160 }
161 ARM64RIATag;
162
163typedef
164 struct {
165 ARM64RIATag tag;
166 union {
167 struct {
168 UShort imm12; /* 0 .. 4095 */
169 UChar shift; /* 0 or 12 only */
170 } I12;
171 struct {
172 HReg reg;
173 } R;
174 } ARM64riA;
175 }
176 ARM64RIA;
177
178extern ARM64RIA* ARM64RIA_I12 ( UShort imm12, UChar shift );
179extern ARM64RIA* ARM64RIA_R ( HReg );
180
181
182/* --------- Reg or "bitfield" (logic immediate) operands --------- */
183
184typedef
185 enum {
186 ARM64riL_I13=6, /* wierd-o bitfield immediate, 13 bits in total */
187 ARM64riL_R /* reg */
188 }
189 ARM64RILTag;
190
191typedef
192 struct {
193 ARM64RILTag tag;
194 union {
195 struct {
196 UChar bitN; /* 0 .. 1 */
197 UChar immR; /* 0 .. 63 */
198 UChar immS; /* 0 .. 63 */
199 } I13;
200 struct {
201 HReg reg;
202 } R;
203 } ARM64riL;
204 }
205 ARM64RIL;
206
207extern ARM64RIL* ARM64RIL_I13 ( UChar bitN, UChar immR, UChar immS );
208extern ARM64RIL* ARM64RIL_R ( HReg );
209
210
211/* --------------- Reg or uimm6 operands --------------- */
212
213typedef
214 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000215 ARM64ri6_I6=30, /* uimm6, 1 .. 63 only */
sewardjbbcf1882014-01-12 12:49:10 +0000216 ARM64ri6_R /* reg */
217 }
218 ARM64RI6Tag;
219
220typedef
221 struct {
222 ARM64RI6Tag tag;
223 union {
224 struct {
225 UInt imm6; /* 1 .. 63 */
226 } I6;
227 struct {
228 HReg reg;
229 } R;
230 } ARM64ri6;
231 }
232 ARM64RI6;
233
234extern ARM64RI6* ARM64RI6_I6 ( UInt imm6 );
235extern ARM64RI6* ARM64RI6_R ( HReg );
236
237
238/* --------------------- Instructions --------------------- */
239
240typedef
241 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000242 ARM64lo_AND=40,
sewardjbbcf1882014-01-12 12:49:10 +0000243 ARM64lo_OR,
244 ARM64lo_XOR
245 }
246 ARM64LogicOp;
247
248typedef
249 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000250 ARM64sh_SHL=50,
sewardjbbcf1882014-01-12 12:49:10 +0000251 ARM64sh_SHR,
252 ARM64sh_SAR
253 }
254 ARM64ShiftOp;
255
256typedef
257 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000258 ARM64un_NEG=60,
sewardjbbcf1882014-01-12 12:49:10 +0000259 ARM64un_NOT,
260 ARM64un_CLZ,
261 }
262 ARM64UnaryOp;
263
264typedef
265 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000266 ARM64mul_PLAIN=70, /* lo64(64 * 64) */
sewardjbbcf1882014-01-12 12:49:10 +0000267 ARM64mul_ZX, /* hi64(64 *u 64) */
268 ARM64mul_SX /* hi64(64 *s 64) */
269 }
270 ARM64MulOp;
271
272typedef
273 /* These characterise an integer-FP conversion, but don't imply any
274 particular direction. */
275 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000276 ARM64cvt_F32_I32S=80,
sewardjbbcf1882014-01-12 12:49:10 +0000277 ARM64cvt_F64_I32S,
278 ARM64cvt_F32_I64S,
279 ARM64cvt_F64_I64S,
280 ARM64cvt_F32_I32U,
281 ARM64cvt_F64_I32U,
282 ARM64cvt_F32_I64U,
283 ARM64cvt_F64_I64U,
284 ARM64cvt_INVALID
285 }
286 ARM64CvtOp;
287
288typedef
289 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000290 ARM64fpb_ADD=100,
sewardjbbcf1882014-01-12 12:49:10 +0000291 ARM64fpb_SUB,
292 ARM64fpb_MUL,
293 ARM64fpb_DIV,
294 ARM64fpb_INVALID
295 }
296 ARM64FpBinOp;
297
298typedef
299 enum {
sewardj606c4ba2014-01-26 19:11:14 +0000300 ARM64fpu_NEG=110,
sewardjbbcf1882014-01-12 12:49:10 +0000301 ARM64fpu_ABS,
302 ARM64fpu_SQRT,
303 ARM64fpu_RINT,
304 ARM64fpu_INVALID
305 }
306 ARM64FpUnaryOp;
307
sewardj606c4ba2014-01-26 19:11:14 +0000308typedef
309 enum {
sewardj25523c42014-06-15 19:36:29 +0000310 ARM64vecb_ADD64x2=120, ARM64vecb_ADD32x4,
311 ARM64vecb_ADD16x8, ARM64vecb_ADD8x16,
312 ARM64vecb_SUB64x2, ARM64vecb_SUB32x4,
313 ARM64vecb_SUB16x8, ARM64vecb_SUB8x16,
314 ARM64vecb_MUL32x4,
315 ARM64vecb_MUL16x8, ARM64vecb_MUL8x16,
316 ARM64vecb_FADD64x2, ARM64vecb_FADD32x4,
317 ARM64vecb_FSUB64x2, ARM64vecb_FSUB32x4,
318 ARM64vecb_FMUL64x2, ARM64vecb_FMUL32x4,
319 ARM64vecb_FDIV64x2, ARM64vecb_FDIV32x4,
sewardj76927e62014-11-17 11:21:21 +0000320 ARM64vecb_FMAX64x2, ARM64vecb_FMAX32x4,
321 ARM64vecb_FMIN64x2, ARM64vecb_FMIN32x4,
sewardj25523c42014-06-15 19:36:29 +0000322 ARM64vecb_UMAX32x4,
323 ARM64vecb_UMAX16x8, ARM64vecb_UMAX8x16,
324 ARM64vecb_UMIN32x4,
325 ARM64vecb_UMIN16x8, ARM64vecb_UMIN8x16,
326 ARM64vecb_SMAX32x4,
327 ARM64vecb_SMAX16x8, ARM64vecb_SMAX8x16,
328 ARM64vecb_SMIN32x4,
329 ARM64vecb_SMIN16x8, ARM64vecb_SMIN8x16,
sewardjecde6972014-02-05 11:01:19 +0000330 ARM64vecb_AND,
331 ARM64vecb_ORR,
sewardje520bb32014-02-17 11:00:53 +0000332 ARM64vecb_XOR,
sewardj25523c42014-06-15 19:36:29 +0000333 ARM64vecb_CMEQ64x2, ARM64vecb_CMEQ32x4,
334 ARM64vecb_CMEQ16x8, ARM64vecb_CMEQ8x16,
335 ARM64vecb_CMHI64x2, ARM64vecb_CMHI32x4, /* >u */
336 ARM64vecb_CMHI16x8, ARM64vecb_CMHI8x16,
337 ARM64vecb_CMGT64x2, ARM64vecb_CMGT32x4, /* >s */
338 ARM64vecb_CMGT16x8, ARM64vecb_CMGT8x16,
339 ARM64vecb_FCMEQ64x2, ARM64vecb_FCMEQ32x4,
340 ARM64vecb_FCMGE64x2, ARM64vecb_FCMGE32x4,
341 ARM64vecb_FCMGT64x2, ARM64vecb_FCMGT32x4,
sewardj92d0ae32014-04-03 13:48:54 +0000342 ARM64vecb_TBL1,
sewardj25523c42014-06-15 19:36:29 +0000343 ARM64vecb_UZP164x2, ARM64vecb_UZP132x4,
344 ARM64vecb_UZP116x8, ARM64vecb_UZP18x16,
345 ARM64vecb_UZP264x2, ARM64vecb_UZP232x4,
346 ARM64vecb_UZP216x8, ARM64vecb_UZP28x16,
347 ARM64vecb_ZIP132x4, ARM64vecb_ZIP116x8,
348 ARM64vecb_ZIP18x16, ARM64vecb_ZIP232x4,
349 ARM64vecb_ZIP216x8, ARM64vecb_ZIP28x16,
sewardj168c8bd2014-06-25 13:05:23 +0000350 ARM64vecb_PMUL8x16,
sewardj31b5a952014-06-26 07:41:14 +0000351 ARM64vecb_PMULL8x8,
sewardj6f312d02014-06-28 12:21:37 +0000352 ARM64vecb_UMULL2DSS,
353 ARM64vecb_UMULL4SHH, ARM64vecb_UMULL8HBB,
354 ARM64vecb_SMULL2DSS,
355 ARM64vecb_SMULL4SHH, ARM64vecb_SMULL8HBB,
sewardj51d012a2014-07-21 09:19:50 +0000356 ARM64vecb_SQADD64x2, ARM64vecb_SQADD32x4,
357 ARM64vecb_SQADD16x8, ARM64vecb_SQADD8x16,
358 ARM64vecb_UQADD64x2, ARM64vecb_UQADD32x4,
359 ARM64vecb_UQADD16x8, ARM64vecb_UQADD8x16,
360 ARM64vecb_SQSUB64x2, ARM64vecb_SQSUB32x4,
361 ARM64vecb_SQSUB16x8, ARM64vecb_SQSUB8x16,
362 ARM64vecb_UQSUB64x2, ARM64vecb_UQSUB32x4,
363 ARM64vecb_UQSUB16x8, ARM64vecb_UQSUB8x16,
364 ARM64vecb_SQDMULL2DSS,
365 ARM64vecb_SQDMULL4SHH,
sewardj54ffa1d2014-07-22 09:27:49 +0000366 ARM64vecb_SQDMULH32x4,
367 ARM64vecb_SQDMULH16x8,
368 ARM64vecb_SQRDMULH32x4,
369 ARM64vecb_SQRDMULH16x8,
sewardj12972182014-08-04 08:09:47 +0000370 ARM64vecb_SQSHL64x2, ARM64vecb_SQSHL32x4,
371 ARM64vecb_SQSHL16x8, ARM64vecb_SQSHL8x16,
372 ARM64vecb_UQSHL64x2, ARM64vecb_UQSHL32x4,
373 ARM64vecb_UQSHL16x8, ARM64vecb_UQSHL8x16,
374 ARM64vecb_SQRSHL64x2, ARM64vecb_SQRSHL32x4,
375 ARM64vecb_SQRSHL16x8, ARM64vecb_SQRSHL8x16,
376 ARM64vecb_UQRSHL64x2, ARM64vecb_UQRSHL32x4,
377 ARM64vecb_UQRSHL16x8, ARM64vecb_UQRSHL8x16,
sewardja6b61f02014-08-17 18:32:14 +0000378 ARM64vecb_SSHL64x2, ARM64vecb_SSHL32x4,
379 ARM64vecb_SSHL16x8, ARM64vecb_SSHL8x16,
380 ARM64vecb_USHL64x2, ARM64vecb_USHL32x4,
381 ARM64vecb_USHL16x8, ARM64vecb_USHL8x16,
382 ARM64vecb_SRSHL64x2, ARM64vecb_SRSHL32x4,
383 ARM64vecb_SRSHL16x8, ARM64vecb_SRSHL8x16,
384 ARM64vecb_URSHL64x2, ARM64vecb_URSHL32x4,
385 ARM64vecb_URSHL16x8, ARM64vecb_URSHL8x16,
sewardj606c4ba2014-01-26 19:11:14 +0000386 ARM64vecb_INVALID
387 }
388 ARM64VecBinOp;
389
sewardjfab09142014-02-10 10:28:13 +0000390typedef
391 enum {
sewardjfc261d92014-08-24 20:36:14 +0000392 ARM64vecmo_SUQADD64x2=300, ARM64vecmo_SUQADD32x4,
sewardjf7003bc2014-08-18 12:28:02 +0000393 ARM64vecmo_SUQADD16x8, ARM64vecmo_SUQADD8x16,
394 ARM64vecmo_USQADD64x2, ARM64vecmo_USQADD32x4,
395 ARM64vecmo_USQADD16x8, ARM64vecmo_USQADD8x16,
396 ARM64vecmo_INVALID
397 }
398 ARM64VecModifyOp;
399
400typedef
401 enum {
sewardjfc261d92014-08-24 20:36:14 +0000402 ARM64vecu_FNEG64x2=350, ARM64vecu_FNEG32x4,
sewardj25523c42014-06-15 19:36:29 +0000403 ARM64vecu_FABS64x2, ARM64vecu_FABS32x4,
sewardje520bb32014-02-17 11:00:53 +0000404 ARM64vecu_NOT,
sewardj25523c42014-06-15 19:36:29 +0000405 ARM64vecu_ABS64x2, ARM64vecu_ABS32x4,
406 ARM64vecu_ABS16x8, ARM64vecu_ABS8x16,
sewardj2b6fd5e2014-06-19 14:21:37 +0000407 ARM64vecu_CLS32x4, ARM64vecu_CLS16x8, ARM64vecu_CLS8x16,
408 ARM64vecu_CLZ32x4, ARM64vecu_CLZ16x8, ARM64vecu_CLZ8x16,
409 ARM64vecu_CNT8x16,
sewardj715d1622014-06-26 12:39:05 +0000410 ARM64vecu_RBIT,
411 ARM64vecu_REV1616B,
sewardjdf9d6d52014-06-27 10:43:22 +0000412 ARM64vecu_REV3216B, ARM64vecu_REV328H,
413 ARM64vecu_REV6416B, ARM64vecu_REV648H, ARM64vecu_REV644S,
sewardjfc261d92014-08-24 20:36:14 +0000414 ARM64vecu_URECPE32x4,
415 ARM64vecu_URSQRTE32x4,
sewardjfab09142014-02-10 10:28:13 +0000416 ARM64vecu_INVALID
417 }
418 ARM64VecUnaryOp;
419
sewardje520bb32014-02-17 11:00:53 +0000420typedef
421 enum {
sewardjfc261d92014-08-24 20:36:14 +0000422 ARM64vecshi_USHR64x2=400, ARM64vecshi_USHR32x4,
sewardja6b61f02014-08-17 18:32:14 +0000423 ARM64vecshi_USHR16x8, ARM64vecshi_USHR8x16,
424 ARM64vecshi_SSHR64x2, ARM64vecshi_SSHR32x4,
425 ARM64vecshi_SSHR16x8, ARM64vecshi_SSHR8x16,
426 ARM64vecshi_SHL64x2, ARM64vecshi_SHL32x4,
427 ARM64vecshi_SHL16x8, ARM64vecshi_SHL8x16,
sewardjecedd982014-08-11 14:02:47 +0000428 /* These narrowing shifts zero out the top half of the destination
429 register. */
sewardja6b61f02014-08-17 18:32:14 +0000430 ARM64vecshi_SQSHRN2SD, ARM64vecshi_SQSHRN4HS, ARM64vecshi_SQSHRN8BH,
431 ARM64vecshi_UQSHRN2SD, ARM64vecshi_UQSHRN4HS, ARM64vecshi_UQSHRN8BH,
432 ARM64vecshi_SQSHRUN2SD, ARM64vecshi_SQSHRUN4HS, ARM64vecshi_SQSHRUN8BH,
433 ARM64vecshi_SQRSHRN2SD, ARM64vecshi_SQRSHRN4HS, ARM64vecshi_SQRSHRN8BH,
434 ARM64vecshi_UQRSHRN2SD, ARM64vecshi_UQRSHRN4HS, ARM64vecshi_UQRSHRN8BH,
435 ARM64vecshi_SQRSHRUN2SD, ARM64vecshi_SQRSHRUN4HS, ARM64vecshi_SQRSHRUN8BH,
sewardja97dddf2014-08-14 22:26:52 +0000436 /* Saturating left shifts, of various flavours. */
sewardja6b61f02014-08-17 18:32:14 +0000437 ARM64vecshi_UQSHL64x2, ARM64vecshi_UQSHL32x4,
438 ARM64vecshi_UQSHL16x8, ARM64vecshi_UQSHL8x16,
439 ARM64vecshi_SQSHL64x2, ARM64vecshi_SQSHL32x4,
440 ARM64vecshi_SQSHL16x8, ARM64vecshi_SQSHL8x16,
441 ARM64vecshi_SQSHLU64x2, ARM64vecshi_SQSHLU32x4,
442 ARM64vecshi_SQSHLU16x8, ARM64vecshi_SQSHLU8x16,
443 ARM64vecshi_INVALID
sewardje520bb32014-02-17 11:00:53 +0000444 }
sewardja6b61f02014-08-17 18:32:14 +0000445 ARM64VecShiftImmOp;
sewardje520bb32014-02-17 11:00:53 +0000446
sewardjbbcf1882014-01-12 12:49:10 +0000447typedef
448 enum {
sewardjfc261d92014-08-24 20:36:14 +0000449 ARM64vecna_XTN=450,
sewardjecedd982014-08-11 14:02:47 +0000450 ARM64vecna_SQXTN,
451 ARM64vecna_UQXTN,
452 ARM64vecna_SQXTUN,
453 ARM64vecna_INVALID
454 }
455 ARM64VecNarrowOp;
456
457typedef
458 enum {
sewardjbbcf1882014-01-12 12:49:10 +0000459 /* baseline */
460 ARM64in_Arith=1220,
461 ARM64in_Cmp,
462 ARM64in_Logic,
463 ARM64in_Test,
464 ARM64in_Shift,
465 ARM64in_Unary,
466 ARM64in_MovI, /* int reg-reg move */
467 ARM64in_Imm64,
468 ARM64in_LdSt64,
469 ARM64in_LdSt32, /* w/ ZX loads */
470 ARM64in_LdSt16, /* w/ ZX loads */
471 ARM64in_LdSt8, /* w/ ZX loads */
472 ARM64in_XDirect, /* direct transfer to GA */
473 ARM64in_XIndir, /* indirect transfer to GA */
474 ARM64in_XAssisted, /* assisted transfer to GA */
475 ARM64in_CSel,
476 ARM64in_Call,
477 ARM64in_AddToSP, /* move SP by small, signed constant */
478 ARM64in_FromSP, /* move SP to integer register */
479 ARM64in_Mul,
sewardj7d009132014-02-20 17:43:38 +0000480 ARM64in_LdrEX,
481 ARM64in_StrEX,
482 ARM64in_MFence,
sewardj606c4ba2014-01-26 19:11:14 +0000483 /* ARM64in_V*: scalar ops involving vector registers */
sewardjbbcf1882014-01-12 12:49:10 +0000484 ARM64in_VLdStS, /* 32-bit FP load/store, with imm offset */
485 ARM64in_VLdStD, /* 64-bit FP load/store, with imm offset */
486 ARM64in_VLdStQ,
487 ARM64in_VCvtI2F,
488 ARM64in_VCvtF2I,
489 ARM64in_VCvtSD,
490 ARM64in_VUnaryD,
491 ARM64in_VUnaryS,
492 ARM64in_VBinD,
493 ARM64in_VBinS,
494 ARM64in_VCmpD,
495 ARM64in_VCmpS,
sewardje23ec112014-11-15 16:07:14 +0000496 ARM64in_VFCSel,
sewardjbbcf1882014-01-12 12:49:10 +0000497 ARM64in_FPCR,
sewardj12972182014-08-04 08:09:47 +0000498 ARM64in_FPSR,
sewardj606c4ba2014-01-26 19:11:14 +0000499 /* ARM64in_V*V: vector ops on vector registers */
500 ARM64in_VBinV,
sewardjf7003bc2014-08-18 12:28:02 +0000501 ARM64in_VModifyV,
sewardjfab09142014-02-10 10:28:13 +0000502 ARM64in_VUnaryV,
sewardj606c4ba2014-01-26 19:11:14 +0000503 ARM64in_VNarrowV,
sewardje520bb32014-02-17 11:00:53 +0000504 ARM64in_VShiftImmV,
sewardjab33a7a2014-06-19 22:20:47 +0000505 ARM64in_VExtV,
sewardjbbcf1882014-01-12 12:49:10 +0000506 ARM64in_VImmQ,
507 ARM64in_VDfromX, /* Move an Xreg to a Dreg */
sewardj12972182014-08-04 08:09:47 +0000508 ARM64in_VQfromX, /* Move an Xreg to a Qreg lo64, and zero hi64 */
sewardjbbcf1882014-01-12 12:49:10 +0000509 ARM64in_VQfromXX, /* Move 2 Xregs to a Qreg */
510 ARM64in_VXfromQ, /* Move half a Qreg to an Xreg */
sewardj85fbb022014-06-12 13:16:01 +0000511 ARM64in_VXfromDorS, /* Move Dreg or Sreg(ZX) to an Xreg */
sewardjbbcf1882014-01-12 12:49:10 +0000512 ARM64in_VMov, /* vector reg-reg move, 16, 8 or 4 bytes */
513 /* infrastructure */
sewardj0ad37a92014-08-29 21:58:03 +0000514 ARM64in_EvCheck, /* Event check */
515 ARM64in_ProfInc /* 64-bit profile counter increment */
sewardjbbcf1882014-01-12 12:49:10 +0000516 }
517 ARM64InstrTag;
518
519/* Destinations are on the LEFT (first operand) */
520
521typedef
522 struct {
523 ARM64InstrTag tag;
524 union {
525 /* --- INTEGER INSTRUCTIONS --- */
526 /* 64 bit ADD/SUB reg, reg or uimm12<<{0,12} */
527 struct {
528 HReg dst;
529 HReg argL;
530 ARM64RIA* argR;
531 Bool isAdd;
532 } Arith;
533 /* 64 or 32 bit CMP reg, reg or aimm (SUB and set flags) */
534 struct {
535 HReg argL;
536 ARM64RIA* argR;
537 Bool is64;
538 } Cmp;
539 /* 64 bit AND/OR/XOR reg, reg or bitfield-immediate */
540 struct {
541 HReg dst;
542 HReg argL;
543 ARM64RIL* argR;
544 ARM64LogicOp op;
545 } Logic;
546 /* 64 bit TST reg, reg or bimm (AND and set flags) */
547 struct {
548 HReg argL;
549 ARM64RIL* argR;
550 } Test;
551 /* 64 bit SHL/SHR/SAR, 2nd arg is reg or imm */
552 struct {
553 HReg dst;
554 HReg argL;
555 ARM64RI6* argR;
556 ARM64ShiftOp op;
557 } Shift;
558 /* NOT/NEG/CLZ, 64 bit only */
559 struct {
560 HReg dst;
561 HReg src;
562 ARM64UnaryOp op;
563 } Unary;
564 /* MOV dst, src -- reg-reg move for integer registers */
565 struct {
566 HReg dst;
567 HReg src;
568 } MovI;
569 /* Pseudo-insn; make a 64-bit immediate */
570 struct {
571 HReg dst;
572 ULong imm64;
573 } Imm64;
574 /* 64-bit load or store */
575 struct {
576 Bool isLoad;
577 HReg rD;
578 ARM64AMode* amode;
579 } LdSt64;
580 /* zx-32-to-64-bit load, or 32-bit store */
581 struct {
582 Bool isLoad;
583 HReg rD;
584 ARM64AMode* amode;
585 } LdSt32;
586 /* zx-16-to-64-bit load, or 16-bit store */
587 struct {
588 Bool isLoad;
589 HReg rD;
590 ARM64AMode* amode;
591 } LdSt16;
592 /* zx-8-to-64-bit load, or 8-bit store */
593 struct {
594 Bool isLoad;
595 HReg rD;
596 ARM64AMode* amode;
597 } LdSt8;
598 /* Update the guest PC value, then exit requesting to chain
599 to it. May be conditional. Urr, use of Addr64 implicitly
600 assumes that wordsize(guest) == wordsize(host). */
601 struct {
602 Addr64 dstGA; /* next guest address */
603 ARM64AMode* amPC; /* amode in guest state for PC */
604 ARM64CondCode cond; /* can be ARM64cc_AL */
605 Bool toFastEP; /* chain to the slow or fast point? */
606 } XDirect;
607 /* Boring transfer to a guest address not known at JIT time.
608 Not chainable. May be conditional. */
609 struct {
610 HReg dstGA;
611 ARM64AMode* amPC;
612 ARM64CondCode cond; /* can be ARM64cc_AL */
613 } XIndir;
614 /* Assisted transfer to a guest address, most general case.
615 Not chainable. May be conditional. */
616 struct {
617 HReg dstGA;
618 ARM64AMode* amPC;
619 ARM64CondCode cond; /* can be ARM64cc_AL */
620 IRJumpKind jk;
621 } XAssisted;
622 /* CSEL: dst = if cond then argL else argR. cond may be anything. */
623 struct {
624 HReg dst;
625 HReg argL;
626 HReg argR;
627 ARM64CondCode cond;
628 } CSel;
629 /* Pseudo-insn. Call target (an absolute address), on given
630 condition (which could be ARM64cc_AL). */
631 struct {
632 RetLoc rloc; /* where the return value will be */
633 HWord target;
634 ARM64CondCode cond;
635 Int nArgRegs; /* # regs carrying args: 0 .. 8 */
636 } Call;
637 /* move SP by small, signed constant */
638 struct {
639 Int simm; /* needs to be 0 % 16 and in the range -4095
640 .. 4095 inclusive */
641 } AddToSP;
642 /* move SP to integer register */
643 struct {
644 HReg dst;
645 } FromSP;
646 /* Integer multiply, with 3 variants:
647 (PLAIN) lo64(64 * 64)
648 (ZX) hi64(64 *u 64)
649 (SX) hi64(64 *s 64)
650 */
651 struct {
652 HReg dst;
653 HReg argL;
654 HReg argR;
655 ARM64MulOp op;
656 } Mul;
sewardj7d009132014-02-20 17:43:38 +0000657 /* LDXR{,H,B} x2, [x4] */
658 struct {
659 Int szB; /* 1, 2, 4 or 8 */
660 } LdrEX;
661 /* STXR{,H,B} w0, x2, [x4] */
662 struct {
663 Int szB; /* 1, 2, 4 or 8 */
664 } StrEX;
665 /* Mem fence. An insn which fences all loads and stores as
666 much as possible before continuing. On ARM64 we emit the
667 sequence "dsb sy ; dmb sy ; isb sy", which is probably
668 total nuclear overkill, but better safe than sorry. */
669 struct {
670 } MFence;
sewardjbbcf1882014-01-12 12:49:10 +0000671 /* --- INSTRUCTIONS INVOLVING VECTOR REGISTERS --- */
672 /* 32-bit Fp load/store */
673 struct {
674 Bool isLoad;
675 HReg sD;
676 HReg rN;
677 UInt uimm12; /* 0 .. 16380 inclusive, 0 % 4 */
678 } VLdStS;
679 /* 64-bit Fp load/store */
680 struct {
681 Bool isLoad;
682 HReg dD;
683 HReg rN;
684 UInt uimm12; /* 0 .. 32760 inclusive, 0 % 8 */
685 } VLdStD;
686 /* 128-bit Vector load/store. */
687 struct {
688 Bool isLoad;
689 HReg rQ; // data
690 HReg rN; // address
691 } VLdStQ;
692 /* Scalar conversion of int to float. */
693 struct {
694 ARM64CvtOp how;
695 HReg rD; // dst, a D or S register
696 HReg rS; // src, a W or X register
697 } VCvtI2F;
698 /* Scalar conversion of float to int, w/ specified RM. */
699 struct {
700 ARM64CvtOp how;
701 HReg rD; // dst, a W or X register
702 HReg rS; // src, a D or S register
703 UChar armRM; // ARM encoded RM:
704 // 00=nearest, 01=+inf, 10=-inf, 11=zero
705 } VCvtF2I;
706 /* Convert between 32-bit and 64-bit FP values (both
707 ways). (FCVT) */
708 struct {
709 Bool sToD; /* True: F32->F64. False: F64->F32 */
710 HReg dst;
711 HReg src;
712 } VCvtSD;
713 /* 64-bit FP unary */
714 struct {
715 ARM64FpUnaryOp op;
716 HReg dst;
717 HReg src;
718 } VUnaryD;
719 /* 32-bit FP unary */
720 struct {
721 ARM64FpUnaryOp op;
722 HReg dst;
723 HReg src;
724 } VUnaryS;
725 /* 64-bit FP binary arithmetic */
726 struct {
727 ARM64FpBinOp op;
728 HReg dst;
729 HReg argL;
730 HReg argR;
731 } VBinD;
732 /* 32-bit FP binary arithmetic */
733 struct {
734 ARM64FpBinOp op;
735 HReg dst;
736 HReg argL;
737 HReg argR;
738 } VBinS;
739 /* 64-bit FP compare */
740 struct {
741 HReg argL;
742 HReg argR;
743 } VCmpD;
744 /* 32-bit FP compare */
745 struct {
746 HReg argL;
747 HReg argR;
748 } VCmpS;
sewardje23ec112014-11-15 16:07:14 +0000749 /* 32- or 64-bit FP conditional select */
750 struct {
751 HReg dst;
752 HReg argL;
753 HReg argR;
754 ARM64CondCode cond;
755 Bool isD;
756 }
757 VFCSel;
sewardjbbcf1882014-01-12 12:49:10 +0000758 /* Move a 32-bit value to/from the FPCR */
759 struct {
760 Bool toFPCR;
761 HReg iReg;
762 } FPCR;
sewardj12972182014-08-04 08:09:47 +0000763 /* Move a 32-bit value to/from the FPSR */
764 struct {
765 Bool toFPSR;
766 HReg iReg;
767 } FPSR;
sewardj606c4ba2014-01-26 19:11:14 +0000768 /* binary vector operation on vector registers */
769 struct {
770 ARM64VecBinOp op;
771 HReg dst;
772 HReg argL;
773 HReg argR;
774 } VBinV;
sewardjf7003bc2014-08-18 12:28:02 +0000775 /* binary vector operation on vector registers.
776 Dst reg is also a src. */
777 struct {
778 ARM64VecModifyOp op;
779 HReg mod;
780 HReg arg;
781 } VModifyV;
sewardjfab09142014-02-10 10:28:13 +0000782 /* unary vector operation on vector registers */
783 struct {
784 ARM64VecUnaryOp op;
785 HReg dst;
786 HReg arg;
787 } VUnaryV;
sewardj606c4ba2014-01-26 19:11:14 +0000788 /* vector narrowing, Q -> Q. Result goes in the bottom half
sewardjecedd982014-08-11 14:02:47 +0000789 of dst and the top half is zeroed out. Iow one of the
790 XTN family. */
sewardj606c4ba2014-01-26 19:11:14 +0000791 struct {
sewardjecedd982014-08-11 14:02:47 +0000792 ARM64VecNarrowOp op;
793 UInt dszBlg2; // 0: 16to8_x8 1: 32to16_x4 2: 64to32_x2
794 HReg dst; // Q reg
795 HReg src; // Q reg
sewardj606c4ba2014-01-26 19:11:14 +0000796 } VNarrowV;
sewardja97dddf2014-08-14 22:26:52 +0000797 /* Vector shift by immediate. For left shifts, |amt| must be
798 >= 0 and < implied lane size of |op|. For right shifts,
799 |amt| must be > 0 and <= implied lane size of |op|. Shifts
800 beyond these ranges are not allowed. */
sewardje520bb32014-02-17 11:00:53 +0000801 struct {
sewardja6b61f02014-08-17 18:32:14 +0000802 ARM64VecShiftImmOp op;
803 HReg dst;
804 HReg src;
805 UInt amt;
sewardje520bb32014-02-17 11:00:53 +0000806 } VShiftImmV;
sewardjab33a7a2014-06-19 22:20:47 +0000807 struct {
808 HReg dst;
809 HReg srcLo;
810 HReg srcHi;
811 UInt amtB;
812 } VExtV;
sewardjbbcf1882014-01-12 12:49:10 +0000813 struct {
814 HReg rQ;
815 UShort imm; /* Same 1-bit-per-byte encoding as IR */
816 } VImmQ;
817 struct {
818 HReg rD;
819 HReg rX;
820 } VDfromX;
821 struct {
822 HReg rQ;
sewardj12972182014-08-04 08:09:47 +0000823 HReg rXlo;
824 } VQfromX;
825 struct {
826 HReg rQ;
sewardjbbcf1882014-01-12 12:49:10 +0000827 HReg rXhi;
828 HReg rXlo;
829 } VQfromXX;
830 struct {
831 HReg rX;
832 HReg rQ;
833 UInt laneNo; /* either 0 or 1 */
834 } VXfromQ;
sewardj85fbb022014-06-12 13:16:01 +0000835 struct {
836 HReg rX;
837 HReg rDorS;
838 Bool fromD;
839 } VXfromDorS;
sewardjbbcf1882014-01-12 12:49:10 +0000840 /* MOV dst, src -- reg-reg move for vector registers */
841 struct {
842 UInt szB; // 16=mov qD,qS; 8=mov dD,dS; 4=mov sD,sS
843 HReg dst;
844 HReg src;
845 } VMov;
846 struct {
847 ARM64AMode* amCounter;
848 ARM64AMode* amFailAddr;
849 } EvCheck;
sewardj0ad37a92014-08-29 21:58:03 +0000850 struct {
851 /* No fields. The address of the counter to inc is
852 installed later, post-translation, by patching it in,
853 as it is not known at translation time. */
854 } ProfInc;
sewardjbbcf1882014-01-12 12:49:10 +0000855 } ARM64in;
856 }
857 ARM64Instr;
858
sewardj633d9db2014-06-25 12:19:02 +0000859
sewardjbbcf1882014-01-12 12:49:10 +0000860extern ARM64Instr* ARM64Instr_Arith ( HReg, HReg, ARM64RIA*, Bool isAdd );
861extern ARM64Instr* ARM64Instr_Cmp ( HReg, ARM64RIA*, Bool is64 );
862extern ARM64Instr* ARM64Instr_Logic ( HReg, HReg, ARM64RIL*, ARM64LogicOp );
863extern ARM64Instr* ARM64Instr_Test ( HReg, ARM64RIL* );
864extern ARM64Instr* ARM64Instr_Shift ( HReg, HReg, ARM64RI6*, ARM64ShiftOp );
865extern ARM64Instr* ARM64Instr_Unary ( HReg, HReg, ARM64UnaryOp );
sewardjbbcf1882014-01-12 12:49:10 +0000866extern ARM64Instr* ARM64Instr_MovI ( HReg, HReg );
867extern ARM64Instr* ARM64Instr_Imm64 ( HReg, ULong );
868extern ARM64Instr* ARM64Instr_LdSt64 ( Bool isLoad, HReg, ARM64AMode* );
869extern ARM64Instr* ARM64Instr_LdSt32 ( Bool isLoad, HReg, ARM64AMode* );
870extern ARM64Instr* ARM64Instr_LdSt16 ( Bool isLoad, HReg, ARM64AMode* );
871extern ARM64Instr* ARM64Instr_LdSt8 ( Bool isLoad, HReg, ARM64AMode* );
sewardjbbcf1882014-01-12 12:49:10 +0000872extern ARM64Instr* ARM64Instr_XDirect ( Addr64 dstGA, ARM64AMode* amPC,
873 ARM64CondCode cond, Bool toFastEP );
874extern ARM64Instr* ARM64Instr_XIndir ( HReg dstGA, ARM64AMode* amPC,
875 ARM64CondCode cond );
876extern ARM64Instr* ARM64Instr_XAssisted ( HReg dstGA, ARM64AMode* amPC,
877 ARM64CondCode cond, IRJumpKind jk );
878extern ARM64Instr* ARM64Instr_CSel ( HReg dst, HReg argL, HReg argR,
879 ARM64CondCode cond );
880extern ARM64Instr* ARM64Instr_Call ( ARM64CondCode, HWord, Int nArgRegs,
881 RetLoc rloc );
882extern ARM64Instr* ARM64Instr_AddToSP ( Int simm );
883extern ARM64Instr* ARM64Instr_FromSP ( HReg dst );
884extern ARM64Instr* ARM64Instr_Mul ( HReg dst, HReg argL, HReg argR,
885 ARM64MulOp op );
sewardj7d009132014-02-20 17:43:38 +0000886extern ARM64Instr* ARM64Instr_LdrEX ( Int szB );
887extern ARM64Instr* ARM64Instr_StrEX ( Int szB );
888extern ARM64Instr* ARM64Instr_MFence ( void );
sewardjbbcf1882014-01-12 12:49:10 +0000889extern ARM64Instr* ARM64Instr_VLdStS ( Bool isLoad, HReg sD, HReg rN,
890 UInt uimm12 /* 0 .. 16380, 0 % 4 */ );
891extern ARM64Instr* ARM64Instr_VLdStD ( Bool isLoad, HReg dD, HReg rN,
892 UInt uimm12 /* 0 .. 32760, 0 % 8 */ );
893extern ARM64Instr* ARM64Instr_VLdStQ ( Bool isLoad, HReg rQ, HReg rN );
894extern ARM64Instr* ARM64Instr_VCvtI2F ( ARM64CvtOp how, HReg rD, HReg rS );
895extern ARM64Instr* ARM64Instr_VCvtF2I ( ARM64CvtOp how, HReg rD, HReg rS,
896 UChar armRM );
897extern ARM64Instr* ARM64Instr_VCvtSD ( Bool sToD, HReg dst, HReg src );
898extern ARM64Instr* ARM64Instr_VUnaryD ( ARM64FpUnaryOp op, HReg dst, HReg src );
899extern ARM64Instr* ARM64Instr_VUnaryS ( ARM64FpUnaryOp op, HReg dst, HReg src );
900extern ARM64Instr* ARM64Instr_VBinD ( ARM64FpBinOp op, HReg, HReg, HReg );
901extern ARM64Instr* ARM64Instr_VBinS ( ARM64FpBinOp op, HReg, HReg, HReg );
902extern ARM64Instr* ARM64Instr_VCmpD ( HReg argL, HReg argR );
903extern ARM64Instr* ARM64Instr_VCmpS ( HReg argL, HReg argR );
sewardje23ec112014-11-15 16:07:14 +0000904extern ARM64Instr* ARM64Instr_VFCSel ( HReg dst, HReg argL, HReg argR,
905 ARM64CondCode cond, Bool isD );
sewardjbbcf1882014-01-12 12:49:10 +0000906extern ARM64Instr* ARM64Instr_FPCR ( Bool toFPCR, HReg iReg );
sewardj12972182014-08-04 08:09:47 +0000907extern ARM64Instr* ARM64Instr_FPSR ( Bool toFPSR, HReg iReg );
sewardj606c4ba2014-01-26 19:11:14 +0000908extern ARM64Instr* ARM64Instr_VBinV ( ARM64VecBinOp op, HReg, HReg, HReg );
sewardjf7003bc2014-08-18 12:28:02 +0000909extern ARM64Instr* ARM64Instr_VModifyV ( ARM64VecModifyOp, HReg, HReg );
sewardjfab09142014-02-10 10:28:13 +0000910extern ARM64Instr* ARM64Instr_VUnaryV ( ARM64VecUnaryOp op, HReg, HReg );
sewardjecedd982014-08-11 14:02:47 +0000911extern ARM64Instr* ARM64Instr_VNarrowV ( ARM64VecNarrowOp op, UInt dszBlg2,
912 HReg dst, HReg src );
sewardja6b61f02014-08-17 18:32:14 +0000913extern ARM64Instr* ARM64Instr_VShiftImmV ( ARM64VecShiftImmOp op,
sewardje520bb32014-02-17 11:00:53 +0000914 HReg dst, HReg src, UInt amt );
sewardjab33a7a2014-06-19 22:20:47 +0000915extern ARM64Instr* ARM64Instr_VExtV ( HReg dst,
916 HReg srcLo, HReg srcHi, UInt amtB );
sewardjbbcf1882014-01-12 12:49:10 +0000917extern ARM64Instr* ARM64Instr_VImmQ ( HReg, UShort );
918extern ARM64Instr* ARM64Instr_VDfromX ( HReg rD, HReg rX );
sewardj12972182014-08-04 08:09:47 +0000919extern ARM64Instr* ARM64Instr_VQfromX ( HReg rQ, HReg rXlo );
sewardjbbcf1882014-01-12 12:49:10 +0000920extern ARM64Instr* ARM64Instr_VQfromXX( HReg rQ, HReg rXhi, HReg rXlo );
921extern ARM64Instr* ARM64Instr_VXfromQ ( HReg rX, HReg rQ, UInt laneNo );
sewardj85fbb022014-06-12 13:16:01 +0000922extern ARM64Instr* ARM64Instr_VXfromDorS ( HReg rX, HReg rDorS, Bool fromD );
sewardjbbcf1882014-01-12 12:49:10 +0000923extern ARM64Instr* ARM64Instr_VMov ( UInt szB, HReg dst, HReg src );
924
925extern ARM64Instr* ARM64Instr_EvCheck ( ARM64AMode* amCounter,
926 ARM64AMode* amFailAddr );
sewardj0ad37a92014-08-29 21:58:03 +0000927extern ARM64Instr* ARM64Instr_ProfInc ( void );
sewardjbbcf1882014-01-12 12:49:10 +0000928
floriand8c64e02014-10-08 08:54:44 +0000929extern void ppARM64Instr ( const ARM64Instr* );
sewardjbbcf1882014-01-12 12:49:10 +0000930
931
932/* Some functions that insulate the register allocator from details
933 of the underlying instruction set. */
floriand8c64e02014-10-08 08:54:44 +0000934extern void getRegUsage_ARM64Instr ( HRegUsage*, const ARM64Instr*, Bool );
sewardjbbcf1882014-01-12 12:49:10 +0000935extern void mapRegs_ARM64Instr ( HRegRemap*, ARM64Instr*, Bool );
floriand8c64e02014-10-08 08:54:44 +0000936extern Bool isMove_ARM64Instr ( const ARM64Instr*, HReg*, HReg* );
sewardjbbcf1882014-01-12 12:49:10 +0000937extern Int emit_ARM64Instr ( /*MB_MOD*/Bool* is_profInc,
floriand8c64e02014-10-08 08:54:44 +0000938 UChar* buf, Int nbuf, const ARM64Instr* i,
sewardjbbcf1882014-01-12 12:49:10 +0000939 Bool mode64,
sewardj9b769162014-07-24 12:42:03 +0000940 VexEndness endness_host,
florian8462d112014-09-24 15:18:09 +0000941 const void* disp_cp_chain_me_to_slowEP,
942 const void* disp_cp_chain_me_to_fastEP,
943 const void* disp_cp_xindir,
944 const void* disp_cp_xassisted );
sewardjbbcf1882014-01-12 12:49:10 +0000945
946extern void genSpill_ARM64 ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
947 HReg rreg, Int offset, Bool );
948extern void genReload_ARM64 ( /*OUT*/HInstr** i1, /*OUT*/HInstr** i2,
949 HReg rreg, Int offset, Bool );
950
951extern void getAllocableRegs_ARM64 ( Int*, HReg** );
floriancacba8e2014-12-15 18:58:07 +0000952extern HInstrArray* iselSB_ARM64 ( const IRSB*,
sewardjbbcf1882014-01-12 12:49:10 +0000953 VexArch,
floriand8c64e02014-10-08 08:54:44 +0000954 const VexArchInfo*,
955 const VexAbiInfo*,
sewardjbbcf1882014-01-12 12:49:10 +0000956 Int offs_Host_EvC_Counter,
957 Int offs_Host_EvC_FailAddr,
958 Bool chainingAllowed,
959 Bool addProfInc,
floriandcd6d232015-01-02 17:32:21 +0000960 Addr max_ga );
sewardjbbcf1882014-01-12 12:49:10 +0000961
962/* How big is an event check? This is kind of a kludge because it
963 depends on the offsets of host_EvC_FAILADDR and
964 host_EvC_COUNTER. */
sewardj9b769162014-07-24 12:42:03 +0000965extern Int evCheckSzB_ARM64 ( VexEndness endness_host );
sewardjbbcf1882014-01-12 12:49:10 +0000966
967/* Perform a chaining and unchaining of an XDirect jump. */
sewardj9b769162014-07-24 12:42:03 +0000968extern VexInvalRange chainXDirect_ARM64 ( VexEndness endness_host,
969 void* place_to_chain,
florian7d6f81d2014-09-22 21:43:37 +0000970 const void* disp_cp_chain_me_EXPECTED,
971 const void* place_to_jump_to );
sewardjbbcf1882014-01-12 12:49:10 +0000972
sewardj9b769162014-07-24 12:42:03 +0000973extern VexInvalRange unchainXDirect_ARM64 ( VexEndness endness_host,
974 void* place_to_unchain,
florian7d6f81d2014-09-22 21:43:37 +0000975 const void* place_to_jump_to_EXPECTED,
976 const void* disp_cp_chain_me );
sewardjc6acaa42014-02-19 17:42:59 +0000977
sewardj0ad37a92014-08-29 21:58:03 +0000978/* Patch the counter location into an existing ProfInc point. */
979extern VexInvalRange patchProfInc_ARM64 ( VexEndness endness_host,
980 void* place_to_patch,
florian7d6f81d2014-09-22 21:43:37 +0000981 const ULong* location_of_counter );
sewardjbbcf1882014-01-12 12:49:10 +0000982
983
984#endif /* ndef __VEX_HOST_ARM64_DEFS_H */
985
986/*---------------------------------------------------------------*/
987/*--- end host_arm64_defs.h ---*/
988/*---------------------------------------------------------------*/