Major registor allocation rework - stage 1.
Direct usage of registers abstracted out.
Live values tracked locally. Redundant loads and stores suppressed.
Address of registers and register pairs unified w/ single "location" mechanism
Register types inferred using existing dataflow analysis pass.
Interim (i.e. Hack) mechanism for storing register liveness info. Rewrite TBD.
Stubbed-out code for linear scan allocation (for loop and long traces)
Moved optimistic lock check for monitor-enter/exit inline for Thumb2
Minor restructuring, renaming and general cleanup of codegen
Renaming of enums to follow coding convention
Formatting fixes introduced by the enum renaming
Rewrite of RallocUtil.c and addition of linear scan to come in stage 2.
diff --git a/vm/compiler/codegen/arm/ArmLIR.h b/vm/compiler/codegen/arm/ArmLIR.h
index 4e97499..bff6cc8 100644
--- a/vm/compiler/codegen/arm/ArmLIR.h
+++ b/vm/compiler/codegen/arm/ArmLIR.h
@@ -71,6 +71,14 @@
#define LOWREG(x) ((x & 0x7) == x)
#define DOUBLEREG(x) ((x & FP_DOUBLE) == FP_DOUBLE)
#define SINGLEREG(x) (FPREG(x) && !DOUBLEREG(x))
+/*
+ * Note: the low register of a floating point pair is sufficient to
+ * create the name of a double, but require both names to be passed to
+ * allow for asserts to verify that the pair is consecutive if significant
+ * rework is done in this area. Also, it is a good reminder in the calling
+ * code that reg locations always describe doubles as a pair of singles.
+ */
+#define S2D(x,y) ((x) | FP_DOUBLE)
/* Mask to strip off fp flags */
#define FP_REG_MASK (FP_REG_OFFSET-1)
/* non-existent Dalvik register */
@@ -78,6 +86,43 @@
/* non-existant physical register */
#define rNone (-1)
+/* RegisterLocation templates return values (r0, or r0/r1) */
+#define LOC_C_RETURN {kLocPhysReg, 0, 0, r0, 0, -1}
+#define LOC_C_RETURN_WIDE {kLocPhysReg, 1, 0, r0, r1, -1}
+/* RegisterLocation templates for interpState->retVal; */
+#define LOC_DALVIK_RETURN_VAL {kLocRetval, 0, 0, 0, 0, -1}
+#define LOC_DALVIK_RETURN_VAL_WIDE {kLocRetval, 1, 0, 0, 0, -1}
+
+ /*
+ * Data structure tracking the mapping between a Dalvik register (pair) and a
+ * native register (pair). The idea is to reuse the previously loaded value
+ * if possible, otherwise to keep the value in a native register as long as
+ * possible.
+ */
+typedef struct RegisterInfo {
+ int reg; // Reg number
+ bool inUse; // Has it been allocated?
+ bool pair; // Part of a register pair?
+ int partner; // If pair, other reg of pair
+ bool live; // Is there an associated SSA name?
+ bool dirty; // If live, is it dirty?
+ int sReg; // Name of live value
+ struct LIR *defStart; // Starting inst in last def sequence
+ struct LIR *defEnd; // Ending inst in last def sequence
+} RegisterInfo;
+
+typedef struct RegisterPool {
+ BitVector *nullCheckedRegs; // Track which registers have been null-checked
+ int numCoreTemps;
+ RegisterInfo *coreTemps;
+ int numFPTemps;
+ RegisterInfo *FPTemps;
+ int numCoreRegs;
+ RegisterInfo *coreRegs;
+ int numFPRegs;
+ RegisterInfo *FPRegs;
+} RegisterPool;
+
typedef enum ResourceEncodingPos {
kGPReg0 = 0,
kRegSP = 13,
@@ -103,49 +148,49 @@
#define DECODE_ALIAS_INFO_WIDE(X) ((X & 0x80000000) ? 1 : 0)
typedef enum OpSize {
- WORD,
- LONG,
- SINGLE,
- DOUBLE,
- UNSIGNED_HALF,
- SIGNED_HALF,
- UNSIGNED_BYTE,
- SIGNED_BYTE,
+ kWord,
+ kLong,
+ kSingle,
+ kDouble,
+ kUnsignedHalf,
+ kSignedHalf,
+ kUnsignedByte,
+ kSignedByte,
} OpSize;
typedef enum OpKind {
- OP_MOV,
- OP_MVN,
- OP_CMP,
- OP_LSL,
- OP_LSR,
- OP_ASR,
- OP_ROR,
- OP_NOT,
- OP_AND,
- OP_OR,
- OP_XOR,
- OP_NEG,
- OP_ADD,
- OP_ADC,
- OP_SUB,
- OP_SBC,
- OP_RSUB,
- OP_MUL,
- OP_DIV,
- OP_REM,
- OP_BIC,
- OP_CMN,
- OP_TST,
- OP_BKPT,
- OP_BLX,
- OP_PUSH,
- OP_POP,
- OP_2CHAR,
- OP_2SHORT,
- OP_2BYTE,
- OP_COND_BR,
- OP_UNCOND_BR,
+ kOpMov,
+ kOpMvn,
+ kOpCmp,
+ kOpLsl,
+ kOpLsr,
+ kOpAsr,
+ kOpRor,
+ kOpNot,
+ kOpAnd,
+ kOpOr,
+ kOpXor,
+ kOpNeg,
+ kOpAdd,
+ kOpAdc,
+ kOpSub,
+ kOpSbc,
+ kOpRsub,
+ kOpMul,
+ kOpDiv,
+ kOpRem,
+ kOpBic,
+ kOpCmn,
+ kOpTst,
+ kOpBkpt,
+ kOpBlx,
+ kOpPush,
+ kOpPop,
+ kOp2Char,
+ kOp2Short,
+ kOp2Byte,
+ kOpCondBr,
+ kOpUncondBr,
} OpKind;
typedef enum NativeRegisterPool {
@@ -215,24 +260,32 @@
dr15 = fr30 + FP_DOUBLE,
} NativeRegisterPool;
+/* Shift encodings */
+typedef enum ArmShiftEncodings {
+ kArmLsl = 0x0,
+ kArmLsr = 0x1,
+ kArmAsr = 0x2,
+ kArmRor = 0x3
+} ArmShiftEncodings;
+
/* Thumb condition encodings */
typedef enum ArmConditionCode {
- ARM_COND_EQ = 0x0, /* 0000 */
- ARM_COND_NE = 0x1, /* 0001 */
- ARM_COND_CS = 0x2, /* 0010 */
- ARM_COND_CC = 0x3, /* 0011 */
- ARM_COND_MI = 0x4, /* 0100 */
- ARM_COND_PL = 0x5, /* 0101 */
- ARM_COND_VS = 0x6, /* 0110 */
- ARM_COND_VC = 0x7, /* 0111 */
- ARM_COND_HI = 0x8, /* 1000 */
- ARM_COND_LS = 0x9, /* 1001 */
- ARM_COND_GE = 0xa, /* 1010 */
- ARM_COND_LT = 0xb, /* 1011 */
- ARM_COND_GT = 0xc, /* 1100 */
- ARM_COND_LE = 0xd, /* 1101 */
- ARM_COND_AL = 0xe, /* 1110 */
- ARM_COND_NV = 0xf, /* 1111 */
+ kArmCondEq = 0x0, /* 0000 */
+ kArmCondNe = 0x1, /* 0001 */
+ kArmCondCs = 0x2, /* 0010 */
+ kArmCondCc = 0x3, /* 0011 */
+ kArmCondMi = 0x4, /* 0100 */
+ kArmCondPl = 0x5, /* 0101 */
+ kArmCondVs = 0x6, /* 0110 */
+ kArmCondVc = 0x7, /* 0111 */
+ kArmCondHi = 0x8, /* 1000 */
+ kArmCondLs = 0x9, /* 1001 */
+ kArmCondGe = 0xa, /* 1010 */
+ kArmCondLt = 0xb, /* 1011 */
+ kArmCondGt = 0xc, /* 1100 */
+ kArmCondLe = 0xd, /* 1101 */
+ kArmCondAl = 0xe, /* 1110 */
+ kArmCondNv = 0xf, /* 1111 */
} ArmConditionCode;
#define isPseudoOpCode(opCode) ((int)(opCode) < 0)
@@ -243,290 +296,311 @@
* Assemble.c.
*/
typedef enum ArmOpCode {
- ARM_PSEUDO_BARRIER = -17,
- ARM_PSEUDO_EXTENDED_MIR = -16,
- ARM_PSEUDO_SSA_REP = -15,
- ARM_PSEUDO_ENTRY_BLOCK = -14,
- ARM_PSEUDO_EXIT_BLOCK = -13,
- ARM_PSEUDO_TARGET_LABEL = -12,
- ARM_PSEUDO_CHAINING_CELL_BACKWARD_BRANCH = -11,
- ARM_PSEUDO_CHAINING_CELL_HOT = -10,
- ARM_PSEUDO_CHAINING_CELL_INVOKE_PREDICTED = -9,
- ARM_PSEUDO_CHAINING_CELL_INVOKE_SINGLETON = -8,
- ARM_PSEUDO_CHAINING_CELL_NORMAL = -7,
- ARM_PSEUDO_DALVIK_BYTECODE_BOUNDARY = -6,
- ARM_PSEUDO_ALIGN4 = -5,
- ARM_PSEUDO_PC_RECONSTRUCTION_CELL = -4,
- ARM_PSEUDO_PC_RECONSTRUCTION_BLOCK_LABEL = -3,
- ARM_PSEUDO_EH_BLOCK_LABEL = -2,
- ARM_PSEUDO_NORMAL_BLOCK_LABEL = -1,
+ kArmPseudoBarrier = -17,
+ kArmPseudoExtended = -16,
+ kArmPseudoSSARep = -15,
+ ARM_PSEUDO_kEntryBlock = -14,
+ ARM_PSEUDO_kExitBlock = -13,
+ kArmPseudoTargetLabel = -12,
+ ARM_PSEUDO_kChainingCellBackwardBranch = -11,
+ ARM_PSEUDO_kChainingCellHot = -10,
+ ARM_PSEUDO_kChainingCellInvokePredicted = -9,
+ ARM_PSEUDO_kChainingCellInvokeSingleton = -8,
+ ARM_PSEUDO_kChainingCellNormal = -7,
+ ARM_PSEUDO_kDalvikByteCode_BOUNDARY = -6,
+ kArmPseudoPseudoAlign4 = -5,
+ ARM_PSEUDO_kPCReconstruction_CELL = -4,
+ ARM_PSEUDO_kPCReconstruction_BLOCK_LABEL = -3,
+ kArmPseudoEHBlockLabel = -2,
+ kArmPseudoNormalBlockLabel = -1,
/************************************************************************/
- ARM_16BIT_DATA, /* DATA [0] rd[15..0] */
- THUMB_ADC_RR, /* adc [0100000101] rm[5..3] rd[2..0] */
- THUMB_ADD_RRI3, /* add(1) [0001110] imm_3[8..6] rn[5..3] rd[2..0]*/
- THUMB_ADD_RI8, /* add(2) [00110] rd[10..8] imm_8[7..0] */
- THUMB_ADD_RRR, /* add(3) [0001100] rm[8..6] rn[5..3] rd[2..0] */
- THUMB_ADD_RR_LH, /* add(4) [01000100] H12[01] rm[5..3] rd[2..0] */
- THUMB_ADD_RR_HL, /* add(4) [01001000] H12[10] rm[5..3] rd[2..0] */
- THUMB_ADD_RR_HH, /* add(4) [01001100] H12[11] rm[5..3] rd[2..0] */
- THUMB_ADD_PC_REL, /* add(5) [10100] rd[10..8] imm_8[7..0] */
- THUMB_ADD_SP_REL, /* add(6) [10101] rd[10..8] imm_8[7..0] */
- THUMB_ADD_SPI7, /* add(7) [101100000] imm_7[6..0] */
- THUMB_AND_RR, /* and [0100000000] rm[5..3] rd[2..0] */
- THUMB_ASR_RRI5, /* asr(1) [00010] imm_5[10..6] rm[5..3] rd[2..0] */
- THUMB_ASR_RR, /* asr(2) [0100000100] rs[5..3] rd[2..0] */
- THUMB_B_COND, /* b(1) [1101] cond[11..8] offset_8[7..0] */
- THUMB_B_UNCOND, /* b(2) [11100] offset_11[10..0] */
- THUMB_BIC_RR, /* bic [0100001110] rm[5..3] rd[2..0] */
- THUMB_BKPT, /* bkpt [10111110] imm_8[7..0] */
- THUMB_BLX_1, /* blx(1) [111] H[10] offset_11[10..0] */
- THUMB_BLX_2, /* blx(1) [111] H[01] offset_11[10..0] */
- THUMB_BL_1, /* blx(1) [111] H[10] offset_11[10..0] */
- THUMB_BL_2, /* blx(1) [111] H[11] offset_11[10..0] */
- THUMB_BLX_R, /* blx(2) [010001111] rm[6..3] [000] */
- THUMB_BX, /* bx [010001110] H2[6..6] rm[5..3] SBZ[000] */
- THUMB_CMN_RR, /* cmn [0100001011] rm[5..3] rd[2..0] */
- THUMB_CMP_RI8, /* cmp(1) [00101] rn[10..8] imm_8[7..0] */
- THUMB_CMP_RR, /* cmp(2) [0100001010] rm[5..3] rd[2..0] */
- THUMB_CMP_LH, /* cmp(3) [01000101] H12[01] rm[5..3] rd[2..0] */
- THUMB_CMP_HL, /* cmp(3) [01000110] H12[10] rm[5..3] rd[2..0] */
- THUMB_CMP_HH, /* cmp(3) [01000111] H12[11] rm[5..3] rd[2..0] */
- THUMB_EOR_RR, /* eor [0100000001] rm[5..3] rd[2..0] */
- THUMB_LDMIA, /* ldmia [11001] rn[10..8] reglist [7..0] */
- THUMB_LDR_RRI5, /* ldr(1) [01101] imm_5[10..6] rn[5..3] rd[2..0] */
- THUMB_LDR_RRR, /* ldr(2) [0101100] rm[8..6] rn[5..3] rd[2..0] */
- THUMB_LDR_PC_REL, /* ldr(3) [01001] rd[10..8] imm_8[7..0] */
- THUMB_LDR_SP_REL, /* ldr(4) [10011] rd[10..8] imm_8[7..0] */
- THUMB_LDRB_RRI5, /* ldrb(1) [01111] imm_5[10..6] rn[5..3] rd[2..0] */
- THUMB_LDRB_RRR, /* ldrb(2) [0101110] rm[8..6] rn[5..3] rd[2..0] */
- THUMB_LDRH_RRI5, /* ldrh(1) [10001] imm_5[10..6] rn[5..3] rd[2..0] */
- THUMB_LDRH_RRR, /* ldrh(2) [0101101] rm[8..6] rn[5..3] rd[2..0] */
- THUMB_LDRSB_RRR, /* ldrsb [0101011] rm[8..6] rn[5..3] rd[2..0] */
- THUMB_LDRSH_RRR, /* ldrsh [0101111] rm[8..6] rn[5..3] rd[2..0] */
- THUMB_LSL_RRI5, /* lsl(1) [00000] imm_5[10..6] rm[5..3] rd[2..0] */
- THUMB_LSL_RR, /* lsl(2) [0100000010] rs[5..3] rd[2..0] */
- THUMB_LSR_RRI5, /* lsr(1) [00001] imm_5[10..6] rm[5..3] rd[2..0] */
- THUMB_LSR_RR, /* lsr(2) [0100000011] rs[5..3] rd[2..0] */
- THUMB_MOV_IMM, /* mov(1) [00100] rd[10..8] imm_8[7..0] */
- THUMB_MOV_RR, /* mov(2) [0001110000] rn[5..3] rd[2..0] */
- THUMB_MOV_RR_H2H, /* mov(3) [01000111] H12[11] rm[5..3] rd[2..0] */
- THUMB_MOV_RR_H2L, /* mov(3) [01000110] H12[01] rm[5..3] rd[2..0] */
- THUMB_MOV_RR_L2H, /* mov(3) [01000101] H12[10] rm[5..3] rd[2..0] */
- THUMB_MUL, /* mul [0100001101] rm[5..3] rd[2..0] */
- THUMB_MVN, /* mvn [0100001111] rm[5..3] rd[2..0] */
- THUMB_NEG, /* neg [0100001001] rm[5..3] rd[2..0] */
- THUMB_ORR, /* orr [0100001100] rm[5..3] rd[2..0] */
- THUMB_POP, /* pop [1011110] r[8..8] rl[7..0] */
- THUMB_PUSH, /* push [1011010] r[8..8] rl[7..0] */
- THUMB_ROR_RR, /* ror [0100000111] rs[5..3] rd[2..0] */
- THUMB_SBC, /* sbc [0100000110] rm[5..3] rd[2..0] */
- THUMB_STMIA, /* stmia [11000] rn[10..8] reglist [7.. 0] */
- THUMB_STR_RRI5, /* str(1) [01100] imm_5[10..6] rn[5..3] rd[2..0] */
- THUMB_STR_RRR, /* str(2) [0101000] rm[8..6] rn[5..3] rd[2..0] */
- THUMB_STR_SP_REL, /* str(3) [10010] rd[10..8] imm_8[7..0] */
- THUMB_STRB_RRI5, /* strb(1) [01110] imm_5[10..6] rn[5..3] rd[2..0] */
- THUMB_STRB_RRR, /* strb(2) [0101010] rm[8..6] rn[5..3] rd[2..0] */
- THUMB_STRH_RRI5, /* strh(1) [10000] imm_5[10..6] rn[5..3] rd[2..0] */
- THUMB_STRH_RRR, /* strh(2) [0101001] rm[8..6] rn[5..3] rd[2..0] */
- THUMB_SUB_RRI3, /* sub(1) [0001111] imm_3[8..6] rn[5..3] rd[2..0]*/
- THUMB_SUB_RI8, /* sub(2) [00111] rd[10..8] imm_8[7..0] */
- THUMB_SUB_RRR, /* sub(3) [0001101] rm[8..6] rn[5..3] rd[2..0] */
- THUMB_SUB_SPI7, /* sub(4) [101100001] imm_7[6..0] */
- THUMB_SWI, /* swi [11011111] imm_8[7..0] */
- THUMB_TST, /* tst [0100001000] rm[5..3] rn[2..0] */
- THUMB2_VLDRS, /* vldr low sx [111011011001] rn[19..16] rd[15-12]
- [1010] imm_8[7..0] */
- THUMB2_VLDRD, /* vldr low dx [111011011001] rn[19..16] rd[15-12]
- [1011] imm_8[7..0] */
- THUMB2_VMULS, /* vmul vd, vn, vm [111011100010] rn[19..16]
- rd[15-12] [10100000] rm[3..0] */
- THUMB2_VMULD, /* vmul vd, vn, vm [111011100010] rn[19..16]
- rd[15-12] [10110000] rm[3..0] */
- THUMB2_VSTRS, /* vstr low sx [111011011000] rn[19..16] rd[15-12]
- [1010] imm_8[7..0] */
- THUMB2_VSTRD, /* vstr low dx [111011011000] rn[19..16] rd[15-12]
- [1011] imm_8[7..0] */
- THUMB2_VSUBS, /* vsub vd, vn, vm [111011100011] rn[19..16]
- rd[15-12] [10100040] rm[3..0] */
- THUMB2_VSUBD, /* vsub vd, vn, vm [111011100011] rn[19..16]
- rd[15-12] [10110040] rm[3..0] */
- THUMB2_VADDS, /* vadd vd, vn, vm [111011100011] rn[19..16]
- rd[15-12] [10100000] rm[3..0] */
- THUMB2_VADDD, /* vadd vd, vn, vm [111011100011] rn[19..16]
- rd[15-12] [10110000] rm[3..0] */
- THUMB2_VDIVS, /* vdiv vd, vn, vm [111011101000] rn[19..16]
- rd[15-12] [10100000] rm[3..0] */
- THUMB2_VDIVD, /* vdiv vd, vn, vm [111011101000] rn[19..16]
- rd[15-12] [10110000] rm[3..0] */
- THUMB2_VCVTIF, /* vcvt.F32 vd, vm [1110111010111000] vd[15..12]
- [10101100] vm[3..0] */
- THUMB2_VCVTID, /* vcvt.F64 vd, vm [1110111010111000] vd[15..12]
+ kArm16BitData, /* DATA [0] rd[15..0] */
+ kThumbAdcRR, /* adc [0100000101] rm[5..3] rd[2..0] */
+ kThumbAddRRI3, /* add(1) [0001110] imm_3[8..6] rn[5..3] rd[2..0]*/
+ kThumbAddRI8, /* add(2) [00110] rd[10..8] imm_8[7..0] */
+ kThumbAddRRR, /* add(3) [0001100] rm[8..6] rn[5..3] rd[2..0] */
+ kThumbAddRRLH, /* add(4) [01000100] H12[01] rm[5..3] rd[2..0] */
+ kThumbAddRRHL, /* add(4) [01001000] H12[10] rm[5..3] rd[2..0] */
+ kThumbAddRRHH, /* add(4) [01001100] H12[11] rm[5..3] rd[2..0] */
+ kThumbAddPcRel, /* add(5) [10100] rd[10..8] imm_8[7..0] */
+ kThumbAddSpRel, /* add(6) [10101] rd[10..8] imm_8[7..0] */
+ kThumbAddSpI7, /* add(7) [101100000] imm_7[6..0] */
+ kThumbAndRR, /* and [0100000000] rm[5..3] rd[2..0] */
+ kThumbAsrRRI5, /* asr(1) [00010] imm_5[10..6] rm[5..3] rd[2..0] */
+ kThumbAsrRR, /* asr(2) [0100000100] rs[5..3] rd[2..0] */
+ kThumbBCond, /* b(1) [1101] cond[11..8] offset_8[7..0] */
+ kThumbBUncond, /* b(2) [11100] offset_11[10..0] */
+ kThumbBicRR, /* bic [0100001110] rm[5..3] rd[2..0] */
+ kThumbBkpt, /* bkpt [10111110] imm_8[7..0] */
+ kThumbBlx1, /* blx(1) [111] H[10] offset_11[10..0] */
+ kThumbBlx2, /* blx(1) [111] H[01] offset_11[10..0] */
+ kThumbBl1, /* blx(1) [111] H[10] offset_11[10..0] */
+ kThumbBl2, /* blx(1) [111] H[11] offset_11[10..0] */
+ kThumbBlxR, /* blx(2) [010001111] rm[6..3] [000] */
+ kThumbBx, /* bx [010001110] H2[6..6] rm[5..3] SBZ[000] */
+ kThumbCmnRR, /* cmn [0100001011] rm[5..3] rd[2..0] */
+ kThumbCmpRI8, /* cmp(1) [00101] rn[10..8] imm_8[7..0] */
+ kThumbCmpRR, /* cmp(2) [0100001010] rm[5..3] rd[2..0] */
+ kThumbCmpLH, /* cmp(3) [01000101] H12[01] rm[5..3] rd[2..0] */
+ kThumbCmpHL, /* cmp(3) [01000110] H12[10] rm[5..3] rd[2..0] */
+ kThumbCmpHH, /* cmp(3) [01000111] H12[11] rm[5..3] rd[2..0] */
+ kThumbEorRR, /* eor [0100000001] rm[5..3] rd[2..0] */
+ kThumbLdmia, /* ldmia [11001] rn[10..8] reglist [7..0] */
+ kThumbLdrRRI5, /* ldr(1) [01101] imm_5[10..6] rn[5..3] rd[2..0] */
+ kThumbLdrRRR, /* ldr(2) [0101100] rm[8..6] rn[5..3] rd[2..0] */
+ kThumbLdrPcRel, /* ldr(3) [01001] rd[10..8] imm_8[7..0] */
+ kThumbLdrSpRel, /* ldr(4) [10011] rd[10..8] imm_8[7..0] */
+ kThumbLdrbRRI5, /* ldrb(1) [01111] imm_5[10..6] rn[5..3] rd[2..0] */
+ kThumbLdrbRRR, /* ldrb(2) [0101110] rm[8..6] rn[5..3] rd[2..0] */
+ kThumbLdrhRRI5, /* ldrh(1) [10001] imm_5[10..6] rn[5..3] rd[2..0] */
+ kThumbLdrhRRR, /* ldrh(2) [0101101] rm[8..6] rn[5..3] rd[2..0] */
+ kThumbLdrsbRRR, /* ldrsb [0101011] rm[8..6] rn[5..3] rd[2..0] */
+ kThumbLdrshRRR, /* ldrsh [0101111] rm[8..6] rn[5..3] rd[2..0] */
+ kThumbLslRRI5, /* lsl(1) [00000] imm_5[10..6] rm[5..3] rd[2..0] */
+ kThumbLslRR, /* lsl(2) [0100000010] rs[5..3] rd[2..0] */
+ kThumbLsrRRI5, /* lsr(1) [00001] imm_5[10..6] rm[5..3] rd[2..0] */
+ kThumbLsrRR, /* lsr(2) [0100000011] rs[5..3] rd[2..0] */
+ kThumbMovImm, /* mov(1) [00100] rd[10..8] imm_8[7..0] */
+ kThumbMovRR, /* mov(2) [0001110000] rn[5..3] rd[2..0] */
+ kThumbMovRR_H2H, /* mov(3) [01000111] H12[11] rm[5..3] rd[2..0] */
+ kThumbMovRR_H2L, /* mov(3) [01000110] H12[01] rm[5..3] rd[2..0] */
+ kThumbMovRR_L2H, /* mov(3) [01000101] H12[10] rm[5..3] rd[2..0] */
+ kThumbMul, /* mul [0100001101] rm[5..3] rd[2..0] */
+ kThumbMvn, /* mvn [0100001111] rm[5..3] rd[2..0] */
+ kThumbNeg, /* neg [0100001001] rm[5..3] rd[2..0] */
+ kThumbOrr, /* orr [0100001100] rm[5..3] rd[2..0] */
+ kThumbPop, /* pop [1011110] r[8..8] rl[7..0] */
+ kThumbPush, /* push [1011010] r[8..8] rl[7..0] */
+ kThumbRorR, /* ror [0100000111] rs[5..3] rd[2..0] */
+ kThumbSbc, /* sbc [0100000110] rm[5..3] rd[2..0] */
+ kThumbStmia, /* stmia [11000] rn[10..8] reglist [7.. 0] */
+ kThumbStrRRI5, /* str(1) [01100] imm_5[10..6] rn[5..3] rd[2..0] */
+ kThumbStrRRR, /* str(2) [0101000] rm[8..6] rn[5..3] rd[2..0] */
+ kThumbStrSpRel, /* str(3) [10010] rd[10..8] imm_8[7..0] */
+ kThumbStrbRRI5, /* strb(1) [01110] imm_5[10..6] rn[5..3] rd[2..0] */
+ kThumbStrbRRR, /* strb(2) [0101010] rm[8..6] rn[5..3] rd[2..0] */
+ kThumbStrhRRI5, /* strh(1) [10000] imm_5[10..6] rn[5..3] rd[2..0] */
+ kThumbStrhRRR, /* strh(2) [0101001] rm[8..6] rn[5..3] rd[2..0] */
+ kThumbSubRRI3, /* sub(1) [0001111] imm_3[8..6] rn[5..3] rd[2..0]*/
+ kThumbSubRI8, /* sub(2) [00111] rd[10..8] imm_8[7..0] */
+ kThumbSubRRR, /* sub(3) [0001101] rm[8..6] rn[5..3] rd[2..0] */
+ kThumbSubSpI7, /* sub(4) [101100001] imm_7[6..0] */
+ kThumbSwi, /* swi [11011111] imm_8[7..0] */
+ kThumbTst, /* tst [0100001000] rm[5..3] rn[2..0] */
+ kThumb2Vldrs, /* vldr low sx [111011011001] rn[19..16] rd[15-12]
+ [1010] imm_8[7..0] */
+ kThumb2Vldrd, /* vldr low dx [111011011001] rn[19..16] rd[15-12]
+ [1011] imm_8[7..0] */
+ kThumb2Vmuls, /* vmul vd, vn, vm [111011100010] rn[19..16]
+ rd[15-12] [10100000] rm[3..0] */
+ kThumb2Vmuld, /* vmul vd, vn, vm [111011100010] rn[19..16]
+ rd[15-12] [10110000] rm[3..0] */
+ kThumb2Vstrs, /* vstr low sx [111011011000] rn[19..16] rd[15-12]
+ [1010] imm_8[7..0] */
+ kThumb2Vstrd, /* vstr low dx [111011011000] rn[19..16] rd[15-12]
+ [1011] imm_8[7..0] */
+ kThumb2Vsubs, /* vsub vd, vn, vm [111011100011] rn[19..16]
+ rd[15-12] [10100040] rm[3..0] */
+ kThumb2Vsubd, /* vsub vd, vn, vm [111011100011] rn[19..16]
+ rd[15-12] [10110040] rm[3..0] */
+ kThumb2Vadds, /* vadd vd, vn, vm [111011100011] rn[19..16]
+ rd[15-12] [10100000] rm[3..0] */
+ kThumb2Vaddd, /* vadd vd, vn, vm [111011100011] rn[19..16]
+ rd[15-12] [10110000] rm[3..0] */
+ kThumb2Vdivs, /* vdiv vd, vn, vm [111011101000] rn[19..16]
+ rd[15-12] [10100000] rm[3..0] */
+ kThumb2Vdivd, /* vdiv vd, vn, vm [111011101000] rn[19..16]
+ rd[15-12] [10110000] rm[3..0] */
+ kThumb2VcvtIF, /* vcvt.F32 vd, vm [1110111010111000] vd[15..12]
+ [10101100] vm[3..0] */
+ kThumb2VcvtID, /* vcvt.F64 vd, vm [1110111010111000] vd[15..12]
[10111100] vm[3..0] */
- THUMB2_VCVTFI, /* vcvt.S32.F32 vd, vm [1110111010111101] vd[15..12]
+ kThumb2VcvtFI, /* vcvt.S32.F32 vd, vm [1110111010111101] vd[15..12]
[10101100] vm[3..0] */
- THUMB2_VCVTDI, /* vcvt.S32.F32 vd, vm [1110111010111101] vd[15..12]
+ kThumb2VcvtDI, /* vcvt.S32.F32 vd, vm [1110111010111101] vd[15..12]
[10111100] vm[3..0] */
- THUMB2_VCVTFD, /* vcvt.F64.F32 vd, vm [1110111010110111] vd[15..12]
+ kThumb2VcvtFd, /* vcvt.F64.F32 vd, vm [1110111010110111] vd[15..12]
[10101100] vm[3..0] */
- THUMB2_VCVTDF, /* vcvt.F32.F64 vd, vm [1110111010110111] vd[15..12]
+ kThumb2VcvtDF, /* vcvt.F32.F64 vd, vm [1110111010110111] vd[15..12]
[10111100] vm[3..0] */
- THUMB2_VSQRTS, /* vsqrt.f32 vd, vm [1110111010110001] vd[15..12]
+ kThumb2Vsqrts, /* vsqrt.f32 vd, vm [1110111010110001] vd[15..12]
[10101100] vm[3..0] */
- THUMB2_VSQRTD, /* vsqrt.f64 vd, vm [1110111010110001] vd[15..12]
+ kThumb2Vsqrtd, /* vsqrt.f64 vd, vm [1110111010110001] vd[15..12]
[10111100] vm[3..0] */
- THUMB2_MOV_IMM_SHIFT, /* mov(T2) rd, #<const> [11110] i [00001001111]
+ kThumb2MovImmShift, /* mov(T2) rd, #<const> [11110] i [00001001111]
imm3 rd[11..8] imm8 */
- THUMB2_MOV_IMM16, /* mov(T3) rd, #<const> [11110] i [0010100] imm4 [0]
+ kThumb2MovImm16, /* mov(T3) rd, #<const> [11110] i [0010100] imm4 [0]
imm3 rd[11..8] imm8 */
- THUMB2_STR_RRI12, /* str(Imm,T3) rd,[rn,#imm12] [111110001100]
+ kThumb2StrRRI12, /* str(Imm,T3) rd,[rn,#imm12] [111110001100]
rn[19..16] rt[15..12] imm12[11..0] */
- THUMB2_LDR_RRI12, /* str(Imm,T3) rd,[rn,#imm12] [111110001100]
+ kThumb2LdrRRI12, /* str(Imm,T3) rd,[rn,#imm12] [111110001100]
rn[19..16] rt[15..12] imm12[11..0] */
- THUMB2_STR_RRI8_PREDEC, /* str(Imm,T4) rd,[rn,#-imm8] [111110000100]
+ kThumb2StrRRI8Predec, /* str(Imm,T4) rd,[rn,#-imm8] [111110000100]
rn[19..16] rt[15..12] [1100] imm[7..0]*/
- THUMB2_LDR_RRI8_PREDEC, /* ldr(Imm,T4) rd,[rn,#-imm8] [111110000101]
+ kThumb2LdrRRI8Predec, /* ldr(Imm,T4) rd,[rn,#-imm8] [111110000101]
rn[19..16] rt[15..12] [1100] imm[7..0]*/
- THUMB2_CBNZ, /* cbnz rd,<label> [101110] i [1] imm5[7..3]
+ kThumb2Cbnz, /* cbnz rd,<label> [101110] i [1] imm5[7..3]
rn[2..0] */
- THUMB2_CBZ, /* cbn rd,<label> [101100] i [1] imm5[7..3]
+ kThumb2Cbz, /* cbn rd,<label> [101100] i [1] imm5[7..3]
rn[2..0] */
- THUMB2_ADD_RRI12, /* add rd, rn, #imm12 [11110] i [100000] rn[19..16]
+ kThumb2AddRRI12, /* add rd, rn, #imm12 [11110] i [100000] rn[19..16]
[0] imm3[14..12] rd[11..8] imm8[7..0] */
- THUMB2_MOV_RR, /* mov rd, rm [11101010010011110000] rd[11..8]
+ kThumb2MovRR, /* mov rd, rm [11101010010011110000] rd[11..8]
[0000] rm[3..0] */
- THUMB2_VMOVS, /* vmov.f32 vd, vm [111011101] D [110000]
+ kThumb2Vmovs, /* vmov.f32 vd, vm [111011101] D [110000]
vd[15..12] 101001] M [0] vm[3..0] */
- THUMB2_VMOVD, /* vmov.f64 vd, vm [111011101] D [110000]
+ kThumb2Vmovd, /* vmov.f64 vd, vm [111011101] D [110000]
vd[15..12] 101101] M [0] vm[3..0] */
- THUMB2_LDMIA, /* ldmia [111010001001[ rn[19..16] mask[15..0] */
- THUMB2_STMIA, /* stmia [111010001000[ rn[19..16] mask[15..0] */
- THUMB2_ADD_RRR, /* add [111010110000] rn[19..16] [0000] rd[11..8]
+ kThumb2Ldmia, /* ldmia [111010001001[ rn[19..16] mask[15..0] */
+ kThumb2Stmia, /* stmia [111010001000[ rn[19..16] mask[15..0] */
+ kThumb2AddRRR, /* add [111010110000] rn[19..16] [0000] rd[11..8]
[0000] rm[3..0] */
- THUMB2_SUB_RRR, /* sub [111010111010] rn[19..16] [0000] rd[11..8]
+ kThumb2SubRRR, /* sub [111010111010] rn[19..16] [0000] rd[11..8]
[0000] rm[3..0] */
- THUMB2_SBC_RRR, /* sbc [111010110110] rn[19..16] [0000] rd[11..8]
+ kThumb2SbcRRR, /* sbc [111010110110] rn[19..16] [0000] rd[11..8]
[0000] rm[3..0] */
- THUMB2_CMP_RR, /* cmp [111010111011] rn[19..16] [0000] [1111]
+ kThumb2CmpRR, /* cmp [111010111011] rn[19..16] [0000] [1111]
[0000] rm[3..0] */
- THUMB2_SUB_RRI12, /* sub rd, rn, #imm12 [11110] i [01010] rn[19..16]
+ kThumb2SubRRI12, /* sub rd, rn, #imm12 [11110] i [01010] rn[19..16]
[0] imm3[14..12] rd[11..8] imm8[7..0] */
- THUMB2_MVN_IMM_SHIFT, /* mov(T2) rd, #<const> [11110] i [00011011110]
+ kThumb2MvnImmShift, /* mov(T2) rd, #<const> [11110] i [00011011110]
imm3 rd[11..8] imm8 */
- THUMB2_SEL, /* sel rd, rn, rm [111110101010] rn[19-16] rd[11-8]
+ kThumb2Sel, /* sel rd, rn, rm [111110101010] rn[19-16] rd[11-8]
rm[3-0] */
- THUMB2_UBFX, /* ubfx rd,rn,#lsb,#width [111100111100] rn[19..16]
+ kThumb2Ubfx, /* ubfx rd,rn,#lsb,#width [111100111100] rn[19..16]
[0] imm3[14-12] rd[11-8] w[4-0] */
- THUMB2_SBFX, /* ubfx rd,rn,#lsb,#width [111100110100] rn[19..16]
+ kThumb2Sbfx, /* ubfx rd,rn,#lsb,#width [111100110100] rn[19..16]
[0] imm3[14-12] rd[11-8] w[4-0] */
- THUMB2_LDR_RRR, /* ldr rt,[rn,rm,LSL #imm] [111110000101] rn[19-16]
+ kThumb2LdrRRR, /* ldr rt,[rn,rm,LSL #imm] [111110000101] rn[19-16]
rt[15-12] [000000] imm[5-4] rm[3-0] */
- THUMB2_LDRH_RRR, /* ldrh rt,[rn,rm,LSL #imm] [111110000101] rn[19-16]
+ kThumb2LdrhRRR, /* ldrh rt,[rn,rm,LSL #imm] [111110000101] rn[19-16]
rt[15-12] [000000] imm[5-4] rm[3-0] */
- THUMB2_LDRSH_RRR, /* ldrsh rt,[rn,rm,LSL #imm] [111110000101] rn[19-16]
+ kThumb2LdrshRRR, /* ldrsh rt,[rn,rm,LSL #imm] [111110000101] rn[19-16]
rt[15-12] [000000] imm[5-4] rm[3-0] */
- THUMB2_LDRB_RRR, /* ldrb rt,[rn,rm,LSL #imm] [111110000101] rn[19-16]
+ kThumb2LdrbRRR, /* ldrb rt,[rn,rm,LSL #imm] [111110000101] rn[19-16]
rt[15-12] [000000] imm[5-4] rm[3-0] */
- THUMB2_LDRSB_RRR, /* ldrsb rt,[rn,rm,LSL #imm] [111110000101] rn[19-16]
+ kThumb2LdrsbRRR, /* ldrsb rt,[rn,rm,LSL #imm] [111110000101] rn[19-16]
rt[15-12] [000000] imm[5-4] rm[3-0] */
- THUMB2_STR_RRR, /* str rt,[rn,rm,LSL #imm] [111110000100] rn[19-16]
+ kThumb2StrRRR, /* str rt,[rn,rm,LSL #imm] [111110000100] rn[19-16]
rt[15-12] [000000] imm[5-4] rm[3-0] */
- THUMB2_STRH_RRR, /* str rt,[rn,rm,LSL #imm] [111110000010] rn[19-16]
+ kThumb2StrhRRR, /* str rt,[rn,rm,LSL #imm] [111110000010] rn[19-16]
rt[15-12] [000000] imm[5-4] rm[3-0] */
- THUMB2_STRB_RRR, /* str rt,[rn,rm,LSL #imm] [111110000000] rn[19-16]
+ kThumb2StrbRRR, /* str rt,[rn,rm,LSL #imm] [111110000000] rn[19-16]
rt[15-12] [000000] imm[5-4] rm[3-0] */
- THUMB2_LDRH_RRI12, /* ldrh rt,[rn,#imm12] [111110001011]
+ kThumb2LdrhRRI12, /* ldrh rt,[rn,#imm12] [111110001011]
rt[15..12] rn[19..16] imm12[11..0] */
- THUMB2_LDRSH_RRI12, /* ldrsh rt,[rn,#imm12] [111110011011]
+ kThumb2LdrshRRI12, /* ldrsh rt,[rn,#imm12] [111110011011]
rt[15..12] rn[19..16] imm12[11..0] */
- THUMB2_LDRB_RRI12, /* ldrb rt,[rn,#imm12] [111110001001]
+ kThumb2LdrbRRI12, /* ldrb rt,[rn,#imm12] [111110001001]
rt[15..12] rn[19..16] imm12[11..0] */
- THUMB2_LDRSB_RRI12, /* ldrsb rt,[rn,#imm12] [111110011001]
+ kThumb2LdrsbRRI12, /* ldrsb rt,[rn,#imm12] [111110011001]
rt[15..12] rn[19..16] imm12[11..0] */
- THUMB2_STRH_RRI12, /* strh rt,[rn,#imm12] [111110001010]
+ kThumb2StrhRRI12, /* strh rt,[rn,#imm12] [111110001010]
rt[15..12] rn[19..16] imm12[11..0] */
- THUMB2_STRB_RRI12, /* strb rt,[rn,#imm12] [111110001000]
+ kThumb2StrbRRI12, /* strb rt,[rn,#imm12] [111110001000]
rt[15..12] rn[19..16] imm12[11..0] */
- THUMB2_POP, /* pop [1110100010111101] list[15-0]*/
- THUMB2_PUSH, /* push [1110100010101101] list[15-0]*/
- THUMB2_CMP_RI8, /* cmp rn, #<const> [11110] i [011011] rn[19-16] [0]
+ kThumb2Pop, /* pop [1110100010111101] list[15-0]*/
+ kThumb2Push, /* push [1110100010101101] list[15-0]*/
+ kThumb2CmpRI8, /* cmp rn, #<const> [11110] i [011011] rn[19-16] [0]
imm3 [1111] imm8[7..0] */
- THUMB2_ADC_RRR, /* adc [111010110101] rn[19..16] [0000] rd[11..8]
+ kThumb2AdcRRR, /* adc [111010110101] rn[19..16] [0000] rd[11..8]
[0000] rm[3..0] */
- THUMB2_AND_RRR, /* and [111010100000] rn[19..16] [0000] rd[11..8]
+ kThumb2AndRRR, /* and [111010100000] rn[19..16] [0000] rd[11..8]
[0000] rm[3..0] */
- THUMB2_BIC_RRR, /* bic [111010100010] rn[19..16] [0000] rd[11..8]
+ kThumb2BicRRR, /* bic [111010100010] rn[19..16] [0000] rd[11..8]
[0000] rm[3..0] */
- THUMB2_CMN_RR, /* cmn [111010110001] rn[19..16] [0000] [1111]
+ kThumb2CmnRR, /* cmn [111010110001] rn[19..16] [0000] [1111]
[0000] rm[3..0] */
- THUMB2_EOR_RRR, /* eor [111010101000] rn[19..16] [0000] rd[11..8]
+ kThumb2EorRRR, /* eor [111010101000] rn[19..16] [0000] rd[11..8]
[0000] rm[3..0] */
- THUMB2_MUL_RRR, /* mul [111110110000] rn[19..16] [1111] rd[11..8]
+ kThumb2MulRRR, /* mul [111110110000] rn[19..16] [1111] rd[11..8]
[0000] rm[3..0] */
- THUMB2_MVN_RR, /* mvn [11101010011011110] rd[11-8] [0000]
+ kThumb2MnvRR, /* mvn [11101010011011110] rd[11-8] [0000]
rm[3..0] */
- THUMB2_RSUB_RRI8, /* rsub [111100011100] rn[19..16] [0000] rd[11..8]
+ kThumb2RsubRRI8, /* rsub [111100011100] rn[19..16] [0000] rd[11..8]
imm8[7..0] */
- THUMB2_NEG_RR, /* actually rsub rd, rn, #0 */
- THUMB2_ORR_RRR, /* orr [111010100100] rn[19..16] [0000] rd[11..8]
+ kThumb2NegRR, /* actually rsub rd, rn, #0 */
+ kThumb2OrrRRR, /* orr [111010100100] rn[19..16] [0000] rd[11..8]
[0000] rm[3..0] */
- THUMB2_TST_RR, /* tst [111010100001] rn[19..16] [0000] [1111]
+ kThumb2TstRR, /* tst [111010100001] rn[19..16] [0000] [1111]
[0000] rm[3..0] */
- THUMB2_LSL_RRR, /* lsl [111110100000] rn[19..16] [1111] rd[11..8]
+ kThumb2LslRRR, /* lsl [111110100000] rn[19..16] [1111] rd[11..8]
[0000] rm[3..0] */
- THUMB2_LSR_RRR, /* lsr [111110100010] rn[19..16] [1111] rd[11..8]
+ kThumb2LsrRRR, /* lsr [111110100010] rn[19..16] [1111] rd[11..8]
[0000] rm[3..0] */
- THUMB2_ASR_RRR, /* asr [111110100100] rn[19..16] [1111] rd[11..8]
+ kThumb2AsrRRR, /* asr [111110100100] rn[19..16] [1111] rd[11..8]
[0000] rm[3..0] */
- THUMB2_ROR_RRR, /* ror [111110100110] rn[19..16] [1111] rd[11..8]
+ kThumb2RorRRR, /* ror [111110100110] rn[19..16] [1111] rd[11..8]
[0000] rm[3..0] */
- THUMB2_LSL_RRI5, /* lsl [11101010010011110] imm[14.12] rd[11..8]
+ kThumb2LslRRI5, /* lsl [11101010010011110] imm[14.12] rd[11..8]
[00] rm[3..0] */
- THUMB2_LSR_RRI5, /* lsr [11101010010011110] imm[14.12] rd[11..8]
+ kThumb2LsrRRI5, /* lsr [11101010010011110] imm[14.12] rd[11..8]
[01] rm[3..0] */
- THUMB2_ASR_RRI5, /* asr [11101010010011110] imm[14.12] rd[11..8]
+ kThumb2AsrRRI5, /* asr [11101010010011110] imm[14.12] rd[11..8]
[10] rm[3..0] */
- THUMB2_ROR_RRI5, /* ror [11101010010011110] imm[14.12] rd[11..8]
+ kThumb2RorRRI5, /* ror [11101010010011110] imm[14.12] rd[11..8]
[11] rm[3..0] */
- THUMB2_BIC_RRI8, /* bic [111100000010] rn[19..16] [0] imm3
+ kThumb2BicRRI8, /* bic [111100000010] rn[19..16] [0] imm3
rd[11..8] imm8 */
- THUMB2_AND_RRI8, /* bic [111100000000] rn[19..16] [0] imm3
+ kThumb2AndRRI8, /* bic [111100000000] rn[19..16] [0] imm3
rd[11..8] imm8 */
- THUMB2_ORR_RRI8, /* orr [111100000100] rn[19..16] [0] imm3
+ kThumb2OrrRRI8, /* orr [111100000100] rn[19..16] [0] imm3
rd[11..8] imm8 */
- THUMB2_EOR_RRI8, /* eor [111100001000] rn[19..16] [0] imm3
+ kThumb2EorRRI8, /* eor [111100001000] rn[19..16] [0] imm3
rd[11..8] imm8 */
- THUMB2_ADD_RRI8, /* add [111100001000] rn[19..16] [0] imm3
+ kThumb2AddRRI8, /* add [111100001000] rn[19..16] [0] imm3
rd[11..8] imm8 */
- THUMB2_ADC_RRI8, /* adc [111100010101] rn[19..16] [0] imm3
+ kThumb2AdcRRI8, /* adc [111100010101] rn[19..16] [0] imm3
rd[11..8] imm8 */
- THUMB2_SUB_RRI8, /* sub [111100011011] rn[19..16] [0] imm3
+ kThumb2SubRRI8, /* sub [111100011011] rn[19..16] [0] imm3
rd[11..8] imm8 */
- THUMB2_SBC_RRI8, /* sbc [111100010111] rn[19..16] [0] imm3
+ kThumb2SbcRRI8, /* sbc [111100010111] rn[19..16] [0] imm3
rd[11..8] imm8 */
- THUMB2_IT, /* it [10111111] firstcond[7-4] mask[3-0] */
- THUMB2_FMSTAT, /* fmstat [11101110111100011111101000010000] */
- THUMB2_VCMPD, /* vcmp [111011101] D [11011] rd[15-12] [1011]
+ kThumb2It, /* it [10111111] firstcond[7-4] mask[3-0] */
+ kThumb2Fmstat, /* fmstat [11101110111100011111101000010000] */
+ kThumb2Vcmpd, /* vcmp [111011101] D [11011] rd[15-12] [1011]
E [1] M [0] rm[3-0] */
- THUMB2_VCMPS, /* vcmp [111011101] D [11010] rd[15-12] [1011]
+ kThumb2Vcmps, /* vcmp [111011101] D [11010] rd[15-12] [1011]
E [1] M [0] rm[3-0] */
- THUMB2_LDR_PC_REL12, /* ldr rd,[pc,#imm12] [1111100011011111] rt[15-12]
- imm12[11-0] */
- THUMB2_B_COND, /* b<c> [1110] S cond[25-22] imm6[21-16] [10]
+ kThumb2LdrPcRel12, /* ldr rd,[pc,#imm12] [1111100011011111] rt[15-12]
+ imm12[11-0] */
+ kThumb2BCond, /* b<c> [1110] S cond[25-22] imm6[21-16] [10]
J1 [0] J2 imm11[10..0] */
- THUMB2_VMOVD_RR, /* vmov [111011101] D [110000] vd[15-12 [101101]
+ kThumb2Vmovd_RR, /* vmov [111011101] D [110000] vd[15-12 [101101]
M [0] vm[3-0] */
- THUMB2_VMOVS_RR, /* vmov [111011101] D [110000] vd[15-12 [101001]
+ kThumb2Vmovs_RR, /* vmov [111011101] D [110000] vd[15-12 [101001]
M [0] vm[3-0] */
- THUMB2_FMRS, /* vmov [111011100000] vn[19-16] rt[15-12] [1010]
+ kThumb2Fmrs, /* vmov [111011100000] vn[19-16] rt[15-12] [1010]
N [0010000] */
- THUMB2_FMSR, /* vmov [111011100001] vn[19-16] rt[15-12] [1010]
+ kThumb2Fmsr, /* vmov [111011100001] vn[19-16] rt[15-12] [1010]
N [0010000] */
- THUMB2_FMRRD, /* vmov [111011000100] rt2[19-16] rt[15-12]
+ kThumb2Fmrrd, /* vmov [111011000100] rt2[19-16] rt[15-12]
[101100] M [1] vm[3-0] */
- THUMB2_FMDRR, /* vmov [111011000101] rt2[19-16] rt[15-12]
+ kThumb2Fmdrr, /* vmov [111011000101] rt2[19-16] rt[15-12]
[101100] M [1] vm[3-0] */
+ kThumb2Vabsd, /* vabs.f64 [111011101] D [110000] rd[15-12]
+ [1011110] M [0] vm[3-0] */
+ kThumb2Vabss, /* vabs.f32 [111011101] D [110000] rd[15-12]
+ [1010110] M [0] vm[3-0] */
+ kThumb2Vnegd, /* vneg.f64 [111011101] D [110000] rd[15-12]
+ [1011110] M [0] vm[3-0] */
+ kThumb2Vnegs, /* vneg.f32 [111011101] D [110000] rd[15-12]
+ [1010110] M [0] vm[3-0] */
+ kThumb2Vmovs_IMM8, /* vmov.f32 [111011101] D [11] imm4h[19-16] vd[15-12]
+ [10100000] imm4l[3-0] */
+ kThumb2Vmovd_IMM8, /* vmov.f64 [111011101] D [11] imm4h[19-16] vd[15-12]
+ [10110000] imm4l[3-0] */
+ kThumb2Mla, /* mla [111110110000] rn[19-16] ra[15-12] rd[7-4]
+ [0000] rm[3-0] */
+ kThumb2Umull, /* umull [111110111010] rn[19-16], rdlo[15-12]
+ rdhi[11-8] [0000] rm[3-0] */
+ kThumb2Ldrex, /* ldrex [111010000101] rn[19-16] rt[11-8] [1111]
+ imm8[7-0] */
+ kThumb2Strex, /* strex [111010000100] rn[19-16] rt[11-8] rd[11-8]
+ imm8[7-0] */
+ kThumb2Clrex, /* clrex [111100111011111110000111100101111\ */
- ARM_LAST,
+ kArmLast,
} ArmOpCode;
/* Bit flags describing the behavior of each native opcode */
@@ -541,6 +615,7 @@
kRegUse0,
kRegUse1,
kRegUse2,
+ kRegUse3,
kRegUseSP,
kRegUsePC,
kRegUseList0,
@@ -565,6 +640,7 @@
#define REG_USE0 (1 << kRegUse0)
#define REG_USE1 (1 << kRegUse1)
#define REG_USE2 (1 << kRegUse2)
+#define REG_USE3 (1 << kRegUse3)
#define REG_USE_SP (1 << kRegUseSP)
#define REG_USE_PC (1 << kRegUsePC)
#define REG_USE_LIST0 (1 << kRegUseList0)
@@ -590,19 +666,20 @@
/* Instruction assembly fieldLoc kind */
typedef enum ArmEncodingKind {
- UNUSED,
- BITBLT, /* Bit string using end/start */
- DFP, /* Double FP reg */
- SFP, /* Single FP reg */
- MODIMM, /* Shifted 8-bit immediate using [26,14..12,7..0] */
- IMM16, /* Zero-extended immediate using [26,19..16,14..12,7..0] */
- IMM6, /* Encoded branch target using [9,7..3]0 */
- IMM12, /* Zero-extended immediate using [26,14..12,7..0] */
- SHIFT, /* Shift descriptor, [14..12,7..4] */
- LSB, /* least significant bit using [14..12][7..6] */
- BWIDTH, /* bit-field width, encoded as width-1 */
- SHIFT5, /* Shift count, [14..12,7..6] */
- BROFFSET, /* Signed extended [26,11,13,21-16,10-0]:0 */
+ kFmtUnused,
+ kFmtBitBlt, /* Bit string using end/start */
+ kFmtDfp, /* Double FP reg */
+ kFmtSfp, /* Single FP reg */
+ kFmtModImm, /* Shifted 8-bit immed using [26,14..12,7..0] */
+ kFmtImm16, /* Zero-extended immed using [26,19..16,14..12,7..0] */
+ kFmtImm6, /* Encoded branch target using [9,7..3]0 */
+ kFmtImm12, /* Zero-extended immediate using [26,14..12,7..0] */
+ kFmtShift, /* Shift descriptor, [14..12,7..4] */
+ kFmtLsb, /* least significant bit using [14..12][7..6] */
+ kFmtBWidth, /* bit-field width, encoded as width-1 */
+ kFmtShift5, /* Shift count, [14..12,7..6] */
+ kFmtBrOffset, /* Signed extended [26,11,13,21-16,10-0]:0 */
+ kFmtFPImm, /* Encoded floating point immediate */
} ArmEncodingKind;
/* Struct used to define the snippet positions for each Thumb opcode */
@@ -610,8 +687,8 @@
u4 skeleton;
struct {
ArmEncodingKind kind;
- int end; /* end for BITBLT, 1-bit slice end for FP regs */
- int start; /* start for BITBLT, 4-bit slice end for FP regs */
+ int end; /* end for kFmtBitBlt, 1-bit slice end for FP regs */
+ int start; /* start for kFmtBitBlt, 4-bit slice end for FP regs */
} fieldLoc[4];
ArmOpCode opCode;
int flags;
@@ -620,7 +697,7 @@
int size;
} ArmEncodingMap;
-extern ArmEncodingMap EncodingMap[ARM_LAST];
+extern ArmEncodingMap EncodingMap[kArmLast];
/*
* Each instance of this struct holds a pseudo or real LIR instruction: