Nguyen Anh Quynh | 0b702b8 | 2014-09-29 15:28:34 +0800 | [diff] [blame] | 1 | # CS_ARCH_ARM, CS_MODE_ARM, None |
| 2 | 0x18,0x0e,0xc0,0xf2 = vmov.i8 d16, #0x8 |
| 3 | 0x10,0x08,0xc1,0xf2 = vmov.i16 d16, #0x10 |
| 4 | 0x10,0x0a,0xc1,0xf2 = vmov.i16 d16, #0x1000 |
| 5 | 0x10,0x00,0xc2,0xf2 = vmov.i32 d16, #0x20 |
| 6 | 0x10,0x02,0xc2,0xf2 = vmov.i32 d16, #0x2000 |
| 7 | 0x10,0x04,0xc2,0xf2 = vmov.i32 d16, #0x200000 |
| 8 | 0x10,0x06,0xc2,0xf2 = vmov.i32 d16, #0x20000000 |
| 9 | 0x10,0x0c,0xc2,0xf2 = vmov.i32 d16, #0x20ff |
| 10 | 0x10,0x0d,0xc2,0xf2 = vmov.i32 d16, #0x20ffff |
| 11 | 0x33,0x0e,0xc1,0xf3 = vmov.i64 d16, #0xff0000ff0000ffff |
| 12 | 0x58,0x0e,0xc0,0xf2 = vmov.i8 q8, #0x8 |
| 13 | 0x50,0x08,0xc1,0xf2 = vmov.i16 q8, #0x10 |
| 14 | 0x50,0x0a,0xc1,0xf2 = vmov.i16 q8, #0x1000 |
| 15 | 0x50,0x00,0xc2,0xf2 = vmov.i32 q8, #0x20 |
| 16 | 0x50,0x02,0xc2,0xf2 = vmov.i32 q8, #0x2000 |
| 17 | 0x50,0x04,0xc2,0xf2 = vmov.i32 q8, #0x200000 |
| 18 | 0x50,0x06,0xc2,0xf2 = vmov.i32 q8, #0x20000000 |
| 19 | 0x50,0x0c,0xc2,0xf2 = vmov.i32 q8, #0x20ff |
| 20 | 0x50,0x0d,0xc2,0xf2 = vmov.i32 q8, #0x20ffff |
| 21 | 0x73,0x0e,0xc1,0xf3 = vmov.i64 q8, #0xff0000ff0000ffff |
| 22 | 0x30,0x08,0xc1,0xf2 = vmvn.i16 d16, #0x10 |
| 23 | 0x30,0x0a,0xc1,0xf2 = vmvn.i16 d16, #0x1000 |
| 24 | 0x30,0x00,0xc2,0xf2 = vmvn.i32 d16, #0x20 |
| 25 | 0x30,0x02,0xc2,0xf2 = vmvn.i32 d16, #0x2000 |
| 26 | 0x30,0x04,0xc2,0xf2 = vmvn.i32 d16, #0x200000 |
| 27 | 0x30,0x06,0xc2,0xf2 = vmvn.i32 d16, #0x20000000 |
| 28 | 0x30,0x0c,0xc2,0xf2 = vmvn.i32 d16, #0x20ff |
| 29 | 0x30,0x0d,0xc2,0xf2 = vmvn.i32 d16, #0x20ffff |
| 30 | 0x30,0x0a,0xc8,0xf2 = vmovl.s8 q8, d16 |
| 31 | 0x30,0x0a,0xd0,0xf2 = vmovl.s16 q8, d16 |
| 32 | 0x30,0x0a,0xe0,0xf2 = vmovl.s32 q8, d16 |
| 33 | 0x30,0x0a,0xc8,0xf3 = vmovl.u8 q8, d16 |
| 34 | 0x30,0x0a,0xd0,0xf3 = vmovl.u16 q8, d16 |
| 35 | 0x30,0x0a,0xe0,0xf3 = vmovl.u32 q8, d16 |
| 36 | 0x20,0x02,0xf2,0xf3 = vmovn.i16 d16, q8 |
| 37 | 0x20,0x02,0xf6,0xf3 = vmovn.i32 d16, q8 |
| 38 | 0x20,0x02,0xfa,0xf3 = vmovn.i64 d16, q8 |
| 39 | 0xa0,0x02,0xf2,0xf3 = vqmovn.s16 d16, q8 |
| 40 | 0xa0,0x02,0xf6,0xf3 = vqmovn.s32 d16, q8 |
| 41 | 0xa0,0x02,0xfa,0xf3 = vqmovn.s64 d16, q8 |
| 42 | 0xe0,0x02,0xf2,0xf3 = vqmovn.u16 d16, q8 |
| 43 | 0xe0,0x02,0xf6,0xf3 = vqmovn.u32 d16, q8 |
| 44 | 0xe0,0x02,0xfa,0xf3 = vqmovn.u64 d16, q8 |
| 45 | 0x60,0x02,0xf2,0xf3 = vqmovun.s16 d16, q8 |
| 46 | 0x60,0x02,0xf6,0xf3 = vqmovun.s32 d16, q8 |
| 47 | 0x60,0x02,0xfa,0xf3 = vqmovun.s64 d16, q8 |
| 48 | 0xb0,0x0b,0x50,0xee = vmov.s8 r0, d16[1] |
| 49 | 0xf0,0x0b,0x10,0xee = vmov.s16 r0, d16[1] |
| 50 | 0xb0,0x0b,0xd0,0xee = vmov.u8 r0, d16[1] |
| 51 | 0xf0,0x0b,0x90,0xee = vmov.u16 r0, d16[1] |
| 52 | 0x90,0x0b,0x30,0xee = vmov.32 r0, d16[1] |
| 53 | 0xb0,0x1b,0x40,0xee = vmov.8 d16[1], r1 |
| 54 | 0xf0,0x1b,0x00,0xee = vmov.16 d16[1], r1 |
| 55 | 0x90,0x1b,0x20,0xee = vmov.32 d16[1], r1 |
| 56 | 0xb0,0x1b,0x42,0xee = vmov.8 d18[1], r1 |
| 57 | 0xf0,0x1b,0x02,0xee = vmov.16 d18[1], r1 |
| 58 | 0x90,0x1b,0x22,0xee = vmov.32 d18[1], r1 |
| 59 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 60 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 61 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 62 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 63 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 64 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 65 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 66 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 67 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 68 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 69 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 70 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 71 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 72 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 73 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 74 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 75 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |
| 76 | 0x82,0x15,0xb0,0xf3 = vmvn d1, d2 |