blob: ef99261a5c548df69ffbdef9391e99986bffc754 [file] [log] [blame]
Douglas Leung200f0402016-02-25 20:05:47 -08001 /*
2 * Array put, 64 bits. vBB[vCC] <- vAA.
3 *
4 * Arrays of long/double are 64-bit aligned, so it's okay to use STRD.
5 */
6 /* aput-wide vAA, vBB, vCC */
7 FETCH(a0, 1) # a0 <- CCBB
8 GET_OPA(t0) # t0 <- AA
9 and a2, a0, 255 # a2 <- BB
10 srl a3, a0, 8 # a3 <- CC
11 GET_VREG(a0, a2) # a0 <- vBB (array object)
12 GET_VREG(a1, a3) # a1 <- vCC (requested index)
13 # null array object?
14 beqz a0, common_errNullObject # yes, bail
15 LOAD_base_offMirrorArray_length(a3, a0) # a3 <- arrayObj->length
16 EAS3(a0, a0, a1) # a0 <- arrayObj + index*width
17 EAS2(rOBJ, rFP, t0) # rOBJ <- &fp[AA]
18 # compare unsigned index, length
19 bgeu a1, a3, common_errArrayIndex # index >= length, bail
20
21 FETCH_ADVANCE_INST(2) # advance rPC, load rINST
22 LOAD64(a2, a3, rOBJ) # a2/a3 <- vAA/vAA+1
23 GET_INST_OPCODE(t0) # extract opcode from rINST
24 STORE64_off(a2, a3, a0, MIRROR_WIDE_ARRAY_DATA_OFFSET) # a2/a3 <- vBB[vCC]
25 GOTO_OPCODE(t0) # jump to next instruction