blob: c9bdc969ee9ca83f1e85120c7b42603bd2246434 [file] [log] [blame]
Elliott Hughes0f3c5532012-03-30 14:51:51 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogers7655f292013-07-29 11:07:13 -070017#include "asm_support_mips.S"
buzbee5bc5a7b2012-03-07 15:52:59 -080018
Mathieu Chartier7410f292013-11-24 13:17:35 -080019#include "arch/quick_alloc_entrypoints.S"
20
jeffhao07030602012-09-26 14:33:14 -070021 .set noreorder
buzbee5bc5a7b2012-03-07 15:52:59 -080022 .balign 4
23
24 /* Deliver the given exception */
25 .extern artDeliverExceptionFromCode
26 /* Deliver an exception pending on a thread */
jeffhao8161c032012-10-31 15:50:00 -070027 .extern artDeliverPendingExceptionFromCode
buzbee5bc5a7b2012-03-07 15:52:59 -080028
Douglas Leung735b8552014-10-31 12:21:40 -070029#define ARG_SLOT_SIZE 32 // space for a0-a3 plus 4 more words
30
buzbee5bc5a7b2012-03-07 15:52:59 -080031 /*
32 * Macro that sets up the callee save frame to conform with
Vladimir Markofd36f1f2016-08-03 18:49:58 +010033 * Runtime::CreateCalleeSaveMethod(kSaveAllCalleeSaves)
Douglas Leung735b8552014-10-31 12:21:40 -070034 * Callee-save: $s0-$s8 + $gp + $ra, 11 total + 1 word for Method*
35 * Clobbers $t0 and $sp
36 * Allocates ARG_SLOT_SIZE bytes at the bottom of the stack for arg slots.
Vladimir Markofd36f1f2016-08-03 18:49:58 +010037 * Reserves FRAME_SIZE_SAVE_ALL_CALLEE_SAVES + ARG_SLOT_SIZE bytes on the stack
buzbee5bc5a7b2012-03-07 15:52:59 -080038 */
Vladimir Markofd36f1f2016-08-03 18:49:58 +010039.macro SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
Chris Larsen715f43e2017-10-23 11:00:32 -070040 addiu $sp, $sp, -112
41 .cfi_adjust_cfa_offset 112
Andreas Gampe5c1e4352014-04-21 19:28:24 -070042
43 // Ugly compile-time check, but we only have the preprocessor.
Chris Larsen715f43e2017-10-23 11:00:32 -070044#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVES != 112)
Vladimir Markofd36f1f2016-08-03 18:49:58 +010045#error "FRAME_SIZE_SAVE_ALL_CALLEE_SAVES(MIPS) size not as expected."
Andreas Gampe5c1e4352014-04-21 19:28:24 -070046#endif
47
Chris Larsen715f43e2017-10-23 11:00:32 -070048 sw $ra, 108($sp)
49 .cfi_rel_offset 31, 108
50 sw $s8, 104($sp)
51 .cfi_rel_offset 30, 104
52 sw $gp, 100($sp)
53 .cfi_rel_offset 28, 100
54 sw $s7, 96($sp)
55 .cfi_rel_offset 23, 96
56 sw $s6, 92($sp)
57 .cfi_rel_offset 22, 92
58 sw $s5, 88($sp)
59 .cfi_rel_offset 21, 88
60 sw $s4, 84($sp)
61 .cfi_rel_offset 20, 84
62 sw $s3, 80($sp)
63 .cfi_rel_offset 19, 80
64 sw $s2, 76($sp)
65 .cfi_rel_offset 18, 76
66 sw $s1, 72($sp)
67 .cfi_rel_offset 17, 72
68 sw $s0, 68($sp)
69 .cfi_rel_offset 16, 68
70 // 4-byte placeholder for register $zero, serving for alignment
71 // of the following double precision floating point registers.
Andreas Gampea4e0e672015-05-27 18:31:42 -070072
Chris Larsen715f43e2017-10-23 11:00:32 -070073 CHECK_ALIGNMENT $sp, $t1
74 sdc1 $f30, 56($sp)
75 sdc1 $f28, 48($sp)
76 sdc1 $f26, 40($sp)
77 sdc1 $f24, 32($sp)
78 sdc1 $f22, 24($sp)
79 sdc1 $f20, 16($sp)
Andreas Gampea4e0e672015-05-27 18:31:42 -070080
Chris Larsen715f43e2017-10-23 11:00:32 -070081 # 1 word for holding Method* plus 12 bytes padding to keep contents of SP
82 # a multiple of 16.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070083
Douglas Leung4af77b72014-10-22 16:32:28 -070084 lw $t0, %got(_ZN3art7Runtime9instance_E)($gp)
85 lw $t0, 0($t0)
Vladimir Markofd36f1f2016-08-03 18:49:58 +010086 lw $t0, RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET($t0)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070087 sw $t0, 0($sp) # Place Method* at bottom of stack.
88 sw $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame.
Douglas Leung735b8552014-10-31 12:21:40 -070089 addiu $sp, $sp, -ARG_SLOT_SIZE # reserve argument slots on the stack
90 .cfi_adjust_cfa_offset ARG_SLOT_SIZE
buzbee5bc5a7b2012-03-07 15:52:59 -080091.endm
92
93 /*
94 * Macro that sets up the callee save frame to conform with
Vladimir Markofd36f1f2016-08-03 18:49:58 +010095 * Runtime::CreateCalleeSaveMethod(kSaveRefsOnly). Restoration assumes non-moving GC.
buzbee5bc5a7b2012-03-07 15:52:59 -080096 * Does not include rSUSPEND or rSELF
Douglas Leung735b8552014-10-31 12:21:40 -070097 * callee-save: $s2-$s8 + $gp + $ra, 9 total + 2 words padding + 1 word to hold Method*
98 * Clobbers $t0 and $sp
99 * Allocates ARG_SLOT_SIZE bytes at the bottom of the stack for arg slots.
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100100 * Reserves FRAME_SIZE_SAVE_REFS_ONLY + ARG_SLOT_SIZE bytes on the stack
buzbee5bc5a7b2012-03-07 15:52:59 -0800101 */
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100102.macro SETUP_SAVE_REFS_ONLY_FRAME
Douglas Leung735b8552014-10-31 12:21:40 -0700103 addiu $sp, $sp, -48
104 .cfi_adjust_cfa_offset 48
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700105
106 // Ugly compile-time check, but we only have the preprocessor.
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100107#if (FRAME_SIZE_SAVE_REFS_ONLY != 48)
108#error "FRAME_SIZE_SAVE_REFS_ONLY(MIPS) size not as expected."
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700109#endif
110
Douglas Leung735b8552014-10-31 12:21:40 -0700111 sw $ra, 44($sp)
112 .cfi_rel_offset 31, 44
113 sw $s8, 40($sp)
114 .cfi_rel_offset 30, 40
115 sw $gp, 36($sp)
116 .cfi_rel_offset 28, 36
117 sw $s7, 32($sp)
118 .cfi_rel_offset 23, 32
119 sw $s6, 28($sp)
120 .cfi_rel_offset 22, 28
121 sw $s5, 24($sp)
122 .cfi_rel_offset 21, 24
123 sw $s4, 20($sp)
124 .cfi_rel_offset 20, 20
125 sw $s3, 16($sp)
126 .cfi_rel_offset 19, 16
127 sw $s2, 12($sp)
128 .cfi_rel_offset 18, 12
129 # 2 words for alignment and bottom word will hold Method*
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700130
Douglas Leung4af77b72014-10-22 16:32:28 -0700131 lw $t0, %got(_ZN3art7Runtime9instance_E)($gp)
132 lw $t0, 0($t0)
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100133 lw $t0, RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET($t0)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700134 sw $t0, 0($sp) # Place Method* at bottom of stack.
135 sw $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame.
Douglas Leung735b8552014-10-31 12:21:40 -0700136 addiu $sp, $sp, -ARG_SLOT_SIZE # reserve argument slots on the stack
137 .cfi_adjust_cfa_offset ARG_SLOT_SIZE
buzbee5bc5a7b2012-03-07 15:52:59 -0800138.endm
139
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100140.macro RESTORE_SAVE_REFS_ONLY_FRAME
Douglas Leung735b8552014-10-31 12:21:40 -0700141 addiu $sp, $sp, ARG_SLOT_SIZE # remove argument slots on the stack
142 .cfi_adjust_cfa_offset -ARG_SLOT_SIZE
143 lw $ra, 44($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -0800144 .cfi_restore 31
Douglas Leung735b8552014-10-31 12:21:40 -0700145 lw $s8, 40($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -0800146 .cfi_restore 30
Douglas Leung735b8552014-10-31 12:21:40 -0700147 lw $gp, 36($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -0800148 .cfi_restore 28
Douglas Leung735b8552014-10-31 12:21:40 -0700149 lw $s7, 32($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -0800150 .cfi_restore 23
Douglas Leung735b8552014-10-31 12:21:40 -0700151 lw $s6, 28($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -0800152 .cfi_restore 22
Douglas Leung735b8552014-10-31 12:21:40 -0700153 lw $s5, 24($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -0800154 .cfi_restore 21
Douglas Leung735b8552014-10-31 12:21:40 -0700155 lw $s4, 20($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -0800156 .cfi_restore 20
Douglas Leung735b8552014-10-31 12:21:40 -0700157 lw $s3, 16($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -0800158 .cfi_restore 19
Douglas Leung735b8552014-10-31 12:21:40 -0700159 lw $s2, 12($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -0800160 .cfi_restore 18
Douglas Leung735b8552014-10-31 12:21:40 -0700161 addiu $sp, $sp, 48
162 .cfi_adjust_cfa_offset -48
buzbee5bc5a7b2012-03-07 15:52:59 -0800163.endm
164
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100165.macro RESTORE_SAVE_REFS_ONLY_FRAME_AND_RETURN
166 RESTORE_SAVE_REFS_ONLY_FRAME
Andreas Gampe8d365912015-01-13 11:32:32 -0800167 jalr $zero, $ra
Douglas Leung735b8552014-10-31 12:21:40 -0700168 nop
buzbee5bc5a7b2012-03-07 15:52:59 -0800169.endm
170
171 /*
Alexey Frunze279cfba2017-07-22 00:24:43 -0700172 * Individually usable part of macro SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY.
173 */
174.macro SETUP_SAVE_REFS_AND_ARGS_FRAME_S4_THRU_S8
175 sw $s8, 104($sp)
176 .cfi_rel_offset 30, 104
177 sw $s7, 96($sp)
178 .cfi_rel_offset 23, 96
179 sw $s6, 92($sp)
180 .cfi_rel_offset 22, 92
181 sw $s5, 88($sp)
182 .cfi_rel_offset 21, 88
183 sw $s4, 84($sp)
184 .cfi_rel_offset 20, 84
185.endm
186
187 /*
buzbee5bc5a7b2012-03-07 15:52:59 -0800188 * Macro that sets up the callee save frame to conform with
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100189 * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs).
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800190 * callee-save: $a1-$a3, $t0-$t1, $s2-$s8, $gp, $ra, $f8-$f19
191 * (26 total + 1 word padding + method*)
buzbee5bc5a7b2012-03-07 15:52:59 -0800192 */
Alexey Frunze279cfba2017-07-22 00:24:43 -0700193.macro SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY save_s4_thru_s8=1
194 addiu $sp, $sp, -112
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800195 .cfi_adjust_cfa_offset 112
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700196
197 // Ugly compile-time check, but we only have the preprocessor.
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800198#if (FRAME_SIZE_SAVE_REFS_AND_ARGS != 112)
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100199#error "FRAME_SIZE_SAVE_REFS_AND_ARGS(MIPS) size not as expected."
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700200#endif
201
Alexey Frunze279cfba2017-07-22 00:24:43 -0700202 sw $ra, 108($sp)
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800203 .cfi_rel_offset 31, 108
Alexey Frunze279cfba2017-07-22 00:24:43 -0700204 sw $gp, 100($sp)
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800205 .cfi_rel_offset 28, 100
Alexey Frunze279cfba2017-07-22 00:24:43 -0700206 .if \save_s4_thru_s8
207 SETUP_SAVE_REFS_AND_ARGS_FRAME_S4_THRU_S8
208 .endif
209 sw $s3, 80($sp)
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800210 .cfi_rel_offset 19, 80
Alexey Frunze279cfba2017-07-22 00:24:43 -0700211 sw $s2, 76($sp)
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800212 .cfi_rel_offset 18, 76
Alexey Frunze279cfba2017-07-22 00:24:43 -0700213 sw $t1, 72($sp)
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800214 .cfi_rel_offset 9, 72
Alexey Frunze279cfba2017-07-22 00:24:43 -0700215 sw $t0, 68($sp)
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800216 .cfi_rel_offset 8, 68
Alexey Frunze279cfba2017-07-22 00:24:43 -0700217 sw $a3, 64($sp)
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800218 .cfi_rel_offset 7, 64
Alexey Frunze279cfba2017-07-22 00:24:43 -0700219 sw $a2, 60($sp)
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800220 .cfi_rel_offset 6, 60
Alexey Frunze279cfba2017-07-22 00:24:43 -0700221 sw $a1, 56($sp)
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800222 .cfi_rel_offset 5, 56
Chris Larsen715f43e2017-10-23 11:00:32 -0700223 CHECK_ALIGNMENT $sp, $t8
224 sdc1 $f18, 48($sp)
225 sdc1 $f16, 40($sp)
226 sdc1 $f14, 32($sp)
227 sdc1 $f12, 24($sp)
228 sdc1 $f10, 16($sp)
229 sdc1 $f8, 8($sp)
jeffhaofa147e22012-10-12 17:03:32 -0700230 # bottom will hold Method*
Douglas Leung735b8552014-10-31 12:21:40 -0700231.endm
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700232
Douglas Leung735b8552014-10-31 12:21:40 -0700233 /*
234 * Macro that sets up the callee save frame to conform with
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100235 * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs). Restoration assumes non-moving GC.
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800236 * callee-save: $a1-$a3, $t0-$t1, $s2-$s8, $gp, $ra, $f8-$f19
237 * (26 total + 1 word padding + method*)
Douglas Leung735b8552014-10-31 12:21:40 -0700238 * Clobbers $t0 and $sp
239 * Allocates ARG_SLOT_SIZE bytes at the bottom of the stack for arg slots.
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100240 * Reserves FRAME_SIZE_SAVE_REFS_AND_ARGS + ARG_SLOT_SIZE bytes on the stack
Douglas Leung735b8552014-10-31 12:21:40 -0700241 */
Alexey Frunze279cfba2017-07-22 00:24:43 -0700242.macro SETUP_SAVE_REFS_AND_ARGS_FRAME save_s4_thru_s8_only=0
243 .if \save_s4_thru_s8_only
244 // It is expected that `SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY /* save_s4_thru_s8 */ 0`
245 // has been done prior to `SETUP_SAVE_REFS_AND_ARGS_FRAME /* save_s4_thru_s8_only */ 1`.
246 SETUP_SAVE_REFS_AND_ARGS_FRAME_S4_THRU_S8
247 .else
248 SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY
249 .endif
Douglas Leung4af77b72014-10-22 16:32:28 -0700250 lw $t0, %got(_ZN3art7Runtime9instance_E)($gp)
251 lw $t0, 0($t0)
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100252 lw $t0, RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET($t0)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700253 sw $t0, 0($sp) # Place Method* at bottom of stack.
254 sw $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame.
Douglas Leung735b8552014-10-31 12:21:40 -0700255 addiu $sp, $sp, -ARG_SLOT_SIZE # reserve argument slots on the stack
256 .cfi_adjust_cfa_offset ARG_SLOT_SIZE
257.endm
258
259 /*
260 * Macro that sets up the callee save frame to conform with
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100261 * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs). Restoration assumes non-moving GC.
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800262 * callee-save: $a1-$a3, $t0-$t1, $s2-$s8, $gp, $ra, $f8-$f19
263 * (26 total + 1 word padding + method*)
Douglas Leung735b8552014-10-31 12:21:40 -0700264 * Clobbers $sp
265 * Use $a0 as the Method* and loads it into bottom of stack.
266 * Allocates ARG_SLOT_SIZE bytes at the bottom of the stack for arg slots.
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100267 * Reserves FRAME_SIZE_SAVE_REFS_AND_ARGS + ARG_SLOT_SIZE bytes on the stack
Douglas Leung735b8552014-10-31 12:21:40 -0700268 */
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100269.macro SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_A0
270 SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY
Douglas Leung735b8552014-10-31 12:21:40 -0700271 sw $a0, 0($sp) # Place Method* at bottom of stack.
272 sw $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame.
273 addiu $sp, $sp, -ARG_SLOT_SIZE # reserve argument slots on the stack
274 .cfi_adjust_cfa_offset ARG_SLOT_SIZE
buzbee5bc5a7b2012-03-07 15:52:59 -0800275.endm
276
Alexey Frunze279cfba2017-07-22 00:24:43 -0700277 /*
278 * Individually usable part of macro RESTORE_SAVE_REFS_AND_ARGS_FRAME.
279 */
280.macro RESTORE_SAVE_REFS_AND_ARGS_FRAME_GP
281 lw $gp, 100($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -0800282 .cfi_restore 28
Alexey Frunze279cfba2017-07-22 00:24:43 -0700283.endm
284
285 /*
286 * Individually usable part of macro RESTORE_SAVE_REFS_AND_ARGS_FRAME.
287 */
288.macro RESTORE_SAVE_REFS_AND_ARGS_FRAME_A1
289 lw $a1, 56($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -0800290 .cfi_restore 5
Alexey Frunze279cfba2017-07-22 00:24:43 -0700291.endm
292
293.macro RESTORE_SAVE_REFS_AND_ARGS_FRAME restore_s4_thru_s8=1, remove_arg_slots=1
294 .if \remove_arg_slots
295 addiu $sp, $sp, ARG_SLOT_SIZE # Remove argument slots from the stack.
296 .cfi_adjust_cfa_offset -ARG_SLOT_SIZE
297 .endif
298 lw $ra, 108($sp)
299 .cfi_restore 31
300 .if \restore_s4_thru_s8
301 lw $s8, 104($sp)
302 .cfi_restore 30
303 .endif
304 RESTORE_SAVE_REFS_AND_ARGS_FRAME_GP
305 .if \restore_s4_thru_s8
306 lw $s7, 96($sp)
307 .cfi_restore 23
308 lw $s6, 92($sp)
309 .cfi_restore 22
310 lw $s5, 88($sp)
311 .cfi_restore 21
312 lw $s4, 84($sp)
313 .cfi_restore 20
314 .endif
315 lw $s3, 80($sp)
316 .cfi_restore 19
317 lw $s2, 76($sp)
318 .cfi_restore 18
319 lw $t1, 72($sp)
320 .cfi_restore 9
321 lw $t0, 68($sp)
322 .cfi_restore 8
323 lw $a3, 64($sp)
324 .cfi_restore 7
325 lw $a2, 60($sp)
326 .cfi_restore 6
327 RESTORE_SAVE_REFS_AND_ARGS_FRAME_A1
Chris Larsen715f43e2017-10-23 11:00:32 -0700328 CHECK_ALIGNMENT $sp, $t8
329 ldc1 $f18, 48($sp)
330 ldc1 $f16, 40($sp)
331 ldc1 $f14, 32($sp)
332 ldc1 $f12, 24($sp)
333 ldc1 $f10, 16($sp)
334 ldc1 $f8, 8($sp)
Alexey Frunze279cfba2017-07-22 00:24:43 -0700335 addiu $sp, $sp, 112 # Pop frame.
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800336 .cfi_adjust_cfa_offset -112
buzbee5bc5a7b2012-03-07 15:52:59 -0800337.endm
338
339 /*
Vladimir Marko952dbb12016-07-28 12:01:51 +0100340 * Macro that sets up the callee save frame to conform with
341 * Runtime::CreateCalleeSaveMethod(kSaveEverything).
Vladimir Marko3b7537b2016-09-13 11:56:01 +0000342 * when the $sp has already been decremented by FRAME_SIZE_SAVE_EVERYTHING.
Vladimir Marko952dbb12016-07-28 12:01:51 +0100343 * Callee-save: $at, $v0-$v1, $a0-$a3, $t0-$t7, $s0-$s7, $t8-$t9, $gp, $fp $ra, $f0-$f31;
344 * 28(GPR)+ 32(FPR) + 3 words for padding and 1 word for Method*
345 * Clobbers $t0 and $t1.
346 * Allocates ARG_SLOT_SIZE bytes at the bottom of the stack for arg slots.
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100347 * Reserves FRAME_SIZE_SAVE_EVERYTHING + ARG_SLOT_SIZE bytes on the stack.
Vladimir Marko952dbb12016-07-28 12:01:51 +0100348 * This macro sets up $gp; entrypoints using it should start with ENTRY_NO_GP.
349 */
Alexey Frunze19428ad2017-08-03 10:36:46 -0700350.macro SETUP_SAVE_EVERYTHING_FRAME_DECREMENTED_SP runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET
Vladimir Marko952dbb12016-07-28 12:01:51 +0100351 // Ugly compile-time check, but we only have the preprocessor.
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100352#if (FRAME_SIZE_SAVE_EVERYTHING != 256)
353#error "FRAME_SIZE_SAVE_EVERYTHING(MIPS) size not as expected."
Vladimir Marko952dbb12016-07-28 12:01:51 +0100354#endif
355
356 sw $ra, 252($sp)
357 .cfi_rel_offset 31, 252
358 sw $fp, 248($sp)
359 .cfi_rel_offset 30, 248
360 sw $gp, 244($sp)
361 .cfi_rel_offset 28, 244
362 sw $t9, 240($sp)
363 .cfi_rel_offset 25, 240
364 sw $t8, 236($sp)
365 .cfi_rel_offset 24, 236
366 sw $s7, 232($sp)
367 .cfi_rel_offset 23, 232
368 sw $s6, 228($sp)
369 .cfi_rel_offset 22, 228
370 sw $s5, 224($sp)
371 .cfi_rel_offset 21, 224
372 sw $s4, 220($sp)
373 .cfi_rel_offset 20, 220
374 sw $s3, 216($sp)
375 .cfi_rel_offset 19, 216
376 sw $s2, 212($sp)
377 .cfi_rel_offset 18, 212
378 sw $s1, 208($sp)
379 .cfi_rel_offset 17, 208
380 sw $s0, 204($sp)
381 .cfi_rel_offset 16, 204
382 sw $t7, 200($sp)
383 .cfi_rel_offset 15, 200
384 sw $t6, 196($sp)
385 .cfi_rel_offset 14, 196
386 sw $t5, 192($sp)
387 .cfi_rel_offset 13, 192
388 sw $t4, 188($sp)
389 .cfi_rel_offset 12, 188
390 sw $t3, 184($sp)
391 .cfi_rel_offset 11, 184
392 sw $t2, 180($sp)
393 .cfi_rel_offset 10, 180
394 sw $t1, 176($sp)
395 .cfi_rel_offset 9, 176
396 sw $t0, 172($sp)
397 .cfi_rel_offset 8, 172
398 sw $a3, 168($sp)
399 .cfi_rel_offset 7, 168
400 sw $a2, 164($sp)
401 .cfi_rel_offset 6, 164
402 sw $a1, 160($sp)
403 .cfi_rel_offset 5, 160
404 sw $a0, 156($sp)
405 .cfi_rel_offset 4, 156
406 sw $v1, 152($sp)
407 .cfi_rel_offset 3, 152
408 sw $v0, 148($sp)
409 .cfi_rel_offset 2, 148
410
411 // Set up $gp, clobbering $ra and using the branch delay slot for a useful instruction.
412 bal 1f
Goran Jakovljevic2e42cf12016-08-09 15:15:39 +0200413 .set push
414 .set noat
Vladimir Marko952dbb12016-07-28 12:01:51 +0100415 sw $at, 144($sp)
416 .cfi_rel_offset 1, 144
Goran Jakovljevic2e42cf12016-08-09 15:15:39 +0200417 .set pop
Vladimir Marko952dbb12016-07-28 12:01:51 +01004181:
419 .cpload $ra
420
Chris Larsen715f43e2017-10-23 11:00:32 -0700421 CHECK_ALIGNMENT $sp, $t1
422 sdc1 $f30, 136($sp)
423 sdc1 $f28, 128($sp)
424 sdc1 $f26, 120($sp)
425 sdc1 $f24, 112($sp)
426 sdc1 $f22, 104($sp)
427 sdc1 $f20, 96($sp)
428 sdc1 $f18, 88($sp)
429 sdc1 $f16, 80($sp)
430 sdc1 $f14, 72($sp)
431 sdc1 $f12, 64($sp)
432 sdc1 $f10, 56($sp)
433 sdc1 $f8, 48($sp)
434 sdc1 $f6, 40($sp)
435 sdc1 $f4, 32($sp)
436 sdc1 $f2, 24($sp)
437 sdc1 $f0, 16($sp)
Vladimir Marko952dbb12016-07-28 12:01:51 +0100438
439 # 3 words padding and 1 word for holding Method*
440
441 lw $t0, %got(_ZN3art7Runtime9instance_E)($gp)
442 lw $t0, 0($t0)
Alexey Frunze19428ad2017-08-03 10:36:46 -0700443 lw $t0, \runtime_method_offset($t0)
Vladimir Marko952dbb12016-07-28 12:01:51 +0100444 sw $t0, 0($sp) # Place Method* at bottom of stack.
445 sw $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame.
446 addiu $sp, $sp, -ARG_SLOT_SIZE # reserve argument slots on the stack
447 .cfi_adjust_cfa_offset ARG_SLOT_SIZE
448.endm
449
Vladimir Marko3b7537b2016-09-13 11:56:01 +0000450 /*
451 * Macro that sets up the callee save frame to conform with
452 * Runtime::CreateCalleeSaveMethod(kSaveEverything).
453 * Callee-save: $at, $v0-$v1, $a0-$a3, $t0-$t7, $s0-$s7, $t8-$t9, $gp, $fp $ra, $f0-$f31;
454 * 28(GPR)+ 32(FPR) + 3 words for padding and 1 word for Method*
455 * Clobbers $t0 and $t1.
456 * Allocates ARG_SLOT_SIZE bytes at the bottom of the stack for arg slots.
457 * Reserves FRAME_SIZE_SAVE_EVERYTHING + ARG_SLOT_SIZE bytes on the stack.
458 * This macro sets up $gp; entrypoints using it should start with ENTRY_NO_GP.
459 */
Alexey Frunze19428ad2017-08-03 10:36:46 -0700460.macro SETUP_SAVE_EVERYTHING_FRAME runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET
Vladimir Marko3b7537b2016-09-13 11:56:01 +0000461 addiu $sp, $sp, -(FRAME_SIZE_SAVE_EVERYTHING)
462 .cfi_adjust_cfa_offset (FRAME_SIZE_SAVE_EVERYTHING)
Alexey Frunze19428ad2017-08-03 10:36:46 -0700463 SETUP_SAVE_EVERYTHING_FRAME_DECREMENTED_SP \runtime_method_offset
Vladimir Marko3b7537b2016-09-13 11:56:01 +0000464.endm
465
Alexey Frunzec61c0762017-04-10 13:54:23 -0700466.macro RESTORE_SAVE_EVERYTHING_FRAME restore_a0=1
Vladimir Marko952dbb12016-07-28 12:01:51 +0100467 addiu $sp, $sp, ARG_SLOT_SIZE # remove argument slots on the stack
468 .cfi_adjust_cfa_offset -ARG_SLOT_SIZE
469
Chris Larsen715f43e2017-10-23 11:00:32 -0700470 CHECK_ALIGNMENT $sp, $t1
471 ldc1 $f30, 136($sp)
472 ldc1 $f28, 128($sp)
473 ldc1 $f26, 120($sp)
474 ldc1 $f24, 112($sp)
475 ldc1 $f22, 104($sp)
476 ldc1 $f20, 96($sp)
477 ldc1 $f18, 88($sp)
478 ldc1 $f16, 80($sp)
479 ldc1 $f14, 72($sp)
480 ldc1 $f12, 64($sp)
481 ldc1 $f10, 56($sp)
482 ldc1 $f8, 48($sp)
483 ldc1 $f6, 40($sp)
484 ldc1 $f4, 32($sp)
485 ldc1 $f2, 24($sp)
486 ldc1 $f0, 16($sp)
Vladimir Marko952dbb12016-07-28 12:01:51 +0100487
488 lw $ra, 252($sp)
489 .cfi_restore 31
490 lw $fp, 248($sp)
491 .cfi_restore 30
492 lw $gp, 244($sp)
493 .cfi_restore 28
494 lw $t9, 240($sp)
495 .cfi_restore 25
496 lw $t8, 236($sp)
497 .cfi_restore 24
498 lw $s7, 232($sp)
499 .cfi_restore 23
500 lw $s6, 228($sp)
501 .cfi_restore 22
502 lw $s5, 224($sp)
503 .cfi_restore 21
504 lw $s4, 220($sp)
505 .cfi_restore 20
506 lw $s3, 216($sp)
507 .cfi_restore 19
508 lw $s2, 212($sp)
509 .cfi_restore 18
510 lw $s1, 208($sp)
511 .cfi_restore 17
512 lw $s0, 204($sp)
513 .cfi_restore 16
514 lw $t7, 200($sp)
515 .cfi_restore 15
516 lw $t6, 196($sp)
517 .cfi_restore 14
518 lw $t5, 192($sp)
519 .cfi_restore 13
520 lw $t4, 188($sp)
521 .cfi_restore 12
522 lw $t3, 184($sp)
523 .cfi_restore 11
524 lw $t2, 180($sp)
525 .cfi_restore 10
526 lw $t1, 176($sp)
527 .cfi_restore 9
528 lw $t0, 172($sp)
529 .cfi_restore 8
530 lw $a3, 168($sp)
531 .cfi_restore 7
532 lw $a2, 164($sp)
533 .cfi_restore 6
534 lw $a1, 160($sp)
535 .cfi_restore 5
Alexey Frunzec61c0762017-04-10 13:54:23 -0700536 .if \restore_a0
Vladimir Marko952dbb12016-07-28 12:01:51 +0100537 lw $a0, 156($sp)
538 .cfi_restore 4
Alexey Frunzec61c0762017-04-10 13:54:23 -0700539 .endif
Vladimir Marko952dbb12016-07-28 12:01:51 +0100540 lw $v1, 152($sp)
541 .cfi_restore 3
542 lw $v0, 148($sp)
543 .cfi_restore 2
Goran Jakovljevic2e42cf12016-08-09 15:15:39 +0200544 .set push
545 .set noat
Vladimir Marko952dbb12016-07-28 12:01:51 +0100546 lw $at, 144($sp)
547 .cfi_restore 1
Goran Jakovljevic2e42cf12016-08-09 15:15:39 +0200548 .set pop
Vladimir Marko952dbb12016-07-28 12:01:51 +0100549
550 addiu $sp, $sp, 256 # pop frame
551 .cfi_adjust_cfa_offset -256
552.endm
553
554 /*
Alexey Frunzec61c0762017-04-10 13:54:23 -0700555 * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending
556 * exception is Thread::Current()->exception_ when the runtime method frame is ready.
557 * Requires $gp properly set up.
buzbee5bc5a7b2012-03-07 15:52:59 -0800558 */
Alexey Frunzec61c0762017-04-10 13:54:23 -0700559.macro DELIVER_PENDING_EXCEPTION_FRAME_READY
jeffhao8161c032012-10-31 15:50:00 -0700560 la $t9, artDeliverPendingExceptionFromCode
Andreas Gampe8d365912015-01-13 11:32:32 -0800561 jalr $zero, $t9 # artDeliverPendingExceptionFromCode(Thread*)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700562 move $a0, rSELF # pass Thread::Current
buzbee5bc5a7b2012-03-07 15:52:59 -0800563.endm
564
Alexey Frunzec61c0762017-04-10 13:54:23 -0700565 /*
566 * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending
567 * exception is Thread::Current()->exception_.
568 * Requires $gp properly set up.
569 */
570.macro DELIVER_PENDING_EXCEPTION
571 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME # save callee saves for throw
572 DELIVER_PENDING_EXCEPTION_FRAME_READY
573.endm
574
buzbee5bc5a7b2012-03-07 15:52:59 -0800575.macro RETURN_IF_NO_EXCEPTION
jeffhao7fbee072012-08-24 17:56:54 -0700576 lw $t0, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100577 RESTORE_SAVE_REFS_ONLY_FRAME
jeffhao8161c032012-10-31 15:50:00 -0700578 bnez $t0, 1f # success if no exception is pending
buzbee5bc5a7b2012-03-07 15:52:59 -0800579 nop
Andreas Gampe8d365912015-01-13 11:32:32 -0800580 jalr $zero, $ra
buzbee5bc5a7b2012-03-07 15:52:59 -0800581 nop
5821:
583 DELIVER_PENDING_EXCEPTION
584.endm
585
586.macro RETURN_IF_ZERO
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100587 RESTORE_SAVE_REFS_ONLY_FRAME
jeffhao7fbee072012-08-24 17:56:54 -0700588 bnez $v0, 1f # success?
buzbee5bc5a7b2012-03-07 15:52:59 -0800589 nop
Andreas Gampe8d365912015-01-13 11:32:32 -0800590 jalr $zero, $ra # return on success
buzbee5bc5a7b2012-03-07 15:52:59 -0800591 nop
5921:
593 DELIVER_PENDING_EXCEPTION
594.endm
595
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800596.macro RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100597 RESTORE_SAVE_REFS_ONLY_FRAME
jeffhao7fbee072012-08-24 17:56:54 -0700598 beqz $v0, 1f # success?
buzbee5bc5a7b2012-03-07 15:52:59 -0800599 nop
Andreas Gampe8d365912015-01-13 11:32:32 -0800600 jalr $zero, $ra # return on success
buzbee5bc5a7b2012-03-07 15:52:59 -0800601 nop
6021:
603 DELIVER_PENDING_EXCEPTION
604.endm
605
buzbee5bc5a7b2012-03-07 15:52:59 -0800606 /*
Goran Jakovljevic3bc13812016-03-22 17:16:05 +0100607 * On stack replacement stub.
608 * On entry:
609 * a0 = stack to copy
610 * a1 = size of stack
611 * a2 = pc to call
612 * a3 = JValue* result
613 * [sp + 16] = shorty
614 * [sp + 20] = thread
615 */
616ENTRY art_quick_osr_stub
617 // Save callee general purpose registers, RA and GP.
618 addiu $sp, $sp, -48
619 .cfi_adjust_cfa_offset 48
620 sw $ra, 44($sp)
621 .cfi_rel_offset 31, 44
622 sw $s8, 40($sp)
623 .cfi_rel_offset 30, 40
624 sw $gp, 36($sp)
625 .cfi_rel_offset 28, 36
626 sw $s7, 32($sp)
627 .cfi_rel_offset 23, 32
628 sw $s6, 28($sp)
629 .cfi_rel_offset 22, 28
630 sw $s5, 24($sp)
631 .cfi_rel_offset 21, 24
632 sw $s4, 20($sp)
633 .cfi_rel_offset 20, 20
634 sw $s3, 16($sp)
635 .cfi_rel_offset 19, 16
636 sw $s2, 12($sp)
637 .cfi_rel_offset 18, 12
638 sw $s1, 8($sp)
639 .cfi_rel_offset 17, 8
640 sw $s0, 4($sp)
641 .cfi_rel_offset 16, 4
642
643 move $s8, $sp # Save the stack pointer
644 move $s7, $a1 # Save size of stack
645 move $s6, $a2 # Save the pc to call
646 lw rSELF, 48+20($sp) # Save managed thread pointer into rSELF
647 addiu $t0, $sp, -12 # Reserve space for stack pointer,
648 # JValue* result, and ArtMethod* slot.
649 srl $t0, $t0, 4 # Align stack pointer to 16 bytes
650 sll $sp, $t0, 4 # Update stack pointer
651 sw $s8, 4($sp) # Save old stack pointer
652 sw $a3, 8($sp) # Save JValue* result
653 sw $zero, 0($sp) # Store null for ArtMethod* at bottom of frame
654 subu $sp, $a1 # Reserve space for callee stack
655 move $a2, $a1
656 move $a1, $a0
657 move $a0, $sp
658 la $t9, memcpy
659 jalr $t9 # memcpy (dest a0, src a1, bytes a2)
660 addiu $sp, $sp, -16 # make space for argument slots for memcpy
661 bal .Losr_entry # Call the method
662 addiu $sp, $sp, 16 # restore stack after memcpy
663 lw $a2, 8($sp) # Restore JValue* result
664 lw $sp, 4($sp) # Restore saved stack pointer
665 lw $a0, 48+16($sp) # load shorty
666 lbu $a0, 0($a0) # load return type
667 li $a1, 'D' # put char 'D' into a1
668 beq $a0, $a1, .Losr_fp_result # Test if result type char == 'D'
669 li $a1, 'F' # put char 'F' into a1
670 beq $a0, $a1, .Losr_fp_result # Test if result type char == 'F'
671 nop
672 sw $v0, 0($a2)
673 b .Losr_exit
674 sw $v1, 4($a2) # store v0/v1 into result
675.Losr_fp_result:
Chris Larsen715f43e2017-10-23 11:00:32 -0700676 CHECK_ALIGNMENT $a2, $t0, 8
677 sdc1 $f0, 0($a2) # store f0/f1 into result
Goran Jakovljevic3bc13812016-03-22 17:16:05 +0100678.Losr_exit:
679 lw $ra, 44($sp)
680 .cfi_restore 31
681 lw $s8, 40($sp)
682 .cfi_restore 30
683 lw $gp, 36($sp)
684 .cfi_restore 28
685 lw $s7, 32($sp)
686 .cfi_restore 23
687 lw $s6, 28($sp)
688 .cfi_restore 22
689 lw $s5, 24($sp)
690 .cfi_restore 21
691 lw $s4, 20($sp)
692 .cfi_restore 20
693 lw $s3, 16($sp)
694 .cfi_restore 19
695 lw $s2, 12($sp)
696 .cfi_restore 18
697 lw $s1, 8($sp)
698 .cfi_restore 17
699 lw $s0, 4($sp)
700 .cfi_restore 16
701 jalr $zero, $ra
702 addiu $sp, $sp, 48
703 .cfi_adjust_cfa_offset -48
704.Losr_entry:
705 addiu $s7, $s7, -4
706 addu $t0, $s7, $sp
707 move $t9, $s6
708 jalr $zero, $t9
709 sw $ra, 0($t0) # Store RA per the compiler ABI
710END art_quick_osr_stub
711
712 /*
Chris Larsen715f43e2017-10-23 11:00:32 -0700713 * On entry $a0 is uint32_t* gprs_ and $a1 is uint32_t* fprs_.
714 * Note that fprs_ is expected to be an address that is a multiple of 8.
buzbee5bc5a7b2012-03-07 15:52:59 -0800715 * FIXME: just guessing about the shape of the jmpbuf. Where will pc be?
716 */
Jeff Haod4c3f7d2013-02-14 14:14:44 -0800717ENTRY art_quick_do_long_jump
Chris Larsen715f43e2017-10-23 11:00:32 -0700718 CHECK_ALIGNMENT $a1, $t1, 8
719 ldc1 $f0, 0*8($a1)
720 ldc1 $f2, 1*8($a1)
721 ldc1 $f4, 2*8($a1)
722 ldc1 $f6, 3*8($a1)
723 ldc1 $f8, 4*8($a1)
724 ldc1 $f10, 5*8($a1)
725 ldc1 $f12, 6*8($a1)
726 ldc1 $f14, 7*8($a1)
727 ldc1 $f16, 8*8($a1)
728 ldc1 $f18, 9*8($a1)
729 ldc1 $f20, 10*8($a1)
730 ldc1 $f22, 11*8($a1)
731 ldc1 $f24, 12*8($a1)
732 ldc1 $f26, 13*8($a1)
733 ldc1 $f28, 14*8($a1)
734 ldc1 $f30, 15*8($a1)
Duane Sande34652f2014-11-04 11:09:36 -0800735
Chris Dearman748dd952014-05-23 10:47:01 -0700736 .set push
737 .set nomacro
738 .set noat
jeffhao7fbee072012-08-24 17:56:54 -0700739 lw $at, 4($a0)
Chris Dearman748dd952014-05-23 10:47:01 -0700740 .set pop
jeffhao7fbee072012-08-24 17:56:54 -0700741 lw $v0, 8($a0)
742 lw $v1, 12($a0)
743 lw $a1, 20($a0)
744 lw $a2, 24($a0)
745 lw $a3, 28($a0)
746 lw $t0, 32($a0)
747 lw $t1, 36($a0)
748 lw $t2, 40($a0)
749 lw $t3, 44($a0)
750 lw $t4, 48($a0)
751 lw $t5, 52($a0)
752 lw $t6, 56($a0)
753 lw $t7, 60($a0)
754 lw $s0, 64($a0)
755 lw $s1, 68($a0)
756 lw $s2, 72($a0)
757 lw $s3, 76($a0)
758 lw $s4, 80($a0)
759 lw $s5, 84($a0)
760 lw $s6, 88($a0)
761 lw $s7, 92($a0)
762 lw $t8, 96($a0)
763 lw $t9, 100($a0)
jeffhao7fbee072012-08-24 17:56:54 -0700764 lw $gp, 112($a0)
765 lw $sp, 116($a0)
766 lw $fp, 120($a0)
767 lw $ra, 124($a0)
768 lw $a0, 16($a0)
Goran Jakovljevic590b1362016-03-21 14:24:43 +0100769 move $v0, $zero # clear result registers v0 and v1 (in branch delay slot)
Goran Jakovljevic75969962015-10-27 12:29:07 +0100770 jalr $zero, $t9 # do long jump
jeffhao7fbee072012-08-24 17:56:54 -0700771 move $v1, $zero
Jeff Haod4c3f7d2013-02-14 14:14:44 -0800772END art_quick_do_long_jump
buzbee5bc5a7b2012-03-07 15:52:59 -0800773
buzbee5bc5a7b2012-03-07 15:52:59 -0800774 /*
775 * Called by managed code, saves most registers (forms basis of long jump context) and passes
776 * the bottom of the stack. artDeliverExceptionFromCode will place the callee save Method* at
Lazar Trsic84bc06e2015-06-10 16:05:46 +0200777 * the bottom of the thread. On entry a0 holds Throwable*
buzbee5bc5a7b2012-03-07 15:52:59 -0800778 */
Ian Rogers468532e2013-08-05 10:56:33 -0700779ENTRY art_quick_deliver_exception
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100780 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
jeffhao8161c032012-10-31 15:50:00 -0700781 la $t9, artDeliverExceptionFromCode
Andreas Gampe8d365912015-01-13 11:32:32 -0800782 jalr $zero, $t9 # artDeliverExceptionFromCode(Throwable*, Thread*)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700783 move $a1, rSELF # pass Thread::Current
Ian Rogers468532e2013-08-05 10:56:33 -0700784END art_quick_deliver_exception
buzbee5bc5a7b2012-03-07 15:52:59 -0800785
buzbee5bc5a7b2012-03-07 15:52:59 -0800786 /*
787 * Called by managed code to create and deliver a NullPointerException
788 */
Jeff Haod4c3f7d2013-02-14 14:14:44 -0800789 .extern artThrowNullPointerExceptionFromCode
Vladimir Marko804b03f2016-09-14 16:26:36 +0100790ENTRY_NO_GP art_quick_throw_null_pointer_exception
791 // Note that setting up $gp does not rely on $t9 here, so branching here directly is OK,
792 // even after clobbering any registers we don't need to preserve, such as $gp or $t0.
793 SETUP_SAVE_EVERYTHING_FRAME
jeffhao8161c032012-10-31 15:50:00 -0700794 la $t9, artThrowNullPointerExceptionFromCode
Andreas Gampe8d365912015-01-13 11:32:32 -0800795 jalr $zero, $t9 # artThrowNullPointerExceptionFromCode(Thread*)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700796 move $a0, rSELF # pass Thread::Current
Ian Rogers468532e2013-08-05 10:56:33 -0700797END art_quick_throw_null_pointer_exception
buzbee5bc5a7b2012-03-07 15:52:59 -0800798
Nicolas Geoffraye8e11272016-06-28 18:08:46 +0100799
800 /*
801 * Call installed by a signal handler to create and deliver a NullPointerException.
802 */
803 .extern artThrowNullPointerExceptionFromSignal
Vladimir Marko3b7537b2016-09-13 11:56:01 +0000804ENTRY_NO_GP_CUSTOM_CFA art_quick_throw_null_pointer_exception_from_signal, FRAME_SIZE_SAVE_EVERYTHING
805 SETUP_SAVE_EVERYTHING_FRAME_DECREMENTED_SP
806 # Retrieve the fault address from the padding where the signal handler stores it.
807 lw $a0, (ARG_SLOT_SIZE + __SIZEOF_POINTER__)($sp)
Nicolas Geoffraye8e11272016-06-28 18:08:46 +0100808 la $t9, artThrowNullPointerExceptionFromSignal
809 jalr $zero, $t9 # artThrowNullPointerExceptionFromSignal(uintptr_t, Thread*)
810 move $a1, rSELF # pass Thread::Current
811END art_quick_throw_null_pointer_exception_from_signal
812
buzbee5bc5a7b2012-03-07 15:52:59 -0800813 /*
814 * Called by managed code to create and deliver an ArithmeticException
815 */
Jeff Haod4c3f7d2013-02-14 14:14:44 -0800816 .extern artThrowDivZeroFromCode
Vladimir Marko804b03f2016-09-14 16:26:36 +0100817ENTRY_NO_GP art_quick_throw_div_zero
818 SETUP_SAVE_EVERYTHING_FRAME
jeffhao8161c032012-10-31 15:50:00 -0700819 la $t9, artThrowDivZeroFromCode
Andreas Gampe8d365912015-01-13 11:32:32 -0800820 jalr $zero, $t9 # artThrowDivZeroFromCode(Thread*)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700821 move $a0, rSELF # pass Thread::Current
Ian Rogers468532e2013-08-05 10:56:33 -0700822END art_quick_throw_div_zero
buzbee5bc5a7b2012-03-07 15:52:59 -0800823
buzbee5bc5a7b2012-03-07 15:52:59 -0800824 /*
825 * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException
826 */
Jeff Haod4c3f7d2013-02-14 14:14:44 -0800827 .extern artThrowArrayBoundsFromCode
Vladimir Marko804b03f2016-09-14 16:26:36 +0100828ENTRY_NO_GP art_quick_throw_array_bounds
829 // Note that setting up $gp does not rely on $t9 here, so branching here directly is OK,
830 // even after clobbering any registers we don't need to preserve, such as $gp or $t0.
831 SETUP_SAVE_EVERYTHING_FRAME
jeffhao8161c032012-10-31 15:50:00 -0700832 la $t9, artThrowArrayBoundsFromCode
Andreas Gampe8d365912015-01-13 11:32:32 -0800833 jalr $zero, $t9 # artThrowArrayBoundsFromCode(index, limit, Thread*)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700834 move $a2, rSELF # pass Thread::Current
Ian Rogers468532e2013-08-05 10:56:33 -0700835END art_quick_throw_array_bounds
buzbee5bc5a7b2012-03-07 15:52:59 -0800836
Ian Rogers57b86d42012-03-27 16:05:41 -0700837 /*
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100838 * Called by managed code to create and deliver a StringIndexOutOfBoundsException
839 * as if thrown from a call to String.charAt().
840 */
841 .extern artThrowStringBoundsFromCode
Vladimir Marko804b03f2016-09-14 16:26:36 +0100842ENTRY_NO_GP art_quick_throw_string_bounds
843 SETUP_SAVE_EVERYTHING_FRAME
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100844 la $t9, artThrowStringBoundsFromCode
845 jalr $zero, $t9 # artThrowStringBoundsFromCode(index, limit, Thread*)
846 move $a2, rSELF # pass Thread::Current
847END art_quick_throw_string_bounds
848
849 /*
Ian Rogers57b86d42012-03-27 16:05:41 -0700850 * Called by managed code to create and deliver a StackOverflowError.
851 */
Jeff Haod4c3f7d2013-02-14 14:14:44 -0800852 .extern artThrowStackOverflowFromCode
Ian Rogers468532e2013-08-05 10:56:33 -0700853ENTRY art_quick_throw_stack_overflow
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100854 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
jeffhao8161c032012-10-31 15:50:00 -0700855 la $t9, artThrowStackOverflowFromCode
Andreas Gampe8d365912015-01-13 11:32:32 -0800856 jalr $zero, $t9 # artThrowStackOverflowFromCode(Thread*)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700857 move $a0, rSELF # pass Thread::Current
Ian Rogers468532e2013-08-05 10:56:33 -0700858END art_quick_throw_stack_overflow
buzbee5bc5a7b2012-03-07 15:52:59 -0800859
Ian Rogers57b86d42012-03-27 16:05:41 -0700860 /*
buzbee5bc5a7b2012-03-07 15:52:59 -0800861 * All generated callsites for interface invokes and invocation slow paths will load arguments
jeffhao7fbee072012-08-24 17:56:54 -0700862 * as usual - except instead of loading arg0/$a0 with the target Method*, arg0/$a0 will contain
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100863 * the method_idx. This wrapper will save arg1-arg3, and call the appropriate C helper.
jeffhao7fbee072012-08-24 17:56:54 -0700864 * NOTE: "this" is first visable argument of the target, and so can be found in arg1/$a1.
buzbee5bc5a7b2012-03-07 15:52:59 -0800865 *
jeffhao7fbee072012-08-24 17:56:54 -0700866 * The helper will attempt to locate the target and return a 64-bit result in $v0/$v1 consisting
867 * of the target Method* in $v0 and method->code_ in $v1.
buzbee5bc5a7b2012-03-07 15:52:59 -0800868 *
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700869 * If unsuccessful, the helper will return null/null. There will be a pending exception in the
buzbee5bc5a7b2012-03-07 15:52:59 -0800870 * thread and we branch to another stub to deliver it.
871 *
872 * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
873 * pointing back to the original caller.
874 */
Alexey Frunze279cfba2017-07-22 00:24:43 -0700875.macro INVOKE_TRAMPOLINE_BODY cxx_name, save_s4_thru_s8_only=0
buzbee5bc5a7b2012-03-07 15:52:59 -0800876 .extern \cxx_name
Alexey Frunze279cfba2017-07-22 00:24:43 -0700877 SETUP_SAVE_REFS_AND_ARGS_FRAME \save_s4_thru_s8_only # save callee saves in case
878 # allocation triggers GC
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100879 move $a2, rSELF # pass Thread::Current
Goran Jakovljevic590b1362016-03-21 14:24:43 +0100880 la $t9, \cxx_name
881 jalr $t9 # (method_idx, this, Thread*, $sp)
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100882 addiu $a3, $sp, ARG_SLOT_SIZE # pass $sp (remove arg slots)
883 move $a0, $v0 # save target Method*
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100884 RESTORE_SAVE_REFS_AND_ARGS_FRAME
jeffhaofa147e22012-10-12 17:03:32 -0700885 beqz $v0, 1f
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100886 move $t9, $v1 # save $v0->code_
Andreas Gampe8d365912015-01-13 11:32:32 -0800887 jalr $zero, $t9
buzbee5bc5a7b2012-03-07 15:52:59 -0800888 nop
8891:
890 DELIVER_PENDING_EXCEPTION
Andreas Gampe3031c8d2015-07-13 20:11:06 -0700891.endm
892.macro INVOKE_TRAMPOLINE c_name, cxx_name
893ENTRY \c_name
894 INVOKE_TRAMPOLINE_BODY \cxx_name
Jeff Haod4c3f7d2013-02-14 14:14:44 -0800895END \c_name
buzbee5bc5a7b2012-03-07 15:52:59 -0800896.endm
897
Logan Chien8dbb7082013-01-25 20:31:17 +0800898INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
buzbee5bc5a7b2012-03-07 15:52:59 -0800899
Logan Chien8dbb7082013-01-25 20:31:17 +0800900INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
901INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
902INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
903INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
buzbee5bc5a7b2012-03-07 15:52:59 -0800904
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800905// Each of the following macros expands into four instructions or 16 bytes.
906// They are used to build indexable "tables" of code.
907
908.macro LOAD_WORD_TO_REG reg, next_arg, index_reg, label
Goran Jakovljevicff734982015-08-24 12:58:55 +0000909 lw $\reg, -4($\next_arg) # next_arg points to argument after the current one (offset is 4)
910 b \label
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800911 addiu $\index_reg, 16
912 .balign 16
Goran Jakovljevicff734982015-08-24 12:58:55 +0000913.endm
914
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800915.macro LOAD_LONG_TO_REG reg1, reg2, next_arg, index_reg, next_index, label
Goran Jakovljevicff734982015-08-24 12:58:55 +0000916 lw $\reg1, -8($\next_arg) # next_arg points to argument after the current one (offset is 8)
917 lw $\reg2, -4($\next_arg)
918 b \label
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800919 li $\index_reg, \next_index
920 .balign 16
Goran Jakovljevicff734982015-08-24 12:58:55 +0000921.endm
922
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800923.macro LOAD_FLOAT_TO_REG reg, next_arg, index_reg, label
Goran Jakovljevicff734982015-08-24 12:58:55 +0000924 lwc1 $\reg, -4($\next_arg) # next_arg points to argument after the current one (offset is 4)
925 b \label
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800926 addiu $\index_reg, 16
927 .balign 16
Goran Jakovljevicff734982015-08-24 12:58:55 +0000928.endm
929
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800930#if defined(__mips_isa_rev) && __mips_isa_rev > 2
931// LDu expands into 3 instructions for 64-bit FPU, so index_reg cannot be updated here.
932.macro LOAD_DOUBLE_TO_REG reg1, reg2, next_arg, index_reg, tmp, label
933 .set reorder # force use of the branch delay slot
Goran Jakovljevicff734982015-08-24 12:58:55 +0000934 LDu $\reg1, $\reg2, -8, $\next_arg, $\tmp # next_arg points to argument after the current one
935 # (offset is 8)
936 b \label
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800937 .set noreorder
938 .balign 16
939.endm
940#else
941// LDu expands into 2 instructions for 32-bit FPU, so index_reg is updated here.
942.macro LOAD_DOUBLE_TO_REG reg1, reg2, next_arg, index_reg, tmp, label
943 LDu $\reg1, $\reg2, -8, $\next_arg, $\tmp # next_arg points to argument after the current one
944 # (offset is 8)
945 b \label
946 addiu $\index_reg, 16
947 .balign 16
948.endm
949#endif
950
951.macro LOAD_END index_reg, next_index, label
952 b \label
953 li $\index_reg, \next_index
954 .balign 16
Goran Jakovljevicff734982015-08-24 12:58:55 +0000955.endm
956
Goran Jakovljevic590b1362016-03-21 14:24:43 +0100957#define SPILL_SIZE 32
958
Jeff Hao79fe5392013-04-24 18:41:58 -0700959 /*
Ian Rogersef7d42f2014-01-06 12:55:46 -0800960 * Invocation stub for quick code.
Jeff Hao5d917302013-02-27 17:57:33 -0800961 * On entry:
962 * a0 = method pointer
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700963 * a1 = argument array or null for no argument methods
Jeff Hao5d917302013-02-27 17:57:33 -0800964 * a2 = size of argument array in bytes
965 * a3 = (managed) thread pointer
Jeff Hao6474d192013-03-26 14:08:09 -0700966 * [sp + 16] = JValue* result
Ian Rogers0177e532014-02-11 16:30:46 -0800967 * [sp + 20] = shorty
Jeff Hao5d917302013-02-27 17:57:33 -0800968 */
969ENTRY art_quick_invoke_stub
Jeff Hao5d917302013-02-27 17:57:33 -0800970 sw $a0, 0($sp) # save out a0
Goran Jakovljevic590b1362016-03-21 14:24:43 +0100971 addiu $sp, $sp, -SPILL_SIZE # spill s0, s1, fp, ra and gp
972 .cfi_adjust_cfa_offset SPILL_SIZE
973 sw $gp, 16($sp)
Jeff Hao5d917302013-02-27 17:57:33 -0800974 sw $ra, 12($sp)
975 .cfi_rel_offset 31, 12
976 sw $fp, 8($sp)
977 .cfi_rel_offset 30, 8
978 sw $s1, 4($sp)
979 .cfi_rel_offset 17, 4
980 sw $s0, 0($sp)
981 .cfi_rel_offset 16, 0
982 move $fp, $sp # save sp in fp
983 .cfi_def_cfa_register 30
984 move $s1, $a3 # move managed thread pointer into s1
Goran Jakovljevicff734982015-08-24 12:58:55 +0000985 addiu $t0, $a2, 4 # create space for ArtMethod* in frame.
Douglas Leung735b8552014-10-31 12:21:40 -0700986 subu $t0, $sp, $t0 # reserve & align *stack* to 16 bytes:
Goran Jakovljevicff734982015-08-24 12:58:55 +0000987 srl $t0, $t0, 4 # native calling convention only aligns to 8B,
988 sll $sp, $t0, 4 # so we have to ensure ART 16B alignment ourselves.
989 addiu $a0, $sp, 4 # pass stack pointer + ArtMethod* as dest for memcpy
Goran Jakovljevic590b1362016-03-21 14:24:43 +0100990 la $t9, memcpy
991 jalr $t9 # (dest, src, bytes)
Jeff Hao5d917302013-02-27 17:57:33 -0800992 addiu $sp, $sp, -16 # make space for argument slots for memcpy
993 addiu $sp, $sp, 16 # restore stack after memcpy
Goran Jakovljevic590b1362016-03-21 14:24:43 +0100994 lw $gp, 16($fp) # restore $gp
995 lw $a0, SPILL_SIZE($fp) # restore ArtMethod*
Goran Jakovljevicff734982015-08-24 12:58:55 +0000996 lw $a1, 4($sp) # a1 = this*
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800997 addiu $t8, $sp, 8 # t8 = pointer to the current argument (skip ArtMethod* and this*)
998 li $t6, 0 # t6 = gpr_index = 0 (corresponds to A2; A0 and A1 are skipped)
999 li $t7, 0 # t7 = fp_index = 0
1000 lw $t9, 20 + SPILL_SIZE($fp) # get shorty (20 is offset from the $sp on entry + SPILL_SIZE
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001001 # as the $fp is SPILL_SIZE bytes below the $sp on entry)
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001002 addiu $t9, 1 # t9 = shorty + 1 (skip 1 for return type)
1003
1004 // Load the base addresses of tabInt ... tabDouble.
1005 // We will use the register indices (gpr_index, fp_index) to branch.
1006 // Note that the indices are scaled by 16, so they can be added to the bases directly.
1007#if defined(__mips_isa_rev) && __mips_isa_rev >= 6
1008 lapc $t2, tabInt
1009 lapc $t3, tabLong
1010 lapc $t4, tabSingle
1011 lapc $t5, tabDouble
1012#else
1013 bltzal $zero, tabBase # nal
1014 addiu $t2, $ra, %lo(tabInt - tabBase)
1015tabBase:
1016 addiu $t3, $ra, %lo(tabLong - tabBase)
1017 addiu $t4, $ra, %lo(tabSingle - tabBase)
1018 addiu $t5, $ra, %lo(tabDouble - tabBase)
1019#endif
1020
Goran Jakovljevicff734982015-08-24 12:58:55 +00001021loop:
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001022 lbu $ra, 0($t9) # ra = shorty[i]
1023 beqz $ra, loopEnd # finish getting args when shorty[i] == '\0'
1024 addiu $t9, 1
Goran Jakovljevicff734982015-08-24 12:58:55 +00001025
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001026 addiu $ra, -'J'
1027 beqz $ra, isLong # branch if result type char == 'J'
1028 addiu $ra, 'J' - 'D'
1029 beqz $ra, isDouble # branch if result type char == 'D'
1030 addiu $ra, 'D' - 'F'
1031 beqz $ra, isSingle # branch if result type char == 'F'
Goran Jakovljevicff734982015-08-24 12:58:55 +00001032
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001033 addu $ra, $t2, $t6
1034 jalr $zero, $ra
1035 addiu $t8, 4 # next_arg = curr_arg + 4
Goran Jakovljevicff734982015-08-24 12:58:55 +00001036
1037isLong:
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001038 addu $ra, $t3, $t6
1039 jalr $zero, $ra
1040 addiu $t8, 8 # next_arg = curr_arg + 8
Goran Jakovljevicff734982015-08-24 12:58:55 +00001041
1042isSingle:
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001043 addu $ra, $t4, $t7
1044 jalr $zero, $ra
1045 addiu $t8, 4 # next_arg = curr_arg + 4
1046
1047isDouble:
1048 addu $ra, $t5, $t7
1049#if defined(__mips_isa_rev) && __mips_isa_rev > 2
1050 addiu $t7, 16 # fp_index += 16 didn't fit into LOAD_DOUBLE_TO_REG
1051#endif
1052 jalr $zero, $ra
1053 addiu $t8, 8 # next_arg = curr_arg + 8
Goran Jakovljevicff734982015-08-24 12:58:55 +00001054
1055loopEnd:
Mathieu Chartiere401d142015-04-22 13:56:20 -07001056 lw $t9, ART_METHOD_QUICK_CODE_OFFSET_32($a0) # get pointer to the code
Jeff Hao5d917302013-02-27 17:57:33 -08001057 jalr $t9 # call the method
Goran Jakovljevicff734982015-08-24 12:58:55 +00001058 sw $zero, 0($sp) # store null for ArtMethod* at bottom of frame
Jeff Hao5d917302013-02-27 17:57:33 -08001059 move $sp, $fp # restore the stack
1060 lw $s0, 0($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -08001061 .cfi_restore 16
Jeff Hao5d917302013-02-27 17:57:33 -08001062 lw $s1, 4($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -08001063 .cfi_restore 17
Jeff Hao5d917302013-02-27 17:57:33 -08001064 lw $fp, 8($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -08001065 .cfi_restore 30
Jeff Hao5d917302013-02-27 17:57:33 -08001066 lw $ra, 12($sp)
Dave Allisonbbb32c22013-11-05 18:25:18 -08001067 .cfi_restore 31
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001068 addiu $sp, $sp, SPILL_SIZE
1069 .cfi_adjust_cfa_offset -SPILL_SIZE
Jeff Hao5d917302013-02-27 17:57:33 -08001070 lw $t0, 16($sp) # get result pointer
Ian Rogers0177e532014-02-11 16:30:46 -08001071 lw $t1, 20($sp) # get shorty
1072 lb $t1, 0($t1) # get result type char
Goran Jakovljevicff734982015-08-24 12:58:55 +00001073 li $t2, 'D' # put char 'D' into t2
1074 beq $t1, $t2, 5f # branch if result type char == 'D'
1075 li $t3, 'F' # put char 'F' into t3
1076 beq $t1, $t3, 5f # branch if result type char == 'F'
Jeff Hao5d917302013-02-27 17:57:33 -08001077 sw $v0, 0($t0) # store the result
Andreas Gampe8d365912015-01-13 11:32:32 -08001078 jalr $zero, $ra
Jeff Hao5d917302013-02-27 17:57:33 -08001079 sw $v1, 4($t0) # store the other half of the result
Goran Jakovljevicff734982015-08-24 12:58:55 +000010805:
Chris Larsen715f43e2017-10-23 11:00:32 -07001081 CHECK_ALIGNMENT $t0, $t1, 8
1082 sdc1 $f0, 0($t0) # store floating point result
Andreas Gampe8d365912015-01-13 11:32:32 -08001083 jalr $zero, $ra
Duane Sande34652f2014-11-04 11:09:36 -08001084 nop
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001085
1086 // Note that gpr_index is kept within the range of tabInt and tabLong
1087 // and fp_index is kept within the range of tabSingle and tabDouble.
1088 .balign 16
1089tabInt:
1090 LOAD_WORD_TO_REG a2, t8, t6, loop # a2 = current argument, gpr_index += 16
1091 LOAD_WORD_TO_REG a3, t8, t6, loop # a3 = current argument, gpr_index += 16
1092 LOAD_WORD_TO_REG t0, t8, t6, loop # t0 = current argument, gpr_index += 16
1093 LOAD_WORD_TO_REG t1, t8, t6, loop # t1 = current argument, gpr_index += 16
1094 LOAD_END t6, 4*16, loop # no more GPR args, gpr_index = 4*16
1095tabLong:
1096 LOAD_LONG_TO_REG a2, a3, t8, t6, 2*16, loop # a2_a3 = curr_arg, gpr_index = 2*16
1097 LOAD_LONG_TO_REG t0, t1, t8, t6, 4*16, loop # t0_t1 = curr_arg, gpr_index = 4*16
1098 LOAD_LONG_TO_REG t0, t1, t8, t6, 4*16, loop # t0_t1 = curr_arg, gpr_index = 4*16
1099 LOAD_END t6, 4*16, loop # no more GPR args, gpr_index = 4*16
1100 LOAD_END t6, 4*16, loop # no more GPR args, gpr_index = 4*16
1101tabSingle:
1102 LOAD_FLOAT_TO_REG f8, t8, t7, loop # f8 = curr_arg, fp_index += 16
1103 LOAD_FLOAT_TO_REG f10, t8, t7, loop # f10 = curr_arg, fp_index += 16
1104 LOAD_FLOAT_TO_REG f12, t8, t7, loop # f12 = curr_arg, fp_index += 16
1105 LOAD_FLOAT_TO_REG f14, t8, t7, loop # f14 = curr_arg, fp_index += 16
1106 LOAD_FLOAT_TO_REG f16, t8, t7, loop # f16 = curr_arg, fp_index += 16
1107 LOAD_FLOAT_TO_REG f18, t8, t7, loop # f18 = curr_arg, fp_index += 16
1108 LOAD_END t7, 6*16, loop # no more FPR args, fp_index = 6*16
1109tabDouble:
1110 LOAD_DOUBLE_TO_REG f8, f9, t8, t7, ra, loop # f8_f9 = curr_arg; if FPU32, fp_index += 16
1111 LOAD_DOUBLE_TO_REG f10, f11, t8, t7, ra, loop # f10_f11 = curr_arg; if FPU32, fp_index += 16
1112 LOAD_DOUBLE_TO_REG f12, f13, t8, t7, ra, loop # f12_f13 = curr_arg; if FPU32, fp_index += 16
1113 LOAD_DOUBLE_TO_REG f14, f15, t8, t7, ra, loop # f14_f15 = curr_arg; if FPU32, fp_index += 16
1114 LOAD_DOUBLE_TO_REG f16, f17, t8, t7, ra, loop # f16_f17 = curr_arg; if FPU32, fp_index += 16
1115 LOAD_DOUBLE_TO_REG f18, f19, t8, t7, ra, loop # f18_f19 = curr_arg; if FPU32, fp_index += 16
1116 LOAD_END t7, 6*16, loop # no more FPR args, fp_index = 6*16
Jeff Hao5d917302013-02-27 17:57:33 -08001117END art_quick_invoke_stub
1118
1119 /*
Goran Jakovljevicff734982015-08-24 12:58:55 +00001120 * Invocation static stub for quick code.
1121 * On entry:
1122 * a0 = method pointer
1123 * a1 = argument array or null for no argument methods
1124 * a2 = size of argument array in bytes
1125 * a3 = (managed) thread pointer
1126 * [sp + 16] = JValue* result
1127 * [sp + 20] = shorty
1128 */
1129ENTRY art_quick_invoke_static_stub
1130 sw $a0, 0($sp) # save out a0
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001131 addiu $sp, $sp, -SPILL_SIZE # spill s0, s1, fp, ra and gp
1132 .cfi_adjust_cfa_offset SPILL_SIZE
1133 sw $gp, 16($sp)
Goran Jakovljevicff734982015-08-24 12:58:55 +00001134 sw $ra, 12($sp)
1135 .cfi_rel_offset 31, 12
1136 sw $fp, 8($sp)
1137 .cfi_rel_offset 30, 8
1138 sw $s1, 4($sp)
1139 .cfi_rel_offset 17, 4
1140 sw $s0, 0($sp)
1141 .cfi_rel_offset 16, 0
1142 move $fp, $sp # save sp in fp
1143 .cfi_def_cfa_register 30
1144 move $s1, $a3 # move managed thread pointer into s1
Goran Jakovljevicff734982015-08-24 12:58:55 +00001145 addiu $t0, $a2, 4 # create space for ArtMethod* in frame.
1146 subu $t0, $sp, $t0 # reserve & align *stack* to 16 bytes:
1147 srl $t0, $t0, 4 # native calling convention only aligns to 8B,
1148 sll $sp, $t0, 4 # so we have to ensure ART 16B alignment ourselves.
1149 addiu $a0, $sp, 4 # pass stack pointer + ArtMethod* as dest for memcpy
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001150 la $t9, memcpy
1151 jalr $t9 # (dest, src, bytes)
Goran Jakovljevicff734982015-08-24 12:58:55 +00001152 addiu $sp, $sp, -16 # make space for argument slots for memcpy
1153 addiu $sp, $sp, 16 # restore stack after memcpy
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001154 lw $gp, 16($fp) # restore $gp
1155 lw $a0, SPILL_SIZE($fp) # restore ArtMethod*
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001156 addiu $t8, $sp, 4 # t8 = pointer to the current argument (skip ArtMethod*)
1157 li $t6, 0 # t6 = gpr_index = 0 (corresponds to A1; A0 is skipped)
1158 li $t7, 0 # t7 = fp_index = 0
1159 lw $t9, 20 + SPILL_SIZE($fp) # get shorty (20 is offset from the $sp on entry + SPILL_SIZE
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001160 # as the $fp is SPILL_SIZE bytes below the $sp on entry)
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001161 addiu $t9, 1 # t9 = shorty + 1 (skip 1 for return type)
1162
1163 // Load the base addresses of tabIntS ... tabDoubleS.
1164 // We will use the register indices (gpr_index, fp_index) to branch.
1165 // Note that the indices are scaled by 16, so they can be added to the bases directly.
1166#if defined(__mips_isa_rev) && __mips_isa_rev >= 6
1167 lapc $t2, tabIntS
1168 lapc $t3, tabLongS
1169 lapc $t4, tabSingleS
1170 lapc $t5, tabDoubleS
1171#else
1172 bltzal $zero, tabBaseS # nal
1173 addiu $t2, $ra, %lo(tabIntS - tabBaseS)
1174tabBaseS:
1175 addiu $t3, $ra, %lo(tabLongS - tabBaseS)
1176 addiu $t4, $ra, %lo(tabSingleS - tabBaseS)
1177 addiu $t5, $ra, %lo(tabDoubleS - tabBaseS)
1178#endif
1179
Goran Jakovljevicff734982015-08-24 12:58:55 +00001180loopS:
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001181 lbu $ra, 0($t9) # ra = shorty[i]
1182 beqz $ra, loopEndS # finish getting args when shorty[i] == '\0'
1183 addiu $t9, 1
Goran Jakovljevicff734982015-08-24 12:58:55 +00001184
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001185 addiu $ra, -'J'
1186 beqz $ra, isLongS # branch if result type char == 'J'
1187 addiu $ra, 'J' - 'D'
1188 beqz $ra, isDoubleS # branch if result type char == 'D'
1189 addiu $ra, 'D' - 'F'
1190 beqz $ra, isSingleS # branch if result type char == 'F'
Goran Jakovljevicff734982015-08-24 12:58:55 +00001191
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001192 addu $ra, $t2, $t6
1193 jalr $zero, $ra
1194 addiu $t8, 4 # next_arg = curr_arg + 4
Goran Jakovljevicff734982015-08-24 12:58:55 +00001195
1196isLongS:
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001197 addu $ra, $t3, $t6
1198 jalr $zero, $ra
1199 addiu $t8, 8 # next_arg = curr_arg + 8
Goran Jakovljevicff734982015-08-24 12:58:55 +00001200
1201isSingleS:
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001202 addu $ra, $t4, $t7
1203 jalr $zero, $ra
1204 addiu $t8, 4 # next_arg = curr_arg + 4
1205
1206isDoubleS:
1207 addu $ra, $t5, $t7
1208#if defined(__mips_isa_rev) && __mips_isa_rev > 2
1209 addiu $t7, 16 # fp_index += 16 didn't fit into LOAD_DOUBLE_TO_REG
1210#endif
1211 jalr $zero, $ra
1212 addiu $t8, 8 # next_arg = curr_arg + 8
Goran Jakovljevicff734982015-08-24 12:58:55 +00001213
1214loopEndS:
1215 lw $t9, ART_METHOD_QUICK_CODE_OFFSET_32($a0) # get pointer to the code
1216 jalr $t9 # call the method
1217 sw $zero, 0($sp) # store null for ArtMethod* at bottom of frame
1218 move $sp, $fp # restore the stack
1219 lw $s0, 0($sp)
1220 .cfi_restore 16
1221 lw $s1, 4($sp)
1222 .cfi_restore 17
1223 lw $fp, 8($sp)
1224 .cfi_restore 30
1225 lw $ra, 12($sp)
1226 .cfi_restore 31
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001227 addiu $sp, $sp, SPILL_SIZE
1228 .cfi_adjust_cfa_offset -SPILL_SIZE
Goran Jakovljevicff734982015-08-24 12:58:55 +00001229 lw $t0, 16($sp) # get result pointer
1230 lw $t1, 20($sp) # get shorty
1231 lb $t1, 0($t1) # get result type char
1232 li $t2, 'D' # put char 'D' into t2
1233 beq $t1, $t2, 6f # branch if result type char == 'D'
1234 li $t3, 'F' # put char 'F' into t3
1235 beq $t1, $t3, 6f # branch if result type char == 'F'
1236 sw $v0, 0($t0) # store the result
1237 jalr $zero, $ra
1238 sw $v1, 4($t0) # store the other half of the result
12396:
Chris Larsen715f43e2017-10-23 11:00:32 -07001240 CHECK_ALIGNMENT $t0, $t1, 8
1241 sdc1 $f0, 0($t0) # store floating point result
Goran Jakovljevicff734982015-08-24 12:58:55 +00001242 jalr $zero, $ra
1243 nop
Alexey Frunze1b8464d2016-11-12 17:22:05 -08001244
1245 // Note that gpr_index is kept within the range of tabIntS and tabLongS
1246 // and fp_index is kept within the range of tabSingleS and tabDoubleS.
1247 .balign 16
1248tabIntS:
1249 LOAD_WORD_TO_REG a1, t8, t6, loopS # a1 = current argument, gpr_index += 16
1250 LOAD_WORD_TO_REG a2, t8, t6, loopS # a2 = current argument, gpr_index += 16
1251 LOAD_WORD_TO_REG a3, t8, t6, loopS # a3 = current argument, gpr_index += 16
1252 LOAD_WORD_TO_REG t0, t8, t6, loopS # t0 = current argument, gpr_index += 16
1253 LOAD_WORD_TO_REG t1, t8, t6, loopS # t1 = current argument, gpr_index += 16
1254 LOAD_END t6, 5*16, loopS # no more GPR args, gpr_index = 5*16
1255tabLongS:
1256 LOAD_LONG_TO_REG a2, a3, t8, t6, 3*16, loopS # a2_a3 = curr_arg, gpr_index = 3*16
1257 LOAD_LONG_TO_REG a2, a3, t8, t6, 3*16, loopS # a2_a3 = curr_arg, gpr_index = 3*16
1258 LOAD_LONG_TO_REG t0, t1, t8, t6, 5*16, loopS # t0_t1 = curr_arg, gpr_index = 5*16
1259 LOAD_LONG_TO_REG t0, t1, t8, t6, 5*16, loopS # t0_t1 = curr_arg, gpr_index = 5*16
1260 LOAD_END t6, 5*16, loopS # no more GPR args, gpr_index = 5*16
1261 LOAD_END t6, 5*16, loopS # no more GPR args, gpr_index = 5*16
1262tabSingleS:
1263 LOAD_FLOAT_TO_REG f8, t8, t7, loopS # f8 = curr_arg, fp_index += 16
1264 LOAD_FLOAT_TO_REG f10, t8, t7, loopS # f10 = curr_arg, fp_index += 16
1265 LOAD_FLOAT_TO_REG f12, t8, t7, loopS # f12 = curr_arg, fp_index += 16
1266 LOAD_FLOAT_TO_REG f14, t8, t7, loopS # f14 = curr_arg, fp_index += 16
1267 LOAD_FLOAT_TO_REG f16, t8, t7, loopS # f16 = curr_arg, fp_index += 16
1268 LOAD_FLOAT_TO_REG f18, t8, t7, loopS # f18 = curr_arg, fp_index += 16
1269 LOAD_END t7, 6*16, loopS # no more FPR args, fp_index = 6*16
1270tabDoubleS:
1271 LOAD_DOUBLE_TO_REG f8, f9, t8, t7, ra, loopS # f8_f9 = curr_arg; if FPU32, fp_index += 16
1272 LOAD_DOUBLE_TO_REG f10, f11, t8, t7, ra, loopS # f10_f11 = curr_arg; if FPU32, fp_index += 16
1273 LOAD_DOUBLE_TO_REG f12, f13, t8, t7, ra, loopS # f12_f13 = curr_arg; if FPU32, fp_index += 16
1274 LOAD_DOUBLE_TO_REG f14, f15, t8, t7, ra, loopS # f14_f15 = curr_arg; if FPU32, fp_index += 16
1275 LOAD_DOUBLE_TO_REG f16, f17, t8, t7, ra, loopS # f16_f17 = curr_arg; if FPU32, fp_index += 16
1276 LOAD_DOUBLE_TO_REG f18, f19, t8, t7, ra, loopS # f18_f19 = curr_arg; if FPU32, fp_index += 16
1277 LOAD_END t7, 6*16, loopS # no more FPR args, fp_index = 6*16
Goran Jakovljevicff734982015-08-24 12:58:55 +00001278END art_quick_invoke_static_stub
1279
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001280#undef SPILL_SIZE
1281
Goran Jakovljevicff734982015-08-24 12:58:55 +00001282 /*
buzbee5bc5a7b2012-03-07 15:52:59 -08001283 * Entry from managed code that calls artHandleFillArrayDataFromCode and delivers exception on
1284 * failure.
1285 */
Jeff Haod4c3f7d2013-02-14 14:14:44 -08001286 .extern artHandleFillArrayDataFromCode
Ian Rogers468532e2013-08-05 10:56:33 -07001287ENTRY art_quick_handle_fill_data
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001288 lw $a2, 0($sp) # pass referrer's Method*
1289 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case exception allocation triggers GC
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001290 la $t9, artHandleFillArrayDataFromCode
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001291 jalr $t9 # (payload offset, Array*, method, Thread*)
1292 move $a3, rSELF # pass Thread::Current
jeffhaofc6a30e2012-10-18 18:24:15 -07001293 RETURN_IF_ZERO
Ian Rogers468532e2013-08-05 10:56:33 -07001294END art_quick_handle_fill_data
buzbee5bc5a7b2012-03-07 15:52:59 -08001295
buzbee5bc5a7b2012-03-07 15:52:59 -08001296 /*
Ian Rogers57b86d42012-03-27 16:05:41 -07001297 * Entry from managed code that calls artLockObjectFromCode, may block for GC.
buzbee5bc5a7b2012-03-07 15:52:59 -08001298 */
Jeff Haod4c3f7d2013-02-14 14:14:44 -08001299 .extern artLockObjectFromCode
Ian Rogers468532e2013-08-05 10:56:33 -07001300ENTRY art_quick_lock_object
Vladimir Marko804b03f2016-09-14 16:26:36 +01001301 beqz $a0, art_quick_throw_null_pointer_exception
Alexey Frunzead63fe52017-05-08 22:10:00 -07001302 li $t8, LOCK_WORD_THIN_LOCK_COUNT_ONE
1303 li $t3, LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED
1304.Lretry_lock:
1305 lw $t0, THREAD_ID_OFFSET(rSELF) # TODO: Can the thread ID really change during the loop?
1306 ll $t1, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0)
1307 and $t2, $t1, $t3 # zero the gc bits
1308 bnez $t2, .Lnot_unlocked # already thin locked
1309 # Unlocked case - $t1: original lock word that's zero except for the read barrier bits.
1310 or $t2, $t1, $t0 # $t2 holds thread id with count of 0 with preserved read barrier bits
1311 sc $t2, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0)
1312 beqz $t2, .Lretry_lock # store failed, retry
Ian Rogersa9a82542013-10-04 11:17:26 -07001313 nop
Alexey Frunzead63fe52017-05-08 22:10:00 -07001314 jalr $zero, $ra
1315 sync # full (LoadLoad|LoadStore) memory barrier
1316.Lnot_unlocked:
1317 # $t1: original lock word, $t0: thread_id with count of 0 and zero read barrier bits
1318 srl $t2, $t1, LOCK_WORD_STATE_SHIFT
1319 bnez $t2, .Lslow_lock # if either of the top two bits are set, go slow path
1320 xor $t2, $t1, $t0 # lock_word.ThreadId() ^ self->ThreadId()
1321 andi $t2, $t2, 0xFFFF # zero top 16 bits
1322 bnez $t2, .Lslow_lock # lock word and self thread id's match -> recursive lock
1323 # otherwise contention, go to slow path
1324 and $t2, $t1, $t3 # zero the gc bits
1325 addu $t2, $t2, $t8 # increment count in lock word
1326 srl $t2, $t2, LOCK_WORD_STATE_SHIFT # if the first gc state bit is set, we overflowed.
1327 bnez $t2, .Lslow_lock # if we overflow the count go slow path
1328 addu $t2, $t1, $t8 # increment count for real
1329 sc $t2, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0)
1330 beqz $t2, .Lretry_lock # store failed, retry
1331 nop
1332 jalr $zero, $ra
1333 nop
1334.Lslow_lock:
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001335 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case we block
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001336 la $t9, artLockObjectFromCode
1337 jalr $t9 # (Object* obj, Thread*)
jeffhao7fbee072012-08-24 17:56:54 -07001338 move $a1, rSELF # pass Thread::Current
Ian Rogers6bcd1632013-10-08 18:50:47 -07001339 RETURN_IF_ZERO
Ian Rogers468532e2013-08-05 10:56:33 -07001340END art_quick_lock_object
buzbee5bc5a7b2012-03-07 15:52:59 -08001341
Andreas Gampec7ed09b2016-04-25 20:08:55 -07001342ENTRY art_quick_lock_object_no_inline
Vladimir Marko804b03f2016-09-14 16:26:36 +01001343 beqz $a0, art_quick_throw_null_pointer_exception
Andreas Gampec7ed09b2016-04-25 20:08:55 -07001344 nop
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001345 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case we block
Andreas Gampec7ed09b2016-04-25 20:08:55 -07001346 la $t9, artLockObjectFromCode
1347 jalr $t9 # (Object* obj, Thread*)
1348 move $a1, rSELF # pass Thread::Current
1349 RETURN_IF_ZERO
1350END art_quick_lock_object_no_inline
1351
buzbee5bc5a7b2012-03-07 15:52:59 -08001352 /*
1353 * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure.
1354 */
Jeff Haod4c3f7d2013-02-14 14:14:44 -08001355 .extern artUnlockObjectFromCode
Ian Rogers468532e2013-08-05 10:56:33 -07001356ENTRY art_quick_unlock_object
Vladimir Marko804b03f2016-09-14 16:26:36 +01001357 beqz $a0, art_quick_throw_null_pointer_exception
Alexey Frunzead63fe52017-05-08 22:10:00 -07001358 li $t8, LOCK_WORD_THIN_LOCK_COUNT_ONE
1359 li $t3, LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED
1360.Lretry_unlock:
1361#ifndef USE_READ_BARRIER
1362 lw $t1, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0)
1363#else
1364 ll $t1, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0) # Need to use atomic read-modify-write for read barrier
1365#endif
1366 srl $t2, $t1, LOCK_WORD_STATE_SHIFT
1367 bnez $t2, .Lslow_unlock # if either of the top two bits are set, go slow path
1368 lw $t0, THREAD_ID_OFFSET(rSELF)
1369 and $t2, $t1, $t3 # zero the gc bits
1370 xor $t2, $t2, $t0 # lock_word.ThreadId() ^ self->ThreadId()
1371 andi $t2, $t2, 0xFFFF # zero top 16 bits
1372 bnez $t2, .Lslow_unlock # do lock word and self thread id's match?
1373 and $t2, $t1, $t3 # zero the gc bits
1374 bgeu $t2, $t8, .Lrecursive_thin_unlock
1375 # transition to unlocked
1376 nor $t2, $zero, $t3 # $t2 = LOCK_WORD_GC_STATE_MASK_SHIFTED
1377 and $t2, $t1, $t2 # $t2: zero except for the preserved gc bits
1378 sync # full (LoadStore|StoreStore) memory barrier
1379#ifndef USE_READ_BARRIER
1380 jalr $zero, $ra
1381 sw $t2, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0)
1382#else
1383 sc $t2, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0)
1384 beqz $t2, .Lretry_unlock # store failed, retry
Ian Rogersa9a82542013-10-04 11:17:26 -07001385 nop
Alexey Frunzead63fe52017-05-08 22:10:00 -07001386 jalr $zero, $ra
1387 nop
1388#endif
1389.Lrecursive_thin_unlock:
1390 # t1: original lock word
1391 subu $t2, $t1, $t8 # decrement count
1392#ifndef USE_READ_BARRIER
1393 jalr $zero, $ra
1394 sw $t2, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0)
1395#else
1396 sc $t2, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0)
1397 beqz $t2, .Lretry_unlock # store failed, retry
1398 nop
1399 jalr $zero, $ra
1400 nop
1401#endif
1402.Lslow_unlock:
1403 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case exception allocation triggers GC
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001404 la $t9, artUnlockObjectFromCode
Alexey Frunzead63fe52017-05-08 22:10:00 -07001405 jalr $t9 # (Object* obj, Thread*)
1406 move $a1, rSELF # pass Thread::Current
buzbee5bc5a7b2012-03-07 15:52:59 -08001407 RETURN_IF_ZERO
Ian Rogers468532e2013-08-05 10:56:33 -07001408END art_quick_unlock_object
buzbee5bc5a7b2012-03-07 15:52:59 -08001409
Andreas Gampec7ed09b2016-04-25 20:08:55 -07001410ENTRY art_quick_unlock_object_no_inline
Vladimir Marko804b03f2016-09-14 16:26:36 +01001411 beqz $a0, art_quick_throw_null_pointer_exception
Andreas Gampec7ed09b2016-04-25 20:08:55 -07001412 nop
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001413 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case exception allocation triggers GC
Andreas Gampec7ed09b2016-04-25 20:08:55 -07001414 la $t9, artUnlockObjectFromCode
1415 jalr $t9 # (Object* obj, Thread*)
1416 move $a1, rSELF # pass Thread::Current
1417 RETURN_IF_ZERO
1418END art_quick_unlock_object_no_inline
1419
buzbee5bc5a7b2012-03-07 15:52:59 -08001420 /*
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08001421 * Entry from managed code that calls artInstanceOfFromCode and delivers exception on failure.
buzbee5bc5a7b2012-03-07 15:52:59 -08001422 */
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08001423 .extern artInstanceOfFromCode
1424 .extern artThrowClassCastExceptionForObject
1425ENTRY art_quick_check_instance_of
Vladimir Marko175e7862018-03-27 09:03:13 +00001426 // Type check using the bit string passes null as the target class. In that case just throw.
1427 beqz $a1, .Lthrow_class_cast_exception_for_bitstring_check
1428 nop
1429
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001430 addiu $sp, $sp, -32
1431 .cfi_adjust_cfa_offset 32
1432 sw $gp, 16($sp)
Ian Rogersa9a82542013-10-04 11:17:26 -07001433 sw $ra, 12($sp)
1434 .cfi_rel_offset 31, 12
1435 sw $t9, 8($sp)
1436 sw $a1, 4($sp)
1437 sw $a0, 0($sp)
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08001438 la $t9, artInstanceOfFromCode
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001439 jalr $t9
Douglas Leung735b8552014-10-31 12:21:40 -07001440 addiu $sp, $sp, -16 # reserve argument slots on the stack
1441 addiu $sp, $sp, 16
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001442 lw $gp, 16($sp)
Ian Rogers86bcdc22014-02-21 22:06:38 -08001443 beqz $v0, .Lthrow_class_cast_exception
Ian Rogersa9a82542013-10-04 11:17:26 -07001444 lw $ra, 12($sp)
Andreas Gampe8d365912015-01-13 11:32:32 -08001445 jalr $zero, $ra
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001446 addiu $sp, $sp, 32
1447 .cfi_adjust_cfa_offset -32
Vladimir Marko175e7862018-03-27 09:03:13 +00001448
Ian Rogers86bcdc22014-02-21 22:06:38 -08001449.Lthrow_class_cast_exception:
Ian Rogersa9a82542013-10-04 11:17:26 -07001450 lw $t9, 8($sp)
1451 lw $a1, 4($sp)
1452 lw $a0, 0($sp)
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001453 addiu $sp, $sp, 32
1454 .cfi_adjust_cfa_offset -32
Vladimir Marko175e7862018-03-27 09:03:13 +00001455
1456.Lthrow_class_cast_exception_for_bitstring_check:
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001457 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08001458 la $t9, artThrowClassCastExceptionForObject
1459 jalr $zero, $t9 # artThrowClassCastException (Object*, Class*, Thread*)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001460 move $a2, rSELF # pass Thread::Current
Mathieu Chartierb99f4d62016-11-07 16:17:26 -08001461END art_quick_check_instance_of
buzbee5bc5a7b2012-03-07 15:52:59 -08001462
buzbee5bc5a7b2012-03-07 15:52:59 -08001463 /*
Man Cao1aee9002015-07-14 22:31:42 -07001464 * Restore rReg's value from offset($sp) if rReg is not the same as rExclude.
1465 * nReg is the register number for rReg.
1466 */
1467.macro POP_REG_NE rReg, nReg, offset, rExclude
1468 .ifnc \rReg, \rExclude
1469 lw \rReg, \offset($sp) # restore rReg
1470 .cfi_restore \nReg
1471 .endif
1472.endm
1473
1474 /*
1475 * Macro to insert read barrier, only used in art_quick_aput_obj.
1476 * rObj and rDest are registers, offset is a defined literal such as MIRROR_OBJECT_CLASS_OFFSET.
1477 * TODO: When read barrier has a fast path, add heap unpoisoning support for the fast path.
1478 */
1479.macro READ_BARRIER rDest, rObj, offset
1480#ifdef USE_READ_BARRIER
1481 # saved registers used in art_quick_aput_obj: a0-a2, t0-t1, t9, ra. 8 words for 16B alignment.
1482 addiu $sp, $sp, -32
1483 .cfi_adjust_cfa_offset 32
1484 sw $ra, 28($sp)
1485 .cfi_rel_offset 31, 28
1486 sw $t9, 24($sp)
1487 .cfi_rel_offset 25, 24
1488 sw $t1, 20($sp)
1489 .cfi_rel_offset 9, 20
1490 sw $t0, 16($sp)
1491 .cfi_rel_offset 8, 16
1492 sw $a2, 8($sp) # padding slot at offset 12 (padding can be any slot in the 32B)
1493 .cfi_rel_offset 6, 8
1494 sw $a1, 4($sp)
1495 .cfi_rel_offset 5, 4
1496 sw $a0, 0($sp)
1497 .cfi_rel_offset 4, 0
1498
Man Cao63069212015-08-21 15:51:39 -07001499 # move $a0, \rRef # pass ref in a0 (no-op for now since parameter ref is unused)
Man Cao1aee9002015-07-14 22:31:42 -07001500 .ifnc \rObj, $a1
1501 move $a1, \rObj # pass rObj
1502 .endif
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001503 addiu $a2, $zero, \offset # pass offset
1504 la $t9, artReadBarrierSlow
1505 jalr $t9 # artReadBarrierSlow(ref, rObj, offset)
Man Cao1aee9002015-07-14 22:31:42 -07001506 addiu $sp, $sp, -16 # Use branch delay slot to reserve argument slots on the stack
1507 # before the call to artReadBarrierSlow.
1508 addiu $sp, $sp, 16 # restore stack after call to artReadBarrierSlow
1509 # No need to unpoison return value in v0, artReadBarrierSlow() would do the unpoisoning.
1510 move \rDest, $v0 # save return value in rDest
1511 # (rDest cannot be v0 in art_quick_aput_obj)
1512
1513 lw $a0, 0($sp) # restore registers except rDest
1514 # (rDest can only be t0 or t1 in art_quick_aput_obj)
1515 .cfi_restore 4
1516 lw $a1, 4($sp)
1517 .cfi_restore 5
1518 lw $a2, 8($sp)
1519 .cfi_restore 6
1520 POP_REG_NE $t0, 8, 16, \rDest
1521 POP_REG_NE $t1, 9, 20, \rDest
1522 lw $t9, 24($sp)
1523 .cfi_restore 25
1524 lw $ra, 28($sp) # restore $ra
1525 .cfi_restore 31
1526 addiu $sp, $sp, 32
1527 .cfi_adjust_cfa_offset -32
1528#else
1529 lw \rDest, \offset(\rObj)
1530 UNPOISON_HEAP_REF \rDest
1531#endif // USE_READ_BARRIER
1532.endm
1533
Man Cao1aee9002015-07-14 22:31:42 -07001534#ifdef USE_READ_BARRIER
1535 .extern artReadBarrierSlow
1536#endif
Ian Rogersa9a82542013-10-04 11:17:26 -07001537ENTRY art_quick_aput_obj
Ian Rogers86bcdc22014-02-21 22:06:38 -08001538 beqz $a2, .Ldo_aput_null
Ian Rogersa9a82542013-10-04 11:17:26 -07001539 nop
Man Cao1aee9002015-07-14 22:31:42 -07001540 READ_BARRIER $t0, $a0, MIRROR_OBJECT_CLASS_OFFSET
1541 READ_BARRIER $t1, $a2, MIRROR_OBJECT_CLASS_OFFSET
1542 READ_BARRIER $t0, $t0, MIRROR_CLASS_COMPONENT_TYPE_OFFSET
Ian Rogers86bcdc22014-02-21 22:06:38 -08001543 bne $t1, $t0, .Lcheck_assignability # value's type == array's component type - trivial assignability
Ian Rogersa9a82542013-10-04 11:17:26 -07001544 nop
Ian Rogers86bcdc22014-02-21 22:06:38 -08001545.Ldo_aput:
Ian Rogersa9a82542013-10-04 11:17:26 -07001546 sll $a1, $a1, 2
1547 add $t0, $a0, $a1
Hiroshi Yamauchibfa5eb62015-05-29 15:04:41 -07001548 POISON_HEAP_REF $a2
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001549 sw $a2, MIRROR_OBJECT_ARRAY_DATA_OFFSET($t0)
Ian Rogersa9a82542013-10-04 11:17:26 -07001550 lw $t0, THREAD_CARD_TABLE_OFFSET(rSELF)
Mathieu Chartierafdcbcb2017-04-26 16:43:35 -07001551 srl $t1, $a0, CARD_TABLE_CARD_SHIFT
Ian Rogersa9a82542013-10-04 11:17:26 -07001552 add $t1, $t1, $t0
1553 sb $t0, ($t1)
Andreas Gampe8d365912015-01-13 11:32:32 -08001554 jalr $zero, $ra
Ian Rogersa9a82542013-10-04 11:17:26 -07001555 nop
Ian Rogers86bcdc22014-02-21 22:06:38 -08001556.Ldo_aput_null:
Ian Rogersa9a82542013-10-04 11:17:26 -07001557 sll $a1, $a1, 2
1558 add $t0, $a0, $a1
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001559 sw $a2, MIRROR_OBJECT_ARRAY_DATA_OFFSET($t0)
Andreas Gampe8d365912015-01-13 11:32:32 -08001560 jalr $zero, $ra
Ian Rogersa9a82542013-10-04 11:17:26 -07001561 nop
Ian Rogers86bcdc22014-02-21 22:06:38 -08001562.Lcheck_assignability:
Ian Rogersa9a82542013-10-04 11:17:26 -07001563 addiu $sp, $sp, -32
1564 .cfi_adjust_cfa_offset 32
1565 sw $ra, 28($sp)
1566 .cfi_rel_offset 31, 28
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001567 sw $gp, 16($sp)
Ian Rogersa9a82542013-10-04 11:17:26 -07001568 sw $t9, 12($sp)
1569 sw $a2, 8($sp)
1570 sw $a1, 4($sp)
1571 sw $a0, 0($sp)
1572 move $a1, $t1
1573 move $a0, $t0
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001574 la $t9, artIsAssignableFromCode
1575 jalr $t9 # (Class*, Class*)
1576 addiu $sp, $sp, -16 # reserve argument slots on the stack
1577 addiu $sp, $sp, 16
Ian Rogersa9a82542013-10-04 11:17:26 -07001578 lw $ra, 28($sp)
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001579 lw $gp, 16($sp)
Ian Rogersa9a82542013-10-04 11:17:26 -07001580 lw $t9, 12($sp)
1581 lw $a2, 8($sp)
1582 lw $a1, 4($sp)
1583 lw $a0, 0($sp)
Duane Sande34652f2014-11-04 11:09:36 -08001584 addiu $sp, 32
Ian Rogersa9a82542013-10-04 11:17:26 -07001585 .cfi_adjust_cfa_offset -32
Ian Rogers86bcdc22014-02-21 22:06:38 -08001586 bnez $v0, .Ldo_aput
Ian Rogersa9a82542013-10-04 11:17:26 -07001587 nop
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001588 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
Ian Rogersa9a82542013-10-04 11:17:26 -07001589 move $a1, $a2
Ian Rogersa9a82542013-10-04 11:17:26 -07001590 la $t9, artThrowArrayStoreException
Andreas Gampe8d365912015-01-13 11:32:32 -08001591 jalr $zero, $t9 # artThrowArrayStoreException(Class*, Class*, Thread*)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001592 move $a2, rSELF # pass Thread::Current
Ian Rogersa9a82542013-10-04 11:17:26 -07001593END art_quick_aput_obj
buzbee5bc5a7b2012-03-07 15:52:59 -08001594
Alexey Frunze0cb12422017-01-25 19:30:18 -08001595// Macros taking opportunity of code similarities for downcalls.
1596.macro ONE_ARG_REF_DOWNCALL name, entrypoint, return
1597 .extern \entrypoint
1598ENTRY \name
1599 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC
1600 la $t9, \entrypoint
1601 jalr $t9 # (field_idx, Thread*)
1602 move $a1, rSELF # pass Thread::Current
1603 \return # RETURN_IF_NO_EXCEPTION or RETURN_IF_ZERO
1604END \name
1605.endm
1606
1607.macro TWO_ARG_REF_DOWNCALL name, entrypoint, return
1608 .extern \entrypoint
1609ENTRY \name
1610 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC
1611 la $t9, \entrypoint
1612 jalr $t9 # (field_idx, Object*, Thread*) or
1613 # (field_idx, new_val, Thread*)
1614 move $a2, rSELF # pass Thread::Current
1615 \return # RETURN_IF_NO_EXCEPTION or RETURN_IF_ZERO
1616END \name
1617.endm
1618
1619.macro THREE_ARG_REF_DOWNCALL name, entrypoint, return
1620 .extern \entrypoint
1621ENTRY \name
1622 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC
1623 la $t9, \entrypoint
1624 jalr $t9 # (field_idx, Object*, new_val, Thread*)
1625 move $a3, rSELF # pass Thread::Current
1626 \return # RETURN_IF_NO_EXCEPTION or RETURN_IF_ZERO
1627END \name
1628.endm
1629
1630.macro FOUR_ARG_REF_DOWNCALL name, entrypoint, return
1631 .extern \entrypoint
1632ENTRY \name
1633 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC
1634 la $t9, \entrypoint
1635 jalr $t9 # (field_idx, Object*, 64-bit new_val, Thread*) or
1636 # (field_idx, 64-bit new_val, Thread*)
1637 # Note that a 64-bit new_val needs to be aligned with
1638 # an even-numbered register, hence A1 may be skipped
1639 # for new_val to reside in A2-A3.
1640 sw rSELF, 16($sp) # pass Thread::Current
1641 \return # RETURN_IF_NO_EXCEPTION or RETURN_IF_ZERO
1642END \name
1643.endm
Fred Shih37f05ef2014-07-16 18:38:08 -07001644
1645 /*
Alexey Frunze0cb12422017-01-25 19:30:18 -08001646 * Called by managed code to resolve a static/instance field and load/store a value.
Roland Levillain809f5b12018-01-04 14:05:59 +00001647 *
1648 * Note: Functions `art{Get,Set}<Kind>{Static,Instance}FromCompiledCode` are
1649 * defined with a macro in runtime/entrypoints/quick/quick_field_entrypoints.cc.
Fred Shih37f05ef2014-07-16 18:38:08 -07001650 */
Alexey Frunze0cb12422017-01-25 19:30:18 -08001651ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCompiledCode, RETURN_IF_NO_EXCEPTION
1652ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCompiledCode, RETURN_IF_NO_EXCEPTION
1653ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCompiledCode, RETURN_IF_NO_EXCEPTION
1654ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCompiledCode, RETURN_IF_NO_EXCEPTION
1655ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCompiledCode, RETURN_IF_NO_EXCEPTION
1656ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCompiledCode, RETURN_IF_NO_EXCEPTION
1657ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCompiledCode, RETURN_IF_NO_EXCEPTION
1658TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCompiledCode, RETURN_IF_NO_EXCEPTION
1659TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCompiledCode, RETURN_IF_NO_EXCEPTION
1660TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCompiledCode, RETURN_IF_NO_EXCEPTION
1661TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCompiledCode, RETURN_IF_NO_EXCEPTION
1662TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCompiledCode, RETURN_IF_NO_EXCEPTION
1663TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCompiledCode, RETURN_IF_NO_EXCEPTION
1664TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCompiledCode, RETURN_IF_NO_EXCEPTION
1665TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCompiledCode, RETURN_IF_ZERO
1666TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCompiledCode, RETURN_IF_ZERO
1667TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCompiledCode, RETURN_IF_ZERO
1668TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCompiledCode, RETURN_IF_ZERO
1669FOUR_ARG_REF_DOWNCALL art_quick_set64_static, artSet64StaticFromCompiledCode, RETURN_IF_ZERO
1670THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCompiledCode, RETURN_IF_ZERO
1671THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCompiledCode, RETURN_IF_ZERO
1672THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCompiledCode, RETURN_IF_ZERO
1673THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCompiledCode, RETURN_IF_ZERO
1674FOUR_ARG_REF_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCompiledCode, RETURN_IF_ZERO
buzbee5bc5a7b2012-03-07 15:52:59 -08001675
Vladimir Markoa3c38272015-04-28 12:37:09 +01001676// Macro to facilitate adding new allocation entrypoints.
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001677.macro ONE_ARG_DOWNCALL name, entrypoint, return
1678 .extern \entrypoint
1679ENTRY \name
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001680 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001681 la $t9, \entrypoint
1682 jalr $t9
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001683 move $a1, rSELF # pass Thread::Current
1684 \return
1685END \name
1686.endm
1687
Mathieu Chartiercbb2d202013-11-14 17:45:16 -08001688.macro TWO_ARG_DOWNCALL name, entrypoint, return
1689 .extern \entrypoint
1690ENTRY \name
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001691 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001692 la $t9, \entrypoint
1693 jalr $t9
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001694 move $a2, rSELF # pass Thread::Current
Mathieu Chartiercbb2d202013-11-14 17:45:16 -08001695 \return
1696END \name
1697.endm
buzbee5bc5a7b2012-03-07 15:52:59 -08001698
Mathieu Chartiercbb2d202013-11-14 17:45:16 -08001699.macro THREE_ARG_DOWNCALL name, entrypoint, return
1700 .extern \entrypoint
1701ENTRY \name
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001702 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001703 la $t9, \entrypoint
1704 jalr $t9
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001705 move $a3, rSELF # pass Thread::Current
Mathieu Chartiercbb2d202013-11-14 17:45:16 -08001706 \return
1707END \name
1708.endm
buzbee5bc5a7b2012-03-07 15:52:59 -08001709
Jeff Hao848f70a2014-01-15 13:49:50 -08001710.macro FOUR_ARG_DOWNCALL name, entrypoint, return
1711 .extern \entrypoint
1712ENTRY \name
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001713 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC
Goran Jakovljevic590b1362016-03-21 14:24:43 +01001714 la $t9, \entrypoint
1715 jalr $t9
Vladimir Markoa3c38272015-04-28 12:37:09 +01001716 sw rSELF, 16($sp) # pass Thread::Current
Jeff Hao848f70a2014-01-15 13:49:50 -08001717 \return
1718END \name
1719.endm
1720
Mathieu Chartier7410f292013-11-24 13:17:35 -08001721// Generate the allocation entrypoints for each allocator.
Goran Jakovljevic854df412017-06-27 14:41:39 +02001722GENERATE_ALLOC_ENTRYPOINTS_FOR_NON_TLAB_ALLOCATORS
1723// Comment out allocators that have mips specific asm.
1724// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_tlab, RegionTLAB)
1725// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_tlab, RegionTLAB)
1726GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB)
1727// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_tlab, RegionTLAB)
1728// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED8(_region_tlab, RegionTLAB)
1729// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED16(_region_tlab, RegionTLAB)
1730// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED32(_region_tlab, RegionTLAB)
1731// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED64(_region_tlab, RegionTLAB)
1732GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_BYTES(_region_tlab, RegionTLAB)
1733GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_CHARS(_region_tlab, RegionTLAB)
1734GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_STRING(_region_tlab, RegionTLAB)
1735
1736// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab, TLAB)
1737// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_tlab, TLAB)
1738GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab, TLAB)
1739// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_tlab, TLAB)
1740// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED8(_tlab, TLAB)
1741// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED16(_tlab, TLAB)
1742// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED32(_tlab, TLAB)
1743// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED64(_tlab, TLAB)
1744GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_BYTES(_tlab, TLAB)
1745GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_CHARS(_tlab, TLAB)
1746GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_STRING(_tlab, TLAB)
Hiroshi Yamauchi10d4c082016-02-24 12:51:18 -08001747
Goran Jakovljevic2449e5c2017-01-19 11:39:18 +01001748// A hand-written override for:
1749// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc, RosAlloc)
1750// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_rosalloc, RosAlloc)
Goran Jakovljevic854df412017-06-27 14:41:39 +02001751.macro ART_QUICK_ALLOC_OBJECT_ROSALLOC c_name, cxx_name, isInitialized
1752ENTRY_NO_GP \c_name
Goran Jakovljevic2449e5c2017-01-19 11:39:18 +01001753 # Fast path rosalloc allocation
1754 # a0: type
1755 # s1: Thread::Current
1756 # -----------------------------
1757 # t1: object size
1758 # t2: rosalloc run
1759 # t3: thread stack top offset
1760 # t4: thread stack bottom offset
1761 # v0: free list head
1762 #
1763 # t5, t6 : temps
1764 lw $t3, THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET($s1) # Check if thread local allocation
1765 lw $t4, THREAD_LOCAL_ALLOC_STACK_END_OFFSET($s1) # stack has any room left.
1766 bgeu $t3, $t4, .Lslow_path_\c_name
Pavle Batuta712c59d2015-12-02 18:39:01 +01001767
Goran Jakovljevic2449e5c2017-01-19 11:39:18 +01001768 lw $t1, MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET($a0) # Load object size (t1).
1769 li $t5, ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE # Check if size is for a thread local
1770 # allocation. Also does the
1771 # initialized and finalizable checks.
Goran Jakovljevic854df412017-06-27 14:41:39 +02001772 # When isInitialized == 0, then the class is potentially not yet initialized.
1773 # If the class is not yet initialized, the object size will be very large to force the branch
1774 # below to be taken.
1775 #
1776 # See InitializeClassVisitors in class-inl.h for more details.
Goran Jakovljevic2449e5c2017-01-19 11:39:18 +01001777 bgtu $t1, $t5, .Lslow_path_\c_name
1778
1779 # Compute the rosalloc bracket index from the size. Since the size is already aligned we can
1780 # combine the two shifts together.
1781 srl $t1, $t1, (ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT - POINTER_SIZE_SHIFT)
1782
1783 addu $t2, $t1, $s1
1784 lw $t2, (THREAD_ROSALLOC_RUNS_OFFSET - __SIZEOF_POINTER__)($t2) # Load rosalloc run (t2).
1785
1786 # Load the free list head (v0).
1787 # NOTE: this will be the return val.
1788 lw $v0, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)($t2)
1789 beqz $v0, .Lslow_path_\c_name
1790 nop
1791
1792 # Load the next pointer of the head and update the list head with the next pointer.
1793 lw $t5, ROSALLOC_SLOT_NEXT_OFFSET($v0)
1794 sw $t5, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)($t2)
1795
1796 # Store the class pointer in the header. This also overwrites the first pointer. The offsets are
1797 # asserted to match.
1798
1799#if ROSALLOC_SLOT_NEXT_OFFSET != MIRROR_OBJECT_CLASS_OFFSET
1800#error "Class pointer needs to overwrite next pointer."
1801#endif
1802
1803 POISON_HEAP_REF $a0
1804 sw $a0, MIRROR_OBJECT_CLASS_OFFSET($v0)
1805
1806 # Push the new object onto the thread local allocation stack and increment the thread local
1807 # allocation stack top.
1808 sw $v0, 0($t3)
1809 addiu $t3, $t3, COMPRESSED_REFERENCE_SIZE
1810 sw $t3, THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET($s1)
1811
1812 # Decrement the size of the free list.
1813 lw $t5, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)($t2)
1814 addiu $t5, $t5, -1
1815 sw $t5, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)($t2)
1816
Goran Jakovljevic854df412017-06-27 14:41:39 +02001817.if \isInitialized == 0
1818 # This barrier is only necessary when the allocation also requires a class initialization check.
1819 #
1820 # If the class is already observably initialized, then new-instance allocations are protected
1821 # from publishing by the compiler which inserts its own StoreStore barrier.
Goran Jakovljevic2449e5c2017-01-19 11:39:18 +01001822 sync # Fence.
Goran Jakovljevic854df412017-06-27 14:41:39 +02001823.endif
Goran Jakovljevic2449e5c2017-01-19 11:39:18 +01001824 jalr $zero, $ra
1825 nop
1826
1827 .Lslow_path_\c_name:
Goran Jakovljevic854df412017-06-27 14:41:39 +02001828 addiu $t9, $t9, (.Lslow_path_\c_name - \c_name) + 4
1829 .cpload $t9
Goran Jakovljevic2449e5c2017-01-19 11:39:18 +01001830 SETUP_SAVE_REFS_ONLY_FRAME
1831 la $t9, \cxx_name
1832 jalr $t9
1833 move $a1, $s1 # Pass self as argument.
1834 RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1835END \c_name
1836.endm
1837
Goran Jakovljevic854df412017-06-27 14:41:39 +02001838ART_QUICK_ALLOC_OBJECT_ROSALLOC art_quick_alloc_object_resolved_rosalloc, artAllocObjectFromCodeResolvedRosAlloc, /* isInitialized */ 0
1839ART_QUICK_ALLOC_OBJECT_ROSALLOC art_quick_alloc_object_initialized_rosalloc, artAllocObjectFromCodeInitializedRosAlloc, /* isInitialized */ 1
Goran Jakovljevic2449e5c2017-01-19 11:39:18 +01001840
Goran Jakovljevic854df412017-06-27 14:41:39 +02001841// The common fast path code for art_quick_alloc_object_resolved/initialized_tlab
1842// and art_quick_alloc_object_resolved/initialized_region_tlab.
1843//
1844// a0: type, s1(rSELF): Thread::Current.
1845// Need to preserve a0 to the slow path.
1846//
1847// If isInitialized=1 then the compiler assumes the object's class has already been initialized.
1848// If isInitialized=0 the compiler can only assume it's been at least resolved.
1849.macro ALLOC_OBJECT_RESOLVED_TLAB_FAST_PATH slowPathLabel isInitialized
1850 lw $v0, THREAD_LOCAL_POS_OFFSET(rSELF) # Load thread_local_pos.
1851 lw $a2, THREAD_LOCAL_END_OFFSET(rSELF) # Load thread_local_end.
1852 subu $a3, $a2, $v0 # Compute the remaining buffer size.
1853 lw $t0, MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET($a0) # Load the object size.
1854
1855 # When isInitialized == 0, then the class is potentially not yet initialized.
1856 # If the class is not yet initialized, the object size will be very large to force the branch
1857 # below to be taken.
1858 #
1859 # See InitializeClassVisitors in class-inl.h for more details.
1860 bgtu $t0, $a3, \slowPathLabel # Check if it fits.
1861 addu $t1, $v0, $t0 # Add object size to tlab pos (in branch
1862 # delay slot).
1863 # "Point of no slow path". Won't go to the slow path from here on.
1864 sw $t1, THREAD_LOCAL_POS_OFFSET(rSELF) # Store new thread_local_pos.
1865 lw $a2, THREAD_LOCAL_OBJECTS_OFFSET(rSELF) # Increment thread_local_objects.
1866 addiu $a2, $a2, 1
1867 sw $a2, THREAD_LOCAL_OBJECTS_OFFSET(rSELF)
1868 POISON_HEAP_REF $a0
1869 sw $a0, MIRROR_OBJECT_CLASS_OFFSET($v0) # Store the class pointer.
1870
1871.if \isInitialized == 0
1872 # This barrier is only necessary when the allocation also requires a class initialization check.
1873 #
1874 # If the class is already observably initialized, then new-instance allocations are protected
1875 # from publishing by the compiler which inserts its own StoreStore barrier.
1876 sync # Fence.
1877.endif
1878 jalr $zero, $ra
1879 nop
1880.endm
1881
1882// The common code for art_quick_alloc_object_resolved/initialized_tlab
1883// and art_quick_alloc_object_resolved/initialized_region_tlab.
1884.macro GENERATE_ALLOC_OBJECT_TLAB name, entrypoint, isInitialized
1885ENTRY_NO_GP \name
1886 # Fast path tlab allocation.
1887 # a0: type, s1(rSELF): Thread::Current.
1888 ALLOC_OBJECT_RESOLVED_TLAB_FAST_PATH .Lslow_path_\name, \isInitialized
1889.Lslow_path_\name:
1890 addiu $t9, $t9, (.Lslow_path_\name - \name) + 4
1891 .cpload $t9
1892 SETUP_SAVE_REFS_ONLY_FRAME # Save callee saves in case of GC.
1893 la $t9, \entrypoint
1894 jalr $t9 # (mirror::Class*, Thread*)
1895 move $a1, rSELF # Pass Thread::Current.
1896 RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1897END \name
1898.endm
1899
1900GENERATE_ALLOC_OBJECT_TLAB art_quick_alloc_object_resolved_region_tlab, artAllocObjectFromCodeResolvedRegionTLAB, /* isInitialized */ 0
1901GENERATE_ALLOC_OBJECT_TLAB art_quick_alloc_object_initialized_region_tlab, artAllocObjectFromCodeInitializedRegionTLAB, /* isInitialized */ 1
1902GENERATE_ALLOC_OBJECT_TLAB art_quick_alloc_object_resolved_tlab, artAllocObjectFromCodeResolvedTLAB, /* isInitialized */ 0
1903GENERATE_ALLOC_OBJECT_TLAB art_quick_alloc_object_initialized_tlab, artAllocObjectFromCodeInitializedTLAB, /* isInitialized */ 1
1904
1905// The common fast path code for art_quick_alloc_array_resolved/initialized_tlab
1906// and art_quick_alloc_array_resolved/initialized_region_tlab.
1907//
1908// a0: type, a1: component_count, a2: total_size, s1(rSELF): Thread::Current.
1909// Need to preserve a0 and a1 to the slow path.
1910.macro ALLOC_ARRAY_TLAB_FAST_PATH_RESOLVED_WITH_SIZE slowPathLabel
1911 li $a3, OBJECT_ALIGNMENT_MASK_TOGGLED # Apply alignemnt mask
1912 and $a2, $a2, $a3 # (addr + 7) & ~7.
1913
1914 lw $v0, THREAD_LOCAL_POS_OFFSET(rSELF) # Load thread_local_pos.
1915 lw $t1, THREAD_LOCAL_END_OFFSET(rSELF) # Load thread_local_end.
1916 subu $t2, $t1, $v0 # Compute the remaining buffer size.
1917 bgtu $a2, $t2, \slowPathLabel # Check if it fits.
1918 addu $a2, $v0, $a2 # Add object size to tlab pos (in branch
1919 # delay slot).
1920
1921 # "Point of no slow path". Won't go to the slow path from here on.
1922 sw $a2, THREAD_LOCAL_POS_OFFSET(rSELF) # Store new thread_local_pos.
1923 lw $a2, THREAD_LOCAL_OBJECTS_OFFSET(rSELF) # Increment thread_local_objects.
1924 addiu $a2, $a2, 1
1925 sw $a2, THREAD_LOCAL_OBJECTS_OFFSET(rSELF)
1926 POISON_HEAP_REF $a0
1927 sw $a0, MIRROR_OBJECT_CLASS_OFFSET($v0) # Store the class pointer.
1928 jalr $zero, $ra
1929 sw $a1, MIRROR_ARRAY_LENGTH_OFFSET($v0) # Store the array length.
1930.endm
1931
1932.macro GENERATE_ALLOC_ARRAY_TLAB name, entrypoint, size_setup
1933ENTRY_NO_GP \name
1934 # Fast path array allocation for region tlab allocation.
1935 # a0: mirror::Class* type
1936 # a1: int32_t component_count
1937 # s1(rSELF): Thread::Current
1938 \size_setup .Lslow_path_\name
1939 ALLOC_ARRAY_TLAB_FAST_PATH_RESOLVED_WITH_SIZE .Lslow_path_\name
1940.Lslow_path_\name:
1941 # a0: mirror::Class* type
1942 # a1: int32_t component_count
1943 # a2: Thread* self
1944 addiu $t9, $t9, (.Lslow_path_\name - \name) + 4
1945 .cpload $t9
1946 SETUP_SAVE_REFS_ONLY_FRAME # Save callee saves in case of GC.
1947 la $t9, \entrypoint
1948 jalr $t9
1949 move $a2, rSELF # Pass Thread::Current.
1950 RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1951END \name
1952.endm
1953
1954.macro COMPUTE_ARRAY_SIZE_UNKNOWN slow_path
1955 break # We should never enter here.
1956 # Code below is for reference.
1957 # Possibly a large object, go slow.
1958 # Also does negative array size check.
1959 li $a2, ((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_WIDE_ARRAY_DATA_OFFSET) / 8)
1960 bgtu $a1, $a2, \slow_path
1961 # Array classes are never finalizable
1962 # or uninitialized, no need to check.
1963 lw $a3, MIRROR_CLASS_COMPONENT_TYPE_OFFSET($a0) # Load component type.
1964 UNPOISON_HEAP_REF $a3
1965 lw $a3, MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET($a3)
1966 srl $a3, $a3, PRIMITIVE_TYPE_SIZE_SHIFT_SHIFT # Component size shift is in high 16 bits.
1967 sllv $a2, $a1, $a3 # Calculate data size.
1968 # Add array data offset and alignment.
1969 addiu $a2, $a2, (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK)
1970#if MIRROR_WIDE_ARRAY_DATA_OFFSET != MIRROR_INT_ARRAY_DATA_OFFSET + 4
1971#error Long array data offset must be 4 greater than int array data offset.
1972#endif
1973
1974 addiu $a3, $a3, 1 # Add 4 to the length only if the component
1975 andi $a3, $a3, 4 # size shift is 3 (for 64 bit alignment).
1976 addu $a2, $a2, $a3
1977.endm
1978
1979.macro COMPUTE_ARRAY_SIZE_8 slow_path
1980 # Possibly a large object, go slow.
1981 # Also does negative array size check.
1982 li $a2, (MIN_LARGE_OBJECT_THRESHOLD - MIRROR_INT_ARRAY_DATA_OFFSET)
1983 bgtu $a1, $a2, \slow_path
1984 # Add array data offset and alignment (in branch delay slot).
1985 addiu $a2, $a1, (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK)
1986.endm
1987
1988.macro COMPUTE_ARRAY_SIZE_16 slow_path
1989 # Possibly a large object, go slow.
1990 # Also does negative array size check.
1991 li $a2, ((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_INT_ARRAY_DATA_OFFSET) / 2)
1992 bgtu $a1, $a2, \slow_path
1993 sll $a2, $a1, 1
1994 # Add array data offset and alignment.
1995 addiu $a2, $a2, (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK)
1996.endm
1997
1998.macro COMPUTE_ARRAY_SIZE_32 slow_path
1999 # Possibly a large object, go slow.
2000 # Also does negative array size check.
2001 li $a2, ((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_INT_ARRAY_DATA_OFFSET) / 4)
2002 bgtu $a1, $a2, \slow_path
2003 sll $a2, $a1, 2
2004 # Add array data offset and alignment.
2005 addiu $a2, $a2, (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK)
2006.endm
2007
2008.macro COMPUTE_ARRAY_SIZE_64 slow_path
2009 # Possibly a large object, go slow.
2010 # Also does negative array size check.
2011 li $a2, ((MIN_LARGE_OBJECT_THRESHOLD - MIRROR_LONG_ARRAY_DATA_OFFSET) / 8)
2012 bgtu $a1, $a2, \slow_path
2013 sll $a2, $a1, 3
2014 # Add array data offset and alignment.
2015 addiu $a2, $a2, (MIRROR_WIDE_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK)
2016.endm
2017
2018GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_UNKNOWN
2019GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved8_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_8
2020GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved16_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_16
2021GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved32_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_32
2022GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved64_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_64
2023
2024GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_UNKNOWN
2025GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved8_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_8
2026GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved16_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_16
2027GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved32_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_32
2028GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved64_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_64
Hiroshi Yamauchi10d4c082016-02-24 12:51:18 -08002029
Orion Hodson18259d72018-04-12 11:18:23 +01002030 /*
2031 * Macro for resolution and initialization of indexed DEX file
2032 * constants such as classes and strings. $a0 is both input and
2033 * output.
2034 */
Alexey Frunze19428ad2017-08-03 10:36:46 -07002035.macro ONE_ARG_SAVE_EVERYTHING_DOWNCALL name, entrypoint, runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET
Alexey Frunzec61c0762017-04-10 13:54:23 -07002036 .extern \entrypoint
2037ENTRY_NO_GP \name
Alexey Frunze19428ad2017-08-03 10:36:46 -07002038 SETUP_SAVE_EVERYTHING_FRAME \runtime_method_offset # Save everything in case of GC.
Alexey Frunzec61c0762017-04-10 13:54:23 -07002039 move $s2, $gp # Preserve $gp across the call for exception delivery.
2040 la $t9, \entrypoint
2041 jalr $t9 # (uint32_t index, Thread*)
2042 move $a1, rSELF # Pass Thread::Current (in delay slot).
2043 beqz $v0, 1f # Success?
2044 move $a0, $v0 # Move result to $a0 (in delay slot).
2045 RESTORE_SAVE_EVERYTHING_FRAME 0 # Restore everything except $a0.
2046 jalr $zero, $ra # Return on success.
2047 nop
20481:
2049 move $gp, $s2
2050 DELIVER_PENDING_EXCEPTION_FRAME_READY
2051END \name
2052.endm
2053
Alexey Frunze19428ad2017-08-03 10:36:46 -07002054.macro ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT name, entrypoint
2055 ONE_ARG_SAVE_EVERYTHING_DOWNCALL \name, \entrypoint, RUNTIME_SAVE_EVERYTHING_FOR_CLINIT_METHOD_OFFSET
2056.endm
2057
buzbee5bc5a7b2012-03-07 15:52:59 -08002058 /*
Orion Hodsondbaa5c72018-05-10 08:22:46 +01002059 * Entry from managed code to resolve a method handle. On entry, A0 holds the method handle
2060 * index. On success the MethodHandle is returned, otherwise an exception is raised.
2061 */
2062ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_method_handle, artResolveMethodHandleFromCode
2063
2064 /*
Orion Hodson18259d72018-04-12 11:18:23 +01002065 * Entry from managed code to resolve a method type. On entry, A0 holds the method type index.
2066 * On success the MethodType is returned, otherwise an exception is raised.
2067 */
2068ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_method_type, artResolveMethodTypeFromCode
2069
2070 /*
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002071 * Entry from managed code to resolve a string, this stub will allocate a String and deliver an
Vladimir Marko5ea536a2015-04-20 20:11:30 +01002072 * exception on error. On success the String is returned. A0 holds the string index. The fast
2073 * path check for hit in strings cache has already been performed.
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002074 */
Alexey Frunzec61c0762017-04-10 13:54:23 -07002075ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromCode
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002076
2077 /*
2078 * Entry from managed code when uninitialized static storage, this stub will run the class
2079 * initializer and deliver the exception on error. On success the static storage base is
2080 * returned.
2081 */
Alexey Frunze19428ad2017-08-03 10:36:46 -07002082ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT art_quick_initialize_static_storage, artInitializeStaticStorageFromCode
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002083
2084 /*
2085 * Entry from managed code when dex cache misses for a type_idx.
2086 */
Alexey Frunze19428ad2017-08-03 10:36:46 -07002087ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT art_quick_initialize_type, artInitializeTypeFromCode
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002088
2089 /*
2090 * Entry from managed code when type_idx needs to be checked for access and dex cache may also
2091 * miss.
2092 */
Alexey Frunzec61c0762017-04-10 13:54:23 -07002093ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08002094
2095 /*
Ian Rogers57b86d42012-03-27 16:05:41 -07002096 * Called by managed code when the value in rSUSPEND has been decremented to 0.
buzbee5bc5a7b2012-03-07 15:52:59 -08002097 */
Jeff Haod4c3f7d2013-02-14 14:14:44 -08002098 .extern artTestSuspendFromCode
Vladimir Marko952dbb12016-07-28 12:01:51 +01002099ENTRY_NO_GP art_quick_test_suspend
Alexey Frunze19428ad2017-08-03 10:36:46 -07002100 SETUP_SAVE_EVERYTHING_FRAME RUNTIME_SAVE_EVERYTHING_FOR_SUSPEND_CHECK_METHOD_OFFSET
2101 # save everything for stack crawl
Goran Jakovljevic590b1362016-03-21 14:24:43 +01002102 la $t9, artTestSuspendFromCode
Vladimir Marko952dbb12016-07-28 12:01:51 +01002103 jalr $t9 # (Thread*)
jeffhao7fbee072012-08-24 17:56:54 -07002104 move $a0, rSELF
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002105 RESTORE_SAVE_EVERYTHING_FRAME
Vladimir Marko952dbb12016-07-28 12:01:51 +01002106 jalr $zero, $ra
2107 nop
Jeff Haod4c3f7d2013-02-14 14:14:44 -08002108END art_quick_test_suspend
buzbee5bc5a7b2012-03-07 15:52:59 -08002109
buzbee5bc5a7b2012-03-07 15:52:59 -08002110 /*
2111 * Called by managed code that is attempting to call a method on a proxy class. On entry
Goran Jakovljevic590b1362016-03-21 14:24:43 +01002112 * a0 holds the proxy method; a1, a2 and a3 may contain arguments.
buzbee5bc5a7b2012-03-07 15:52:59 -08002113 */
Jeff Hao5fa60c32013-04-04 17:57:01 -07002114 .extern artQuickProxyInvokeHandler
Jeff Haod4c3f7d2013-02-14 14:14:44 -08002115ENTRY art_quick_proxy_invoke_handler
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002116 SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_A0
Douglas Leung735b8552014-10-31 12:21:40 -07002117 move $a2, rSELF # pass Thread::Current
Goran Jakovljevic590b1362016-03-21 14:24:43 +01002118 la $t9, artQuickProxyInvokeHandler
2119 jalr $t9 # (Method* proxy method, receiver, Thread*, SP)
Douglas Leung735b8552014-10-31 12:21:40 -07002120 addiu $a3, $sp, ARG_SLOT_SIZE # pass $sp (remove arg slots)
Alexey Frunze1b8464d2016-11-12 17:22:05 -08002121 lw $t7, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002122 RESTORE_SAVE_REFS_AND_ARGS_FRAME
Alexey Frunze1b8464d2016-11-12 17:22:05 -08002123 bnez $t7, 1f
Duane Sande34652f2014-11-04 11:09:36 -08002124 # don't care if $v0 and/or $v1 are modified, when exception branch taken
2125 MTD $v0, $v1, $f0, $f1 # move float value to return value
Andreas Gampe8d365912015-01-13 11:32:32 -08002126 jalr $zero, $ra
Duane Sande34652f2014-11-04 11:09:36 -08002127 nop
buzbee5bc5a7b2012-03-07 15:52:59 -080021281:
2129 DELIVER_PENDING_EXCEPTION
Jeff Haod4c3f7d2013-02-14 14:14:44 -08002130END art_quick_proxy_invoke_handler
buzbee5bc5a7b2012-03-07 15:52:59 -08002131
Jeff Hao88474b42013-10-23 16:24:40 -07002132 /*
Goran Jakovljevic59028d92016-03-29 18:05:03 +02002133 * Called to resolve an imt conflict.
2134 * a0 is the conflict ArtMethod.
Alexey Frunze1b8464d2016-11-12 17:22:05 -08002135 * t7 is a hidden argument that holds the target interface method's dex method index.
Goran Jakovljevic59028d92016-03-29 18:05:03 +02002136 *
Alexey Frunze279cfba2017-07-22 00:24:43 -07002137 * Note that this stub writes to v0-v1, a0, t2-t9, f0-f7.
Jeff Hao88474b42013-10-23 16:24:40 -07002138 */
Alexey Frunze279cfba2017-07-22 00:24:43 -07002139 .extern artLookupResolvedMethod
2140 .extern __atomic_load_8 # For int64_t std::atomic::load(std::memory_order).
Douglas Leung13738bf2014-10-27 14:44:47 -07002141ENTRY art_quick_imt_conflict_trampoline
Alexey Frunze279cfba2017-07-22 00:24:43 -07002142 SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY /* save_s4_thru_s8 */ 0
2143
2144 lw $t8, FRAME_SIZE_SAVE_REFS_AND_ARGS($sp) # $t8 = referrer.
Vladimir Marko5122e6b2017-08-17 16:10:09 +01002145 // If the method is obsolete, just go through the dex cache miss slow path.
2146 // The obsolete flag is set with suspended threads, so we do not need an acquire operation here.
2147 lw $t9, ART_METHOD_ACCESS_FLAGS_OFFSET($t8) # $t9 = access flags.
2148 sll $t9, $t9, 31 - ACC_OBSOLETE_METHOD_SHIFT # Move obsolete method bit to sign bit.
2149 bltz $t9, .Limt_conflict_trampoline_dex_cache_miss
2150 lw $t8, ART_METHOD_DECLARING_CLASS_OFFSET($t8) # $t8 = declaring class (no read barrier).
2151 lw $t8, MIRROR_CLASS_DEX_CACHE_OFFSET($t8) # $t8 = dex cache (without read barrier).
2152 UNPOISON_HEAP_REF $t8
Alexey Frunze279cfba2017-07-22 00:24:43 -07002153 la $t9, __atomic_load_8
2154 addiu $sp, $sp, -ARG_SLOT_SIZE # Reserve argument slots on the stack.
2155 .cfi_adjust_cfa_offset ARG_SLOT_SIZE
Vladimir Marko5122e6b2017-08-17 16:10:09 +01002156 lw $t8, MIRROR_DEX_CACHE_RESOLVED_METHODS_OFFSET($t8) # $t8 = dex cache methods array.
Alexey Frunze279cfba2017-07-22 00:24:43 -07002157
2158 move $s2, $t7 # $s2 = method index (callee-saved).
2159 lw $s3, ART_METHOD_JNI_OFFSET_32($a0) # $s3 = ImtConflictTable (callee-saved).
2160
2161 sll $t7, $t7, 32 - METHOD_DEX_CACHE_HASH_BITS # $t7 = slot index in top bits, zeroes below.
2162 srl $t7, $t7, 32 - METHOD_DEX_CACHE_HASH_BITS - (POINTER_SIZE_SHIFT + 1)
2163 # $t7 = slot offset.
2164
2165 li $a1, STD_MEMORY_ORDER_RELAXED # $a1 = std::memory_order_relaxed.
2166 jalr $t9 # [$v0, $v1] = __atomic_load_8($a0, $a1).
2167 addu $a0, $t8, $t7 # $a0 = DexCache method slot address.
2168
2169 bne $v1, $s2, .Limt_conflict_trampoline_dex_cache_miss # Branch if method index miss.
2170 addiu $sp, $sp, ARG_SLOT_SIZE # Remove argument slots from the stack.
2171 .cfi_adjust_cfa_offset -ARG_SLOT_SIZE
Goran Jakovljevic59028d92016-03-29 18:05:03 +02002172
2173.Limt_table_iterate:
Alexey Frunze279cfba2017-07-22 00:24:43 -07002174 lw $t8, 0($s3) # Load next entry in ImtConflictTable.
Goran Jakovljevic59028d92016-03-29 18:05:03 +02002175 # Branch if found.
Alexey Frunze279cfba2017-07-22 00:24:43 -07002176 beq $t8, $v0, .Limt_table_found
Goran Jakovljevic59028d92016-03-29 18:05:03 +02002177 nop
2178 # If the entry is null, the interface method is not in the ImtConflictTable.
Alexey Frunze1b8464d2016-11-12 17:22:05 -08002179 beqz $t8, .Lconflict_trampoline
Goran Jakovljevic59028d92016-03-29 18:05:03 +02002180 nop
2181 # Iterate over the entries of the ImtConflictTable.
2182 b .Limt_table_iterate
Alexey Frunze279cfba2017-07-22 00:24:43 -07002183 addiu $s3, $s3, 2 * __SIZEOF_POINTER__ # Iterate to the next entry.
Goran Jakovljevic59028d92016-03-29 18:05:03 +02002184
2185.Limt_table_found:
2186 # We successfully hit an entry in the table. Load the target method and jump to it.
Alexey Frunze279cfba2017-07-22 00:24:43 -07002187 .cfi_remember_state
2188 lw $a0, __SIZEOF_POINTER__($s3)
Goran Jakovljevic59028d92016-03-29 18:05:03 +02002189 lw $t9, ART_METHOD_QUICK_CODE_OFFSET_32($a0)
Alexey Frunze279cfba2017-07-22 00:24:43 -07002190 RESTORE_SAVE_REFS_AND_ARGS_FRAME /* restore_s4_thru_s8 */ 0, /* remove_arg_slots */ 0
Alexey Frunze1b8464d2016-11-12 17:22:05 -08002191 jalr $zero, $t9
Goran Jakovljevic59028d92016-03-29 18:05:03 +02002192 nop
Alexey Frunze279cfba2017-07-22 00:24:43 -07002193 .cfi_restore_state
Goran Jakovljevic59028d92016-03-29 18:05:03 +02002194
2195.Lconflict_trampoline:
2196 # Call the runtime stub to populate the ImtConflictTable and jump to the resolved method.
Alexey Frunze279cfba2017-07-22 00:24:43 -07002197 .cfi_remember_state
2198 RESTORE_SAVE_REFS_AND_ARGS_FRAME_GP # Restore clobbered $gp.
2199 RESTORE_SAVE_REFS_AND_ARGS_FRAME_A1 # Restore this.
2200 move $a0, $v0 # Load interface method.
2201 INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline, /* save_s4_thru_s8_only */ 1
2202 .cfi_restore_state
2203
2204.Limt_conflict_trampoline_dex_cache_miss:
2205 # We're not creating a proper runtime method frame here,
2206 # artLookupResolvedMethod() is not allowed to walk the stack.
2207 RESTORE_SAVE_REFS_AND_ARGS_FRAME_GP # Restore clobbered $gp.
2208 lw $a1, FRAME_SIZE_SAVE_REFS_AND_ARGS($sp) # $a1 = referrer.
2209 la $t9, artLookupResolvedMethod
2210 addiu $sp, $sp, -ARG_SLOT_SIZE # Reserve argument slots on the stack.
2211 .cfi_adjust_cfa_offset ARG_SLOT_SIZE
2212 jalr $t9 # (uint32_t method_index, ArtMethod* referrer).
2213 move $a0, $s2 # $a0 = method index.
2214
2215 # If the method wasn't resolved, skip the lookup and go to artInvokeInterfaceTrampoline().
2216 beqz $v0, .Lconflict_trampoline
2217 addiu $sp, $sp, ARG_SLOT_SIZE # Remove argument slots from the stack.
2218 .cfi_adjust_cfa_offset -ARG_SLOT_SIZE
2219
2220 b .Limt_table_iterate
2221 nop
Jeff Hao88474b42013-10-23 16:24:40 -07002222END art_quick_imt_conflict_trampoline
2223
Ian Rogers468532e2013-08-05 10:56:33 -07002224 .extern artQuickResolutionTrampoline
2225ENTRY art_quick_resolution_trampoline
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002226 SETUP_SAVE_REFS_AND_ARGS_FRAME
Douglas Leung735b8552014-10-31 12:21:40 -07002227 move $a2, rSELF # pass Thread::Current
Goran Jakovljevic590b1362016-03-21 14:24:43 +01002228 la $t9, artQuickResolutionTrampoline
2229 jalr $t9 # (Method* called, receiver, Thread*, SP)
Douglas Leung735b8552014-10-31 12:21:40 -07002230 addiu $a3, $sp, ARG_SLOT_SIZE # pass $sp (remove arg slots)
Ian Rogers468532e2013-08-05 10:56:33 -07002231 beqz $v0, 1f
Douglas Leung735b8552014-10-31 12:21:40 -07002232 lw $a0, ARG_SLOT_SIZE($sp) # load resolved method to $a0
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002233 RESTORE_SAVE_REFS_AND_ARGS_FRAME
Ian Rogers65d1b222013-09-27 10:59:41 -07002234 move $t9, $v0 # code pointer must be in $t9 to generate the global pointer
Douglas Leungf96e8bd2015-03-27 15:38:30 -07002235 jalr $zero, $t9 # tail call to method
Mathieu Chartier19841522013-10-22 11:29:00 -07002236 nop
Ian Rogers468532e2013-08-05 10:56:33 -070022371:
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002238 RESTORE_SAVE_REFS_AND_ARGS_FRAME
Ian Rogers468532e2013-08-05 10:56:33 -07002239 DELIVER_PENDING_EXCEPTION
2240END art_quick_resolution_trampoline
2241
Douglas Leung735b8552014-10-31 12:21:40 -07002242 .extern artQuickGenericJniTrampoline
2243 .extern artQuickGenericJniEndTrampoline
2244ENTRY art_quick_generic_jni_trampoline
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002245 SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_A0
Douglas Leung735b8552014-10-31 12:21:40 -07002246 move $s8, $sp # save $sp to $s8
2247 move $s3, $gp # save $gp to $s3
2248
2249 # prepare for call to artQuickGenericJniTrampoline(Thread*, SP)
2250 move $a0, rSELF # pass Thread::Current
2251 addiu $a1, $sp, ARG_SLOT_SIZE # save $sp (remove arg slots)
Goran Jakovljevic590b1362016-03-21 14:24:43 +01002252 la $t9, artQuickGenericJniTrampoline
2253 jalr $t9 # (Thread*, SP)
Douglas Leung735b8552014-10-31 12:21:40 -07002254 addiu $sp, $sp, -5120 # reserve space on the stack
2255
2256 # The C call will have registered the complete save-frame on success.
2257 # The result of the call is:
2258 # v0: ptr to native code, 0 on error.
2259 # v1: ptr to the bottom of the used area of the alloca, can restore stack till here.
Alexey Frunze1b8464d2016-11-12 17:22:05 -08002260 beq $v0, $zero, 2f # check entry error
Douglas Leung735b8552014-10-31 12:21:40 -07002261 move $t9, $v0 # save the code ptr
2262 move $sp, $v1 # release part of the alloca
2263
2264 # Load parameters from stack into registers
2265 lw $a0, 0($sp)
2266 lw $a1, 4($sp)
2267 lw $a2, 8($sp)
Douglas Leung735b8552014-10-31 12:21:40 -07002268 lw $a3, 12($sp)
Alexey Frunze1b8464d2016-11-12 17:22:05 -08002269
2270 # artQuickGenericJniTrampoline sets bit 0 of the native code address to 1
2271 # when the first two arguments are both single precision floats. This lets
2272 # us extract them properly from the stack and load into floating point
2273 # registers.
2274 MTD $a0, $a1, $f12, $f13
2275 andi $t0, $t9, 1
2276 xor $t9, $t9, $t0
2277 bnez $t0, 1f
2278 mtc1 $a1, $f14
2279 MTD $a2, $a3, $f14, $f15
2280
22811:
2282 jalr $t9 # native call
2283 nop
Douglas Leung735b8552014-10-31 12:21:40 -07002284 addiu $sp, $sp, 16 # remove arg slots
2285
2286 move $gp, $s3 # restore $gp from $s3
2287
2288 # result sign extension is handled in C code
2289 # prepare for call to artQuickGenericJniEndTrampoline(Thread*, result, result_f)
2290 move $a0, rSELF # pass Thread::Current
2291 move $a2, $v0 # pass result
2292 move $a3, $v1
Chris Larsen715f43e2017-10-23 11:00:32 -07002293 addiu $sp, $sp, -32 # reserve arg slots
Goran Jakovljevic590b1362016-03-21 14:24:43 +01002294 la $t9, artQuickGenericJniEndTrampoline
2295 jalr $t9
Douglas Leung735b8552014-10-31 12:21:40 -07002296 s.d $f0, 16($sp) # pass result_f
Douglas Leung735b8552014-10-31 12:21:40 -07002297
2298 lw $t0, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_
Alexey Frunze1b8464d2016-11-12 17:22:05 -08002299 bne $t0, $zero, 2f # check for pending exceptions
Nicolas Geoffray126d6592015-03-03 14:28:35 +00002300
Douglas Leung735b8552014-10-31 12:21:40 -07002301 move $sp, $s8 # tear down the alloca
2302
Alexey Frunze1b8464d2016-11-12 17:22:05 -08002303 # tear down the callee-save frame
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002304 RESTORE_SAVE_REFS_AND_ARGS_FRAME
Douglas Leung735b8552014-10-31 12:21:40 -07002305
Duane Sande34652f2014-11-04 11:09:36 -08002306 MTD $v0, $v1, $f0, $f1 # move float value to return value
Andreas Gampe8d365912015-01-13 11:32:32 -08002307 jalr $zero, $ra
Duane Sande34652f2014-11-04 11:09:36 -08002308 nop
Douglas Leung735b8552014-10-31 12:21:40 -07002309
Alexey Frunze1b8464d2016-11-12 17:22:05 -080023102:
Vladimir Marko2196c652017-11-30 16:16:07 +00002311 lw $t0, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF)
2312 addiu $sp, $t0, -1 // Remove the GenericJNI tag.
Alexey Frunzec61c0762017-04-10 13:54:23 -07002313 move $gp, $s3 # restore $gp from $s3
Nicolas Geoffray126d6592015-03-03 14:28:35 +00002314 # This will create a new save-all frame, required by the runtime.
Douglas Leung735b8552014-10-31 12:21:40 -07002315 DELIVER_PENDING_EXCEPTION
2316END art_quick_generic_jni_trampoline
Andreas Gampe2da88232014-02-27 12:26:20 -08002317
Ian Rogers468532e2013-08-05 10:56:33 -07002318 .extern artQuickToInterpreterBridge
2319ENTRY art_quick_to_interpreter_bridge
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002320 SETUP_SAVE_REFS_AND_ARGS_FRAME
Douglas Leung735b8552014-10-31 12:21:40 -07002321 move $a1, rSELF # pass Thread::Current
Goran Jakovljevic590b1362016-03-21 14:24:43 +01002322 la $t9, artQuickToInterpreterBridge
2323 jalr $t9 # (Method* method, Thread*, SP)
Douglas Leung735b8552014-10-31 12:21:40 -07002324 addiu $a2, $sp, ARG_SLOT_SIZE # pass $sp (remove arg slots)
Alexey Frunze1b8464d2016-11-12 17:22:05 -08002325 lw $t7, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002326 RESTORE_SAVE_REFS_AND_ARGS_FRAME
Alexey Frunze1b8464d2016-11-12 17:22:05 -08002327 bnez $t7, 1f
Duane Sande34652f2014-11-04 11:09:36 -08002328 # don't care if $v0 and/or $v1 are modified, when exception branch taken
2329 MTD $v0, $v1, $f0, $f1 # move float value to return value
Andreas Gampe8d365912015-01-13 11:32:32 -08002330 jalr $zero, $ra
Duane Sande34652f2014-11-04 11:09:36 -08002331 nop
Ian Rogers7db619b2013-01-16 18:35:48 -080023321:
2333 DELIVER_PENDING_EXCEPTION
Ian Rogers468532e2013-08-05 10:56:33 -07002334END art_quick_to_interpreter_bridge
Ian Rogers7db619b2013-01-16 18:35:48 -08002335
Alex Lightdb01a092017-04-03 15:39:55 -07002336 .extern artInvokeObsoleteMethod
2337ENTRY art_invoke_obsolete_method_stub
2338 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
2339 la $t9, artInvokeObsoleteMethod
2340 jalr $t9 # (Method* method, Thread* self)
2341 move $a1, rSELF # pass Thread::Current
2342END art_invoke_obsolete_method_stub
2343
buzbee5bc5a7b2012-03-07 15:52:59 -08002344 /*
jeffhao725a9572012-11-13 18:20:12 -08002345 * Routine that intercepts method calls and returns.
buzbee5bc5a7b2012-03-07 15:52:59 -08002346 */
Jeff Haod4c3f7d2013-02-14 14:14:44 -08002347 .extern artInstrumentationMethodEntryFromCode
2348 .extern artInstrumentationMethodExitFromCode
Ian Rogers468532e2013-08-05 10:56:33 -07002349ENTRY art_quick_instrumentation_entry
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002350 SETUP_SAVE_REFS_AND_ARGS_FRAME
Alexey Frunze60454cb2017-08-09 21:28:30 -07002351 sw $a0, 28($sp) # save arg0 in free arg slot
2352 addiu $a3, $sp, ARG_SLOT_SIZE # Pass $sp.
2353 la $t9, artInstrumentationMethodEntryFromCode
2354 jalr $t9 # (Method*, Object*, Thread*, SP)
2355 move $a2, rSELF # pass Thread::Current
2356 beqz $v0, .Ldeliver_instrumentation_entry_exception
2357 move $t9, $v0 # $t9 holds reference to code
2358 lw $a0, 28($sp) # restore arg0 from free arg slot
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002359 RESTORE_SAVE_REFS_AND_ARGS_FRAME
Alexey Frunze60454cb2017-08-09 21:28:30 -07002360 la $ra, art_quick_instrumentation_exit
2361 jalr $zero, $t9 # call method, returning to art_quick_instrumentation_exit
Goran Jakovljevic25e4d1e2017-06-14 13:47:44 +02002362 nop
2363.Ldeliver_instrumentation_entry_exception:
Goran Jakovljevic25e4d1e2017-06-14 13:47:44 +02002364 RESTORE_SAVE_REFS_AND_ARGS_FRAME
Goran Jakovljevic25e4d1e2017-06-14 13:47:44 +02002365 DELIVER_PENDING_EXCEPTION
Alexey Frunze60454cb2017-08-09 21:28:30 -07002366END art_quick_instrumentation_entry
2367
2368ENTRY_NO_GP art_quick_instrumentation_exit
2369 move $ra, $zero # RA points here, so clobber with 0 for later checks.
2370 SETUP_SAVE_EVERYTHING_FRAME # Allocates ARG_SLOT_SIZE bytes at the bottom of the stack.
2371 move $s2, $gp # Preserve $gp across the call for exception delivery.
2372
2373 addiu $a3, $sp, ARG_SLOT_SIZE+16 # Pass fpr_res pointer ($f0 in SAVE_EVERYTHING_FRAME).
2374 addiu $a2, $sp, ARG_SLOT_SIZE+148 # Pass gpr_res pointer ($v0 in SAVE_EVERYTHING_FRAME).
2375 addiu $a1, $sp, ARG_SLOT_SIZE # Pass $sp.
2376 la $t9, artInstrumentationMethodExitFromCode
2377 jalr $t9 # (Thread*, SP, gpr_res*, fpr_res*)
2378 move $a0, rSELF # Pass Thread::Current.
2379
2380 beqz $v0, .Ldo_deliver_instrumentation_exception
2381 move $gp, $s2 # Deliver exception if we got nullptr as function.
2382 bnez $v1, .Ldeoptimize
2383
2384 # Normal return.
2385 sw $v0, (ARG_SLOT_SIZE+FRAME_SIZE_SAVE_EVERYTHING-4)($sp) # Set return pc.
2386 RESTORE_SAVE_EVERYTHING_FRAME
2387 jalr $zero, $ra
2388 nop
2389.Ldo_deliver_instrumentation_exception:
2390 DELIVER_PENDING_EXCEPTION_FRAME_READY
2391.Ldeoptimize:
2392 b art_quick_deoptimize
2393 sw $v1, (ARG_SLOT_SIZE+FRAME_SIZE_SAVE_EVERYTHING-4)($sp)
2394 # Fake a call from instrumentation return pc.
Ian Rogers468532e2013-08-05 10:56:33 -07002395END art_quick_instrumentation_exit
buzbee5bc5a7b2012-03-07 15:52:59 -08002396
jeffhao12051ea2013-01-10 11:24:31 -08002397 /*
Ian Rogers62d6c772013-02-27 08:32:07 -08002398 * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
2399 * will long jump to the upcall with a special exception of -1.
jeffhao12051ea2013-01-10 11:24:31 -08002400 */
Jeff Haod4c3f7d2013-02-14 14:14:44 -08002401 .extern artDeoptimize
Alexey Frunze60454cb2017-08-09 21:28:30 -07002402ENTRY_NO_GP_CUSTOM_CFA art_quick_deoptimize, ARG_SLOT_SIZE+FRAME_SIZE_SAVE_EVERYTHING
2403 # SETUP_SAVE_EVERYTHING_FRAME has been done by art_quick_instrumentation_exit.
2404 .cfi_rel_offset 31, ARG_SLOT_SIZE+252
2405 .cfi_rel_offset 30, ARG_SLOT_SIZE+248
2406 .cfi_rel_offset 28, ARG_SLOT_SIZE+244
2407 .cfi_rel_offset 25, ARG_SLOT_SIZE+240
2408 .cfi_rel_offset 24, ARG_SLOT_SIZE+236
2409 .cfi_rel_offset 23, ARG_SLOT_SIZE+232
2410 .cfi_rel_offset 22, ARG_SLOT_SIZE+228
2411 .cfi_rel_offset 21, ARG_SLOT_SIZE+224
2412 .cfi_rel_offset 20, ARG_SLOT_SIZE+220
2413 .cfi_rel_offset 19, ARG_SLOT_SIZE+216
2414 .cfi_rel_offset 18, ARG_SLOT_SIZE+212
2415 .cfi_rel_offset 17, ARG_SLOT_SIZE+208
2416 .cfi_rel_offset 16, ARG_SLOT_SIZE+204
2417 .cfi_rel_offset 15, ARG_SLOT_SIZE+200
2418 .cfi_rel_offset 14, ARG_SLOT_SIZE+196
2419 .cfi_rel_offset 13, ARG_SLOT_SIZE+192
2420 .cfi_rel_offset 12, ARG_SLOT_SIZE+188
2421 .cfi_rel_offset 11, ARG_SLOT_SIZE+184
2422 .cfi_rel_offset 10, ARG_SLOT_SIZE+180
2423 .cfi_rel_offset 9, ARG_SLOT_SIZE+176
2424 .cfi_rel_offset 8, ARG_SLOT_SIZE+172
2425 .cfi_rel_offset 7, ARG_SLOT_SIZE+168
2426 .cfi_rel_offset 6, ARG_SLOT_SIZE+164
2427 .cfi_rel_offset 5, ARG_SLOT_SIZE+160
2428 .cfi_rel_offset 4, ARG_SLOT_SIZE+156
2429 .cfi_rel_offset 3, ARG_SLOT_SIZE+152
2430 .cfi_rel_offset 2, ARG_SLOT_SIZE+148
2431 .cfi_rel_offset 1, ARG_SLOT_SIZE+144
2432
2433 la $t9, artDeoptimize
2434 jalr $t9 # (Thread*)
2435 move $a0, rSELF # pass Thread::current
2436 break
Jeff Haod4c3f7d2013-02-14 14:14:44 -08002437END art_quick_deoptimize
jeffhao12051ea2013-01-10 11:24:31 -08002438
buzbee5bc5a7b2012-03-07 15:52:59 -08002439 /*
Sebastien Hertz07474662015-08-25 15:12:33 +00002440 * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
2441 * will long jump to the upcall with a special exception of -1.
2442 */
2443 .extern artDeoptimizeFromCompiledCode
Alexey Frunze60454cb2017-08-09 21:28:30 -07002444ENTRY_NO_GP art_quick_deoptimize_from_compiled_code
Vladimir Marko239d6ea2016-09-05 10:44:04 +01002445 SETUP_SAVE_EVERYTHING_FRAME
Goran Jakovljevic590b1362016-03-21 14:24:43 +01002446 la $t9, artDeoptimizeFromCompiledCode
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +01002447 jalr $t9 # (DeoptimizationKind, Thread*)
2448 move $a1, rSELF # pass Thread::current
Sebastien Hertz07474662015-08-25 15:12:33 +00002449END art_quick_deoptimize_from_compiled_code
2450
2451 /*
buzbee5bc5a7b2012-03-07 15:52:59 -08002452 * Long integer shift. This is different from the generic 32/64-bit
2453 * binary operations because vAA/vBB are 64-bit but vCC (the shift
2454 * distance) is 32-bit. Also, Dalvik requires us to ignore all but the low
2455 * 6 bits.
2456 * On entry:
jeffhao7fbee072012-08-24 17:56:54 -07002457 * $a0: low word
2458 * $a1: high word
2459 * $a2: shift count
buzbee5bc5a7b2012-03-07 15:52:59 -08002460 */
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002461ENTRY_NO_GP art_quick_shl_long
buzbee5bc5a7b2012-03-07 15:52:59 -08002462 /* shl-long vAA, vBB, vCC */
jeffhao7fbee072012-08-24 17:56:54 -07002463 sll $v0, $a0, $a2 # rlo<- alo << (shift&31)
2464 not $v1, $a2 # rhi<- 31-shift (shift is 5b)
2465 srl $a0, 1
2466 srl $a0, $v1 # alo<- alo >> (32-(shift&31))
2467 sll $v1, $a1, $a2 # rhi<- ahi << (shift&31)
jeffhao7fbee072012-08-24 17:56:54 -07002468 andi $a2, 0x20 # shift< shift & 0x20
Duane Sande34652f2014-11-04 11:09:36 -08002469 beqz $a2, 1f
2470 or $v1, $a0 # rhi<- rhi | alo
2471
2472 move $v1, $v0 # rhi<- rlo (if shift&0x20)
2473 move $v0, $zero # rlo<- 0 (if shift&0x20)
2474
Andreas Gampe8d365912015-01-13 11:32:32 -080024751: jalr $zero, $ra
Duane Sande34652f2014-11-04 11:09:36 -08002476 nop
Jeff Haod4c3f7d2013-02-14 14:14:44 -08002477END art_quick_shl_long
buzbee5bc5a7b2012-03-07 15:52:59 -08002478
buzbee5bc5a7b2012-03-07 15:52:59 -08002479 /*
2480 * Long integer shift. This is different from the generic 32/64-bit
2481 * binary operations because vAA/vBB are 64-bit but vCC (the shift
2482 * distance) is 32-bit. Also, Dalvik requires us to ignore all but the low
2483 * 6 bits.
2484 * On entry:
jeffhao7fbee072012-08-24 17:56:54 -07002485 * $a0: low word
2486 * $a1: high word
2487 * $a2: shift count
buzbee5bc5a7b2012-03-07 15:52:59 -08002488 */
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002489ENTRY_NO_GP art_quick_shr_long
jeffhao7fbee072012-08-24 17:56:54 -07002490 sra $v1, $a1, $a2 # rhi<- ahi >> (shift&31)
2491 srl $v0, $a0, $a2 # rlo<- alo >> (shift&31)
2492 sra $a3, $a1, 31 # $a3<- sign(ah)
2493 not $a0, $a2 # alo<- 31-shift (shift is 5b)
2494 sll $a1, 1
2495 sll $a1, $a0 # ahi<- ahi << (32-(shift&31))
jeffhao7fbee072012-08-24 17:56:54 -07002496 andi $a2, 0x20 # shift & 0x20
Douglas Leung475cfd82014-12-16 20:15:41 -08002497 beqz $a2, 1f
Duane Sande34652f2014-11-04 11:09:36 -08002498 or $v0, $a1 # rlo<- rlo | ahi
2499
2500 move $v0, $v1 # rlo<- rhi (if shift&0x20)
2501 move $v1, $a3 # rhi<- sign(ahi) (if shift&0x20)
2502
Andreas Gampe8d365912015-01-13 11:32:32 -080025031: jalr $zero, $ra
Duane Sande34652f2014-11-04 11:09:36 -08002504 nop
Jeff Haod4c3f7d2013-02-14 14:14:44 -08002505END art_quick_shr_long
buzbee5bc5a7b2012-03-07 15:52:59 -08002506
buzbee5bc5a7b2012-03-07 15:52:59 -08002507 /*
2508 * Long integer shift. This is different from the generic 32/64-bit
2509 * binary operations because vAA/vBB are 64-bit but vCC (the shift
2510 * distance) is 32-bit. Also, Dalvik requires us to ignore all but the low
2511 * 6 bits.
2512 * On entry:
Goran Jakovljevic590b1362016-03-21 14:24:43 +01002513 * $a0: low word
2514 * $a1: high word
2515 * $a2: shift count
buzbee5bc5a7b2012-03-07 15:52:59 -08002516 */
2517 /* ushr-long vAA, vBB, vCC */
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002518ENTRY_NO_GP art_quick_ushr_long
jeffhaofc6a30e2012-10-18 18:24:15 -07002519 srl $v1, $a1, $a2 # rhi<- ahi >> (shift&31)
jeffhao7fbee072012-08-24 17:56:54 -07002520 srl $v0, $a0, $a2 # rlo<- alo >> (shift&31)
jeffhao7fbee072012-08-24 17:56:54 -07002521 not $a0, $a2 # alo<- 31-shift (shift is 5b)
2522 sll $a1, 1
2523 sll $a1, $a0 # ahi<- ahi << (32-(shift&31))
jeffhao7fbee072012-08-24 17:56:54 -07002524 andi $a2, 0x20 # shift & 0x20
Duane Sande34652f2014-11-04 11:09:36 -08002525 beqz $a2, 1f
2526 or $v0, $a1 # rlo<- rlo | ahi
2527
2528 move $v0, $v1 # rlo<- rhi (if shift&0x20)
2529 move $v1, $zero # rhi<- 0 (if shift&0x20)
2530
Andreas Gampe8d365912015-01-13 11:32:32 -080025311: jalr $zero, $ra
Duane Sande34652f2014-11-04 11:09:36 -08002532 nop
Jeff Haod4c3f7d2013-02-14 14:14:44 -08002533END art_quick_ushr_long
jeffhao7fbee072012-08-24 17:56:54 -07002534
Chris Larsencf283da2016-01-19 16:45:35 -08002535/* java.lang.String.indexOf(int ch, int fromIndex=0) */
2536ENTRY_NO_GP art_quick_indexof
2537/* $a0 holds address of "this" */
2538/* $a1 holds "ch" */
2539/* $a2 holds "fromIndex" */
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002540#if (STRING_COMPRESSION_FEATURE)
2541 lw $a3, MIRROR_STRING_COUNT_OFFSET($a0) # 'count' field of this
Chris Larsencf283da2016-01-19 16:45:35 -08002542#else
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002543 lw $t0, MIRROR_STRING_COUNT_OFFSET($a0) # this.length()
Chris Larsencf283da2016-01-19 16:45:35 -08002544#endif
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002545 slt $t1, $a2, $zero # if fromIndex < 0
Goran Jakovljeviccdb23d62017-02-28 14:58:01 +01002546#if defined(_MIPS_ARCH_MIPS32R6)
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002547 seleqz $a2, $a2, $t1 # fromIndex = 0;
2548#else
2549 movn $a2, $zero, $t1 # fromIndex = 0;
2550#endif
Goran Jakovljeviccdb23d62017-02-28 14:58:01 +01002551
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002552#if (STRING_COMPRESSION_FEATURE)
2553 srl $t0, $a3, 1 # $a3 holds count (with flag) and $t0 holds actual length
2554#endif
2555 subu $t0, $t0, $a2 # this.length() - fromIndex
2556 blez $t0, 6f # if this.length()-fromIndex <= 0
2557 li $v0, -1 # return -1;
Chris Larsencf283da2016-01-19 16:45:35 -08002558
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002559#if (STRING_COMPRESSION_FEATURE)
2560 sll $a3, $a3, 31 # Extract compression flag.
2561 beqz $a3, .Lstring_indexof_compressed
2562 move $t2, $a0 # Save a copy in $t2 to later compute result (in branch delay slot).
2563#endif
2564 sll $v0, $a2, 1 # $a0 += $a2 * 2
2565 addu $a0, $a0, $v0 # " ditto "
2566 move $v0, $a2 # Set i to fromIndex.
Chris Larsencf283da2016-01-19 16:45:35 -08002567
25681:
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002569 lhu $t3, MIRROR_STRING_VALUE_OFFSET($a0) # if this.charAt(i) == ch
2570 beq $t3, $a1, 6f # return i;
2571 addu $a0, $a0, 2 # i++
2572 subu $t0, $t0, 1 # this.length() - i
2573 bnez $t0, 1b # while this.length() - i > 0
2574 addu $v0, $v0, 1 # i++
Chris Larsencf283da2016-01-19 16:45:35 -08002575
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002576 li $v0, -1 # if this.length() - i <= 0
2577 # return -1;
Chris Larsencf283da2016-01-19 16:45:35 -08002578
25796:
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002580 j $ra
2581 nop
2582
2583#if (STRING_COMPRESSION_FEATURE)
2584.Lstring_indexof_compressed:
2585 addu $a0, $a0, $a2 # $a0 += $a2
2586
2587.Lstring_indexof_compressed_loop:
2588 lbu $t3, MIRROR_STRING_VALUE_OFFSET($a0)
2589 beq $t3, $a1, .Lstring_indexof_compressed_matched
2590 subu $t0, $t0, 1
2591 bgtz $t0, .Lstring_indexof_compressed_loop
2592 addu $a0, $a0, 1
2593
2594.Lstring_indexof_nomatch:
2595 jalr $zero, $ra
2596 li $v0, -1 # return -1;
2597
2598.Lstring_indexof_compressed_matched:
2599 jalr $zero, $ra
2600 subu $v0, $a0, $t2 # return (current - start);
2601#endif
Chris Larsencf283da2016-01-19 16:45:35 -08002602END art_quick_indexof
2603
Chris Larsencf283da2016-01-19 16:45:35 -08002604/* java.lang.String.compareTo(String anotherString) */
2605ENTRY_NO_GP art_quick_string_compareto
2606/* $a0 holds address of "this" */
2607/* $a1 holds address of "anotherString" */
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002608 beq $a0, $a1, .Lstring_compareto_length_diff # this and anotherString are the same object
2609 move $a3, $a2 # trick to return 0 (it returns a2 - a3)
Chris Larsencf283da2016-01-19 16:45:35 -08002610
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002611#if (STRING_COMPRESSION_FEATURE)
2612 lw $t0, MIRROR_STRING_COUNT_OFFSET($a0) # 'count' field of this
2613 lw $t1, MIRROR_STRING_COUNT_OFFSET($a1) # 'count' field of anotherString
2614 sra $a2, $t0, 1 # this.length()
2615 sra $a3, $t1, 1 # anotherString.length()
2616#else
2617 lw $a2, MIRROR_STRING_COUNT_OFFSET($a0) # this.length()
2618 lw $a3, MIRROR_STRING_COUNT_OFFSET($a1) # anotherString.length()
2619#endif
Chris Larsencf283da2016-01-19 16:45:35 -08002620
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002621 MINu $t2, $a2, $a3
2622 # $t2 now holds min(this.length(),anotherString.length())
Chris Larsencf283da2016-01-19 16:45:35 -08002623
Goran Jakovljevicf94fa812017-02-10 17:48:52 +01002624 # while min(this.length(),anotherString.length())-i != 0
2625 beqz $t2, .Lstring_compareto_length_diff # if $t2==0
2626 nop # return (this.length() - anotherString.length())
2627
2628#if (STRING_COMPRESSION_FEATURE)
2629 # Differ cases:
2630 sll $t3, $t0, 31
2631 beqz $t3, .Lstring_compareto_this_is_compressed
2632 sll $t3, $t1, 31 # In branch delay slot.
2633 beqz $t3, .Lstring_compareto_that_is_compressed
2634 nop
2635 b .Lstring_compareto_both_not_compressed
2636 nop
2637
2638.Lstring_compareto_this_is_compressed:
2639 beqz $t3, .Lstring_compareto_both_compressed
2640 nop
2641 /* If (this->IsCompressed() && that->IsCompressed() == false) */
2642.Lstring_compareto_loop_comparison_this_compressed:
2643 lbu $t0, MIRROR_STRING_VALUE_OFFSET($a0)
2644 lhu $t1, MIRROR_STRING_VALUE_OFFSET($a1)
2645 bne $t0, $t1, .Lstring_compareto_char_diff
2646 addiu $a0, $a0, 1 # point at this.charAt(i++) - compressed
2647 subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i
2648 bnez $t2, .Lstring_compareto_loop_comparison_this_compressed
2649 addiu $a1, $a1, 2 # point at anotherString.charAt(i++) - uncompressed
2650 jalr $zero, $ra
2651 subu $v0, $a2, $a3 # return (this.length() - anotherString.length())
2652
2653.Lstring_compareto_that_is_compressed:
2654 lhu $t0, MIRROR_STRING_VALUE_OFFSET($a0)
2655 lbu $t1, MIRROR_STRING_VALUE_OFFSET($a1)
2656 bne $t0, $t1, .Lstring_compareto_char_diff
2657 addiu $a0, $a0, 2 # point at this.charAt(i++) - uncompressed
2658 subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i
2659 bnez $t2, .Lstring_compareto_that_is_compressed
2660 addiu $a1, $a1, 1 # point at anotherString.charAt(i++) - compressed
2661 jalr $zero, $ra
2662 subu $v0, $a2, $a3 # return (this.length() - anotherString.length())
2663
2664.Lstring_compareto_both_compressed:
2665 lbu $t0, MIRROR_STRING_VALUE_OFFSET($a0)
2666 lbu $t1, MIRROR_STRING_VALUE_OFFSET($a1)
2667 bne $t0, $t1, .Lstring_compareto_char_diff
2668 addiu $a0, $a0, 1 # point at this.charAt(i++) - compressed
2669 subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i
2670 bnez $t2, .Lstring_compareto_both_compressed
2671 addiu $a1, $a1, 1 # point at anotherString.charAt(i++) - compressed
2672 jalr $zero, $ra
2673 subu $v0, $a2, $a3 # return (this.length() - anotherString.length())
2674#endif
2675
2676.Lstring_compareto_both_not_compressed:
2677 lhu $t0, MIRROR_STRING_VALUE_OFFSET($a0) # while this.charAt(i) == anotherString.charAt(i)
2678 lhu $t1, MIRROR_STRING_VALUE_OFFSET($a1)
2679 bne $t0, $t1, .Lstring_compareto_char_diff # if this.charAt(i) != anotherString.charAt(i)
2680 # return (this.charAt(i) - anotherString.charAt(i))
2681 addiu $a0, $a0, 2 # point at this.charAt(i++)
2682 subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i
2683 bnez $t2, .Lstring_compareto_both_not_compressed
2684 addiu $a1, $a1, 2 # point at anotherString.charAt(i++)
2685
2686.Lstring_compareto_length_diff:
2687 jalr $zero, $ra
2688 subu $v0, $a2, $a3 # return (this.length() - anotherString.length())
2689
2690.Lstring_compareto_char_diff:
2691 jalr $zero, $ra
2692 subu $v0, $t0, $t1 # return (this.charAt(i) - anotherString.charAt(i))
Chris Larsencf283da2016-01-19 16:45:35 -08002693END art_quick_string_compareto
Orion Hodsonac141392017-01-13 11:53:47 +00002694
Alexey Frunze15958152017-02-09 19:08:30 -08002695 /*
2696 * Create a function `name` calling the ReadBarrier::Mark routine,
2697 * getting its argument and returning its result through register
2698 * `reg`, saving and restoring all caller-save registers.
2699 */
2700.macro READ_BARRIER_MARK_REG name, reg
2701ENTRY \name
Alexey Frunzea3cb1322017-05-01 18:49:46 -07002702 // Null check so that we can load the lock word.
2703 bnez \reg, .Lnot_null_\name
2704 nop
2705.Lret_rb_\name:
2706 jalr $zero, $ra
2707 nop
2708.Lnot_null_\name:
2709 // Check lock word for mark bit, if marked return.
2710 lw $t9, MIRROR_OBJECT_LOCK_WORD_OFFSET(\reg)
2711 .set push
2712 .set noat
2713 sll $at, $t9, 31 - LOCK_WORD_MARK_BIT_SHIFT # Move mark bit to sign bit.
2714 bltz $at, .Lret_rb_\name
2715#if (LOCK_WORD_STATE_SHIFT != 30) || (LOCK_WORD_STATE_FORWARDING_ADDRESS != 3)
2716 // The below code depends on the lock word state being in the highest bits
2717 // and the "forwarding address" state having all bits set.
2718#error "Unexpected lock word state shift or forwarding address state value."
2719#endif
2720 // Test that both the forwarding state bits are 1.
2721 sll $at, $t9, 1
2722 and $at, $at, $t9 # Sign bit = 1 IFF both bits are 1.
2723 bltz $at, .Lret_forwarding_address\name
2724 nop
2725 .set pop
2726
2727 addiu $sp, $sp, -160 # Includes 16 bytes of space for argument registers a0-a3.
Alexey Frunze15958152017-02-09 19:08:30 -08002728 .cfi_adjust_cfa_offset 160
2729
2730 sw $ra, 156($sp)
2731 .cfi_rel_offset 31, 156
2732 sw $t8, 152($sp)
2733 .cfi_rel_offset 24, 152
2734 sw $t7, 148($sp)
2735 .cfi_rel_offset 15, 148
2736 sw $t6, 144($sp)
2737 .cfi_rel_offset 14, 144
2738 sw $t5, 140($sp)
2739 .cfi_rel_offset 13, 140
2740 sw $t4, 136($sp)
2741 .cfi_rel_offset 12, 136
2742 sw $t3, 132($sp)
2743 .cfi_rel_offset 11, 132
2744 sw $t2, 128($sp)
2745 .cfi_rel_offset 10, 128
2746 sw $t1, 124($sp)
2747 .cfi_rel_offset 9, 124
2748 sw $t0, 120($sp)
2749 .cfi_rel_offset 8, 120
2750 sw $a3, 116($sp)
2751 .cfi_rel_offset 7, 116
2752 sw $a2, 112($sp)
2753 .cfi_rel_offset 6, 112
2754 sw $a1, 108($sp)
2755 .cfi_rel_offset 5, 108
2756 sw $a0, 104($sp)
2757 .cfi_rel_offset 4, 104
2758 sw $v1, 100($sp)
2759 .cfi_rel_offset 3, 100
2760 sw $v0, 96($sp)
2761 .cfi_rel_offset 2, 96
2762
2763 la $t9, artReadBarrierMark
2764
2765 sdc1 $f18, 88($sp)
2766 sdc1 $f16, 80($sp)
2767 sdc1 $f14, 72($sp)
2768 sdc1 $f12, 64($sp)
2769 sdc1 $f10, 56($sp)
2770 sdc1 $f8, 48($sp)
2771 sdc1 $f6, 40($sp)
2772 sdc1 $f4, 32($sp)
2773 sdc1 $f2, 24($sp)
2774
2775 .ifnc \reg, $a0
2776 move $a0, \reg # pass obj from `reg` in a0
2777 .endif
2778 jalr $t9 # v0 <- artReadBarrierMark(obj)
2779 sdc1 $f0, 16($sp) # in delay slot
2780
2781 lw $ra, 156($sp)
2782 .cfi_restore 31
2783 lw $t8, 152($sp)
2784 .cfi_restore 24
2785 lw $t7, 148($sp)
2786 .cfi_restore 15
2787 lw $t6, 144($sp)
2788 .cfi_restore 14
2789 lw $t5, 140($sp)
2790 .cfi_restore 13
2791 lw $t4, 136($sp)
2792 .cfi_restore 12
2793 lw $t3, 132($sp)
2794 .cfi_restore 11
2795 lw $t2, 128($sp)
2796 .cfi_restore 10
2797 lw $t1, 124($sp)
2798 .cfi_restore 9
2799 lw $t0, 120($sp)
2800 .cfi_restore 8
2801 lw $a3, 116($sp)
2802 .cfi_restore 7
2803 lw $a2, 112($sp)
2804 .cfi_restore 6
2805 lw $a1, 108($sp)
2806 .cfi_restore 5
2807 lw $a0, 104($sp)
2808 .cfi_restore 4
2809 lw $v1, 100($sp)
2810 .cfi_restore 3
2811
2812 .ifnc \reg, $v0
2813 move \reg, $v0 # `reg` <- v0
2814 lw $v0, 96($sp)
2815 .cfi_restore 2
2816 .endif
2817
2818 ldc1 $f18, 88($sp)
2819 ldc1 $f16, 80($sp)
2820 ldc1 $f14, 72($sp)
2821 ldc1 $f12, 64($sp)
2822 ldc1 $f10, 56($sp)
2823 ldc1 $f8, 48($sp)
2824 ldc1 $f6, 40($sp)
2825 ldc1 $f4, 32($sp)
2826 ldc1 $f2, 24($sp)
2827 ldc1 $f0, 16($sp)
2828
2829 jalr $zero, $ra
2830 addiu $sp, $sp, 160
2831 .cfi_adjust_cfa_offset -160
Alexey Frunzea3cb1322017-05-01 18:49:46 -07002832
2833.Lret_forwarding_address\name:
2834 jalr $zero, $ra
2835 // Shift left by the forwarding address shift. This clears out the state bits since they are
2836 // in the top 2 bits of the lock word.
2837 sll \reg, $t9, LOCK_WORD_STATE_FORWARDING_ADDRESS_SHIFT
Alexey Frunze15958152017-02-09 19:08:30 -08002838END \name
2839.endm
2840
2841// Note that art_quick_read_barrier_mark_regXX corresponds to register XX+1.
2842// ZERO (register 0) is reserved.
2843// AT (register 1) is reserved as a temporary/scratch register.
2844READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg01, $v0
2845READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg02, $v1
2846READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg03, $a0
2847READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg04, $a1
2848READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg05, $a2
2849READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg06, $a3
2850READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg07, $t0
2851READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg08, $t1
2852READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg09, $t2
2853READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg10, $t3
2854READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg11, $t4
2855READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg12, $t5
2856READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg13, $t6
2857READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg14, $t7
2858// S0 and S1 (registers 16 and 17) are reserved as suspended and thread registers.
2859READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg17, $s2
2860READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg18, $s3
2861READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg19, $s4
2862READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg20, $s5
2863READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg21, $s6
2864READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg22, $s7
2865// T8 and T9 (registers 24 and 25) are reserved as temporary/scratch registers.
2866// K0, K1, GP, SP (registers 26 - 29) are reserved.
2867READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg29, $s8
2868// RA (register 31) is reserved.
2869
Alexey Frunze4147fcc2017-06-17 19:57:27 -07002870// Caller code:
2871// Short constant offset/index:
2872// R2: | R6:
2873// lw $t9, pReadBarrierMarkReg00
2874// beqz $t9, skip_call | beqzc $t9, skip_call
2875// addiu $t9, $t9, thunk_disp | nop
2876// jalr $t9 | jialc $t9, thunk_disp
2877// nop |
2878// skip_call: | skip_call:
2879// lw `out`, ofs(`obj`) | lw `out`, ofs(`obj`)
2880// [subu `out`, $zero, `out`] | [subu `out`, $zero, `out`] # Unpoison reference.
2881.macro BRB_FIELD_SHORT_OFFSET_ENTRY obj
28821:
2883 # Explicit null check. May be redundant (for array elements or when the field
2884 # offset is larger than the page size, 4KB).
2885 # $ra will be adjusted to point to lw's stack map when throwing NPE.
2886 beqz \obj, .Lintrospection_throw_npe
2887#if defined(_MIPS_ARCH_MIPS32R6)
2888 lapc $gp, .Lintrospection_exits # $gp = address of .Lintrospection_exits.
2889#else
2890 addiu $gp, $t9, (.Lintrospection_exits - 1b) # $gp = address of .Lintrospection_exits.
2891#endif
2892 .set push
2893 .set noat
2894 lw $at, MIRROR_OBJECT_LOCK_WORD_OFFSET(\obj)
2895 sll $at, $at, 31 - LOCK_WORD_READ_BARRIER_STATE_SHIFT # Move barrier state bit
2896 # to sign bit.
2897 bltz $at, .Lintrospection_field_array # If gray, load reference, mark.
2898 move $t8, \obj # Move `obj` to $t8 for common code.
2899 .set pop
2900 jalr $zero, $ra # Otherwise, load-load barrier and return.
2901 sync
2902.endm
2903
2904// Caller code (R2):
2905// Long constant offset/index: | Variable index:
2906// lw $t9, pReadBarrierMarkReg00
2907// lui $t8, ofs_hi | sll $t8, `index`, 2
2908// beqz $t9, skip_call | beqz $t9, skip_call
2909// addiu $t9, $t9, thunk_disp | addiu $t9, $t9, thunk_disp
2910// jalr $t9 | jalr $t9
2911// skip_call: | skip_call:
2912// addu $t8, $t8, `obj` | addu $t8, $t8, `obj`
2913// lw `out`, ofs_lo($t8) | lw `out`, ofs($t8)
2914// [subu `out`, $zero, `out`] | [subu `out`, $zero, `out`] # Unpoison reference.
2915//
2916// Caller code (R6):
2917// Long constant offset/index: | Variable index:
2918// lw $t9, pReadBarrierMarkReg00
2919// beqz $t9, skip_call | beqz $t9, skip_call
2920// aui $t8, `obj`, ofs_hi | lsa $t8, `index`, `obj`, 2
2921// jialc $t9, thunk_disp | jialc $t9, thunk_disp
2922// skip_call: | skip_call:
2923// lw `out`, ofs_lo($t8) | lw `out`, ofs($t8)
2924// [subu `out`, $zero, `out`] | [subu `out`, $zero, `out`] # Unpoison reference.
2925.macro BRB_FIELD_LONG_OFFSET_ENTRY obj
29261:
2927 # No explicit null check for variable indices or large constant indices/offsets
2928 # as it must have been done earlier.
2929#if defined(_MIPS_ARCH_MIPS32R6)
2930 lapc $gp, .Lintrospection_exits # $gp = address of .Lintrospection_exits.
2931#else
2932 addiu $gp, $t9, (.Lintrospection_exits - 1b) # $gp = address of .Lintrospection_exits.
2933#endif
2934 .set push
2935 .set noat
2936 lw $at, MIRROR_OBJECT_LOCK_WORD_OFFSET(\obj)
2937 sll $at, $at, 31 - LOCK_WORD_READ_BARRIER_STATE_SHIFT # Move barrier state bit
2938 # to sign bit.
2939 bltz $at, .Lintrospection_field_array # If gray, load reference, mark.
2940 nop
2941 .set pop
2942 jalr $zero, $ra # Otherwise, load-load barrier and return.
2943 sync
2944 break # Padding to 8 instructions.
2945.endm
2946
2947.macro BRB_GC_ROOT_ENTRY root
29481:
2949#if defined(_MIPS_ARCH_MIPS32R6)
2950 lapc $gp, .Lintrospection_exit_\root # $gp = exit point address.
2951#else
2952 addiu $gp, $t9, (.Lintrospection_exit_\root - 1b) # $gp = exit point address.
2953#endif
2954 bnez \root, .Lintrospection_common
2955 move $t8, \root # Move reference to $t8 for common code.
2956 jalr $zero, $ra # Return if null.
2957 # The next instruction (from the following BRB_GC_ROOT_ENTRY) fills the delay slot.
2958 # This instruction has no effect (actual NOP for the last entry; otherwise changes $gp,
2959 # which is unused after that anyway).
2960.endm
2961
2962.macro BRB_FIELD_EXIT out
2963.Lintrospection_exit_\out:
2964 jalr $zero, $ra
2965 move \out, $t8 # Return reference in expected register.
2966.endm
2967
2968.macro BRB_FIELD_EXIT_BREAK
2969 break
2970 break
2971.endm
2972
2973ENTRY_NO_GP art_quick_read_barrier_mark_introspection
2974 # Entry points for offsets/indices not fitting into int16_t and for variable indices.
2975 BRB_FIELD_LONG_OFFSET_ENTRY $v0
2976 BRB_FIELD_LONG_OFFSET_ENTRY $v1
2977 BRB_FIELD_LONG_OFFSET_ENTRY $a0
2978 BRB_FIELD_LONG_OFFSET_ENTRY $a1
2979 BRB_FIELD_LONG_OFFSET_ENTRY $a2
2980 BRB_FIELD_LONG_OFFSET_ENTRY $a3
2981 BRB_FIELD_LONG_OFFSET_ENTRY $t0
2982 BRB_FIELD_LONG_OFFSET_ENTRY $t1
2983 BRB_FIELD_LONG_OFFSET_ENTRY $t2
2984 BRB_FIELD_LONG_OFFSET_ENTRY $t3
2985 BRB_FIELD_LONG_OFFSET_ENTRY $t4
2986 BRB_FIELD_LONG_OFFSET_ENTRY $t5
2987 BRB_FIELD_LONG_OFFSET_ENTRY $t6
2988 BRB_FIELD_LONG_OFFSET_ENTRY $t7
2989 BRB_FIELD_LONG_OFFSET_ENTRY $s2
2990 BRB_FIELD_LONG_OFFSET_ENTRY $s3
2991 BRB_FIELD_LONG_OFFSET_ENTRY $s4
2992 BRB_FIELD_LONG_OFFSET_ENTRY $s5
2993 BRB_FIELD_LONG_OFFSET_ENTRY $s6
2994 BRB_FIELD_LONG_OFFSET_ENTRY $s7
2995 BRB_FIELD_LONG_OFFSET_ENTRY $s8
2996
2997 # Entry points for offsets/indices fitting into int16_t.
2998 BRB_FIELD_SHORT_OFFSET_ENTRY $v0
2999 BRB_FIELD_SHORT_OFFSET_ENTRY $v1
3000 BRB_FIELD_SHORT_OFFSET_ENTRY $a0
3001 BRB_FIELD_SHORT_OFFSET_ENTRY $a1
3002 BRB_FIELD_SHORT_OFFSET_ENTRY $a2
3003 BRB_FIELD_SHORT_OFFSET_ENTRY $a3
3004 BRB_FIELD_SHORT_OFFSET_ENTRY $t0
3005 BRB_FIELD_SHORT_OFFSET_ENTRY $t1
3006 BRB_FIELD_SHORT_OFFSET_ENTRY $t2
3007 BRB_FIELD_SHORT_OFFSET_ENTRY $t3
3008 BRB_FIELD_SHORT_OFFSET_ENTRY $t4
3009 BRB_FIELD_SHORT_OFFSET_ENTRY $t5
3010 BRB_FIELD_SHORT_OFFSET_ENTRY $t6
3011 BRB_FIELD_SHORT_OFFSET_ENTRY $t7
3012 BRB_FIELD_SHORT_OFFSET_ENTRY $s2
3013 BRB_FIELD_SHORT_OFFSET_ENTRY $s3
3014 BRB_FIELD_SHORT_OFFSET_ENTRY $s4
3015 BRB_FIELD_SHORT_OFFSET_ENTRY $s5
3016 BRB_FIELD_SHORT_OFFSET_ENTRY $s6
3017 BRB_FIELD_SHORT_OFFSET_ENTRY $s7
3018 BRB_FIELD_SHORT_OFFSET_ENTRY $s8
3019
3020 .global art_quick_read_barrier_mark_introspection_gc_roots
3021art_quick_read_barrier_mark_introspection_gc_roots:
3022 # Entry points for GC roots.
3023 BRB_GC_ROOT_ENTRY $v0
3024 BRB_GC_ROOT_ENTRY $v1
3025 BRB_GC_ROOT_ENTRY $a0
3026 BRB_GC_ROOT_ENTRY $a1
3027 BRB_GC_ROOT_ENTRY $a2
3028 BRB_GC_ROOT_ENTRY $a3
3029 BRB_GC_ROOT_ENTRY $t0
3030 BRB_GC_ROOT_ENTRY $t1
3031 BRB_GC_ROOT_ENTRY $t2
3032 BRB_GC_ROOT_ENTRY $t3
3033 BRB_GC_ROOT_ENTRY $t4
3034 BRB_GC_ROOT_ENTRY $t5
3035 BRB_GC_ROOT_ENTRY $t6
3036 BRB_GC_ROOT_ENTRY $t7
3037 BRB_GC_ROOT_ENTRY $s2
3038 BRB_GC_ROOT_ENTRY $s3
3039 BRB_GC_ROOT_ENTRY $s4
3040 BRB_GC_ROOT_ENTRY $s5
3041 BRB_GC_ROOT_ENTRY $s6
3042 BRB_GC_ROOT_ENTRY $s7
3043 BRB_GC_ROOT_ENTRY $s8
3044 .global art_quick_read_barrier_mark_introspection_end_of_entries
3045art_quick_read_barrier_mark_introspection_end_of_entries:
3046 nop # Fill the delay slot of the last BRB_GC_ROOT_ENTRY.
3047
3048.Lintrospection_throw_npe:
3049 b art_quick_throw_null_pointer_exception
3050 addiu $ra, $ra, 4 # Skip lw, make $ra point to lw's stack map.
3051
3052 .set push
3053 .set noat
3054
3055 // Fields and array elements.
3056
3057.Lintrospection_field_array:
3058 // Get the field/element address using $t8 and the offset from the lw instruction.
3059 lh $at, 0($ra) # $ra points to lw: $at = field/element offset.
3060 addiu $ra, $ra, 4 + HEAP_POISON_INSTR_SIZE # Skip lw(+subu).
3061 addu $t8, $t8, $at # $t8 = field/element address.
3062
3063 // Calculate the address of the exit point, store it in $gp and load the reference into $t8.
3064 lb $at, (-HEAP_POISON_INSTR_SIZE - 2)($ra) # $ra-HEAP_POISON_INSTR_SIZE-4 points to
3065 # "lw `out`, ...".
3066 andi $at, $at, 31 # Extract `out` from lw.
3067 sll $at, $at, 3 # Multiply `out` by the exit point size (BRB_FIELD_EXIT* macros).
3068
3069 lw $t8, 0($t8) # $t8 = reference.
3070 UNPOISON_HEAP_REF $t8
3071
3072 // Return if null reference.
3073 bnez $t8, .Lintrospection_common
3074 addu $gp, $gp, $at # $gp = address of the exit point.
3075
3076 // Early return through the exit point.
3077.Lintrospection_return_early:
3078 jalr $zero, $gp # Move $t8 to `out` and return.
3079 nop
3080
3081 // Code common for GC roots, fields and array elements.
3082
3083.Lintrospection_common:
3084 // Check lock word for mark bit, if marked return.
3085 lw $t9, MIRROR_OBJECT_LOCK_WORD_OFFSET($t8)
3086 sll $at, $t9, 31 - LOCK_WORD_MARK_BIT_SHIFT # Move mark bit to sign bit.
3087 bltz $at, .Lintrospection_return_early
3088#if (LOCK_WORD_STATE_SHIFT != 30) || (LOCK_WORD_STATE_FORWARDING_ADDRESS != 3)
3089 // The below code depends on the lock word state being in the highest bits
3090 // and the "forwarding address" state having all bits set.
3091#error "Unexpected lock word state shift or forwarding address state value."
3092#endif
3093 // Test that both the forwarding state bits are 1.
3094 sll $at, $t9, 1
3095 and $at, $at, $t9 # Sign bit = 1 IFF both bits are 1.
3096 bgez $at, .Lintrospection_mark
3097 nop
3098
3099 .set pop
3100
3101 // Shift left by the forwarding address shift. This clears out the state bits since they are
3102 // in the top 2 bits of the lock word.
3103 jalr $zero, $gp # Move $t8 to `out` and return.
3104 sll $t8, $t9, LOCK_WORD_STATE_FORWARDING_ADDRESS_SHIFT
3105
3106.Lintrospection_mark:
3107 // Partially set up the stack frame preserving only $ra.
3108 addiu $sp, $sp, -160 # Includes 16 bytes of space for argument registers $a0-$a3.
3109 .cfi_adjust_cfa_offset 160
3110 sw $ra, 156($sp)
3111 .cfi_rel_offset 31, 156
3112
3113 // Set up $gp, clobbering $ra and using the branch delay slot for a useful instruction.
3114 bal 1f
3115 sw $gp, 152($sp) # Preserve the exit point address.
31161:
3117 .cpload $ra
3118
3119 // Finalize the stack frame and call.
3120 sw $t7, 148($sp)
3121 .cfi_rel_offset 15, 148
3122 sw $t6, 144($sp)
3123 .cfi_rel_offset 14, 144
3124 sw $t5, 140($sp)
3125 .cfi_rel_offset 13, 140
3126 sw $t4, 136($sp)
3127 .cfi_rel_offset 12, 136
3128 sw $t3, 132($sp)
3129 .cfi_rel_offset 11, 132
3130 sw $t2, 128($sp)
3131 .cfi_rel_offset 10, 128
3132 sw $t1, 124($sp)
3133 .cfi_rel_offset 9, 124
3134 sw $t0, 120($sp)
3135 .cfi_rel_offset 8, 120
3136 sw $a3, 116($sp)
3137 .cfi_rel_offset 7, 116
3138 sw $a2, 112($sp)
3139 .cfi_rel_offset 6, 112
3140 sw $a1, 108($sp)
3141 .cfi_rel_offset 5, 108
3142 sw $a0, 104($sp)
3143 .cfi_rel_offset 4, 104
3144 sw $v1, 100($sp)
3145 .cfi_rel_offset 3, 100
3146 sw $v0, 96($sp)
3147 .cfi_rel_offset 2, 96
3148
3149 la $t9, artReadBarrierMark
3150
3151 sdc1 $f18, 88($sp)
3152 sdc1 $f16, 80($sp)
3153 sdc1 $f14, 72($sp)
3154 sdc1 $f12, 64($sp)
3155 sdc1 $f10, 56($sp)
3156 sdc1 $f8, 48($sp)
3157 sdc1 $f6, 40($sp)
3158 sdc1 $f4, 32($sp)
3159 sdc1 $f2, 24($sp)
3160 sdc1 $f0, 16($sp)
3161
3162 jalr $t9 # $v0 <- artReadBarrierMark(reference)
3163 move $a0, $t8 # Pass reference in $a0.
3164 move $t8, $v0
3165
3166 lw $ra, 156($sp)
3167 .cfi_restore 31
3168 lw $gp, 152($sp) # $gp = address of the exit point.
3169 lw $t7, 148($sp)
3170 .cfi_restore 15
3171 lw $t6, 144($sp)
3172 .cfi_restore 14
3173 lw $t5, 140($sp)
3174 .cfi_restore 13
3175 lw $t4, 136($sp)
3176 .cfi_restore 12
3177 lw $t3, 132($sp)
3178 .cfi_restore 11
3179 lw $t2, 128($sp)
3180 .cfi_restore 10
3181 lw $t1, 124($sp)
3182 .cfi_restore 9
3183 lw $t0, 120($sp)
3184 .cfi_restore 8
3185 lw $a3, 116($sp)
3186 .cfi_restore 7
3187 lw $a2, 112($sp)
3188 .cfi_restore 6
3189 lw $a1, 108($sp)
3190 .cfi_restore 5
3191 lw $a0, 104($sp)
3192 .cfi_restore 4
3193 lw $v1, 100($sp)
3194 .cfi_restore 3
3195 lw $v0, 96($sp)
3196 .cfi_restore 2
3197
3198 ldc1 $f18, 88($sp)
3199 ldc1 $f16, 80($sp)
3200 ldc1 $f14, 72($sp)
3201 ldc1 $f12, 64($sp)
3202 ldc1 $f10, 56($sp)
3203 ldc1 $f8, 48($sp)
3204 ldc1 $f6, 40($sp)
3205 ldc1 $f4, 32($sp)
3206 ldc1 $f2, 24($sp)
3207 ldc1 $f0, 16($sp)
3208
3209 // Return through the exit point.
3210 jalr $zero, $gp # Move $t8 to `out` and return.
3211 addiu $sp, $sp, 160
3212 .cfi_adjust_cfa_offset -160
3213
3214.Lintrospection_exits:
3215 BRB_FIELD_EXIT_BREAK
3216 BRB_FIELD_EXIT_BREAK
3217 BRB_FIELD_EXIT $v0
3218 BRB_FIELD_EXIT $v1
3219 BRB_FIELD_EXIT $a0
3220 BRB_FIELD_EXIT $a1
3221 BRB_FIELD_EXIT $a2
3222 BRB_FIELD_EXIT $a3
3223 BRB_FIELD_EXIT $t0
3224 BRB_FIELD_EXIT $t1
3225 BRB_FIELD_EXIT $t2
3226 BRB_FIELD_EXIT $t3
3227 BRB_FIELD_EXIT $t4
3228 BRB_FIELD_EXIT $t5
3229 BRB_FIELD_EXIT $t6
3230 BRB_FIELD_EXIT $t7
3231 BRB_FIELD_EXIT_BREAK
3232 BRB_FIELD_EXIT_BREAK
3233 BRB_FIELD_EXIT $s2
3234 BRB_FIELD_EXIT $s3
3235 BRB_FIELD_EXIT $s4
3236 BRB_FIELD_EXIT $s5
3237 BRB_FIELD_EXIT $s6
3238 BRB_FIELD_EXIT $s7
3239 BRB_FIELD_EXIT_BREAK
3240 BRB_FIELD_EXIT_BREAK
3241 BRB_FIELD_EXIT_BREAK
3242 BRB_FIELD_EXIT_BREAK
3243 BRB_FIELD_EXIT_BREAK
3244 BRB_FIELD_EXIT_BREAK
3245 BRB_FIELD_EXIT $s8
3246 BRB_FIELD_EXIT_BREAK
3247END art_quick_read_barrier_mark_introspection
3248
Orion Hodsoncd260eb2018-06-06 09:04:17 +01003249 /*
3250 * Polymorphic method invocation.
3251 * On entry:
3252 * a0 = unused
3253 * a1 = receiver
3254 */
Orion Hodsonac141392017-01-13 11:53:47 +00003255.extern artInvokePolymorphic
3256ENTRY art_quick_invoke_polymorphic
3257 SETUP_SAVE_REFS_AND_ARGS_FRAME
Orion Hodsoncd260eb2018-06-06 09:04:17 +01003258 move $a0, $a1 # Make $a0 the receiver.
3259 move $a1, rSELF # Make $a1 an alias for the current Thread.
3260 la $t9, artInvokePolymorphic # Invoke artInvokePolymorphic
3261 jalr $t9 # with args (receiver, Thread*, context).
3262 addiu $a2, $sp, ARG_SLOT_SIZE # Make $a2 a pointer to the saved frame context.
3263 lw $t7, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_
Orion Hodsonac141392017-01-13 11:53:47 +00003264 RESTORE_SAVE_REFS_AND_ARGS_FRAME
Orion Hodsoncd260eb2018-06-06 09:04:17 +01003265 bnez $t7, 1f
3266 # don't care if $v0 and/or $v1 are modified, when exception branch taken
3267 MTD $v0, $v1, $f0, $f1 # move float value to return value
3268 jalr $zero, $ra
Orion Hodsonac141392017-01-13 11:53:47 +00003269 nop
32701:
3271 DELIVER_PENDING_EXCEPTION
3272END art_quick_invoke_polymorphic