blob: 4ede453a15c0a0d0f0e7c6ebb99af4c35212d0c3 [file] [log] [blame]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm64.S"
18
19#include "arch/quick_alloc_entrypoints.S"
20
21
22 /*
23 * Macro that sets up the callee save frame to conform with
24 * Runtime::CreateCalleeSaveMethod(kSaveAll)
25 */
26.macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
27 adrp x9, :got:_ZN3art7Runtime9instance_E
28 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
29
30 // Our registers aren't intermixed - just spill in order.
31 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
32
33 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
34 ldr x9, [x9, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET ]
35
36 sub sp, sp, #368
37 .cfi_adjust_cfa_offset 368
38
Andreas Gampe5c1e4352014-04-21 19:28:24 -070039 // Ugly compile-time check, but we only have the preprocessor.
40#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 368)
41#error "SAVE_ALL_CALLEE_SAVE_FRAME(ARM64) size not as expected."
42#endif
43
Stuart Monteithb95a5342014-03-12 13:32:32 +000044 // FP args
Andreas Gampe6cf80102014-05-19 11:32:41 -070045 stp d0, d1, [sp, #8]
Stuart Monteithb95a5342014-03-12 13:32:32 +000046 stp d2, d3, [sp, #24]
47 stp d4, d5, [sp, #40]
48 stp d6, d7, [sp, #56]
49
50 // FP callee-saves
51 stp d8, d9, [sp, #72]
52 stp d10, d11, [sp, #88]
53 stp d12, d13, [sp, #104]
54 stp d14, d15, [sp, #120]
55
56 stp d16, d17, [sp, #136]
57 stp d18, d19, [sp, #152]
58 stp d20, d21, [sp, #168]
59 stp d22, d23, [sp, #184]
60 stp d24, d25, [sp, #200]
61 stp d26, d27, [sp, #216]
62 stp d28, d29, [sp, #232]
63 stp d30, d31, [sp, #248]
64
65
66 // Callee saved.
67 stp xSELF, x19, [sp, #264]
Andreas Gampe03906cf2014-04-07 12:08:28 -070068 .cfi_rel_offset x18, 264
69 .cfi_rel_offset x19, 272
Stuart Monteithb95a5342014-03-12 13:32:32 +000070
Andreas Gampe03906cf2014-04-07 12:08:28 -070071 stp x20, x21, [sp, #280]
72 .cfi_rel_offset x20, 280
73 .cfi_rel_offset x21, 288
74
75 stp x22, x23, [sp, #296]
76 .cfi_rel_offset x22, 296
77 .cfi_rel_offset x23, 304
78
79 stp x24, x25, [sp, #312]
80 .cfi_rel_offset x24, 312
81 .cfi_rel_offset x25, 320
82
83 stp x26, x27, [sp, #328]
84 .cfi_rel_offset x26, 328
85 .cfi_rel_offset x27, 336
86
87 stp x28, xFP, [sp, #344] // Save FP.
88 .cfi_rel_offset x28, 344
89 .cfi_rel_offset x29, 352
90
91 str xLR, [sp, #360]
92 .cfi_rel_offset x30, 360
Stuart Monteithb95a5342014-03-12 13:32:32 +000093
94 // Loads appropriate callee-save-method
95 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
96
97.endm
98
99 /*
100 * Macro that sets up the callee save frame to conform with
101 * Runtime::CreateCalleeSaveMethod(kRefsOnly).
102 */
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700103// WIP.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000104.macro SETUP_REF_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700105 adrp x9, :got:_ZN3art7Runtime9instance_E
106 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
107
108 // Our registers aren't intermixed - just spill in order.
109 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
110
111 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
112 ldr x9, [x9, RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET ]
113
114 sub sp, sp, #176
115 .cfi_adjust_cfa_offset 176
116
117 // Ugly compile-time check, but we only have the preprocessor.
118#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 176)
119#error "REFS_ONLY_CALLEE_SAVE_FRAME(ARM64) size not as expected."
120#endif
121
122 // FP callee-saves
123 stp d8, d9, [sp, #8]
124 stp d10, d11, [sp, #24]
125 stp d12, d13, [sp, #40]
126 stp d14, d15, [sp, #56]
127
128 // Callee saved.
129 stp xSELF, x19, [sp, #72]
130 .cfi_rel_offset x18, 72
131 .cfi_rel_offset x19, 80
132
133 stp x20, x21, [sp, #88]
134 .cfi_rel_offset x20, 88
135 .cfi_rel_offset x21, 96
136
137 stp x22, x23, [sp, #104]
138 .cfi_rel_offset x22, 104
139 .cfi_rel_offset x23, 112
140
141 stp x24, x25, [sp, #120]
142 .cfi_rel_offset x24, 120
143 .cfi_rel_offset x25, 128
144
145 stp x26, x27, [sp, #136]
146 .cfi_rel_offset x26, 136
147 .cfi_rel_offset x27, 144
148
149 stp x28, xFP, [sp, #152] // Save FP.
150 .cfi_rel_offset x28, 152
151 .cfi_rel_offset x29, 160
152
153 str xLR, [sp, #168]
154 .cfi_rel_offset x30, 168
155
156 // Loads appropriate callee-save-method
157 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000158.endm
159
160.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700161 // FP callee saves
162 ldp d8, d9, [sp, #8]
163 ldp d10, d11, [sp, #24]
164 ldp d12, d13, [sp, #40]
165 ldp d14, d15, [sp, #56]
166
167 // Callee saved.
168 ldp xSELF, x19, [sp, #72]
169 .cfi_restore x18
170 .cfi_restore x19
171
172 ldp x20, x21, [sp, #88]
173 .cfi_restore x20
174 .cfi_restore x21
175
176 ldp x22, x23, [sp, #104]
177 .cfi_restore x22
178 .cfi_restore x23
179
180 ldp x24, x25, [sp, #120]
181 .cfi_restore x24
182 .cfi_restore x25
183
184 ldp x26, x27, [sp, #136]
185 .cfi_restore x26
186 .cfi_restore x27
187
188 ldp x28, xFP, [sp, #152] // Save FP.
189 .cfi_restore x28
190 .cfi_restore x29
191
192 ldr xLR, [sp, #168]
193 .cfi_restore x30
194
195 add sp, sp, #176
196 .cfi_adjust_cfa_offset -176
Stuart Monteithb95a5342014-03-12 13:32:32 +0000197.endm
198
Andreas Gamped58342c2014-06-05 14:18:08 -0700199.macro POP_REF_ONLY_CALLEE_SAVE_FRAME
200 add sp, sp, #176
201 .cfi_adjust_cfa_offset -176
202.endm
203
Stuart Monteithb95a5342014-03-12 13:32:32 +0000204.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
Zheng Xu48241e72014-05-23 11:52:42 +0800205 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
206 ret
Stuart Monteithb95a5342014-03-12 13:32:32 +0000207.endm
208
209
210.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
211 sub sp, sp, #304
212 .cfi_adjust_cfa_offset 304
213
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700214 // Ugly compile-time check, but we only have the preprocessor.
215#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 304)
216#error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(ARM64) size not as expected."
217#endif
218
Stuart Monteithb95a5342014-03-12 13:32:32 +0000219 stp d0, d1, [sp, #16]
220 stp d2, d3, [sp, #32]
221 stp d4, d5, [sp, #48]
222 stp d6, d7, [sp, #64]
223 stp d8, d9, [sp, #80]
224 stp d10, d11, [sp, #96]
225 stp d12, d13, [sp, #112]
226 stp d14, d15, [sp, #128]
227
228 stp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700229 .cfi_rel_offset x1, 144
230 .cfi_rel_offset x2, 152
Stuart Monteithb95a5342014-03-12 13:32:32 +0000231
Andreas Gampe03906cf2014-04-07 12:08:28 -0700232 stp x3, x4, [sp, #160]
233 .cfi_rel_offset x3, 160
234 .cfi_rel_offset x4, 168
235
236 stp x5, x6, [sp, #176]
237 .cfi_rel_offset x5, 176
238 .cfi_rel_offset x6, 184
239
240 stp x7, xSELF, [sp, #192]
241 .cfi_rel_offset x7, 192
242 .cfi_rel_offset x18, 200
243
244 stp x19, x20, [sp, #208]
245 .cfi_rel_offset x19, 208
246 .cfi_rel_offset x20, 216
247
248 stp x21, x22, [sp, #224]
249 .cfi_rel_offset x21, 224
250 .cfi_rel_offset x22, 232
251
252 stp x23, x24, [sp, #240]
253 .cfi_rel_offset x23, 240
254 .cfi_rel_offset x24, 248
255
256 stp x25, x26, [sp, #256]
257 .cfi_rel_offset x25, 256
258 .cfi_rel_offset x26, 264
259
260 stp x27, x28, [sp, #272]
261 .cfi_rel_offset x27, 272
262 .cfi_rel_offset x28, 280
263
264 stp xFP, xLR, [sp, #288]
265 .cfi_rel_offset x29, 288
266 .cfi_rel_offset x30, 296
Stuart Monteithb95a5342014-03-12 13:32:32 +0000267.endm
268
269 /*
270 * Macro that sets up the callee save frame to conform with
271 * Runtime::CreateCalleeSaveMethod(kRefsAndArgs).
272 *
273 * TODO This is probably too conservative - saving FP & LR.
274 */
275.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
276 adrp x9, :got:_ZN3art7Runtime9instance_E
277 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
278
279 // Our registers aren't intermixed - just spill in order.
280 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
281
282 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
283 ldr x9, [x9, RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ]
284
285 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
286
287 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
288.endm
289
290.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
291
292 ldp d0, d1, [sp, #16]
293 ldp d2, d3, [sp, #32]
294 ldp d4, d5, [sp, #48]
295 ldp d6, d7, [sp, #64]
296 ldp d8, d9, [sp, #80]
297 ldp d10, d11, [sp, #96]
298 ldp d12, d13, [sp, #112]
299 ldp d14, d15, [sp, #128]
300
301 // args.
302 ldp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700303 .cfi_restore x1
304 .cfi_restore x2
305
Stuart Monteithb95a5342014-03-12 13:32:32 +0000306 ldp x3, x4, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700307 .cfi_restore x3
308 .cfi_restore x4
309
Stuart Monteithb95a5342014-03-12 13:32:32 +0000310 ldp x5, x6, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700311 .cfi_restore x5
312 .cfi_restore x6
313
Stuart Monteithb95a5342014-03-12 13:32:32 +0000314 ldp x7, xSELF, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700315 .cfi_restore x7
316 .cfi_restore x18
317
Stuart Monteithb95a5342014-03-12 13:32:32 +0000318 ldp x19, x20, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700319 .cfi_restore x19
320 .cfi_restore x20
321
Stuart Monteithb95a5342014-03-12 13:32:32 +0000322 ldp x21, x22, [sp, #224]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700323 .cfi_restore x21
324 .cfi_restore x22
325
Stuart Monteithb95a5342014-03-12 13:32:32 +0000326 ldp x23, x24, [sp, #240]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700327 .cfi_restore x23
328 .cfi_restore x24
329
Stuart Monteithb95a5342014-03-12 13:32:32 +0000330 ldp x25, x26, [sp, #256]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700331 .cfi_restore x25
332 .cfi_restore x26
333
Stuart Monteithb95a5342014-03-12 13:32:32 +0000334 ldp x27, x28, [sp, #272]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700335 .cfi_restore x27
336 .cfi_restore x28
337
Stuart Monteithb95a5342014-03-12 13:32:32 +0000338 ldp xFP, xLR, [sp, #288]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700339 .cfi_restore x29
340 .cfi_restore x30
Stuart Monteithb95a5342014-03-12 13:32:32 +0000341
342 add sp, sp, #304
343 .cfi_adjust_cfa_offset -304
344.endm
345
346.macro RETURN_IF_RESULT_IS_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700347 cbnz x0, 1f // result non-zero branch over
348 ret // return
3491:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000350.endm
351
352.macro RETURN_IF_RESULT_IS_NON_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700353 cbz x0, 1f // result zero branch over
354 ret // return
3551:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000356.endm
357
358 /*
359 * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
360 * exception is Thread::Current()->exception_
361 */
362.macro DELIVER_PENDING_EXCEPTION
363 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
364 mov x0, xSELF
365 mov x1, sp
366
367 // Point of no return.
368 b artDeliverPendingExceptionFromCode // artDeliverPendingExceptionFromCode(Thread*, SP)
369 brk 0 // Unreached
370.endm
371
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700372.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_REG reg
373 ldr \reg, [xSELF, # THREAD_EXCEPTION_OFFSET] // Get exception field.
374 cbnz \reg, 1f
Stuart Monteithb95a5342014-03-12 13:32:32 +0000375 ret
3761:
377 DELIVER_PENDING_EXCEPTION
378.endm
379
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700380.macro RETURN_OR_DELIVER_PENDING_EXCEPTION
381 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG x9
382.endm
383
384// Same as above with x1. This is helpful in stubs that want to avoid clobbering another register.
385.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
386 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG x1
387.endm
388
389.macro RETURN_IF_W0_IS_ZERO_OR_DELIVER
390 cbnz w0, 1f // result non-zero branch over
391 ret // return
3921:
393 DELIVER_PENDING_EXCEPTION
394.endm
395
Stuart Monteithb95a5342014-03-12 13:32:32 +0000396.macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
397 .extern \cxx_name
398ENTRY \c_name
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700399 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Serban Constantinescu63206f32014-05-07 18:40:49 +0100400 mov x0, xSELF // pass Thread::Current
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700401 mov x1, sp // pass SP
402 b \cxx_name // \cxx_name(Thread*, SP)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000403END \c_name
404.endm
405
406.macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name
407 .extern \cxx_name
408ENTRY \c_name
Serban Constantinescu75b91132014-04-09 18:39:10 +0100409 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context.
Serban Constantinescu63206f32014-05-07 18:40:49 +0100410 mov x1, xSELF // pass Thread::Current.
Serban Constantinescu75b91132014-04-09 18:39:10 +0100411 mov x2, sp // pass SP.
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700412 b \cxx_name // \cxx_name(arg, Thread*, SP).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000413 brk 0
414END \c_name
415.endm
416
417.macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
418 .extern \cxx_name
419ENTRY \c_name
420 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Serban Constantinescu63206f32014-05-07 18:40:49 +0100421 mov x2, xSELF // pass Thread::Current
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700422 mov x3, sp // pass SP
423 b \cxx_name // \cxx_name(arg1, arg2, Thread*, SP)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000424 brk 0
425END \c_name
426.endm
427
428 /*
429 * Called by managed code, saves callee saves and then calls artThrowException
430 * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
431 */
432ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
433
434 /*
435 * Called by managed code to create and deliver a NullPointerException.
436 */
437NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
438
439 /*
440 * Called by managed code to create and deliver an ArithmeticException.
441 */
442NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
443
444 /*
445 * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
446 * index, arg2 holds limit.
447 */
448TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
449
450 /*
451 * Called by managed code to create and deliver a StackOverflowError.
452 */
453NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
454
455 /*
456 * Called by managed code to create and deliver a NoSuchMethodError.
457 */
458ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
459
460 /*
Stuart Monteithb95a5342014-03-12 13:32:32 +0000461 * All generated callsites for interface invokes and invocation slow paths will load arguments
Andreas Gampe51f76352014-05-21 08:28:48 -0700462 * as usual - except instead of loading arg0/x0 with the target Method*, arg0/x0 will contain
463 * the method_idx. This wrapper will save arg1-arg3, load the caller's Method*, align the
Stuart Monteithb95a5342014-03-12 13:32:32 +0000464 * stack and call the appropriate C helper.
Andreas Gampe51f76352014-05-21 08:28:48 -0700465 * NOTE: "this" is first visible argument of the target, and so can be found in arg1/x1.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000466 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700467 * The helper will attempt to locate the target and return a 128-bit result in x0/x1 consisting
Stuart Monteithb95a5342014-03-12 13:32:32 +0000468 * of the target Method* in x0 and method->code_ in x1.
469 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700470 * If unsuccessful, the helper will return NULL/????. There will be a pending exception in the
Stuart Monteithb95a5342014-03-12 13:32:32 +0000471 * thread and we branch to another stub to deliver it.
472 *
473 * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
474 * pointing back to the original caller.
Andreas Gampe51f76352014-05-21 08:28:48 -0700475 *
476 * Adapted from ARM32 code.
477 *
478 * Clobbers x12.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000479 */
480.macro INVOKE_TRAMPOLINE c_name, cxx_name
481 .extern \cxx_name
482ENTRY \c_name
Andreas Gampe51f76352014-05-21 08:28:48 -0700483 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // save callee saves in case allocation triggers GC
484 // Helper signature is always
485 // (method_idx, *this_object, *caller_method, *self, sp)
486
Alexei Zavjalov41c507a2014-05-15 16:02:46 +0700487 ldr w2, [sp, #FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE] // pass caller Method*
Andreas Gampe51f76352014-05-21 08:28:48 -0700488 mov x3, xSELF // pass Thread::Current
489 mov x4, sp
490 bl \cxx_name // (method_idx, this, caller, Thread*, SP)
491 mov x12, x1 // save Method*->code_
492 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
493 cbz x0, 1f // did we find the target? if not go to exception delivery
494 br x12 // tail call to target
4951:
496 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +0000497END \c_name
498.endm
499
500INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline
501INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
502
503INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
504INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
505INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
506INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
507
Andreas Gampe03906cf2014-04-07 12:08:28 -0700508
509.macro INVOKE_STUB_CREATE_FRAME
510
Andreas Gampecf4035a2014-05-28 22:43:01 -0700511SAVE_SIZE=6*8 // x4, x5, x19(wSUSPEND), SP, LR & FP saved.
512SAVE_SIZE_AND_METHOD=SAVE_SIZE+STACK_REFERENCE_SIZE
513
Andreas Gampe03906cf2014-04-07 12:08:28 -0700514
Zheng Xu48241e72014-05-23 11:52:42 +0800515 mov x9, sp // Save stack pointer.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700516 .cfi_register sp,x9
517
Zheng Xu48241e72014-05-23 11:52:42 +0800518 add x10, x2, # SAVE_SIZE_AND_METHOD // calculate size of frame.
519 sub x10, sp, x10 // Calculate SP position - saves + ArtMethod* + args
520 and x10, x10, # ~0xf // Enforce 16 byte stack alignment.
521 mov sp, x10 // Set new SP.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700522
Zheng Xu48241e72014-05-23 11:52:42 +0800523 sub x10, x9, #SAVE_SIZE // Calculate new FP (later). Done here as we must move SP
524 .cfi_def_cfa_register x10 // before this.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700525 .cfi_adjust_cfa_offset SAVE_SIZE
526
Andreas Gampecf4035a2014-05-28 22:43:01 -0700527 stp x9, x19, [x10, #32] // Save old stack pointer and x19(wSUSPEND)
Andreas Gampe03906cf2014-04-07 12:08:28 -0700528 .cfi_rel_offset sp, 32
Andreas Gampecf4035a2014-05-28 22:43:01 -0700529 .cfi_rel_offset x19, 40
Andreas Gampe03906cf2014-04-07 12:08:28 -0700530
Zheng Xu48241e72014-05-23 11:52:42 +0800531 stp x4, x5, [x10, #16] // Save result and shorty addresses.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700532 .cfi_rel_offset x4, 16
533 .cfi_rel_offset x5, 24
534
Zheng Xu48241e72014-05-23 11:52:42 +0800535 stp xFP, xLR, [x10] // Store LR & FP.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700536 .cfi_rel_offset x29, 0
537 .cfi_rel_offset x30, 8
538
Zheng Xu48241e72014-05-23 11:52:42 +0800539 mov xFP, x10 // Use xFP now, as it's callee-saved.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700540 .cfi_def_cfa_register x29
Zheng Xu48241e72014-05-23 11:52:42 +0800541 mov xSELF, x3 // Move thread pointer into SELF register.
542 mov wSUSPEND, #SUSPEND_CHECK_INTERVAL // reset wSUSPEND to suspend check interval
Andreas Gampe03906cf2014-04-07 12:08:28 -0700543
544 // Copy arguments into stack frame.
545 // Use simple copy routine for now.
546 // 4 bytes per slot.
547 // X1 - source address
548 // W2 - args length
549 // X9 - destination address.
550 // W10 - temporary
Andreas Gampecf4035a2014-05-28 22:43:01 -0700551 add x9, sp, #4 // Destination address is bottom of stack + NULL.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700552
553 // Use \@ to differentiate between macro invocations.
554.LcopyParams\@:
555 cmp w2, #0
556 beq .LendCopyParams\@
557 sub w2, w2, #4 // Need 65536 bytes of range.
558 ldr w10, [x1, x2]
559 str w10, [x9, x2]
560
561 b .LcopyParams\@
562
563.LendCopyParams\@:
564
Andreas Gampecf4035a2014-05-28 22:43:01 -0700565 // Store NULL into StackReference<Method>* at bottom of frame.
566 str wzr, [sp]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700567
Andreas Gampecf4035a2014-05-28 22:43:01 -0700568#if (STACK_REFERENCE_SIZE != 4)
569#error "STACK_REFERENCE_SIZE(ARM64) size not as expected."
570#endif
Andreas Gampe03906cf2014-04-07 12:08:28 -0700571.endm
572
573.macro INVOKE_STUB_CALL_AND_RETURN
574
575 // load method-> METHOD_QUICK_CODE_OFFSET
576 ldr x9, [x0 , #METHOD_QUICK_CODE_OFFSET]
577 // Branch to method.
578 blr x9
579
580 // Restore return value address and shorty address.
581 ldp x4,x5, [xFP, #16]
582 .cfi_restore x4
583 .cfi_restore x5
584
585 // Store result (w0/x0/s0/d0) appropriately, depending on resultType.
586 ldrb w10, [x5]
587
588 // Don't set anything for a void type.
589 cmp w10, #'V'
590 beq .Lexit_art_quick_invoke_stub\@
591
592 cmp w10, #'D'
593 bne .Lreturn_is_float\@
594 str d0, [x4]
595 b .Lexit_art_quick_invoke_stub\@
596
597.Lreturn_is_float\@:
598 cmp w10, #'F'
599 bne .Lreturn_is_int\@
600 str s0, [x4]
601 b .Lexit_art_quick_invoke_stub\@
602
603 // Just store x0. Doesn't matter if it is 64 or 32 bits.
604.Lreturn_is_int\@:
605 str x0, [x4]
606
607.Lexit_art_quick_invoke_stub\@:
Andreas Gamped58342c2014-06-05 14:18:08 -0700608 ldp x2, x19, [xFP, #32] // Restore stack pointer and x19.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700609 .cfi_restore x19
Andreas Gampe03906cf2014-04-07 12:08:28 -0700610 mov sp, x2
611 .cfi_restore sp
612
Andreas Gamped58342c2014-06-05 14:18:08 -0700613 ldp xFP, xLR, [xFP] // Restore old frame pointer and link register.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700614 .cfi_restore x29
615 .cfi_restore x30
616
617 ret
618
619.endm
620
621
Stuart Monteithb95a5342014-03-12 13:32:32 +0000622/*
623 * extern"C" void art_quick_invoke_stub(ArtMethod *method, x0
624 * uint32_t *args, x1
625 * uint32_t argsize, w2
626 * Thread *self, x3
627 * JValue *result, x4
628 * char *shorty); x5
629 * +----------------------+
630 * | |
631 * | C/C++ frame |
632 * | LR'' |
633 * | FP'' | <- SP'
634 * +----------------------+
635 * +----------------------+
636 * | SP' |
637 * | X5 |
638 * | X4 | Saved registers
639 * | LR' |
640 * | FP' | <- FP
641 * +----------------------+
642 * | uint32_t out[n-1] |
643 * | : : | Outs
644 * | uint32_t out[0] |
Andreas Gampecf4035a2014-05-28 22:43:01 -0700645 * | StackRef<ArtMethod> | <- SP value=null
Stuart Monteithb95a5342014-03-12 13:32:32 +0000646 * +----------------------+
647 *
648 * Outgoing registers:
649 * x0 - Method*
650 * x1-x7 - integer parameters.
651 * d0-d7 - Floating point parameters.
652 * xSELF = self
Zheng Xu48241e72014-05-23 11:52:42 +0800653 * wSUSPEND = suspend count
Stuart Monteithb95a5342014-03-12 13:32:32 +0000654 * SP = & of ArtMethod*
655 * x1 = "this" pointer.
656 *
657 */
658ENTRY art_quick_invoke_stub
659 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700660 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000661
662 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
663 // Parse the passed shorty to determine which register to load.
664 // Load addresses for routines that load WXSD registers.
665 adr x11, .LstoreW2
666 adr x12, .LstoreX2
667 adr x13, .LstoreS0
668 adr x14, .LstoreD0
669
670 // Initialize routine offsets to 0 for integers and floats.
671 // x8 for integers, x15 for floating point.
672 mov x8, #0
673 mov x15, #0
674
675 add x10, x5, #1 // Load shorty address, plus one to skip return value.
676 ldr w1, [x9],#4 // Load "this" parameter, and increment arg pointer.
677
678 // Loop to fill registers.
679.LfillRegisters:
680 ldrb w17, [x10], #1 // Load next character in signature, and increment.
681 cbz w17, .LcallFunction // Exit at end of signature. Shorty 0 terminated.
682
683 cmp w17, #'F' // is this a float?
684 bne .LisDouble
685
686 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700687 beq .Ladvance4
Stuart Monteithb95a5342014-03-12 13:32:32 +0000688
689 add x17, x13, x15 // Calculate subroutine to jump to.
690 br x17
691
692.LisDouble:
693 cmp w17, #'D' // is this a double?
694 bne .LisLong
695
696 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700697 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000698
699 add x17, x14, x15 // Calculate subroutine to jump to.
700 br x17
701
702.LisLong:
703 cmp w17, #'J' // is this a long?
704 bne .LisOther
705
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700706 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700707 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000708
709 add x17, x12, x8 // Calculate subroutine to jump to.
710 br x17
711
Stuart Monteithb95a5342014-03-12 13:32:32 +0000712.LisOther: // Everything else takes one vReg.
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700713 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700714 beq .Ladvance4
715
Stuart Monteithb95a5342014-03-12 13:32:32 +0000716 add x17, x11, x8 // Calculate subroutine to jump to.
717 br x17
718
Andreas Gampe03906cf2014-04-07 12:08:28 -0700719.Ladvance4:
720 add x9, x9, #4
721 b .LfillRegisters
722
723.Ladvance8:
724 add x9, x9, #8
725 b .LfillRegisters
726
Stuart Monteithb95a5342014-03-12 13:32:32 +0000727// Macro for loading a parameter into a register.
728// counter - the register with offset into these tables
729// size - the size of the register - 4 or 8 bytes.
730// register - the name of the register to be loaded.
731.macro LOADREG counter size register return
732 ldr \register , [x9], #\size
733 add \counter, \counter, 12
734 b \return
735.endm
736
737// Store ints.
738.LstoreW2:
739 LOADREG x8 4 w2 .LfillRegisters
740 LOADREG x8 4 w3 .LfillRegisters
741 LOADREG x8 4 w4 .LfillRegisters
742 LOADREG x8 4 w5 .LfillRegisters
743 LOADREG x8 4 w6 .LfillRegisters
744 LOADREG x8 4 w7 .LfillRegisters
745
746// Store longs.
747.LstoreX2:
748 LOADREG x8 8 x2 .LfillRegisters
749 LOADREG x8 8 x3 .LfillRegisters
750 LOADREG x8 8 x4 .LfillRegisters
751 LOADREG x8 8 x5 .LfillRegisters
752 LOADREG x8 8 x6 .LfillRegisters
753 LOADREG x8 8 x7 .LfillRegisters
754
755// Store singles.
756.LstoreS0:
757 LOADREG x15 4 s0 .LfillRegisters
758 LOADREG x15 4 s1 .LfillRegisters
759 LOADREG x15 4 s2 .LfillRegisters
760 LOADREG x15 4 s3 .LfillRegisters
761 LOADREG x15 4 s4 .LfillRegisters
762 LOADREG x15 4 s5 .LfillRegisters
763 LOADREG x15 4 s6 .LfillRegisters
764 LOADREG x15 4 s7 .LfillRegisters
765
766// Store doubles.
767.LstoreD0:
768 LOADREG x15 8 d0 .LfillRegisters
769 LOADREG x15 8 d1 .LfillRegisters
770 LOADREG x15 8 d2 .LfillRegisters
771 LOADREG x15 8 d3 .LfillRegisters
772 LOADREG x15 8 d4 .LfillRegisters
773 LOADREG x15 8 d5 .LfillRegisters
774 LOADREG x15 8 d6 .LfillRegisters
775 LOADREG x15 8 d7 .LfillRegisters
776
777
778.LcallFunction:
779
Andreas Gampe03906cf2014-04-07 12:08:28 -0700780 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000781
Stuart Monteithb95a5342014-03-12 13:32:32 +0000782END art_quick_invoke_stub
783
784/* extern"C"
785 * void art_quick_invoke_static_stub(ArtMethod *method, x0
786 * uint32_t *args, x1
787 * uint32_t argsize, w2
788 * Thread *self, x3
789 * JValue *result, x4
790 * char *shorty); x5
791 */
792ENTRY art_quick_invoke_static_stub
793 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700794 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000795
796 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
797 // Parse the passed shorty to determine which register to load.
798 // Load addresses for routines that load WXSD registers.
799 adr x11, .LstoreW1_2
800 adr x12, .LstoreX1_2
801 adr x13, .LstoreS0_2
802 adr x14, .LstoreD0_2
803
804 // Initialize routine offsets to 0 for integers and floats.
805 // x8 for integers, x15 for floating point.
806 mov x8, #0
807 mov x15, #0
808
809 add x10, x5, #1 // Load shorty address, plus one to skip return value.
810
811 // Loop to fill registers.
812.LfillRegisters2:
813 ldrb w17, [x10], #1 // Load next character in signature, and increment.
814 cbz w17, .LcallFunction2 // Exit at end of signature. Shorty 0 terminated.
815
816 cmp w17, #'F' // is this a float?
817 bne .LisDouble2
818
819 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700820 beq .Ladvance4_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000821
822 add x17, x13, x15 // Calculate subroutine to jump to.
823 br x17
824
825.LisDouble2:
826 cmp w17, #'D' // is this a double?
827 bne .LisLong2
828
829 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700830 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000831
832 add x17, x14, x15 // Calculate subroutine to jump to.
833 br x17
834
835.LisLong2:
836 cmp w17, #'J' // is this a long?
837 bne .LisOther2
838
839 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700840 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000841
842 add x17, x12, x8 // Calculate subroutine to jump to.
843 br x17
844
Stuart Monteithb95a5342014-03-12 13:32:32 +0000845.LisOther2: // Everything else takes one vReg.
846 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700847 beq .Ladvance4_2
848
Stuart Monteithb95a5342014-03-12 13:32:32 +0000849 add x17, x11, x8 // Calculate subroutine to jump to.
850 br x17
851
Andreas Gampe03906cf2014-04-07 12:08:28 -0700852.Ladvance4_2:
853 add x9, x9, #4
854 b .LfillRegisters2
855
856.Ladvance8_2:
857 add x9, x9, #8
858 b .LfillRegisters2
859
Stuart Monteithb95a5342014-03-12 13:32:32 +0000860// Store ints.
861.LstoreW1_2:
862 LOADREG x8 4 w1 .LfillRegisters2
863 LOADREG x8 4 w2 .LfillRegisters2
864 LOADREG x8 4 w3 .LfillRegisters2
865 LOADREG x8 4 w4 .LfillRegisters2
866 LOADREG x8 4 w5 .LfillRegisters2
867 LOADREG x8 4 w6 .LfillRegisters2
868 LOADREG x8 4 w7 .LfillRegisters2
869
870// Store longs.
871.LstoreX1_2:
872 LOADREG x8 8 x1 .LfillRegisters2
873 LOADREG x8 8 x2 .LfillRegisters2
874 LOADREG x8 8 x3 .LfillRegisters2
875 LOADREG x8 8 x4 .LfillRegisters2
876 LOADREG x8 8 x5 .LfillRegisters2
877 LOADREG x8 8 x6 .LfillRegisters2
878 LOADREG x8 8 x7 .LfillRegisters2
879
880// Store singles.
881.LstoreS0_2:
882 LOADREG x15 4 s0 .LfillRegisters2
883 LOADREG x15 4 s1 .LfillRegisters2
884 LOADREG x15 4 s2 .LfillRegisters2
885 LOADREG x15 4 s3 .LfillRegisters2
886 LOADREG x15 4 s4 .LfillRegisters2
887 LOADREG x15 4 s5 .LfillRegisters2
888 LOADREG x15 4 s6 .LfillRegisters2
889 LOADREG x15 4 s7 .LfillRegisters2
890
891// Store doubles.
892.LstoreD0_2:
893 LOADREG x15 8 d0 .LfillRegisters2
894 LOADREG x15 8 d1 .LfillRegisters2
895 LOADREG x15 8 d2 .LfillRegisters2
896 LOADREG x15 8 d3 .LfillRegisters2
897 LOADREG x15 8 d4 .LfillRegisters2
898 LOADREG x15 8 d5 .LfillRegisters2
899 LOADREG x15 8 d6 .LfillRegisters2
900 LOADREG x15 8 d7 .LfillRegisters2
901
902
903.LcallFunction2:
904
Andreas Gampe03906cf2014-04-07 12:08:28 -0700905 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000906
Stuart Monteithb95a5342014-03-12 13:32:32 +0000907END art_quick_invoke_static_stub
908
Andreas Gampe03906cf2014-04-07 12:08:28 -0700909
Stuart Monteithb95a5342014-03-12 13:32:32 +0000910
911 /*
912 * On entry x0 is uintptr_t* gprs_ and x1 is uint64_t* fprs_
913 */
914
915ENTRY art_quick_do_long_jump
916 // Load FPRs
917 ldp d0, d1, [x1], #16
918 ldp d2, d3, [x1], #16
919 ldp d4, d5, [x1], #16
920 ldp d6, d7, [x1], #16
921 ldp d8, d9, [x1], #16
922 ldp d10, d11, [x1], #16
923 ldp d12, d13, [x1], #16
924 ldp d14, d15, [x1], #16
925 ldp d16, d17, [x1], #16
926 ldp d18, d19, [x1], #16
927 ldp d20, d21, [x1], #16
928 ldp d22, d23, [x1], #16
929 ldp d24, d25, [x1], #16
930 ldp d26, d27, [x1], #16
931 ldp d28, d29, [x1], #16
932 ldp d30, d31, [x1]
933
934 // Load GPRs
935 // TODO: lots of those are smashed, could optimize.
936 add x0, x0, #30*8
937 ldp x30, x1, [x0], #-16
938 ldp x28, x29, [x0], #-16
939 ldp x26, x27, [x0], #-16
940 ldp x24, x25, [x0], #-16
941 ldp x22, x23, [x0], #-16
942 ldp x20, x21, [x0], #-16
943 ldp x18, x19, [x0], #-16
944 ldp x16, x17, [x0], #-16
945 ldp x14, x15, [x0], #-16
946 ldp x12, x13, [x0], #-16
947 ldp x10, x11, [x0], #-16
948 ldp x8, x9, [x0], #-16
949 ldp x6, x7, [x0], #-16
950 ldp x4, x5, [x0], #-16
951 ldp x2, x3, [x0], #-16
952 mov sp, x1
953
954 // TODO: Is it really OK to use LR for the target PC?
955 mov x0, #0
956 mov x1, #0
957 br xLR
958END art_quick_do_long_jump
959
Andreas Gampef4e910b2014-04-29 16:55:52 -0700960 /*
961 * Entry from managed code that calls artHandleFillArrayDataFromCode and delivers exception on
962 * failure.
963 */
964 .extern artHandleFillArrayDataFromCode
Andreas Gampef4e910b2014-04-29 16:55:52 -0700965ENTRY art_quick_handle_fill_data
966 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // Save callee saves in case exception allocation triggers GC.
967 mov x2, xSELF // Pass Thread::Current.
968 mov x3, sp // Pass SP.
969 bl artHandleFillArrayDataFromCode // (Array*, const DexFile::Payload*, Thread*, SP)
970 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
971 RETURN_IF_RESULT_IS_ZERO
972 DELIVER_PENDING_EXCEPTION
973END art_quick_handle_fill_data
Stuart Monteithb95a5342014-03-12 13:32:32 +0000974
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700975 /*
976 * Entry from managed code that calls artLockObjectFromCode, may block for GC. x0 holds the
977 * possibly null object to lock.
978 *
979 * Derived from arm32 code.
980 */
981 .extern artLockObjectFromCode
982ENTRY art_quick_lock_object
983 cbz w0, .Lslow_lock
984 add x4, x0, #LOCK_WORD_OFFSET // exclusive load/store had no immediate anymore
985.Lretry_lock:
986 ldr w2, [xSELF, #THREAD_ID_OFFSET] // TODO: Can the thread ID really change during the loop?
987 ldxr w1, [x4]
988 cbnz w1, .Lnot_unlocked // already thin locked
989 stxr w3, w2, [x4]
990 cbnz w3, .Lstrex_fail // store failed, retry
Andreas Gampe675967d2014-05-14 16:28:34 -0700991 dmb ishld // full (LoadLoad|LoadStore) memory barrier
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700992 ret
993.Lstrex_fail:
994 b .Lretry_lock // unlikely forward branch, need to reload and recheck r1/r2
995.Lnot_unlocked:
996 lsr w3, w1, 30
997 cbnz w3, .Lslow_lock // if either of the top two bits are set, go slow path
998 eor w2, w1, w2 // lock_word.ThreadId() ^ self->ThreadId()
999 uxth w2, w2 // zero top 16 bits
1000 cbnz w2, .Lslow_lock // lock word and self thread id's match -> recursive lock
1001 // else contention, go to slow path
1002 add w2, w1, #65536 // increment count in lock word placing in w2 for storing
1003 lsr w1, w2, 30 // if either of the top two bits are set, we overflowed.
1004 cbnz w1, .Lslow_lock // if we overflow the count go slow path
1005 str w2, [x0, #LOCK_WORD_OFFSET]// no need for stxr as we hold the lock
1006 ret
1007.Lslow_lock:
1008 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case we block
1009 mov x1, xSELF // pass Thread::Current
1010 mov x2, sp // pass SP
1011 bl artLockObjectFromCode // (Object* obj, Thread*, SP)
1012 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1013 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1014END art_quick_lock_object
1015
1016 /*
1017 * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure.
1018 * x0 holds the possibly null object to lock.
1019 *
1020 * Derived from arm32 code.
1021 */
1022 .extern artUnlockObjectFromCode
1023ENTRY art_quick_unlock_object
1024 cbz x0, .Lslow_unlock
1025 ldr w1, [x0, #LOCK_WORD_OFFSET]
1026 lsr w2, w1, 30
1027 cbnz w2, .Lslow_unlock // if either of the top two bits are set, go slow path
1028 ldr w2, [xSELF, #THREAD_ID_OFFSET]
1029 eor w3, w1, w2 // lock_word.ThreadId() ^ self->ThreadId()
1030 uxth w3, w3 // zero top 16 bits
1031 cbnz w3, .Lslow_unlock // do lock word and self thread id's match?
1032 cmp w1, #65536
1033 bpl .Lrecursive_thin_unlock
1034 // transition to unlocked, w3 holds 0
Andreas Gampe675967d2014-05-14 16:28:34 -07001035 dmb ish // full (LoadStore|StoreStore) memory barrier
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001036 str w3, [x0, #LOCK_WORD_OFFSET]
1037 ret
1038.Lrecursive_thin_unlock:
1039 sub w1, w1, #65536
1040 str w1, [x0, #LOCK_WORD_OFFSET]
1041 ret
1042.Lslow_unlock:
1043 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case exception allocation triggers GC
1044 mov x1, xSELF // pass Thread::Current
1045 mov x2, sp // pass SP
1046 bl artUnlockObjectFromCode // (Object* obj, Thread*, SP)
1047 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1048 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1049END art_quick_unlock_object
Andreas Gampe525cde22014-04-22 15:44:50 -07001050
1051 /*
1052 * Entry from managed code that calls artIsAssignableFromCode and on failure calls
1053 * artThrowClassCastException.
1054 */
1055 .extern artThrowClassCastException
1056ENTRY art_quick_check_cast
1057 // Store arguments and link register
1058 sub sp, sp, #32 // Stack needs to be 16b aligned on calls
1059 .cfi_adjust_cfa_offset 32
1060 stp x0, x1, [sp]
1061 .cfi_rel_offset x0, 0
1062 .cfi_rel_offset x1, 8
1063 stp xSELF, xLR, [sp, #16]
1064 .cfi_rel_offset x18, 16
1065 .cfi_rel_offset x30, 24
1066
1067 // Call runtime code
1068 bl artIsAssignableFromCode
1069
1070 // Check for exception
1071 cbz x0, .Lthrow_class_cast_exception
1072
1073 // Restore and return
1074 ldp x0, x1, [sp]
1075 .cfi_restore x0
1076 .cfi_restore x1
1077 ldp xSELF, xLR, [sp, #16]
1078 .cfi_restore x18
1079 .cfi_restore x30
1080 add sp, sp, #32
1081 .cfi_adjust_cfa_offset -32
1082 ret
1083
1084.Lthrow_class_cast_exception:
1085 // Restore
1086 ldp x0, x1, [sp]
1087 .cfi_restore x0
1088 .cfi_restore x1
1089 ldp xSELF, xLR, [sp, #16]
1090 .cfi_restore x18
1091 .cfi_restore x30
1092 add sp, sp, #32
1093 .cfi_adjust_cfa_offset -32
1094
1095 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
1096 mov x2, xSELF // pass Thread::Current
1097 mov x3, sp // pass SP
1098 b artThrowClassCastException // (Class*, Class*, Thread*, SP)
1099 brk 0 // We should not return here...
1100END art_quick_check_cast
1101
Andreas Gampef4e910b2014-04-29 16:55:52 -07001102 /*
1103 * Entry from managed code for array put operations of objects where the value being stored
1104 * needs to be checked for compatibility.
1105 * x0 = array, x1 = index, x2 = value
1106 *
1107 * Currently all values should fit into w0/w1/w2, and w1 always will as indices are 32b. We
1108 * assume, though, that the upper 32b are zeroed out. At least for x1/w1 we can do better by
1109 * using index-zero-extension in load/stores.
1110 *
1111 * Temporaries: x3, x4
1112 * TODO: x4 OK? ip seems wrong here.
1113 */
1114ENTRY art_quick_aput_obj_with_null_and_bound_check
1115 tst x0, x0
1116 bne art_quick_aput_obj_with_bound_check
1117 b art_quick_throw_null_pointer_exception
1118END art_quick_aput_obj_with_null_and_bound_check
1119
1120ENTRY art_quick_aput_obj_with_bound_check
1121 ldr w3, [x0, #ARRAY_LENGTH_OFFSET]
1122 cmp w3, w1
1123 bhi art_quick_aput_obj
1124 mov x0, x1
1125 mov x1, x3
1126 b art_quick_throw_array_bounds
1127END art_quick_aput_obj_with_bound_check
1128
1129ENTRY art_quick_aput_obj
1130 cbz x2, .Ldo_aput_null
1131 ldr w3, [x0, #CLASS_OFFSET] // Heap reference = 32b
1132 // This also zero-extends to x3
1133 ldr w4, [x2, #CLASS_OFFSET] // Heap reference = 32b
1134 // This also zero-extends to x4
1135 ldr w3, [x3, #CLASS_COMPONENT_TYPE_OFFSET] // Heap reference = 32b
1136 // This also zero-extends to x3
1137 cmp w3, w4 // value's type == array's component type - trivial assignability
1138 bne .Lcheck_assignability
1139.Ldo_aput:
1140 add x3, x0, #OBJECT_ARRAY_DATA_OFFSET
1141 // "Compress" = do nothing
1142 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1143 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1144 lsr x0, x0, #7
1145 strb w3, [x3, x0]
1146 ret
1147.Ldo_aput_null:
1148 add x3, x0, #OBJECT_ARRAY_DATA_OFFSET
1149 // "Compress" = do nothing
1150 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1151 ret
1152.Lcheck_assignability:
1153 // Store arguments and link register
1154 sub sp, sp, #48 // Stack needs to be 16b aligned on calls
1155 .cfi_adjust_cfa_offset 48
1156 stp x0, x1, [sp]
1157 .cfi_rel_offset x0, 0
1158 .cfi_rel_offset x1, 8
1159 stp x2, xSELF, [sp, #16]
1160 .cfi_rel_offset x2, 16
1161 .cfi_rel_offset x18, 24
1162 str xLR, [sp, #32]
1163 .cfi_rel_offset x30, 32
1164
1165 // Call runtime code
1166 mov x0, x3 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1167 mov x1, x4 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1168 bl artIsAssignableFromCode
1169
1170 // Check for exception
1171 cbz x0, .Lthrow_array_store_exception
1172
1173 // Restore
1174 ldp x0, x1, [sp]
1175 .cfi_restore x0
1176 .cfi_restore x1
1177 ldp x2, xSELF, [sp, #16]
1178 .cfi_restore x2
1179 .cfi_restore x18
1180 ldr xLR, [sp, #32]
1181 .cfi_restore x30
1182 add sp, sp, #48
1183 .cfi_adjust_cfa_offset -48
1184
1185 add x3, x0, #OBJECT_ARRAY_DATA_OFFSET
1186 // "Compress" = do nothing
1187 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1188 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1189 lsr x0, x0, #7
1190 strb w3, [x3, x0]
1191 ret
1192.Lthrow_array_store_exception:
1193 ldp x0, x1, [sp]
1194 .cfi_restore x0
1195 .cfi_restore x1
1196 ldp x2, xSELF, [sp, #16]
1197 .cfi_restore x2
1198 .cfi_restore x18
1199 ldr xLR, [sp, #32]
1200 .cfi_restore x30
1201 add sp, sp, #48
1202 .cfi_adjust_cfa_offset -48
1203
1204 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
1205 mov x1, x2 // Pass value.
1206 mov x2, xSELF // Pass Thread::Current.
1207 mov x3, sp // Pass SP.
1208 b artThrowArrayStoreException // (Object*, Object*, Thread*, SP).
1209 brk 0 // Unreached.
1210END art_quick_aput_obj
1211
Stuart Monteithb95a5342014-03-12 13:32:32 +00001212// Macro to facilitate adding new allocation entrypoints.
1213.macro TWO_ARG_DOWNCALL name, entrypoint, return
1214 .extern \entrypoint
1215ENTRY \name
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001216 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1217 mov x2, xSELF // pass Thread::Current
1218 mov x3, sp // pass SP
1219 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP)
1220 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1221 \return
1222 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +00001223END \name
1224.endm
1225
1226// Macro to facilitate adding new array allocation entrypoints.
1227.macro THREE_ARG_DOWNCALL name, entrypoint, return
1228 .extern \entrypoint
1229ENTRY \name
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001230 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1231 mov x3, xSELF // pass Thread::Current
1232 mov x4, sp // pass SP
1233 bl \entrypoint
1234 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1235 \return
1236 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +00001237END \name
1238.endm
1239
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001240// Macros taking opportunity of code similarities for downcalls with referrer.
1241
1242// TODO: xSELF -> x19. Temporarily rely on xSELF being saved in REF_ONLY
1243.macro ONE_ARG_REF_DOWNCALL name, entrypoint, return
1244 .extern \entrypoint
1245ENTRY \name
1246 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampecf4035a2014-05-28 22:43:01 -07001247 ldr w1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001248 mov x2, xSELF // pass Thread::Current
1249 mov x3, sp // pass SP
1250 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP)
1251 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1252 \return
1253END \name
1254.endm
1255
1256// TODO: xSELF -> x19. Temporarily rely on xSELF being saved in REF_ONLY
1257.macro TWO_ARG_REF_DOWNCALL name, entrypoint, return
1258 .extern \entrypoint
1259ENTRY \name
1260 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampecf4035a2014-05-28 22:43:01 -07001261 ldr w2, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001262 mov x3, xSELF // pass Thread::Current
1263 mov x4, sp // pass SP
1264 bl \entrypoint
1265 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1266 \return
1267END \name
1268.endm
1269
1270// TODO: xSELF -> x19. Temporarily rely on xSELF being saved in REF_ONLY
1271.macro THREE_ARG_REF_DOWNCALL name, entrypoint, return
1272 .extern \entrypoint
1273ENTRY \name
1274 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampecf4035a2014-05-28 22:43:01 -07001275 ldr w3, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001276 mov x4, xSELF // pass Thread::Current
1277 mov x5, sp // pass SP
1278 bl \entrypoint
1279 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1280 \return
1281END \name
1282.endm
1283
Matteo Franchindfd891a2014-04-30 12:17:17 +01001284 /*
1285 * Entry from managed code when uninitialized static storage, this stub will run the class
1286 * initializer and deliver the exception on error. On success the static storage base is
1287 * returned.
1288 */
1289TWO_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO
1290
Andreas Gampe6aac3552014-06-09 14:55:53 -07001291TWO_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO
1292TWO_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO
Matteo Franchindfd891a2014-04-30 12:17:17 +01001293
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001294ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1295ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1296ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1297
1298TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1299TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1300TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1301
1302TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1303TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1304
1305THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1306THREE_ARG_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1307THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1308
1309// This is separated out as the argument order is different.
1310 .extern artSet64StaticFromCode
1311ENTRY art_quick_set64_static
1312 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1313 mov x3, x1 // Store value
Andreas Gampecf4035a2014-05-28 22:43:01 -07001314 ldr w1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001315 mov x2, x3 // Put value param
1316 mov x3, xSELF // pass Thread::Current
1317 mov x4, sp // pass SP
1318 bl artSet64StaticFromCode
1319 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1320 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1321END art_quick_set64_static
1322
Matteo Franchindfd891a2014-04-30 12:17:17 +01001323 /*
1324 * Entry from managed code to resolve a string, this stub will allocate a String and deliver an
1325 * exception on error. On success the String is returned. x0 holds the referring method,
1326 * w1 holds the string index. The fast path check for hit in strings cache has already been
1327 * performed.
1328 */
1329TWO_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001330
Stuart Monteithb95a5342014-03-12 13:32:32 +00001331// Generate the allocation entrypoints for each allocator.
1332GENERATE_ALL_ALLOC_ENTRYPOINTS
1333
Zheng Xu48241e72014-05-23 11:52:42 +08001334 /*
1335 * Called by managed code when the value in wSUSPEND has been decremented to 0.
1336 */
1337 .extern artTestSuspendFromCode
1338ENTRY art_quick_test_suspend
1339 ldrh w0, [xSELF, #THREAD_FLAGS_OFFSET] // get xSELF->state_and_flags.as_struct.flags
1340 mov wSUSPEND, #SUSPEND_CHECK_INTERVAL // reset wSUSPEND to SUSPEND_CHECK_INTERVAL
1341 cbnz w0, .Lneed_suspend // check flags == 0
1342 ret // return if flags == 0
1343.Lneed_suspend:
1344 mov x0, xSELF
1345 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl
1346 mov x1, sp
1347 bl artTestSuspendFromCode // (Thread*, SP)
1348 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
1349END art_quick_test_suspend
Stuart Monteithb95a5342014-03-12 13:32:32 +00001350
Andreas Gampee62a07e2014-03-26 14:53:21 -07001351 /*
1352 * Called by managed code that is attempting to call a method on a proxy class. On entry
1353 * x0 holds the proxy method and x1 holds the receiver; The frame size of the invoked proxy
1354 * method agrees with a ref and args callee save frame.
1355 */
1356 .extern artQuickProxyInvokeHandler
1357ENTRY art_quick_proxy_invoke_handler
1358 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
1359 str x0, [sp, #0] // place proxy method at bottom of frame
1360 mov x2, xSELF // pass Thread::Current
1361 mov x3, sp // pass SP
1362 bl artQuickProxyInvokeHandler // (Method* proxy method, receiver, Thread*, SP)
Serban Constantinescu63206f32014-05-07 18:40:49 +01001363 ldr xSELF, [sp, #200] // Restore self pointer.
Andreas Gampee62a07e2014-03-26 14:53:21 -07001364 ldr x2, [xSELF, THREAD_EXCEPTION_OFFSET]
1365 cbnz x2, .Lexception_in_proxy // success if no exception is pending
Andreas Gamped1e91672014-06-02 22:50:05 -07001366 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // Restore frame
1367 fmov d0, x0 // Store result in d0 in case it was float or double
Andreas Gampee62a07e2014-03-26 14:53:21 -07001368 ret // return on success
1369.Lexception_in_proxy:
1370 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1371 DELIVER_PENDING_EXCEPTION
1372END art_quick_proxy_invoke_handler
Stuart Monteithb95a5342014-03-12 13:32:32 +00001373
Andreas Gampe51f76352014-05-21 08:28:48 -07001374 /*
1375 * Called to resolve an imt conflict. x12 is a hidden argument that holds the target method's
1376 * dex method index.
1377 */
1378ENTRY art_quick_imt_conflict_trampoline
Andreas Gampecf4035a2014-05-28 22:43:01 -07001379 ldr w0, [sp, #0] // load caller Method*
Andreas Gampe51f76352014-05-21 08:28:48 -07001380 ldr w0, [x0, #METHOD_DEX_CACHE_METHODS_OFFSET] // load dex_cache_resolved_methods
1381 add x0, x0, #OBJECT_ARRAY_DATA_OFFSET // get starting address of data
1382 ldr w0, [x0, x12, lsl 2] // load the target method
1383 b art_quick_invoke_interface_trampoline
1384END art_quick_imt_conflict_trampoline
Stuart Monteithb95a5342014-03-12 13:32:32 +00001385
1386ENTRY art_quick_resolution_trampoline
1387 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
1388 mov x2, xSELF
1389 mov x3, sp
1390 bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP)
Matteo Franchindfd891a2014-04-30 12:17:17 +01001391 cbz x0, 1f
1392 mov x9, x0 // Remember returned code pointer in x9.
Andreas Gampecf4035a2014-05-28 22:43:01 -07001393 ldr w0, [sp, #0] // artQuickResolutionTrampoline puts called method in *SP.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001394 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gampec6ee54e2014-03-24 16:45:44 -07001395 br x9
Stuart Monteithb95a5342014-03-12 13:32:32 +000013961:
1397 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1398 DELIVER_PENDING_EXCEPTION
1399END art_quick_resolution_trampoline
1400
1401/*
1402 * Generic JNI frame layout:
1403 *
1404 * #-------------------#
1405 * | |
1406 * | caller method... |
1407 * #-------------------# <--- SP on entry
1408 * | Return X30/LR |
1409 * | X29/FP | callee save
1410 * | X28 | callee save
1411 * | X27 | callee save
1412 * | X26 | callee save
1413 * | X25 | callee save
1414 * | X24 | callee save
1415 * | X23 | callee save
1416 * | X22 | callee save
1417 * | X21 | callee save
1418 * | X20 | callee save
1419 * | X19 | callee save
1420 * | X7 | arg7
1421 * | X6 | arg6
1422 * | X5 | arg5
1423 * | X4 | arg4
1424 * | X3 | arg3
1425 * | X2 | arg2
1426 * | X1 | arg1
1427 * | D15 | float arg 8
1428 * | D14 | float arg 8
1429 * | D13 | float arg 8
1430 * | D12 | callee save
1431 * | D11 | callee save
1432 * | D10 | callee save
1433 * | D9 | callee save
1434 * | D8 | callee save
1435 * | D7 | float arg 8
1436 * | D6 | float arg 7
1437 * | D5 | float arg 6
1438 * | D4 | float arg 5
1439 * | D3 | float arg 4
1440 * | D2 | float arg 3
1441 * | D1 | float arg 2
1442 * | D0 | float arg 1
Andreas Gampecf4035a2014-05-28 22:43:01 -07001443 * | Method* | <- X0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001444 * #-------------------#
1445 * | local ref cookie | // 4B
Mathieu Chartier421c5372014-05-14 14:11:40 -07001446 * | handle scope size | // 4B
Stuart Monteithb95a5342014-03-12 13:32:32 +00001447 * #-------------------#
1448 * | JNI Call Stack |
1449 * #-------------------# <--- SP on native call
1450 * | |
1451 * | Stack for Regs | The trampoline assembly will pop these values
1452 * | | into registers for native call
1453 * #-------------------#
1454 * | Native code ptr |
1455 * #-------------------#
1456 * | Free scratch |
1457 * #-------------------#
1458 * | Ptr to (1) | <--- SP
1459 * #-------------------#
1460 */
1461 /*
1462 * Called to do a generic JNI down-call
1463 */
1464ENTRY art_quick_generic_jni_trampoline
1465 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
1466 str x0, [sp, #0] // Store native ArtMethod* to bottom of stack.
1467
1468 // Save SP , so we can have static CFI info.
1469 mov x28, sp
1470 .cfi_def_cfa_register x28
1471
1472 // This looks the same, but is different: this will be updated to point to the bottom
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001473 // of the frame when the handle scope is inserted.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001474 mov xFP, sp
1475
1476 mov x8, #5120
1477 sub sp, sp, x8
1478
1479 // prepare for artQuickGenericJniTrampoline call
1480 // (Thread*, SP)
1481 // x0 x1 <= C calling convention
1482 // xSELF xFP <= where they are
1483
1484 mov x0, xSELF // Thread*
1485 mov x1, xFP
1486 bl artQuickGenericJniTrampoline // (Thread*, sp)
1487
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001488 // Get the updated pointer. This is the bottom of the frame _with_ handle scope.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001489 ldr xFP, [sp]
1490 add x9, sp, #8
1491
1492 cmp x0, #0
1493 b.mi .Lentry_error // Check for error, negative value.
1494
1495 // release part of the alloca.
1496 add x9, x9, x0
1497
1498 // Get the code pointer
1499 ldr xIP0, [x9, #0]
1500
1501 // Load parameters from frame into registers.
1502 // TODO Check with artQuickGenericJniTrampoline.
1503 // Also, check again APPCS64 - the stack arguments are interleaved.
1504 ldp x0, x1, [x9, #8]
1505 ldp x2, x3, [x9, #24]
1506 ldp x4, x5, [x9, #40]
1507 ldp x6, x7, [x9, #56]
1508
1509 ldp d0, d1, [x9, #72]
1510 ldp d2, d3, [x9, #88]
1511 ldp d4, d5, [x9, #104]
1512 ldp d6, d7, [x9, #120]
1513
1514 add sp, x9, #136
1515
1516 blr xIP0 // native call.
1517
1518 // Restore self pointer.
1519 ldr xSELF, [x28, #200]
1520
1521 // result sign extension is handled in C code
1522 // prepare for artQuickGenericJniEndTrampoline call
1523 // (Thread*, SP, result, result_f)
1524 // x0 x1 x2 x3 <= C calling convention
1525 mov x5, x0 // Save return value
1526 mov x0, xSELF // Thread register
1527 mov x1, xFP // Stack pointer
1528 mov x2, x5 // Result (from saved)
1529 fmov x3, d0 // d0 will contain floating point result, but needs to go into x3
1530
1531 bl artQuickGenericJniEndTrampoline
1532
1533 // Tear down the alloca.
1534 mov sp, x28
1535 .cfi_def_cfa_register sp
1536
1537 // Restore self pointer.
1538 ldr xSELF, [x28, #200]
1539
1540 // Pending exceptions possible.
1541 ldr x1, [xSELF, THREAD_EXCEPTION_OFFSET]
1542 cbnz x1, .Lexception_in_native
1543
1544 // Tear down the callee-save frame.
1545 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1546
1547 // store into fpr, for when it's a fpr return...
1548 fmov d0, x0
1549 ret
1550
1551.Lentry_error:
1552 mov sp, x28
1553 .cfi_def_cfa_register sp
1554 ldr xSELF, [x28, #200]
1555.Lexception_in_native:
1556 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1557 DELIVER_PENDING_EXCEPTION
1558
1559END art_quick_generic_jni_trampoline
1560
1561/*
1562 * Called to bridge from the quick to interpreter ABI. On entry the arguments match those
1563 * of a quick call:
1564 * x0 = method being called/to bridge to.
1565 * x1..x7, d0..d7 = arguments to that method.
1566 */
1567ENTRY art_quick_to_interpreter_bridge
1568 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments.
1569
1570 // x0 will contain mirror::ArtMethod* method.
1571 mov x1, xSELF // How to get Thread::Current() ???
1572 mov x2, sp
1573
1574 // uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
1575 // mirror::ArtMethod** sp)
1576 bl artQuickToInterpreterBridge
1577
1578 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case.
1579
1580 fmov d0, x0
1581
1582 RETURN_OR_DELIVER_PENDING_EXCEPTION
1583END art_quick_to_interpreter_bridge
1584
Andreas Gamped58342c2014-06-05 14:18:08 -07001585
1586//
1587// Instrumentation-related stubs
1588//
1589 .extern artInstrumentationMethodEntryFromCode
1590ENTRY art_quick_instrumentation_entry
1591 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
1592
1593 mov x19, x0 // Preserve method reference in a callee-save.
1594
1595 mov x2, xSELF
1596 mov x3, sp
1597 mov x4, xLR
1598 bl artInstrumentationMethodEntryFromCode // (Method*, Object*, Thread*, SP, LR)
1599
1600 mov x9, x0 // x0 = result of call.
1601 mov x0, x19 // Reload method reference.
1602
1603 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // Note: will restore xSELF
1604 adr xLR, art_quick_instrumentation_exit
1605 br x9 // Tail-call method with lr set to art_quick_instrumentation_exit.
1606END art_quick_instrumentation_entry
1607
1608 .extern artInstrumentationMethodExitFromCode
1609ENTRY art_quick_instrumentation_exit
1610 mov xLR, #0 // Clobber LR for later checks.
1611
1612 SETUP_REF_ONLY_CALLEE_SAVE_FRAME
1613
1614 // We need to save x0 and d0. We could use a callee-save from SETUP_REF_ONLY, but then
1615 // we would need to fully restore it. As there are a lot of callee-save registers, it seems
1616 // easier to have an extra small stack area.
1617
1618 str x19, [sp, #-16]! // Save integer result.
1619 .cfi_adjust_cfa_offset 16
1620 str d0, [sp, #8] // Save floating-point result.
1621
1622 mov x0, xSELF // Pass Thread.
1623 add x1, sp, #16 // Pass SP.
1624 mov x2, x0 // Pass integer result.
1625 fmov x3, d0 // Pass floating-point result.
1626 bl artInstrumentationMethodExitFromCode // (Thread*, SP, gpr_res, fpr_res)
1627
1628 mov x9, x0 // Return address from instrumentation call.
1629 mov xLR, x1 // r1 is holding link register if we're to bounce to deoptimize
1630
1631 ldr d0, [sp, #8] // Restore floating-point result.
1632 ldr x0, [sp], 16 // Restore integer result, and drop stack area.
1633 .cfi_adjust_cfa_offset 16
1634
1635 POP_REF_ONLY_CALLEE_SAVE_FRAME
1636
1637 br x9 // Tail-call out.
1638END art_quick_instrumentation_exit
1639
1640 /*
1641 * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
1642 * will long jump to the upcall with a special exception of -1.
1643 */
1644 .extern artDeoptimize
1645ENTRY art_quick_deoptimize
1646 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
1647 mov x0, xSELF // Pass thread.
1648 mov x1, sp // Pass SP.
1649 bl artDeoptimize // artDeoptimize(Thread*, SP)
1650END art_quick_deoptimize
1651
1652
Serban Constantinescu169489b2014-06-11 16:43:35 +01001653 /*
1654 * String's indexOf.
1655 *
1656 * TODO: Not very optimized.
1657 * On entry:
1658 * x0: string object (known non-null)
1659 * w1: char to match (known <= 0xFFFF)
1660 * w2: Starting offset in string data
1661 */
1662ENTRY art_quick_indexof
1663 ldr w3, [x0, #STRING_COUNT_OFFSET]
1664 ldr w4, [x0, #STRING_OFFSET_OFFSET]
1665 ldr w0, [x0, #STRING_VALUE_OFFSET] // x0 ?
1666
1667 /* Clamp start to [0..count] */
1668 cmp w2, #0
1669 csel w2, wzr, w2, lt
1670 cmp w2, w3
1671 csel w2, w3, w2, gt
1672
1673 /* Build a pointer to the start of the string data */
1674 add x0, x0, #STRING_DATA_OFFSET
1675 add x0, x0, x4, lsl #1
1676
1677 /* Save a copy to compute result */
1678 mov x5, x0
1679
1680 /* Build pointer to start of data to compare and pre-bias */
1681 add x0, x0, x2, lsl #1
1682 sub x0, x0, #2
1683
1684 /* Compute iteration count */
1685 sub w2, w3, w2
1686
1687 /*
1688 * At this point we have:
1689 * x0: start of the data to test
1690 * w1: char to compare
1691 * w2: iteration count
1692 * x5: original start of string data
1693 */
1694
1695 subs w2, w2, #4
1696 b.lt .Lindexof_remainder
1697
1698.Lindexof_loop4:
1699 ldrh w6, [x0, #2]!
1700 ldrh w7, [x0, #2]!
1701 ldrh w8, [x0, #2]!
1702 ldrh w9, [x0, #2]!
1703 cmp w6, w1
1704 b.eq .Lmatch_0
1705 cmp w7, w1
1706 b.eq .Lmatch_1
1707 cmp w8, w1
1708 b.eq .Lmatch_2
1709 cmp w9, w1
1710 b.eq .Lmatch_3
1711 subs w2, w2, #4
1712 b.ge .Lindexof_loop4
1713
1714.Lindexof_remainder:
1715 adds w2, w2, #4
1716 b.eq .Lindexof_nomatch
1717
1718.Lindexof_loop1:
1719 ldrh w6, [x0, #2]!
1720 cmp w6, w1
1721 b.eq .Lmatch_3
1722 subs w2, w2, #1
1723 b.ne .Lindexof_loop1
1724
1725.Lindexof_nomatch:
1726 mov x0, #-1
1727 ret
1728
1729.Lmatch_0:
1730 sub x0, x0, #6
1731 sub x0, x0, x5
1732 asr x0, x0, #1
1733 ret
1734.Lmatch_1:
1735 sub x0, x0, #4
1736 sub x0, x0, x5
1737 asr x0, x0, #1
1738 ret
1739.Lmatch_2:
1740 sub x0, x0, #2
1741 sub x0, x0, x5
1742 asr x0, x0, #1
1743 ret
1744.Lmatch_3:
1745 sub x0, x0, x5
1746 asr x0, x0, #1
1747 ret
1748END art_quick_indexof
Andreas Gampe266340d2014-05-02 07:55:24 -07001749
1750 /*
1751 * String's compareTo.
1752 *
1753 * TODO: Not very optimized.
1754 *
1755 * On entry:
1756 * x0: this object pointer
1757 * x1: comp object pointer
1758 *
1759 */
Andreas Gampe4d0589c2014-06-10 16:10:56 -07001760 .extern memcmp16_generic_static
Andreas Gampe266340d2014-05-02 07:55:24 -07001761ENTRY art_quick_string_compareto
1762 mov x2, x0 // x0 is return, use x2 for first input.
1763 sub x0, x2, x1 // Same string object?
1764 cbnz x0,1f
1765 ret
17661: // Different string objects.
1767
1768 ldr w6, [x2, #STRING_OFFSET_OFFSET]
1769 ldr w5, [x1, #STRING_OFFSET_OFFSET]
1770 ldr w4, [x2, #STRING_COUNT_OFFSET]
1771 ldr w3, [x1, #STRING_COUNT_OFFSET]
1772 ldr w2, [x2, #STRING_VALUE_OFFSET]
1773 ldr w1, [x1, #STRING_VALUE_OFFSET]
1774
1775 /*
1776 * Now: CharArray* Offset Count
1777 * first arg x2 w6 w4
1778 * second arg x1 w5 w3
1779 */
1780
1781 // x0 := str1.length(w4) - str2.length(w3). ldr zero-extended w3/w4 into x3/x4.
1782 subs x0, x4, x3
1783 // Min(count1, count2) into w3.
1784 csel x3, x3, x4, ge
1785
1786 // Build pointer into string data.
1787
1788 // Add offset in array (substr etc.) (sign extend and << 1).
1789 add x2, x2, w6, sxtw #1
1790 add x1, x1, w5, sxtw #1
1791
1792 // Add offset in CharArray to array.
1793 add x2, x2, #STRING_DATA_OFFSET
1794 add x1, x1, #STRING_DATA_OFFSET
1795
Serban Constantinescu169489b2014-06-11 16:43:35 +01001796 // TODO: Tune this value.
Andreas Gampe266340d2014-05-02 07:55:24 -07001797 // Check for long string, do memcmp16 for them.
1798 cmp w3, #28 // Constant from arm32.
1799 bgt .Ldo_memcmp16
1800
1801 /*
1802 * Now:
1803 * x2: *first string data
1804 * x1: *second string data
1805 * w3: iteration count
1806 * x0: return value if comparison equal
1807 * x4, x5, x6, x7: free
1808 */
1809
1810 // Do a simple unrolled loop.
1811.Lloop:
1812 // At least two more elements?
1813 subs w3, w3, #2
1814 b.lt .Lremainder_or_done
1815
1816 ldrh w4, [x2], #2
1817 ldrh w5, [x1], #2
1818
1819 ldrh w6, [x2], #2
1820 ldrh w7, [x1], #2
1821
1822 subs w4, w4, w5
1823 b.ne .Lw4_result
1824
1825 subs w6, w6, w7
1826 b.ne .Lw6_result
1827
1828 b .Lloop
1829
1830.Lremainder_or_done:
1831 adds w3, w3, #1
1832 b.eq .Lremainder
1833 ret
1834
1835.Lremainder:
1836 ldrh w4, [x2], #2
1837 ldrh w5, [x1], #2
1838 subs w4, w4, w5
1839 b.ne .Lw4_result
1840 ret
1841
1842// Result is in w4
1843.Lw4_result:
1844 sxtw x0, w4
1845 ret
1846
1847// Result is in w6
1848.Lw6_result:
1849 sxtw x0, w6
1850 ret
1851
1852.Ldo_memcmp16:
1853 str x0, [sp,#-16]! // Save x0
1854
1855 mov x0, x2
1856 uxtw x2, w3
Andreas Gampe4d0589c2014-06-10 16:10:56 -07001857 bl memcmp16_generic_static
Andreas Gampe266340d2014-05-02 07:55:24 -07001858
1859 ldr x1, [sp], #16 // Restore old x0 = length diff
1860
1861 cmp x0, #0 // Check the memcmp difference
1862 csel x0, x0, x1, ne // x0 := x0 != 0 ? x0 : x1
1863 ret
1864END art_quick_string_compareto
Zheng Xu0210d112014-06-17 12:25:48 +08001865
1866// Macro to facilitate adding new entrypoints which call to native function directly.
1867// Currently, xSELF is the only thing we need to take care of between managed code and AAPCS.
1868// But we might introduce more differences.
1869.macro NATIVE_DOWNCALL name, entrypoint
1870 .extern \entrypoint
1871ENTRY \name
1872 sub sp, sp, #16
1873 stp xSELF, xLR, [sp]
1874 bl \entrypoint
1875 ldp xSELF, xLR, [sp]
1876 add sp, sp, #16
1877 ret
1878END \name
1879.endm
1880
1881NATIVE_DOWNCALL art_quick_fmod fmod
1882NATIVE_DOWNCALL art_quick_fmodf fmodf
1883NATIVE_DOWNCALL art_quick_memcpy memcpy