blob: 71f5bf7ef2d855bb171cdc03df91e5706a32470d [file] [log] [blame]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm64.S"
18
19#include "arch/quick_alloc_entrypoints.S"
20
21
22 /*
23 * Macro that sets up the callee save frame to conform with
24 * Runtime::CreateCalleeSaveMethod(kSaveAll)
25 */
26.macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
27 adrp x9, :got:_ZN3art7Runtime9instance_E
28 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
29
30 // Our registers aren't intermixed - just spill in order.
31 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
32
33 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
34 ldr x9, [x9, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET ]
35
36 sub sp, sp, #368
37 .cfi_adjust_cfa_offset 368
38
Andreas Gampe5c1e4352014-04-21 19:28:24 -070039 // Ugly compile-time check, but we only have the preprocessor.
40#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 368)
41#error "SAVE_ALL_CALLEE_SAVE_FRAME(ARM64) size not as expected."
42#endif
43
Stuart Monteithb95a5342014-03-12 13:32:32 +000044 // FP args
45 stp d1, d2, [sp, #8]
46 stp d2, d3, [sp, #24]
47 stp d4, d5, [sp, #40]
48 stp d6, d7, [sp, #56]
49
50 // FP callee-saves
51 stp d8, d9, [sp, #72]
52 stp d10, d11, [sp, #88]
53 stp d12, d13, [sp, #104]
54 stp d14, d15, [sp, #120]
55
56 stp d16, d17, [sp, #136]
57 stp d18, d19, [sp, #152]
58 stp d20, d21, [sp, #168]
59 stp d22, d23, [sp, #184]
60 stp d24, d25, [sp, #200]
61 stp d26, d27, [sp, #216]
62 stp d28, d29, [sp, #232]
63 stp d30, d31, [sp, #248]
64
65
66 // Callee saved.
67 stp xSELF, x19, [sp, #264]
Andreas Gampe03906cf2014-04-07 12:08:28 -070068 .cfi_rel_offset x18, 264
69 .cfi_rel_offset x19, 272
Stuart Monteithb95a5342014-03-12 13:32:32 +000070
Andreas Gampe03906cf2014-04-07 12:08:28 -070071 stp x20, x21, [sp, #280]
72 .cfi_rel_offset x20, 280
73 .cfi_rel_offset x21, 288
74
75 stp x22, x23, [sp, #296]
76 .cfi_rel_offset x22, 296
77 .cfi_rel_offset x23, 304
78
79 stp x24, x25, [sp, #312]
80 .cfi_rel_offset x24, 312
81 .cfi_rel_offset x25, 320
82
83 stp x26, x27, [sp, #328]
84 .cfi_rel_offset x26, 328
85 .cfi_rel_offset x27, 336
86
87 stp x28, xFP, [sp, #344] // Save FP.
88 .cfi_rel_offset x28, 344
89 .cfi_rel_offset x29, 352
90
91 str xLR, [sp, #360]
92 .cfi_rel_offset x30, 360
Stuart Monteithb95a5342014-03-12 13:32:32 +000093
94 // Loads appropriate callee-save-method
95 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
96
97.endm
98
99 /*
100 * Macro that sets up the callee save frame to conform with
101 * Runtime::CreateCalleeSaveMethod(kRefsOnly).
102 */
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700103// WIP.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000104.macro SETUP_REF_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700105 adrp x9, :got:_ZN3art7Runtime9instance_E
106 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
107
108 // Our registers aren't intermixed - just spill in order.
109 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
110
111 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
112 ldr x9, [x9, RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET ]
113
114 sub sp, sp, #176
115 .cfi_adjust_cfa_offset 176
116
117 // Ugly compile-time check, but we only have the preprocessor.
118#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 176)
119#error "REFS_ONLY_CALLEE_SAVE_FRAME(ARM64) size not as expected."
120#endif
121
122 // FP callee-saves
123 stp d8, d9, [sp, #8]
124 stp d10, d11, [sp, #24]
125 stp d12, d13, [sp, #40]
126 stp d14, d15, [sp, #56]
127
128 // Callee saved.
129 stp xSELF, x19, [sp, #72]
130 .cfi_rel_offset x18, 72
131 .cfi_rel_offset x19, 80
132
133 stp x20, x21, [sp, #88]
134 .cfi_rel_offset x20, 88
135 .cfi_rel_offset x21, 96
136
137 stp x22, x23, [sp, #104]
138 .cfi_rel_offset x22, 104
139 .cfi_rel_offset x23, 112
140
141 stp x24, x25, [sp, #120]
142 .cfi_rel_offset x24, 120
143 .cfi_rel_offset x25, 128
144
145 stp x26, x27, [sp, #136]
146 .cfi_rel_offset x26, 136
147 .cfi_rel_offset x27, 144
148
149 stp x28, xFP, [sp, #152] // Save FP.
150 .cfi_rel_offset x28, 152
151 .cfi_rel_offset x29, 160
152
153 str xLR, [sp, #168]
154 .cfi_rel_offset x30, 168
155
156 // Loads appropriate callee-save-method
157 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000158.endm
159
160.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
161 brk 0
162.endm
163
164.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
165 brk 0
166.endm
167
168
169.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
170 sub sp, sp, #304
171 .cfi_adjust_cfa_offset 304
172
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700173 // Ugly compile-time check, but we only have the preprocessor.
174#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 304)
175#error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(ARM64) size not as expected."
176#endif
177
Stuart Monteithb95a5342014-03-12 13:32:32 +0000178 stp d0, d1, [sp, #16]
179 stp d2, d3, [sp, #32]
180 stp d4, d5, [sp, #48]
181 stp d6, d7, [sp, #64]
182 stp d8, d9, [sp, #80]
183 stp d10, d11, [sp, #96]
184 stp d12, d13, [sp, #112]
185 stp d14, d15, [sp, #128]
186
187 stp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700188 .cfi_rel_offset x1, 144
189 .cfi_rel_offset x2, 152
Stuart Monteithb95a5342014-03-12 13:32:32 +0000190
Andreas Gampe03906cf2014-04-07 12:08:28 -0700191 stp x3, x4, [sp, #160]
192 .cfi_rel_offset x3, 160
193 .cfi_rel_offset x4, 168
194
195 stp x5, x6, [sp, #176]
196 .cfi_rel_offset x5, 176
197 .cfi_rel_offset x6, 184
198
199 stp x7, xSELF, [sp, #192]
200 .cfi_rel_offset x7, 192
201 .cfi_rel_offset x18, 200
202
203 stp x19, x20, [sp, #208]
204 .cfi_rel_offset x19, 208
205 .cfi_rel_offset x20, 216
206
207 stp x21, x22, [sp, #224]
208 .cfi_rel_offset x21, 224
209 .cfi_rel_offset x22, 232
210
211 stp x23, x24, [sp, #240]
212 .cfi_rel_offset x23, 240
213 .cfi_rel_offset x24, 248
214
215 stp x25, x26, [sp, #256]
216 .cfi_rel_offset x25, 256
217 .cfi_rel_offset x26, 264
218
219 stp x27, x28, [sp, #272]
220 .cfi_rel_offset x27, 272
221 .cfi_rel_offset x28, 280
222
223 stp xFP, xLR, [sp, #288]
224 .cfi_rel_offset x29, 288
225 .cfi_rel_offset x30, 296
Stuart Monteithb95a5342014-03-12 13:32:32 +0000226.endm
227
228 /*
229 * Macro that sets up the callee save frame to conform with
230 * Runtime::CreateCalleeSaveMethod(kRefsAndArgs).
231 *
232 * TODO This is probably too conservative - saving FP & LR.
233 */
234.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
235 adrp x9, :got:_ZN3art7Runtime9instance_E
236 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
237
238 // Our registers aren't intermixed - just spill in order.
239 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
240
241 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
242 ldr x9, [x9, RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ]
243
244 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
245
246 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
247.endm
248
249.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
250
251 ldp d0, d1, [sp, #16]
252 ldp d2, d3, [sp, #32]
253 ldp d4, d5, [sp, #48]
254 ldp d6, d7, [sp, #64]
255 ldp d8, d9, [sp, #80]
256 ldp d10, d11, [sp, #96]
257 ldp d12, d13, [sp, #112]
258 ldp d14, d15, [sp, #128]
259
260 // args.
261 ldp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700262 .cfi_restore x1
263 .cfi_restore x2
264
Stuart Monteithb95a5342014-03-12 13:32:32 +0000265 ldp x3, x4, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700266 .cfi_restore x3
267 .cfi_restore x4
268
Stuart Monteithb95a5342014-03-12 13:32:32 +0000269 ldp x5, x6, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700270 .cfi_restore x5
271 .cfi_restore x6
272
Stuart Monteithb95a5342014-03-12 13:32:32 +0000273 ldp x7, xSELF, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700274 .cfi_restore x7
275 .cfi_restore x18
276
Stuart Monteithb95a5342014-03-12 13:32:32 +0000277 ldp x19, x20, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700278 .cfi_restore x19
279 .cfi_restore x20
280
Stuart Monteithb95a5342014-03-12 13:32:32 +0000281 ldp x21, x22, [sp, #224]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700282 .cfi_restore x21
283 .cfi_restore x22
284
Stuart Monteithb95a5342014-03-12 13:32:32 +0000285 ldp x23, x24, [sp, #240]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700286 .cfi_restore x23
287 .cfi_restore x24
288
Stuart Monteithb95a5342014-03-12 13:32:32 +0000289 ldp x25, x26, [sp, #256]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700290 .cfi_restore x25
291 .cfi_restore x26
292
Stuart Monteithb95a5342014-03-12 13:32:32 +0000293 ldp x27, x28, [sp, #272]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700294 .cfi_restore x27
295 .cfi_restore x28
296
Stuart Monteithb95a5342014-03-12 13:32:32 +0000297 ldp xFP, xLR, [sp, #288]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700298 .cfi_restore x29
299 .cfi_restore x30
Stuart Monteithb95a5342014-03-12 13:32:32 +0000300
301 add sp, sp, #304
302 .cfi_adjust_cfa_offset -304
303.endm
304
Andreas Gampee62a07e2014-03-26 14:53:21 -0700305.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME_NO_D0
306
307 ldr d1, [sp, #24]
308 ldp d2, d3, [sp, #32]
309 ldp d4, d5, [sp, #48]
310 ldp d6, d7, [sp, #64]
311 ldp d8, d9, [sp, #80]
312 ldp d10, d11, [sp, #96]
313 ldp d12, d13, [sp, #112]
314 ldp d14, d15, [sp, #128]
315
316 // args.
317 ldp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700318 .cfi_restore x1
319 .cfi_restore x2
320
Andreas Gampee62a07e2014-03-26 14:53:21 -0700321 ldp x3, x4, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700322 .cfi_restore x3
323 .cfi_restore x4
324
Andreas Gampee62a07e2014-03-26 14:53:21 -0700325 ldp x5, x6, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700326 .cfi_restore x5
327 .cfi_restore x6
328
Andreas Gampee62a07e2014-03-26 14:53:21 -0700329 ldp x7, xSELF, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700330 .cfi_restore x7
331 .cfi_restore x18
332
Andreas Gampee62a07e2014-03-26 14:53:21 -0700333 ldp x19, x20, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700334 .cfi_restore x19
335 .cfi_restore x20
336
Andreas Gampee62a07e2014-03-26 14:53:21 -0700337 ldp x21, x22, [sp, #224]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700338 .cfi_restore x21
339 .cfi_restore x22
340
Andreas Gampee62a07e2014-03-26 14:53:21 -0700341 ldp x23, x24, [sp, #240]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700342 .cfi_restore x23
343 .cfi_restore x24
344
Andreas Gampee62a07e2014-03-26 14:53:21 -0700345 ldp x25, x26, [sp, #256]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700346 .cfi_restore x25
347 .cfi_restore x26
348
Andreas Gampee62a07e2014-03-26 14:53:21 -0700349 ldp x27, x28, [sp, #272]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700350 .cfi_restore x27
351 .cfi_restore x28
352
Andreas Gampee62a07e2014-03-26 14:53:21 -0700353 ldp xFP, xLR, [sp, #288]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700354 .cfi_restore x29
355 .cfi_restore x30
Andreas Gampee62a07e2014-03-26 14:53:21 -0700356
357 add sp, sp, #304
358 .cfi_adjust_cfa_offset -304
359.endm
360
Stuart Monteithb95a5342014-03-12 13:32:32 +0000361.macro RETURN_IF_RESULT_IS_ZERO
362 brk 0
363.endm
364
365.macro RETURN_IF_RESULT_IS_NON_ZERO
366 brk 0
367.endm
368
369 /*
370 * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
371 * exception is Thread::Current()->exception_
372 */
373.macro DELIVER_PENDING_EXCEPTION
374 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
375 mov x0, xSELF
376 mov x1, sp
377
378 // Point of no return.
379 b artDeliverPendingExceptionFromCode // artDeliverPendingExceptionFromCode(Thread*, SP)
380 brk 0 // Unreached
381.endm
382
383.macro RETURN_OR_DELIVER_PENDING_EXCEPTION
384 ldr x9, [xSELF, # THREAD_EXCEPTION_OFFSET] // Get exception field.
385 cbnz x9, 1f
386 ret
3871:
388 DELIVER_PENDING_EXCEPTION
389.endm
390
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700391// FIXME: Temporary fix for TR(XSELF).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000392.macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
393 .extern \cxx_name
394ENTRY \c_name
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700395 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
396 mov x0, x19 // pass Thread::Current
397 mov x1, sp // pass SP
398 b \cxx_name // \cxx_name(Thread*, SP)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000399END \c_name
400.endm
401
Serban Constantinescu75b91132014-04-09 18:39:10 +0100402// FIXME: Temporary fix for TR(XSELF).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000403.macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name
404 .extern \cxx_name
405ENTRY \c_name
Serban Constantinescu75b91132014-04-09 18:39:10 +0100406 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context.
407 mov x1, x19 // pass Thread::Current.
408 mov x2, sp // pass SP.
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700409 b \cxx_name // \cxx_name(arg, Thread*, SP).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000410 brk 0
411END \c_name
412.endm
413
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700414// FIXME: Temporary fix for TR(XSELF).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000415.macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
416 .extern \cxx_name
417ENTRY \c_name
418 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700419 mov x2, x19 // pass Thread::Current
420 mov x3, sp // pass SP
421 b \cxx_name // \cxx_name(arg1, arg2, Thread*, SP)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000422 brk 0
423END \c_name
424.endm
425
426 /*
427 * Called by managed code, saves callee saves and then calls artThrowException
428 * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
429 */
430ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
431
432 /*
433 * Called by managed code to create and deliver a NullPointerException.
434 */
435NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
436
437 /*
438 * Called by managed code to create and deliver an ArithmeticException.
439 */
440NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
441
442 /*
443 * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
444 * index, arg2 holds limit.
445 */
446TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
447
448 /*
449 * Called by managed code to create and deliver a StackOverflowError.
450 */
451NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
452
453 /*
454 * Called by managed code to create and deliver a NoSuchMethodError.
455 */
456ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
457
458 /*
459 * TODO arm64 specifics need to be fleshed out.
460 * All generated callsites for interface invokes and invocation slow paths will load arguments
461 * as usual - except instead of loading x0 with the target Method*, x0 will contain
462 * the method_idx. This wrapper will save x1-x3, load the caller's Method*, align the
463 * stack and call the appropriate C helper.
464 * NOTE: "this" is first visible argument of the target, and so can be found in x1.
465 *
466 * The helper will attempt to locate the target and return a result in x0 consisting
467 * of the target Method* in x0 and method->code_ in x1.
468 *
469 * If unsuccessful, the helper will return NULL/NULL. There will be a pending exception in the
470 * thread and we branch to another stub to deliver it.
471 *
472 * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
473 * pointing back to the original caller.
474 */
475.macro INVOKE_TRAMPOLINE c_name, cxx_name
476 .extern \cxx_name
477ENTRY \c_name
478 brk 0
479END \c_name
480.endm
481
482INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline
483INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
484
485INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
486INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
487INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
488INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
489
Andreas Gampe03906cf2014-04-07 12:08:28 -0700490
491.macro INVOKE_STUB_CREATE_FRAME
492
493SAVE_SIZE=5*8 // x4, x5, SP, LR & FP saved.
494SAVE_SIZE_AND_METHOD=SAVE_SIZE+8
495
496 mov x9, sp // Save stack pointer.
497 .cfi_register sp,x9
498
499 add x10, x2, # SAVE_SIZE_AND_METHOD // calculate size of frame.
500 sub x10, sp, x10 // Calculate SP position - saves + ArtMethod* + args
501 and x10, x10, # ~0xf // Enforce 16 byte stack alignment.
502 mov sp, x10 // Set new SP.
503
504 sub x10, x9, #SAVE_SIZE // Calculate new FP (later). Done here as we must move SP
505 .cfi_def_cfa_register x10 // before this.
506 .cfi_adjust_cfa_offset SAVE_SIZE
507
508 str x9, [x10, #32] // Save old stack pointer.
509 .cfi_rel_offset sp, 32
510
511 stp x4, x5, [x10, #16] // Save result and shorty addresses.
512 .cfi_rel_offset x4, 16
513 .cfi_rel_offset x5, 24
514
515 stp xFP, xLR, [x10] // Store LR & FP.
516 .cfi_rel_offset x29, 0
517 .cfi_rel_offset x30, 8
518
519 mov xFP, x10 // Use xFP now, as it's callee-saved.
520 .cfi_def_cfa_register x29
521 mov xSELF, x3 // Move thread pointer into SELF register.
522
523 // Copy arguments into stack frame.
524 // Use simple copy routine for now.
525 // 4 bytes per slot.
526 // X1 - source address
527 // W2 - args length
528 // X9 - destination address.
529 // W10 - temporary
530 add x9, sp, #8 // Destination address is bottom of stack + NULL.
531
532 // Use \@ to differentiate between macro invocations.
533.LcopyParams\@:
534 cmp w2, #0
535 beq .LendCopyParams\@
536 sub w2, w2, #4 // Need 65536 bytes of range.
537 ldr w10, [x1, x2]
538 str w10, [x9, x2]
539
540 b .LcopyParams\@
541
542.LendCopyParams\@:
543
544 // Store NULL into Method* at bottom of frame.
545 str xzr, [sp]
546
547.endm
548
549.macro INVOKE_STUB_CALL_AND_RETURN
550
551 // load method-> METHOD_QUICK_CODE_OFFSET
552 ldr x9, [x0 , #METHOD_QUICK_CODE_OFFSET]
553 // Branch to method.
554 blr x9
555
556 // Restore return value address and shorty address.
557 ldp x4,x5, [xFP, #16]
558 .cfi_restore x4
559 .cfi_restore x5
560
561 // Store result (w0/x0/s0/d0) appropriately, depending on resultType.
562 ldrb w10, [x5]
563
564 // Don't set anything for a void type.
565 cmp w10, #'V'
566 beq .Lexit_art_quick_invoke_stub\@
567
568 cmp w10, #'D'
569 bne .Lreturn_is_float\@
570 str d0, [x4]
571 b .Lexit_art_quick_invoke_stub\@
572
573.Lreturn_is_float\@:
574 cmp w10, #'F'
575 bne .Lreturn_is_int\@
576 str s0, [x4]
577 b .Lexit_art_quick_invoke_stub\@
578
579 // Just store x0. Doesn't matter if it is 64 or 32 bits.
580.Lreturn_is_int\@:
581 str x0, [x4]
582
583.Lexit_art_quick_invoke_stub\@:
584 ldr x2, [x29, #32] // Restore stack pointer.
585 mov sp, x2
586 .cfi_restore sp
587
588 ldp x29, x30, [x29] // Restore old frame pointer and link register.
589 .cfi_restore x29
590 .cfi_restore x30
591
592 ret
593
594.endm
595
596
Stuart Monteithb95a5342014-03-12 13:32:32 +0000597/*
598 * extern"C" void art_quick_invoke_stub(ArtMethod *method, x0
599 * uint32_t *args, x1
600 * uint32_t argsize, w2
601 * Thread *self, x3
602 * JValue *result, x4
603 * char *shorty); x5
604 * +----------------------+
605 * | |
606 * | C/C++ frame |
607 * | LR'' |
608 * | FP'' | <- SP'
609 * +----------------------+
610 * +----------------------+
611 * | SP' |
612 * | X5 |
613 * | X4 | Saved registers
614 * | LR' |
615 * | FP' | <- FP
616 * +----------------------+
617 * | uint32_t out[n-1] |
618 * | : : | Outs
619 * | uint32_t out[0] |
620 * | ArtMethod* NULL | <- SP
621 * +----------------------+
622 *
623 * Outgoing registers:
624 * x0 - Method*
625 * x1-x7 - integer parameters.
626 * d0-d7 - Floating point parameters.
627 * xSELF = self
628 * SP = & of ArtMethod*
629 * x1 = "this" pointer.
630 *
631 */
632ENTRY art_quick_invoke_stub
633 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700634 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000635
636 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
637 // Parse the passed shorty to determine which register to load.
638 // Load addresses for routines that load WXSD registers.
639 adr x11, .LstoreW2
640 adr x12, .LstoreX2
641 adr x13, .LstoreS0
642 adr x14, .LstoreD0
643
644 // Initialize routine offsets to 0 for integers and floats.
645 // x8 for integers, x15 for floating point.
646 mov x8, #0
647 mov x15, #0
648
649 add x10, x5, #1 // Load shorty address, plus one to skip return value.
650 ldr w1, [x9],#4 // Load "this" parameter, and increment arg pointer.
651
652 // Loop to fill registers.
653.LfillRegisters:
654 ldrb w17, [x10], #1 // Load next character in signature, and increment.
655 cbz w17, .LcallFunction // Exit at end of signature. Shorty 0 terminated.
656
657 cmp w17, #'F' // is this a float?
658 bne .LisDouble
659
660 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700661 beq .Ladvance4
Stuart Monteithb95a5342014-03-12 13:32:32 +0000662
663 add x17, x13, x15 // Calculate subroutine to jump to.
664 br x17
665
666.LisDouble:
667 cmp w17, #'D' // is this a double?
668 bne .LisLong
669
670 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700671 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000672
673 add x17, x14, x15 // Calculate subroutine to jump to.
674 br x17
675
676.LisLong:
677 cmp w17, #'J' // is this a long?
678 bne .LisOther
679
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700680 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700681 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000682
683 add x17, x12, x8 // Calculate subroutine to jump to.
684 br x17
685
Stuart Monteithb95a5342014-03-12 13:32:32 +0000686.LisOther: // Everything else takes one vReg.
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700687 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700688 beq .Ladvance4
689
Stuart Monteithb95a5342014-03-12 13:32:32 +0000690 add x17, x11, x8 // Calculate subroutine to jump to.
691 br x17
692
Andreas Gampe03906cf2014-04-07 12:08:28 -0700693.Ladvance4:
694 add x9, x9, #4
695 b .LfillRegisters
696
697.Ladvance8:
698 add x9, x9, #8
699 b .LfillRegisters
700
Stuart Monteithb95a5342014-03-12 13:32:32 +0000701// Macro for loading a parameter into a register.
702// counter - the register with offset into these tables
703// size - the size of the register - 4 or 8 bytes.
704// register - the name of the register to be loaded.
705.macro LOADREG counter size register return
706 ldr \register , [x9], #\size
707 add \counter, \counter, 12
708 b \return
709.endm
710
711// Store ints.
712.LstoreW2:
713 LOADREG x8 4 w2 .LfillRegisters
714 LOADREG x8 4 w3 .LfillRegisters
715 LOADREG x8 4 w4 .LfillRegisters
716 LOADREG x8 4 w5 .LfillRegisters
717 LOADREG x8 4 w6 .LfillRegisters
718 LOADREG x8 4 w7 .LfillRegisters
719
720// Store longs.
721.LstoreX2:
722 LOADREG x8 8 x2 .LfillRegisters
723 LOADREG x8 8 x3 .LfillRegisters
724 LOADREG x8 8 x4 .LfillRegisters
725 LOADREG x8 8 x5 .LfillRegisters
726 LOADREG x8 8 x6 .LfillRegisters
727 LOADREG x8 8 x7 .LfillRegisters
728
729// Store singles.
730.LstoreS0:
731 LOADREG x15 4 s0 .LfillRegisters
732 LOADREG x15 4 s1 .LfillRegisters
733 LOADREG x15 4 s2 .LfillRegisters
734 LOADREG x15 4 s3 .LfillRegisters
735 LOADREG x15 4 s4 .LfillRegisters
736 LOADREG x15 4 s5 .LfillRegisters
737 LOADREG x15 4 s6 .LfillRegisters
738 LOADREG x15 4 s7 .LfillRegisters
739
740// Store doubles.
741.LstoreD0:
742 LOADREG x15 8 d0 .LfillRegisters
743 LOADREG x15 8 d1 .LfillRegisters
744 LOADREG x15 8 d2 .LfillRegisters
745 LOADREG x15 8 d3 .LfillRegisters
746 LOADREG x15 8 d4 .LfillRegisters
747 LOADREG x15 8 d5 .LfillRegisters
748 LOADREG x15 8 d6 .LfillRegisters
749 LOADREG x15 8 d7 .LfillRegisters
750
751
752.LcallFunction:
753
Andreas Gampe03906cf2014-04-07 12:08:28 -0700754 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000755
Stuart Monteithb95a5342014-03-12 13:32:32 +0000756END art_quick_invoke_stub
757
758/* extern"C"
759 * void art_quick_invoke_static_stub(ArtMethod *method, x0
760 * uint32_t *args, x1
761 * uint32_t argsize, w2
762 * Thread *self, x3
763 * JValue *result, x4
764 * char *shorty); x5
765 */
766ENTRY art_quick_invoke_static_stub
767 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700768 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000769
770 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
771 // Parse the passed shorty to determine which register to load.
772 // Load addresses for routines that load WXSD registers.
773 adr x11, .LstoreW1_2
774 adr x12, .LstoreX1_2
775 adr x13, .LstoreS0_2
776 adr x14, .LstoreD0_2
777
778 // Initialize routine offsets to 0 for integers and floats.
779 // x8 for integers, x15 for floating point.
780 mov x8, #0
781 mov x15, #0
782
783 add x10, x5, #1 // Load shorty address, plus one to skip return value.
784
785 // Loop to fill registers.
786.LfillRegisters2:
787 ldrb w17, [x10], #1 // Load next character in signature, and increment.
788 cbz w17, .LcallFunction2 // Exit at end of signature. Shorty 0 terminated.
789
790 cmp w17, #'F' // is this a float?
791 bne .LisDouble2
792
793 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700794 beq .Ladvance4_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000795
796 add x17, x13, x15 // Calculate subroutine to jump to.
797 br x17
798
799.LisDouble2:
800 cmp w17, #'D' // is this a double?
801 bne .LisLong2
802
803 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700804 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000805
806 add x17, x14, x15 // Calculate subroutine to jump to.
807 br x17
808
809.LisLong2:
810 cmp w17, #'J' // is this a long?
811 bne .LisOther2
812
813 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700814 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000815
816 add x17, x12, x8 // Calculate subroutine to jump to.
817 br x17
818
Stuart Monteithb95a5342014-03-12 13:32:32 +0000819.LisOther2: // Everything else takes one vReg.
820 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700821 beq .Ladvance4_2
822
Stuart Monteithb95a5342014-03-12 13:32:32 +0000823 add x17, x11, x8 // Calculate subroutine to jump to.
824 br x17
825
Andreas Gampe03906cf2014-04-07 12:08:28 -0700826.Ladvance4_2:
827 add x9, x9, #4
828 b .LfillRegisters2
829
830.Ladvance8_2:
831 add x9, x9, #8
832 b .LfillRegisters2
833
Stuart Monteithb95a5342014-03-12 13:32:32 +0000834// Store ints.
835.LstoreW1_2:
836 LOADREG x8 4 w1 .LfillRegisters2
837 LOADREG x8 4 w2 .LfillRegisters2
838 LOADREG x8 4 w3 .LfillRegisters2
839 LOADREG x8 4 w4 .LfillRegisters2
840 LOADREG x8 4 w5 .LfillRegisters2
841 LOADREG x8 4 w6 .LfillRegisters2
842 LOADREG x8 4 w7 .LfillRegisters2
843
844// Store longs.
845.LstoreX1_2:
846 LOADREG x8 8 x1 .LfillRegisters2
847 LOADREG x8 8 x2 .LfillRegisters2
848 LOADREG x8 8 x3 .LfillRegisters2
849 LOADREG x8 8 x4 .LfillRegisters2
850 LOADREG x8 8 x5 .LfillRegisters2
851 LOADREG x8 8 x6 .LfillRegisters2
852 LOADREG x8 8 x7 .LfillRegisters2
853
854// Store singles.
855.LstoreS0_2:
856 LOADREG x15 4 s0 .LfillRegisters2
857 LOADREG x15 4 s1 .LfillRegisters2
858 LOADREG x15 4 s2 .LfillRegisters2
859 LOADREG x15 4 s3 .LfillRegisters2
860 LOADREG x15 4 s4 .LfillRegisters2
861 LOADREG x15 4 s5 .LfillRegisters2
862 LOADREG x15 4 s6 .LfillRegisters2
863 LOADREG x15 4 s7 .LfillRegisters2
864
865// Store doubles.
866.LstoreD0_2:
867 LOADREG x15 8 d0 .LfillRegisters2
868 LOADREG x15 8 d1 .LfillRegisters2
869 LOADREG x15 8 d2 .LfillRegisters2
870 LOADREG x15 8 d3 .LfillRegisters2
871 LOADREG x15 8 d4 .LfillRegisters2
872 LOADREG x15 8 d5 .LfillRegisters2
873 LOADREG x15 8 d6 .LfillRegisters2
874 LOADREG x15 8 d7 .LfillRegisters2
875
876
877.LcallFunction2:
878
Andreas Gampe03906cf2014-04-07 12:08:28 -0700879 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000880
Stuart Monteithb95a5342014-03-12 13:32:32 +0000881END art_quick_invoke_static_stub
882
Andreas Gampe03906cf2014-04-07 12:08:28 -0700883
Stuart Monteithb95a5342014-03-12 13:32:32 +0000884
885 /*
886 * On entry x0 is uintptr_t* gprs_ and x1 is uint64_t* fprs_
887 */
888
889ENTRY art_quick_do_long_jump
890 // Load FPRs
891 ldp d0, d1, [x1], #16
892 ldp d2, d3, [x1], #16
893 ldp d4, d5, [x1], #16
894 ldp d6, d7, [x1], #16
895 ldp d8, d9, [x1], #16
896 ldp d10, d11, [x1], #16
897 ldp d12, d13, [x1], #16
898 ldp d14, d15, [x1], #16
899 ldp d16, d17, [x1], #16
900 ldp d18, d19, [x1], #16
901 ldp d20, d21, [x1], #16
902 ldp d22, d23, [x1], #16
903 ldp d24, d25, [x1], #16
904 ldp d26, d27, [x1], #16
905 ldp d28, d29, [x1], #16
906 ldp d30, d31, [x1]
907
908 // Load GPRs
909 // TODO: lots of those are smashed, could optimize.
910 add x0, x0, #30*8
911 ldp x30, x1, [x0], #-16
912 ldp x28, x29, [x0], #-16
913 ldp x26, x27, [x0], #-16
914 ldp x24, x25, [x0], #-16
915 ldp x22, x23, [x0], #-16
916 ldp x20, x21, [x0], #-16
917 ldp x18, x19, [x0], #-16
918 ldp x16, x17, [x0], #-16
919 ldp x14, x15, [x0], #-16
920 ldp x12, x13, [x0], #-16
921 ldp x10, x11, [x0], #-16
922 ldp x8, x9, [x0], #-16
923 ldp x6, x7, [x0], #-16
924 ldp x4, x5, [x0], #-16
925 ldp x2, x3, [x0], #-16
926 mov sp, x1
927
928 // TODO: Is it really OK to use LR for the target PC?
929 mov x0, #0
930 mov x1, #0
931 br xLR
932END art_quick_do_long_jump
933
934UNIMPLEMENTED art_quick_handle_fill_data
935
936UNIMPLEMENTED art_quick_lock_object
937UNIMPLEMENTED art_quick_unlock_object
Andreas Gampe525cde22014-04-22 15:44:50 -0700938
939 /*
940 * Entry from managed code that calls artIsAssignableFromCode and on failure calls
941 * artThrowClassCastException.
942 */
943 .extern artThrowClassCastException
944ENTRY art_quick_check_cast
945 // Store arguments and link register
946 sub sp, sp, #32 // Stack needs to be 16b aligned on calls
947 .cfi_adjust_cfa_offset 32
948 stp x0, x1, [sp]
949 .cfi_rel_offset x0, 0
950 .cfi_rel_offset x1, 8
951 stp xSELF, xLR, [sp, #16]
952 .cfi_rel_offset x18, 16
953 .cfi_rel_offset x30, 24
954
955 // Call runtime code
956 bl artIsAssignableFromCode
957
958 // Check for exception
959 cbz x0, .Lthrow_class_cast_exception
960
961 // Restore and return
962 ldp x0, x1, [sp]
963 .cfi_restore x0
964 .cfi_restore x1
965 ldp xSELF, xLR, [sp, #16]
966 .cfi_restore x18
967 .cfi_restore x30
968 add sp, sp, #32
969 .cfi_adjust_cfa_offset -32
970 ret
971
972.Lthrow_class_cast_exception:
973 // Restore
974 ldp x0, x1, [sp]
975 .cfi_restore x0
976 .cfi_restore x1
977 ldp xSELF, xLR, [sp, #16]
978 .cfi_restore x18
979 .cfi_restore x30
980 add sp, sp, #32
981 .cfi_adjust_cfa_offset -32
982
983 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
984 mov x2, xSELF // pass Thread::Current
985 mov x3, sp // pass SP
986 b artThrowClassCastException // (Class*, Class*, Thread*, SP)
987 brk 0 // We should not return here...
988END art_quick_check_cast
989
Stuart Monteithb95a5342014-03-12 13:32:32 +0000990UNIMPLEMENTED art_quick_aput_obj_with_null_and_bound_check
991UNIMPLEMENTED art_quick_aput_obj_with_bound_check
992UNIMPLEMENTED art_quick_aput_obj
993UNIMPLEMENTED art_quick_initialize_static_storage
994UNIMPLEMENTED art_quick_initialize_type
995UNIMPLEMENTED art_quick_initialize_type_and_verify_access
996UNIMPLEMENTED art_quick_get32_static
997UNIMPLEMENTED art_quick_get64_static
998UNIMPLEMENTED art_quick_get_obj_static
999UNIMPLEMENTED art_quick_get32_instance
1000UNIMPLEMENTED art_quick_get64_instance
1001UNIMPLEMENTED art_quick_get_obj_instance
1002UNIMPLEMENTED art_quick_set32_static
1003UNIMPLEMENTED art_quick_set64_static
1004UNIMPLEMENTED art_quick_set_obj_static
1005UNIMPLEMENTED art_quick_set32_instance
1006UNIMPLEMENTED art_quick_set64_instance
1007UNIMPLEMENTED art_quick_set_obj_instance
1008UNIMPLEMENTED art_quick_resolve_string
1009
1010// Macro to facilitate adding new allocation entrypoints.
1011.macro TWO_ARG_DOWNCALL name, entrypoint, return
1012 .extern \entrypoint
1013ENTRY \name
1014 brk 0
1015END \name
1016.endm
1017
1018// Macro to facilitate adding new array allocation entrypoints.
1019.macro THREE_ARG_DOWNCALL name, entrypoint, return
1020 .extern \entrypoint
1021ENTRY \name
1022 brk 0
1023END \name
1024.endm
1025
1026// Generate the allocation entrypoints for each allocator.
1027GENERATE_ALL_ALLOC_ENTRYPOINTS
1028
1029UNIMPLEMENTED art_quick_test_suspend
1030
Andreas Gampee62a07e2014-03-26 14:53:21 -07001031 /*
1032 * Called by managed code that is attempting to call a method on a proxy class. On entry
1033 * x0 holds the proxy method and x1 holds the receiver; The frame size of the invoked proxy
1034 * method agrees with a ref and args callee save frame.
1035 */
1036 .extern artQuickProxyInvokeHandler
1037ENTRY art_quick_proxy_invoke_handler
1038 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
1039 str x0, [sp, #0] // place proxy method at bottom of frame
1040 mov x2, xSELF // pass Thread::Current
1041 mov x3, sp // pass SP
1042 bl artQuickProxyInvokeHandler // (Method* proxy method, receiver, Thread*, SP)
1043 ldr xSELF, [sp, #200] // Restore self pointer.
1044 ldr x2, [xSELF, THREAD_EXCEPTION_OFFSET]
1045 cbnz x2, .Lexception_in_proxy // success if no exception is pending
1046 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME_NO_D0 // keep d0
1047 ret // return on success
1048.Lexception_in_proxy:
1049 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1050 DELIVER_PENDING_EXCEPTION
1051END art_quick_proxy_invoke_handler
Stuart Monteithb95a5342014-03-12 13:32:32 +00001052
1053UNIMPLEMENTED art_quick_imt_conflict_trampoline
1054
1055
1056ENTRY art_quick_resolution_trampoline
1057 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gampec6ee54e2014-03-24 16:45:44 -07001058 mov x19, x0 // save the called method
Stuart Monteithb95a5342014-03-12 13:32:32 +00001059 mov x2, xSELF
1060 mov x3, sp
1061 bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP)
Andreas Gampec6ee54e2014-03-24 16:45:44 -07001062 mov x9, x0 // Remember returned code pointer in x9.
1063 mov x0, x19 // Restore the method, before x19 is restored to on-call value
Stuart Monteithb95a5342014-03-12 13:32:32 +00001064 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1065 cbz x9, 1f
Andreas Gampec6ee54e2014-03-24 16:45:44 -07001066 br x9
Stuart Monteithb95a5342014-03-12 13:32:32 +000010671:
1068 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1069 DELIVER_PENDING_EXCEPTION
1070END art_quick_resolution_trampoline
1071
1072/*
1073 * Generic JNI frame layout:
1074 *
1075 * #-------------------#
1076 * | |
1077 * | caller method... |
1078 * #-------------------# <--- SP on entry
1079 * | Return X30/LR |
1080 * | X29/FP | callee save
1081 * | X28 | callee save
1082 * | X27 | callee save
1083 * | X26 | callee save
1084 * | X25 | callee save
1085 * | X24 | callee save
1086 * | X23 | callee save
1087 * | X22 | callee save
1088 * | X21 | callee save
1089 * | X20 | callee save
1090 * | X19 | callee save
1091 * | X7 | arg7
1092 * | X6 | arg6
1093 * | X5 | arg5
1094 * | X4 | arg4
1095 * | X3 | arg3
1096 * | X2 | arg2
1097 * | X1 | arg1
1098 * | D15 | float arg 8
1099 * | D14 | float arg 8
1100 * | D13 | float arg 8
1101 * | D12 | callee save
1102 * | D11 | callee save
1103 * | D10 | callee save
1104 * | D9 | callee save
1105 * | D8 | callee save
1106 * | D7 | float arg 8
1107 * | D6 | float arg 7
1108 * | D5 | float arg 6
1109 * | D4 | float arg 5
1110 * | D3 | float arg 4
1111 * | D2 | float arg 3
1112 * | D1 | float arg 2
1113 * | D0 | float arg 1
1114 * | RDI/Method* | <- X0
1115 * #-------------------#
1116 * | local ref cookie | // 4B
1117 * | SIRT size | // 4B
1118 * #-------------------#
1119 * | JNI Call Stack |
1120 * #-------------------# <--- SP on native call
1121 * | |
1122 * | Stack for Regs | The trampoline assembly will pop these values
1123 * | | into registers for native call
1124 * #-------------------#
1125 * | Native code ptr |
1126 * #-------------------#
1127 * | Free scratch |
1128 * #-------------------#
1129 * | Ptr to (1) | <--- SP
1130 * #-------------------#
1131 */
1132 /*
1133 * Called to do a generic JNI down-call
1134 */
1135ENTRY art_quick_generic_jni_trampoline
1136 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
1137 str x0, [sp, #0] // Store native ArtMethod* to bottom of stack.
1138
1139 // Save SP , so we can have static CFI info.
1140 mov x28, sp
1141 .cfi_def_cfa_register x28
1142
1143 // This looks the same, but is different: this will be updated to point to the bottom
1144 // of the frame when the SIRT is inserted.
1145 mov xFP, sp
1146
1147 mov x8, #5120
1148 sub sp, sp, x8
1149
1150 // prepare for artQuickGenericJniTrampoline call
1151 // (Thread*, SP)
1152 // x0 x1 <= C calling convention
1153 // xSELF xFP <= where they are
1154
1155 mov x0, xSELF // Thread*
1156 mov x1, xFP
1157 bl artQuickGenericJniTrampoline // (Thread*, sp)
1158
1159 // Get the updated pointer. This is the bottom of the frame _with_ SIRT.
1160 ldr xFP, [sp]
1161 add x9, sp, #8
1162
1163 cmp x0, #0
1164 b.mi .Lentry_error // Check for error, negative value.
1165
1166 // release part of the alloca.
1167 add x9, x9, x0
1168
1169 // Get the code pointer
1170 ldr xIP0, [x9, #0]
1171
1172 // Load parameters from frame into registers.
1173 // TODO Check with artQuickGenericJniTrampoline.
1174 // Also, check again APPCS64 - the stack arguments are interleaved.
1175 ldp x0, x1, [x9, #8]
1176 ldp x2, x3, [x9, #24]
1177 ldp x4, x5, [x9, #40]
1178 ldp x6, x7, [x9, #56]
1179
1180 ldp d0, d1, [x9, #72]
1181 ldp d2, d3, [x9, #88]
1182 ldp d4, d5, [x9, #104]
1183 ldp d6, d7, [x9, #120]
1184
1185 add sp, x9, #136
1186
1187 blr xIP0 // native call.
1188
1189 // Restore self pointer.
1190 ldr xSELF, [x28, #200]
1191
1192 // result sign extension is handled in C code
1193 // prepare for artQuickGenericJniEndTrampoline call
1194 // (Thread*, SP, result, result_f)
1195 // x0 x1 x2 x3 <= C calling convention
1196 mov x5, x0 // Save return value
1197 mov x0, xSELF // Thread register
1198 mov x1, xFP // Stack pointer
1199 mov x2, x5 // Result (from saved)
1200 fmov x3, d0 // d0 will contain floating point result, but needs to go into x3
1201
1202 bl artQuickGenericJniEndTrampoline
1203
1204 // Tear down the alloca.
1205 mov sp, x28
1206 .cfi_def_cfa_register sp
1207
1208 // Restore self pointer.
1209 ldr xSELF, [x28, #200]
1210
1211 // Pending exceptions possible.
1212 ldr x1, [xSELF, THREAD_EXCEPTION_OFFSET]
1213 cbnz x1, .Lexception_in_native
1214
1215 // Tear down the callee-save frame.
1216 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1217
1218 // store into fpr, for when it's a fpr return...
1219 fmov d0, x0
1220 ret
1221
1222.Lentry_error:
1223 mov sp, x28
1224 .cfi_def_cfa_register sp
1225 ldr xSELF, [x28, #200]
1226.Lexception_in_native:
1227 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1228 DELIVER_PENDING_EXCEPTION
1229
1230END art_quick_generic_jni_trampoline
1231
1232/*
1233 * Called to bridge from the quick to interpreter ABI. On entry the arguments match those
1234 * of a quick call:
1235 * x0 = method being called/to bridge to.
1236 * x1..x7, d0..d7 = arguments to that method.
1237 */
1238ENTRY art_quick_to_interpreter_bridge
1239 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments.
1240
1241 // x0 will contain mirror::ArtMethod* method.
1242 mov x1, xSELF // How to get Thread::Current() ???
1243 mov x2, sp
1244
1245 // uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
1246 // mirror::ArtMethod** sp)
1247 bl artQuickToInterpreterBridge
1248
1249 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case.
1250
1251 fmov d0, x0
1252
1253 RETURN_OR_DELIVER_PENDING_EXCEPTION
1254END art_quick_to_interpreter_bridge
1255
1256UNIMPLEMENTED art_quick_instrumentation_entry
1257UNIMPLEMENTED art_quick_instrumentation_exit
1258UNIMPLEMENTED art_quick_deoptimize
1259UNIMPLEMENTED art_quick_mul_long
1260UNIMPLEMENTED art_quick_shl_long
1261UNIMPLEMENTED art_quick_shr_long
1262UNIMPLEMENTED art_quick_ushr_long
1263UNIMPLEMENTED art_quick_indexof
1264UNIMPLEMENTED art_quick_string_compareto