Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "asm_support_x86_64.S" |
| 18 | |
| 19 | // For x86, the CFA is esp+4, the address above the pushed return address on the stack. |
| 20 | |
| 21 | /* |
| 22 | * Macro that sets up the callee save frame to conform with |
| 23 | * Runtime::CreateCalleeSaveMethod(kSaveAll) |
| 24 | */ |
| 25 | MACRO0(SETUP_SAVE_ALL_CALLEE_SAVE_FRAME) |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 26 | // R10 := Runtime::Current() |
| 27 | movq _ZN3art7Runtime9instance_E@GOTPCREL(%rip), %r10 |
| 28 | movq (%r10), %r10 |
| 29 | // Save callee and GPR args, mixed together to agree with core spills bitmap. |
| 30 | PUSH r15 // Callee save. |
| 31 | PUSH r14 // Callee save. |
| 32 | PUSH r13 // Callee save. |
| 33 | PUSH r12 // Callee save. |
| 34 | PUSH rbp // Callee save. |
| 35 | PUSH rbx // Callee save. |
| 36 | subq LITERAL(8), %rsp // Space for Method* (also aligns the frame). |
| 37 | CFI_ADJUST_CFA_OFFSET(8) |
| 38 | // R10 := ArtMethod* for ref and args callee save frame method. |
| 39 | movq RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10 |
| 40 | // Store ArtMethod* to bottom of stack. |
| 41 | movq %r10, 0(%rsp) |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 42 | END_MACRO |
| 43 | |
| 44 | /* |
| 45 | * Macro that sets up the callee save frame to conform with |
| 46 | * Runtime::CreateCalleeSaveMethod(kRefsOnly) |
| 47 | */ |
| 48 | MACRO0(SETUP_REF_ONLY_CALLEE_SAVE_FRAME) |
| 49 | int3 |
| 50 | int3 |
| 51 | END_MACRO |
| 52 | |
| 53 | MACRO0(RESTORE_REF_ONLY_CALLEE_SAVE_FRAME) |
| 54 | int3 |
| 55 | int3 |
| 56 | END_MACRO |
| 57 | |
| 58 | /* |
| 59 | * Macro that sets up the callee save frame to conform with |
| 60 | * Runtime::CreateCalleeSaveMethod(kRefsAndArgs) |
| 61 | */ |
| 62 | MACRO0(SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME) |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 63 | // R10 := Runtime::Current() |
| 64 | movq _ZN3art7Runtime9instance_E@GOTPCREL(%rip), %r10 |
| 65 | movq (%r10), %r10 |
| 66 | // Save callee and GPR args, mixed together to agree with core spills bitmap. |
| 67 | PUSH r15 // Callee save. |
| 68 | PUSH r14 // Callee save. |
| 69 | PUSH r13 // Callee save. |
| 70 | PUSH r12 // Callee save. |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 71 | PUSH r9 // Quick arg 5. |
| 72 | PUSH r8 // Quick arg 4. |
| 73 | PUSH rsi // Quick arg 1. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 74 | PUSH rbp // Callee save. |
| 75 | PUSH rbx // Callee save. |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 76 | PUSH rdx // Quick arg 2. |
| 77 | PUSH rcx // Quick arg 3. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 78 | // Create space for FPR args and create 2 slots, 1 of padding and 1 for the ArtMethod*. |
| 79 | subq LITERAL(80), %rsp |
| 80 | CFI_ADJUST_CFA_OFFSET(80) |
| 81 | // R10 := ArtMethod* for ref and args callee save frame method. |
| 82 | movq RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10 |
| 83 | // Save FPRs. |
| 84 | movq %xmm0, 16(%rsp) |
| 85 | movq %xmm1, 24(%rsp) |
| 86 | movq %xmm2, 32(%rsp) |
| 87 | movq %xmm3, 40(%rsp) |
| 88 | movq %xmm4, 48(%rsp) |
| 89 | movq %xmm5, 56(%rsp) |
| 90 | movq %xmm6, 64(%rsp) |
| 91 | movq %xmm7, 72(%rsp) |
| 92 | // Store ArtMethod* to bottom of stack. |
| 93 | movq %r10, 0(%rsp) |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 94 | END_MACRO |
| 95 | |
| 96 | MACRO0(RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME) |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 97 | // Restore FPRs. |
| 98 | movq 16(%rsp), %xmm0 |
| 99 | movq 24(%rsp), %xmm1 |
| 100 | movq 32(%rsp), %xmm2 |
| 101 | movq 40(%rsp), %xmm3 |
| 102 | movq 48(%rsp), %xmm4 |
| 103 | movq 56(%rsp), %xmm5 |
| 104 | movq 64(%rsp), %xmm6 |
| 105 | movq 72(%rsp), %xmm7 |
| 106 | addq LITERAL(80), %rsp |
| 107 | CFI_ADJUST_CFA_OFFSET(-80) |
Ian Rogers | befbd57 | 2014-03-06 01:13:39 -0800 | [diff] [blame^] | 108 | // Restore callee and GPR args, mixed together to agree with core spills bitmap. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 109 | POP rcx |
| 110 | POP rdx |
| 111 | POP rbx |
| 112 | POP rbp |
| 113 | POP rsi |
| 114 | POP r8 |
| 115 | POP r9 |
| 116 | POP r12 |
| 117 | POP r13 |
| 118 | POP r14 |
| 119 | POP r15 |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 120 | END_MACRO |
| 121 | |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 122 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 123 | /* |
| 124 | * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending |
| 125 | * exception is Thread::Current()->exception_. |
| 126 | */ |
| 127 | MACRO0(DELIVER_PENDING_EXCEPTION) |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 128 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save callee saves for throw |
| 129 | // (Thread*, SP) setup |
| 130 | movq %gs:THREAD_SELF_OFFSET, %rdi |
| 131 | movq %rsp, %rsi |
| 132 | call PLT_SYMBOL(artDeliverPendingExceptionFromCode) // artDeliverPendingExceptionFromCode(Thread*, SP) |
| 133 | int3 // unreached |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 134 | END_MACRO |
| 135 | |
| 136 | MACRO2(NO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) |
| 137 | DEFINE_FUNCTION VAR(c_name, 0) |
| 138 | int3 |
| 139 | int3 |
| 140 | END_FUNCTION VAR(c_name, 0) |
| 141 | END_MACRO |
| 142 | |
| 143 | MACRO2(ONE_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) |
| 144 | DEFINE_FUNCTION VAR(c_name, 0) |
| 145 | int3 |
| 146 | int3 |
| 147 | END_FUNCTION VAR(c_name, 0) |
| 148 | END_MACRO |
| 149 | |
| 150 | MACRO2(TWO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) |
| 151 | DEFINE_FUNCTION VAR(c_name, 0) |
| 152 | int3 |
| 153 | int3 |
| 154 | END_FUNCTION VAR(c_name, 0) |
| 155 | END_MACRO |
| 156 | |
| 157 | /* |
| 158 | * Called by managed code to create and deliver a NullPointerException. |
| 159 | */ |
| 160 | NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode |
| 161 | |
| 162 | /* |
| 163 | * Called by managed code to create and deliver an ArithmeticException. |
| 164 | */ |
| 165 | NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode |
| 166 | |
| 167 | /* |
| 168 | * Called by managed code to create and deliver a StackOverflowError. |
| 169 | */ |
| 170 | NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode |
| 171 | |
| 172 | /* |
| 173 | * Called by managed code, saves callee saves and then calls artThrowException |
| 174 | * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception. |
| 175 | */ |
| 176 | ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode |
| 177 | |
| 178 | /* |
| 179 | * Called by managed code to create and deliver a NoSuchMethodError. |
| 180 | */ |
| 181 | ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode |
| 182 | |
| 183 | /* |
| 184 | * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds |
| 185 | * index, arg2 holds limit. |
| 186 | */ |
| 187 | TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode |
| 188 | |
| 189 | /* |
| 190 | * All generated callsites for interface invokes and invocation slow paths will load arguments |
| 191 | * as usual - except instead of loading arg0/r0 with the target Method*, arg0/r0 will contain |
| 192 | * the method_idx. This wrapper will save arg1-arg3, load the caller's Method*, align the |
| 193 | * stack and call the appropriate C helper. |
| 194 | * NOTE: "this" is first visible argument of the target, and so can be found in arg1/r1. |
| 195 | * |
| 196 | * The helper will attempt to locate the target and return a 64-bit result in r0/r1 consisting |
| 197 | * of the target Method* in r0 and method->code_ in r1. |
| 198 | * |
| 199 | * If unsuccessful, the helper will return NULL/NULL. There will bea pending exception in the |
| 200 | * thread and we branch to another stub to deliver it. |
| 201 | * |
| 202 | * On success this wrapper will restore arguments and *jump* to the target, leaving the lr |
| 203 | * pointing back to the original caller. |
| 204 | */ |
| 205 | MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name) |
| 206 | DEFINE_FUNCTION VAR(c_name, 0) |
| 207 | int3 |
| 208 | int3 |
| 209 | END_FUNCTION VAR(c_name, 0) |
| 210 | END_MACRO |
| 211 | |
| 212 | INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline |
| 213 | INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck |
| 214 | |
| 215 | INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck |
| 216 | INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck |
| 217 | INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck |
| 218 | INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck |
| 219 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 220 | |
| 221 | /* |
| 222 | * Helper for quick invocation stub to set up XMM registers. Assumes r10 == shorty, |
| 223 | * r11 == arg_array. Clobbers r10, r11 and al. Branches to xmm_setup_finished if it encounters |
| 224 | * the end of the shorty. |
| 225 | */ |
| 226 | MACRO2(LOOP_OVER_SHORTY_LOADING_XMMS, xmm_reg, finished) |
| 227 | 1: // LOOP |
| 228 | movb (%r10), %al // al := *shorty |
| 229 | addq LITERAL(1), %r10 // shorty++ |
| 230 | cmpb LITERAL(0), %al // if (al == '\0') goto xmm_setup_finished |
| 231 | je VAR(finished, 1) |
| 232 | cmpb LITERAL(68), %al // if (al == 'D') goto FOUND_DOUBLE |
| 233 | je 2f |
| 234 | cmpb LITERAL(70), %al // if (al == 'F') goto FOUND_FLOAT |
| 235 | je 3f |
| 236 | addq LITERAL(4), %r11 // arg_array++ |
| 237 | // Handle extra space in arg array taken by a long. |
| 238 | cmpb LITERAL(74), %al // if (al != 'J') goto LOOP |
| 239 | jne 1b |
| 240 | addq LITERAL(4), %r11 // arg_array++ |
| 241 | jmp 1b // goto LOOP |
| 242 | 2: // FOUND_DOUBLE |
| 243 | movsd (%r11), REG_VAR(xmm_reg, 0) |
| 244 | addq LITERAL(8), %r11 // arg_array+=2 |
| 245 | jmp 4f |
| 246 | 3: // FOUND_FLOAT |
| 247 | movss (%r11), REG_VAR(xmm_reg, 0) |
| 248 | addq LITERAL(4), %r11 // arg_array++ |
| 249 | 4: |
| 250 | END_MACRO |
| 251 | |
| 252 | /* |
| 253 | * Helper for quick invocation stub to set up GPR registers. Assumes r10 == shorty, |
| 254 | * r11 == arg_array. Clobbers r10, r11 and al. Branches to gpr_setup_finished if it encounters |
| 255 | * the end of the shorty. |
| 256 | */ |
| 257 | MACRO3(LOOP_OVER_SHORTY_LOADING_GPRS, gpr_reg64, gpr_reg32, finished) |
| 258 | 1: // LOOP |
| 259 | movb (%r10), %al // al := *shorty |
| 260 | addq LITERAL(1), %r10 // shorty++ |
| 261 | cmpb LITERAL(0), %al // if (al == '\0') goto gpr_setup_finished |
| 262 | je VAR(finished, 2) |
| 263 | cmpb LITERAL(74), %al // if (al == 'J') goto FOUND_LONG |
| 264 | je 2f |
| 265 | cmpb LITERAL(70), %al // if (al == 'F') goto SKIP_FLOAT |
| 266 | je 3f |
| 267 | cmpb LITERAL(68), %al // if (al == 'D') goto SKIP_DOUBLE |
| 268 | je 4f |
| 269 | movl (%r11), REG_VAR(gpr_reg32, 1) |
| 270 | addq LITERAL(4), %r11 // arg_array++ |
| 271 | jmp 5f |
| 272 | 2: // FOUND_LONG |
| 273 | movq (%r11), REG_VAR(gpr_reg64, 0) |
| 274 | addq LITERAL(8), %r11 // arg_array+=2 |
| 275 | jmp 5f |
| 276 | 3: // SKIP_FLOAT |
| 277 | addq LITERAL(4), %r11 // arg_array++ |
| 278 | jmp 1b |
| 279 | 4: // SKIP_DOUBLE |
| 280 | addq LITERAL(8), %r11 // arg_array+=2 |
| 281 | jmp 1b |
| 282 | 5: |
| 283 | END_MACRO |
| 284 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 285 | /* |
| 286 | * Quick invocation stub. |
Ian Rogers | 0177e53 | 2014-02-11 16:30:46 -0800 | [diff] [blame] | 287 | * On entry: |
| 288 | * [sp] = return address |
| 289 | * rdi = method pointer |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 290 | * rsi = argument array that must at least contain the this pointer. |
Ian Rogers | 0177e53 | 2014-02-11 16:30:46 -0800 | [diff] [blame] | 291 | * rdx = size of argument array in bytes |
| 292 | * rcx = (managed) thread pointer |
| 293 | * r8 = JValue* result |
| 294 | * r9 = char* shorty |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 295 | */ |
| 296 | DEFINE_FUNCTION art_quick_invoke_stub |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 297 | // Set up argument XMM registers. |
| 298 | leaq 1(%r9), %r10 // R10 := shorty + 1 ; ie skip return arg character. |
| 299 | leaq 4(%rsi), %r11 // R11 := arg_array + 4 ; ie skip this pointer. |
| 300 | LOOP_OVER_SHORTY_LOADING_XMMS xmm0, .Lxmm_setup_finished |
| 301 | LOOP_OVER_SHORTY_LOADING_XMMS xmm1, .Lxmm_setup_finished |
| 302 | LOOP_OVER_SHORTY_LOADING_XMMS xmm2, .Lxmm_setup_finished |
| 303 | LOOP_OVER_SHORTY_LOADING_XMMS xmm3, .Lxmm_setup_finished |
| 304 | LOOP_OVER_SHORTY_LOADING_XMMS xmm4, .Lxmm_setup_finished |
| 305 | LOOP_OVER_SHORTY_LOADING_XMMS xmm5, .Lxmm_setup_finished |
| 306 | LOOP_OVER_SHORTY_LOADING_XMMS xmm6, .Lxmm_setup_finished |
| 307 | LOOP_OVER_SHORTY_LOADING_XMMS xmm7, .Lxmm_setup_finished |
| 308 | .balign 16 |
| 309 | .Lxmm_setup_finished: |
| 310 | PUSH rbp // Save rbp. |
| 311 | PUSH r8 // Save r8/result*. |
| 312 | PUSH r9 // Save r9/shorty*. |
| 313 | mov %rsp, %rbp // Copy value of stack pointer into base pointer. |
| 314 | CFI_DEF_CFA_REGISTER(rbp) |
| 315 | movl %edx, %r10d |
| 316 | addl LITERAL(64), %edx // Reserve space for return addr, method*, rbp, r8 and r9 in frame. |
| 317 | andl LITERAL(0xFFFFFFF0), %edx // Align frame size to 16 bytes. |
| 318 | subl LITERAL(32), %edx // Remove space for return address, rbp, r8 and r9. |
| 319 | subq %rdx, %rsp // Reserve stack space for argument array. |
| 320 | movq LITERAL(0), (%rsp) // Store NULL for method* |
| 321 | movl %r10d, %ecx // Place size of args in rcx. |
| 322 | movq %rdi, %rax // RAX := method to be called |
| 323 | movq %rsi, %r11 // R11 := arg_array |
| 324 | leaq 8(%rsp), %rdi // Rdi is pointing just above the method* in the stack arguments. |
| 325 | // Copy arg array into stack. |
| 326 | rep movsb // while (rcx--) { *rdi++ = *rsi++ } |
| 327 | leaq 1(%r9), %r10 // R10 := shorty + 1 ; ie skip return arg character |
| 328 | movq %rax, %rdi // RDI := method to be called |
| 329 | movl (%r11), %esi // RSI := this pointer |
| 330 | addq LITERAL(4), %r11 // arg_array++ |
| 331 | LOOP_OVER_SHORTY_LOADING_GPRS rdx, edx, .Lgpr_setup_finished |
| 332 | LOOP_OVER_SHORTY_LOADING_GPRS rcx, ecx, .Lgpr_setup_finished |
| 333 | LOOP_OVER_SHORTY_LOADING_GPRS r8, r8d, .Lgpr_setup_finished |
| 334 | LOOP_OVER_SHORTY_LOADING_GPRS r9, r9d, .Lgpr_setup_finished |
| 335 | .Lgpr_setup_finished: |
| 336 | call *METHOD_QUICK_CODE_OFFSET(%rdi) // Call the method. |
| 337 | movq %rbp, %rsp // Restore stack pointer. |
| 338 | CFI_DEF_CFA_REGISTER(rsp) |
| 339 | POP r9 // Pop r9 - shorty*. |
| 340 | POP r8 // Pop r8 - result*. |
| 341 | POP rbp // Pop rbp |
| 342 | cmpb LITERAL(68), (%r9) // Test if result type char == 'D'. |
| 343 | je .Lreturn_double_quick |
| 344 | cmpb LITERAL(70), (%r9) // Test if result type char == 'F'. |
| 345 | je .Lreturn_float_quick |
| 346 | movq %rax, (%r8) // Store the result assuming its a long, int or Object* |
| 347 | ret |
| 348 | .Lreturn_double_quick: |
| 349 | movsd %xmm0, (%r8) // Store the double floating point result. |
| 350 | ret |
| 351 | .Lreturn_float_quick: |
| 352 | movss %xmm0, (%r8) // Store the floating point result. |
| 353 | ret |
| 354 | END_FUNCTION art_quick_invoke_stub |
| 355 | |
| 356 | /* |
| 357 | * Quick invocation stub. |
| 358 | * On entry: |
| 359 | * [sp] = return address |
| 360 | * rdi = method pointer |
| 361 | * rsi = argument array or NULL if no arguments. |
| 362 | * rdx = size of argument array in bytes |
| 363 | * rcx = (managed) thread pointer |
| 364 | * r8 = JValue* result |
| 365 | * r9 = char* shorty |
| 366 | */ |
| 367 | DEFINE_FUNCTION art_quick_invoke_static_stub |
| 368 | // Set up argument XMM registers. |
| 369 | leaq 1(%r9), %r10 // R10 := shorty + 1 ; ie skip return arg character |
| 370 | movq %rsi, %r11 // R11 := arg_array |
| 371 | LOOP_OVER_SHORTY_LOADING_XMMS xmm0, .Lxmm_setup_finished2 |
| 372 | LOOP_OVER_SHORTY_LOADING_XMMS xmm1, .Lxmm_setup_finished2 |
| 373 | LOOP_OVER_SHORTY_LOADING_XMMS xmm2, .Lxmm_setup_finished2 |
| 374 | LOOP_OVER_SHORTY_LOADING_XMMS xmm3, .Lxmm_setup_finished2 |
| 375 | LOOP_OVER_SHORTY_LOADING_XMMS xmm4, .Lxmm_setup_finished2 |
| 376 | LOOP_OVER_SHORTY_LOADING_XMMS xmm5, .Lxmm_setup_finished2 |
| 377 | LOOP_OVER_SHORTY_LOADING_XMMS xmm6, .Lxmm_setup_finished2 |
| 378 | LOOP_OVER_SHORTY_LOADING_XMMS xmm7, .Lxmm_setup_finished2 |
| 379 | .balign 16 |
| 380 | .Lxmm_setup_finished2: |
| 381 | PUSH rbp // Save rbp. |
| 382 | PUSH r8 // Save r8/result*. |
| 383 | PUSH r9 // Save r9/shorty*. |
| 384 | mov %rsp, %rbp // Copy value of stack pointer into base pointer. |
| 385 | CFI_DEF_CFA_REGISTER(rbp) |
| 386 | movl %edx, %r10d |
| 387 | addl LITERAL(64), %edx // Reserve space for return addr, method*, rbp, r8 and r9 in frame. |
| 388 | andl LITERAL(0xFFFFFFF0), %edx // Align frame size to 16 bytes. |
| 389 | subl LITERAL(32), %edx // Remove space for return address, rbp, r8 and r9. |
| 390 | subq %rdx, %rsp // Reserve stack space for argument array. |
| 391 | movq LITERAL(0), (%rsp) // Store NULL for method* |
| 392 | movl %r10d, %ecx // Place size of args in rcx. |
| 393 | movq %rdi, %rax // RAX := method to be called |
| 394 | movq %rsi, %r11 // R11 := arg_array |
| 395 | leaq 8(%rsp), %rdi // Rdi is pointing just above the method* in the stack arguments. |
| 396 | // Copy arg array into stack. |
| 397 | rep movsb // while (rcx--) { *rdi++ = *rsi++ } |
| 398 | leaq 1(%r9), %r10 // R10 := shorty + 1 ; ie skip return arg character |
| 399 | movq %rax, %rdi // RDI := method to be called |
| 400 | LOOP_OVER_SHORTY_LOADING_GPRS rsi, esi, .Lgpr_setup_finished2 |
| 401 | LOOP_OVER_SHORTY_LOADING_GPRS rdx, edx, .Lgpr_setup_finished2 |
| 402 | LOOP_OVER_SHORTY_LOADING_GPRS rcx, ecx, .Lgpr_setup_finished2 |
| 403 | LOOP_OVER_SHORTY_LOADING_GPRS r8, r8d, .Lgpr_setup_finished2 |
| 404 | LOOP_OVER_SHORTY_LOADING_GPRS r9, r9d, .Lgpr_setup_finished2 |
| 405 | .Lgpr_setup_finished2: |
| 406 | call *METHOD_QUICK_CODE_OFFSET(%rdi) // Call the method. |
| 407 | movq %rbp, %rsp // Restore stack pointer. |
| 408 | CFI_DEF_CFA_REGISTER(rsp) |
| 409 | POP r9 // Pop r9 - shorty*. |
| 410 | POP r8 // Pop r8 - result*. |
| 411 | POP rbp // Pop rbp |
| 412 | cmpb LITERAL(68), (%r9) // Test if result type char == 'D'. |
| 413 | je .Lreturn_double_quick2 |
| 414 | cmpb LITERAL(70), (%r9) // Test if result type char == 'F'. |
| 415 | je .Lreturn_float_quick2 |
| 416 | movq %rax, (%r8) // Store the result assuming its a long, int or Object* |
| 417 | ret |
| 418 | .Lreturn_double_quick2: |
| 419 | movsd %xmm0, (%r8) // Store the double floating point result. |
| 420 | ret |
| 421 | .Lreturn_float_quick2: |
| 422 | movss %xmm0, (%r8) // Store the floating point result. |
| 423 | ret |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 424 | END_FUNCTION art_quick_invoke_stub |
| 425 | |
| 426 | MACRO3(NO_ARG_DOWNCALL, c_name, cxx_name, return_macro) |
| 427 | DEFINE_FUNCTION VAR(c_name, 0) |
| 428 | int3 |
| 429 | int3 |
| 430 | END_FUNCTION VAR(c_name, 0) |
| 431 | END_MACRO |
| 432 | |
| 433 | MACRO3(ONE_ARG_DOWNCALL, c_name, cxx_name, return_macro) |
| 434 | DEFINE_FUNCTION VAR(c_name, 0) |
| 435 | int3 |
| 436 | int3 |
| 437 | END_FUNCTION VAR(c_name, 0) |
| 438 | END_MACRO |
| 439 | |
| 440 | MACRO3(TWO_ARG_DOWNCALL, c_name, cxx_name, return_macro) |
| 441 | DEFINE_FUNCTION VAR(c_name, 0) |
| 442 | int3 |
| 443 | int3 |
| 444 | END_FUNCTION VAR(c_name, 0) |
| 445 | END_MACRO |
| 446 | |
| 447 | MACRO3(THREE_ARG_DOWNCALL, c_name, cxx_name, return_macro) |
| 448 | DEFINE_FUNCTION VAR(c_name, 0) |
| 449 | int3 |
| 450 | int3 |
| 451 | END_FUNCTION VAR(c_name, 0) |
| 452 | END_MACRO |
| 453 | |
| 454 | MACRO0(RETURN_IF_RESULT_IS_NON_ZERO) |
| 455 | int3 |
| 456 | testl %eax, %eax // eax == 0 ? |
| 457 | jz 1f // if eax == 0 goto 1 |
| 458 | ret // return |
| 459 | 1: // deliver exception on current thread |
| 460 | DELIVER_PENDING_EXCEPTION |
| 461 | END_MACRO |
| 462 | |
| 463 | MACRO0(RETURN_IF_EAX_ZERO) |
| 464 | int3 |
| 465 | testl %eax, %eax // eax == 0 ? |
| 466 | jnz 1f // if eax != 0 goto 1 |
| 467 | ret // return |
| 468 | 1: // deliver exception on current thread |
| 469 | DELIVER_PENDING_EXCEPTION |
| 470 | END_MACRO |
| 471 | |
| 472 | MACRO0(RETURN_OR_DELIVER_PENDING_EXCEPTION) |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 473 | movq %gs:THREAD_EXCEPTION_OFFSET, %rcx // get exception field |
| 474 | testq %rcx, %rcx // rcx == 0 ? |
| 475 | jnz 1f // if rcx != 0 goto 1 |
| 476 | ret // return |
| 477 | 1: // deliver exception on current thread |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 478 | DELIVER_PENDING_EXCEPTION |
| 479 | END_MACRO |
| 480 | |
| 481 | // Generate the allocation entrypoints for each allocator. |
| 482 | // TODO: use arch/quick_alloc_entrypoints.S. Currently we don't as we need to use concatenation |
| 483 | // macros to work around differences between OS/X's as and binutils as (OS/X lacks named arguments |
| 484 | // to macros and the VAR macro won't concatenate arguments properly), this also breaks having |
| 485 | // multi-line macros that use each other (hence using 1 macro per newline below). |
| 486 | #define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(c_suffix, cxx_suffix) \ |
| 487 | TWO_ARG_DOWNCALL art_quick_alloc_object ## c_suffix, artAllocObjectFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 488 | #define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(c_suffix, cxx_suffix) \ |
| 489 | TWO_ARG_DOWNCALL art_quick_alloc_object_resolved ## c_suffix, artAllocObjectFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 490 | #define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(c_suffix, cxx_suffix) \ |
| 491 | TWO_ARG_DOWNCALL art_quick_alloc_object_initialized ## c_suffix, artAllocObjectFromCodeInitialized ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 492 | #define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \ |
| 493 | TWO_ARG_DOWNCALL art_quick_alloc_object_with_access_check ## c_suffix, artAllocObjectFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 494 | #define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(c_suffix, cxx_suffix) \ |
| 495 | THREE_ARG_DOWNCALL art_quick_alloc_array ## c_suffix, artAllocArrayFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 496 | #define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(c_suffix, cxx_suffix) \ |
| 497 | THREE_ARG_DOWNCALL art_quick_alloc_array_resolved ## c_suffix, artAllocArrayFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 498 | #define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \ |
| 499 | THREE_ARG_DOWNCALL art_quick_alloc_array_with_access_check ## c_suffix, artAllocArrayFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 500 | #define GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(c_suffix, cxx_suffix) \ |
| 501 | THREE_ARG_DOWNCALL art_quick_check_and_alloc_array ## c_suffix, artCheckAndAllocArrayFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 502 | #define GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \ |
| 503 | THREE_ARG_DOWNCALL art_quick_check_and_alloc_array_with_access_check ## c_suffix, artCheckAndAllocArrayFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 504 | |
| 505 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_dlmalloc, DlMalloc) |
| 506 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_dlmalloc, DlMalloc) |
| 507 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_dlmalloc, DlMalloc) |
| 508 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc) |
| 509 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_dlmalloc, DlMalloc) |
| 510 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_dlmalloc, DlMalloc) |
| 511 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc) |
| 512 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_dlmalloc, DlMalloc) |
| 513 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc) |
| 514 | |
| 515 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_dlmalloc_instrumented, DlMallocInstrumented) |
| 516 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_dlmalloc_instrumented, DlMallocInstrumented) |
| 517 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_dlmalloc_instrumented, DlMallocInstrumented) |
| 518 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented) |
| 519 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_dlmalloc_instrumented, DlMallocInstrumented) |
| 520 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_dlmalloc_instrumented, DlMallocInstrumented) |
| 521 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented) |
| 522 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_dlmalloc_instrumented, DlMallocInstrumented) |
| 523 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented) |
| 524 | |
| 525 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc, RosAlloc) |
| 526 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc, RosAlloc) |
| 527 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_rosalloc, RosAlloc) |
| 528 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_rosalloc, RosAlloc) |
| 529 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_rosalloc, RosAlloc) |
| 530 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_rosalloc, RosAlloc) |
| 531 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc, RosAlloc) |
| 532 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_rosalloc, RosAlloc) |
| 533 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc, RosAlloc) |
| 534 | |
| 535 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc_instrumented, RosAllocInstrumented) |
| 536 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc_instrumented, RosAllocInstrumented) |
| 537 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_rosalloc_instrumented, RosAllocInstrumented) |
| 538 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented) |
| 539 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_rosalloc_instrumented, RosAllocInstrumented) |
| 540 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_rosalloc_instrumented, RosAllocInstrumented) |
| 541 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented) |
| 542 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_rosalloc_instrumented, RosAllocInstrumented) |
| 543 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented) |
| 544 | |
| 545 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_bump_pointer, BumpPointer) |
| 546 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_bump_pointer, BumpPointer) |
| 547 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_bump_pointer, BumpPointer) |
| 548 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer) |
| 549 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_bump_pointer, BumpPointer) |
| 550 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_bump_pointer, BumpPointer) |
| 551 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer) |
| 552 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_bump_pointer, BumpPointer) |
| 553 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer) |
| 554 | |
| 555 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 556 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 557 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 558 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 559 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 560 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 561 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 562 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 563 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 564 | |
| 565 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_tlab, TLAB) |
| 566 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab, TLAB) |
| 567 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_tlab, TLAB) |
| 568 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab, TLAB) |
| 569 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_tlab, TLAB) |
| 570 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_tlab, TLAB) |
| 571 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab, TLAB) |
| 572 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_tlab, TLAB) |
| 573 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab, TLAB) |
| 574 | |
| 575 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_tlab_instrumented, TLABInstrumented) |
| 576 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab_instrumented, TLABInstrumented) |
| 577 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_tlab_instrumented, TLABInstrumented) |
| 578 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented) |
| 579 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_tlab_instrumented, TLABInstrumented) |
| 580 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_tlab_instrumented, TLABInstrumented) |
| 581 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented) |
| 582 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_tlab_instrumented, TLABInstrumented) |
| 583 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented) |
| 584 | |
| 585 | TWO_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO |
| 586 | TWO_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO |
| 587 | TWO_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO |
| 588 | TWO_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO |
| 589 | |
| 590 | TWO_ARG_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_EAX_ZERO |
| 591 | |
| 592 | DEFINE_FUNCTION art_quick_lock_object |
| 593 | int3 |
| 594 | int3 |
| 595 | END_FUNCTION art_quick_lock_object |
| 596 | |
| 597 | DEFINE_FUNCTION art_quick_unlock_object |
| 598 | int3 |
| 599 | int3 |
| 600 | END_FUNCTION art_quick_unlock_object |
| 601 | |
| 602 | DEFINE_FUNCTION art_quick_is_assignable |
| 603 | int3 |
| 604 | int3 |
| 605 | END_FUNCTION art_quick_is_assignable |
| 606 | |
| 607 | DEFINE_FUNCTION art_quick_check_cast |
| 608 | int3 |
| 609 | int3 |
| 610 | END_FUNCTION art_quick_check_cast |
| 611 | |
| 612 | /* |
| 613 | * Entry from managed code for array put operations of objects where the value being stored |
| 614 | * needs to be checked for compatibility. |
| 615 | * eax = array, ecx = index, edx = value |
| 616 | */ |
| 617 | UNIMPLEMENTED art_quick_aput_obj_with_null_and_bound_check |
| 618 | UNIMPLEMENTED art_quick_aput_obj_with_bound_check |
| 619 | UNIMPLEMENTED art_quick_aput_obj |
| 620 | UNIMPLEMENTED art_quick_memcpy |
| 621 | |
| 622 | NO_ARG_DOWNCALL art_quick_test_suspend, artTestSuspendFromCode, ret |
| 623 | |
| 624 | UNIMPLEMENTED art_quick_fmod |
| 625 | UNIMPLEMENTED art_quick_fmodf |
| 626 | UNIMPLEMENTED art_quick_l2d |
| 627 | UNIMPLEMENTED art_quick_l2f |
| 628 | UNIMPLEMENTED art_quick_d2l |
| 629 | UNIMPLEMENTED art_quick_f2l |
| 630 | UNIMPLEMENTED art_quick_idivmod |
| 631 | UNIMPLEMENTED art_quick_ldiv |
| 632 | UNIMPLEMENTED art_quick_lmod |
| 633 | UNIMPLEMENTED art_quick_lmul |
| 634 | UNIMPLEMENTED art_quick_lshl |
| 635 | UNIMPLEMENTED art_quick_lshr |
| 636 | UNIMPLEMENTED art_quick_lushr |
| 637 | UNIMPLEMENTED art_quick_set32_instance |
| 638 | UNIMPLEMENTED art_quick_set64_instance |
| 639 | UNIMPLEMENTED art_quick_set_obj_instance |
| 640 | UNIMPLEMENTED art_quick_get32_instance |
| 641 | UNIMPLEMENTED art_quick_get64_instance |
| 642 | UNIMPLEMENTED art_quick_get_obj_instance |
| 643 | UNIMPLEMENTED art_quick_set32_static |
| 644 | UNIMPLEMENTED art_quick_set64_static |
| 645 | UNIMPLEMENTED art_quick_set_obj_static |
| 646 | UNIMPLEMENTED art_quick_get32_static |
| 647 | UNIMPLEMENTED art_quick_get64_static |
| 648 | UNIMPLEMENTED art_quick_get_obj_static |
| 649 | UNIMPLEMENTED art_quick_proxy_invoke_handler |
| 650 | |
| 651 | /* |
| 652 | * Called to resolve an imt conflict. |
| 653 | */ |
| 654 | UNIMPLEMENTED art_quick_imt_conflict_trampoline |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 655 | |
Ian Rogers | befbd57 | 2014-03-06 01:13:39 -0800 | [diff] [blame^] | 656 | DEFINE_FUNCTION art_quick_resolution_trampoline |
| 657 | SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME |
| 658 | movq %gs:THREAD_SELF_OFFSET, %rdx |
| 659 | movq %rsp, %rcx |
| 660 | call PLT_SYMBOL(artQuickResolutionTrampoline) // (called, receiver, Thread*, SP) |
| 661 | movq %rax, %r10 // Remember returned code pointer in R10. |
| 662 | RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME |
| 663 | testq %r10, %r10 // If code pointer is NULL goto deliver pending exception. |
| 664 | jz 1f |
| 665 | jmp *%r10 // Tail call into method. |
| 666 | 1: |
| 667 | RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME |
| 668 | DELIVER_PENDING_EXCEPTION |
| 669 | END_FUNCTION art_quick_resolution_trampoline |
| 670 | |
| 671 | /* Generic JNI frame layout: |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 672 | * |
| 673 | * #-------------------# |
| 674 | * | | |
| 675 | * | caller method... | |
| 676 | * #-------------------# <--- SP on entry |
| 677 | * | Return | |
| 678 | * | R15 | callee save |
| 679 | * | R14 | callee save |
| 680 | * | R13 | callee save |
| 681 | * | R12 | callee save |
| 682 | * | R9 | arg5 |
| 683 | * | R8 | arg4 |
| 684 | * | RSI/R6 | arg1 |
| 685 | * | RBP/R5 | callee save |
| 686 | * | RBX/R3 | callee save |
| 687 | * | RDX/R2 | arg2 |
| 688 | * | RCX/R1 | arg3 |
| 689 | * | XMM7 | float arg 8 |
| 690 | * | XMM6 | float arg 7 |
| 691 | * | XMM5 | float arg 6 |
| 692 | * | XMM4 | float arg 5 |
| 693 | * | XMM3 | float arg 4 |
| 694 | * | XMM2 | float arg 3 |
| 695 | * | XMM1 | float arg 2 |
| 696 | * | XMM0 | float arg 1 |
| 697 | * | Padding | |
| 698 | * | RDI/Method* | <- sp |
| 699 | * #-------------------# |
| 700 | * | local ref cookie | // 4B |
| 701 | * | padding | // 4B |
| 702 | * #----------#--------# |
| 703 | * | | | | |
| 704 | * | Temp/ | SIRT | | Scratch frame is 4k |
| 705 | * | Scratch | v | |
| 706 | * | Frame #--------| |
| 707 | * | | |
| 708 | * | #--------| |
| 709 | * | | ^ | |
| 710 | * | | JNI | | |
| 711 | * | | Stack| | |
| 712 | * #----------#--------# <--- SP on native call (needs alignment?) |
| 713 | * | | |
| 714 | * | Stack for Regs | The trampoline assembly will pop these values |
| 715 | * | | into registers for native call |
| 716 | * #---------#---------# |
| 717 | * | | sp* | |
| 718 | * | Tramp. #---------# |
| 719 | * | args | thread | |
| 720 | * | Tramp. #---------# |
| 721 | * | | method | |
| 722 | * #-------------------# <--- SP on artQuickGenericJniTrampoline |
| 723 | */ |
Andreas Gampe | 2da8823 | 2014-02-27 12:26:20 -0800 | [diff] [blame] | 724 | /* |
| 725 | * Called to do a generic JNI down-call |
| 726 | */ |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 727 | DEFINE_FUNCTION art_quick_generic_jni_trampoline |
| 728 | // Save callee and GPR args, mixed together to agree with core spills bitmap. |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 729 | PUSH r15 // Callee save. |
| 730 | PUSH r14 // Callee save. |
| 731 | PUSH r13 // Callee save. |
| 732 | PUSH r12 // Callee save. |
| 733 | PUSH r9 // Quick arg 5. |
| 734 | PUSH r8 // Quick arg 4. |
| 735 | PUSH rsi // Quick arg 1. |
| 736 | PUSH rbp // Callee save. |
| 737 | PUSH rbx // Callee save. |
| 738 | PUSH rdx // Quick arg 2. |
| 739 | PUSH rcx // Quick arg 3. |
| 740 | // Create space for FPR args and create 2 slots, 1 of padding and 1 for the ArtMethod*. |
| 741 | subq LITERAL(80), %rsp |
| 742 | CFI_ADJUST_CFA_OFFSET(80) |
| 743 | // Save FPRs. |
| 744 | movq %xmm0, 16(%rsp) |
| 745 | movq %xmm1, 24(%rsp) |
| 746 | movq %xmm2, 32(%rsp) |
| 747 | movq %xmm3, 40(%rsp) |
| 748 | movq %xmm4, 48(%rsp) |
| 749 | movq %xmm5, 56(%rsp) |
| 750 | movq %xmm6, 64(%rsp) |
| 751 | movq %xmm7, 72(%rsp) |
| 752 | // Store native ArtMethod* to bottom of stack. |
| 753 | movq %rdi, 0(%rsp) |
| 754 | movq %rsp, %rbp // save SP at callee-save frame |
| 755 | CFI_DEF_CFA_REGISTER(rbp) |
| 756 | // |
| 757 | // reserve a lot of space |
| 758 | // |
| 759 | // 4 local state ref |
| 760 | // 4 padding |
| 761 | // 4196 4k scratch space, enough for 2x 256 8-byte parameters (TODO: SIRT overhead?) |
| 762 | // 16 SIRT member fields ? |
| 763 | // + 112 14x 8-byte stack-2-register space |
| 764 | // ------ |
| 765 | // 4332 |
| 766 | // 16-byte aligned: 4336 |
| 767 | // Note: 14x8 = 7*16, so the stack stays aligned for the native call... |
| 768 | // Also means: the padding is somewhere in the middle |
| 769 | subq LITERAL(4336), %rsp |
| 770 | // prepare for artQuickGenericJniTrampoline call |
| 771 | // (Thread*, SP) |
| 772 | // rdi rsi <= C calling convention |
| 773 | // gs:... rbp <= where they are |
| 774 | movq %gs:THREAD_SELF_OFFSET, %rdi |
| 775 | movq %rbp, %rsi |
Ian Rogers | befbd57 | 2014-03-06 01:13:39 -0800 | [diff] [blame^] | 776 | call PLT_SYMBOL(artQuickGenericJniTrampoline) // (Thread*, sp) |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 777 | test %rax, %rax // check whether code pointer is NULL, also indicates exception |
| 778 | jz 1f |
| 779 | // pop from the register-passing alloca |
| 780 | // what's the right layout? |
| 781 | popq %rdi |
| 782 | popq %rsi |
| 783 | popq %rdx |
| 784 | popq %rcx |
| 785 | popq %r8 |
| 786 | popq %r9 |
| 787 | // TODO: skip floating point if unused, some flag. |
| 788 | movq 0(%rsp), %xmm0 |
| 789 | movq 8(%rsp), %xmm1 |
| 790 | movq 16(%rsp), %xmm2 |
| 791 | movq 24(%rsp), %xmm3 |
| 792 | movq 32(%rsp), %xmm4 |
| 793 | movq 40(%rsp), %xmm5 |
| 794 | movq 48(%rsp), %xmm6 |
| 795 | movq 56(%rsp), %xmm7 |
| 796 | addq LITERAL(64), %rsp // floating-point done |
| 797 | // native call |
| 798 | call *%rax // Q: is the stack aligned 16B with or without the return addr? |
| 799 | // result sign extension is handled in C code |
| 800 | // prepare for artQuickGenericJniEndTrampoline call |
| 801 | // (Thread*, SP, result, result_f) |
| 802 | // rdi rsi rdx rcx <= C calling convention |
| 803 | // gs:... rbp rax xmm0 <= where they are |
| 804 | movq %gs:THREAD_SELF_OFFSET, %rdi |
| 805 | movq %rbp, %rsi |
| 806 | movq %rax, %rdx |
| 807 | movq %xmm0, %rcx |
| 808 | call PLT_SYMBOL(artQuickGenericJniEndTrampoline) |
| 809 | // tear down the alloca already |
| 810 | movq %rbp, %rsp |
| 811 | CFI_DEF_CFA_REGISTER(rsp) |
| 812 | // Exceptions possible. |
| 813 | // TODO: use cmpq, needs direct encoding because of gas bug |
| 814 | movq %gs:THREAD_EXCEPTION_OFFSET, %rbx |
| 815 | test %rbx, %rbx |
| 816 | jnz 2f |
| 817 | // Tear down the callee-save frame |
| 818 | // Load FPRs. |
| 819 | // movq %xmm0, 16(%rsp) // doesn't make sense!!! |
| 820 | movq 24(%rsp), %xmm1 // neither does this!!! |
| 821 | movq 32(%rsp), %xmm2 |
| 822 | movq 40(%rsp), %xmm3 |
| 823 | movq 48(%rsp), %xmm4 |
| 824 | movq 56(%rsp), %xmm5 |
| 825 | movq 64(%rsp), %xmm6 |
| 826 | movq 72(%rsp), %xmm7 |
| 827 | // was 80 bytes |
| 828 | addq LITERAL(80), %rsp |
| 829 | CFI_ADJUST_CFA_OFFSET(-80) |
| 830 | // Save callee and GPR args, mixed together to agree with core spills bitmap. |
| 831 | POP rcx // Arg. |
| 832 | POP rdx // Arg. |
| 833 | POP rbx // Callee save. |
| 834 | POP rbp // Callee save. |
| 835 | POP rsi // Arg. |
| 836 | POP r8 // Arg. |
| 837 | POP r9 // Arg. |
| 838 | POP r12 // Callee save. |
| 839 | POP r13 // Callee save. |
| 840 | POP r14 // Callee save. |
| 841 | POP r15 // Callee save. |
| 842 | // store into fpr, for when it's a fpr return... |
| 843 | movq %rax, %xmm0 |
| 844 | ret |
| 845 | 1: |
| 846 | // tear down the _whole_ scratch space, assumes SIRT is empty, cookie not valid etc. |
| 847 | movq %rbp, %rsp |
| 848 | CFI_DEF_CFA_REGISTER(rsp) |
| 849 | 2: RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME |
| 850 | DELIVER_PENDING_EXCEPTION |
| 851 | END_FUNCTION art_quick_generic_jni_trampoline |
Andreas Gampe | 2da8823 | 2014-02-27 12:26:20 -0800 | [diff] [blame] | 852 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 853 | /* |
| 854 | * Called to bridge from the quick to interpreter ABI. On entry the arguments match those |
| 855 | * of a quick call: |
| 856 | * RDI = method being called / to bridge to. |
| 857 | * RSI, RDX, RCX, R8, R9 are arguments to that method. |
| 858 | */ |
| 859 | DEFINE_FUNCTION art_quick_to_interpreter_bridge |
| 860 | SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments. |
| 861 | movq %gs:THREAD_SELF_OFFSET, %rsi // RSI := Thread::Current() |
| 862 | movq %rsp, %rdx // RDX := sp |
| 863 | call PLT_SYMBOL(artQuickToInterpreterBridge) // (method, Thread*, SP) |
| 864 | RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case. |
| 865 | movq %rax, %xmm0 // Place return value also into floating point return value. |
| 866 | RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception |
| 867 | END_FUNCTION art_quick_to_interpreter_bridge |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 868 | |
| 869 | /* |
| 870 | * Routine that intercepts method calls and returns. |
| 871 | */ |
| 872 | UNIMPLEMENTED art_quick_instrumentation_entry |
| 873 | UNIMPLEMENTED art_quick_instrumentation_exit |
| 874 | |
| 875 | /* |
| 876 | * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization |
| 877 | * will long jump to the upcall with a special exception of -1. |
| 878 | */ |
| 879 | UNIMPLEMENTED art_quick_deoptimize |
| 880 | |
| 881 | UNIMPLEMENTED art_quick_indexof |
| 882 | UNIMPLEMENTED art_quick_string_compareto |
| 883 | UNIMPLEMENTED art_quick_memcmp16 |