Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Andreas Gampe | 53c913b | 2014-08-12 23:19:23 -0700 | [diff] [blame] | 17 | #include "jni_compiler.h" |
| 18 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 19 | #include <algorithm> |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 20 | #include <ios> |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 21 | #include <memory> |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 22 | #include <vector> |
Dave Allison | 65fcc2c | 2014-04-28 13:45:27 -0700 | [diff] [blame] | 23 | #include <fstream> |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 24 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 25 | #include "art_method.h" |
Vladimir Marko | 93205e3 | 2016-04-13 11:59:46 +0100 | [diff] [blame] | 26 | #include "base/arena_allocator.h" |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 27 | #include "base/enums.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 28 | #include "base/logging.h" |
| 29 | #include "base/macros.h" |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 30 | #include "memory_region.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 31 | #include "calling_convention.h" |
| 32 | #include "class_linker.h" |
| 33 | #include "compiled_method.h" |
| 34 | #include "dex_file-inl.h" |
| 35 | #include "driver/compiler_driver.h" |
David Srbecky | c6b4dd8 | 2015-04-07 20:32:43 +0100 | [diff] [blame] | 36 | #include "driver/compiler_options.h" |
Ian Rogers | 166db04 | 2013-07-26 12:05:57 -0700 | [diff] [blame] | 37 | #include "entrypoints/quick/quick_entrypoints.h" |
Ian Rogers | 68d8b42 | 2014-07-17 11:09:10 -0700 | [diff] [blame] | 38 | #include "jni_env_ext.h" |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 39 | #include "debug/dwarf/debug_frame_opcode_writer.h" |
Ian Rogers | 166db04 | 2013-07-26 12:05:57 -0700 | [diff] [blame] | 40 | #include "utils/assembler.h" |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 41 | #include "utils/jni_macro_assembler.h" |
Ian Rogers | 166db04 | 2013-07-26 12:05:57 -0700 | [diff] [blame] | 42 | #include "utils/managed_register.h" |
| 43 | #include "utils/arm/managed_register_arm.h" |
Serban Constantinescu | 75b9113 | 2014-04-09 18:39:10 +0100 | [diff] [blame] | 44 | #include "utils/arm64/managed_register_arm64.h" |
Ian Rogers | 166db04 | 2013-07-26 12:05:57 -0700 | [diff] [blame] | 45 | #include "utils/mips/managed_register_mips.h" |
Maja Gagic | 6ea651f | 2015-02-24 16:55:04 +0100 | [diff] [blame] | 46 | #include "utils/mips64/managed_register_mips64.h" |
Ian Rogers | 166db04 | 2013-07-26 12:05:57 -0700 | [diff] [blame] | 47 | #include "utils/x86/managed_register_x86.h" |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 48 | #include "utils.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 49 | #include "thread.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 50 | |
| 51 | #define __ jni_asm-> |
| 52 | |
| 53 | namespace art { |
| 54 | |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 55 | using JniOptimizationFlags = Compiler::JniOptimizationFlags; |
| 56 | |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 57 | template <PointerSize kPointerSize> |
| 58 | static void CopyParameter(JNIMacroAssembler<kPointerSize>* jni_asm, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 59 | ManagedRuntimeCallingConvention* mr_conv, |
| 60 | JniCallingConvention* jni_conv, |
| 61 | size_t frame_size, size_t out_arg_size); |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 62 | template <PointerSize kPointerSize> |
| 63 | static void SetNativeParameter(JNIMacroAssembler<kPointerSize>* jni_asm, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 64 | JniCallingConvention* jni_conv, |
| 65 | ManagedRegister in_reg); |
| 66 | |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 67 | template <PointerSize kPointerSize> |
| 68 | static std::unique_ptr<JNIMacroAssembler<kPointerSize>> GetMacroAssembler( |
| 69 | ArenaAllocator* arena, InstructionSet isa, const InstructionSetFeatures* features) { |
| 70 | return JNIMacroAssembler<kPointerSize>::Create(arena, isa, features); |
| 71 | } |
| 72 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 73 | // Generate the JNI bridge for the given method, general contract: |
| 74 | // - Arguments are in the managed runtime format, either on stack or in |
| 75 | // registers, a reference to the method object is supplied as part of this |
| 76 | // convention. |
| 77 | // |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 78 | template <PointerSize kPointerSize> |
| 79 | static CompiledMethod* ArtJniCompileMethodInternal(CompilerDriver* driver, |
| 80 | uint32_t access_flags, |
| 81 | uint32_t method_idx, |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 82 | const DexFile& dex_file, |
| 83 | JniOptimizationFlags optimization_flags) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 84 | const bool is_native = (access_flags & kAccNative) != 0; |
| 85 | CHECK(is_native); |
| 86 | const bool is_static = (access_flags & kAccStatic) != 0; |
| 87 | const bool is_synchronized = (access_flags & kAccSynchronized) != 0; |
| 88 | const char* shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(method_idx)); |
Ian Rogers | 72d3262 | 2014-05-06 16:20:11 -0700 | [diff] [blame] | 89 | InstructionSet instruction_set = driver->GetInstructionSet(); |
Goran Jakovljevic | 8c434dc | 2015-08-26 14:39:44 +0200 | [diff] [blame] | 90 | const InstructionSetFeatures* instruction_set_features = driver->GetInstructionSetFeatures(); |
Vladimir Marko | 93205e3 | 2016-04-13 11:59:46 +0100 | [diff] [blame] | 91 | |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 92 | // i.e. if the method was annotated with @FastNative |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 93 | const bool is_fast_native = (optimization_flags == Compiler::kFastNative); |
| 94 | |
| 95 | // i.e. if the method was annotated with @CriticalNative |
| 96 | bool is_critical_native = (optimization_flags == Compiler::kCriticalNative); |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 97 | |
| 98 | VLOG(jni) << "JniCompile: Method :: " |
| 99 | << art::PrettyMethod(method_idx, dex_file, /* with signature */ true) |
| 100 | << " :: access_flags = " << std::hex << access_flags << std::dec; |
| 101 | |
| 102 | if (UNLIKELY(is_fast_native)) { |
| 103 | VLOG(jni) << "JniCompile: Fast native method detected :: " |
| 104 | << art::PrettyMethod(method_idx, dex_file, /* with signature */ true); |
| 105 | } |
| 106 | |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 107 | if (UNLIKELY(is_critical_native)) { |
| 108 | VLOG(jni) << "JniCompile: Critical native method detected :: " |
| 109 | << art::PrettyMethod(method_idx, dex_file, /* with signature */ true); |
| 110 | } |
| 111 | |
| 112 | if (kIsDebugBuild) { |
| 113 | // Don't allow both @FastNative and @CriticalNative. They are mutually exclusive. |
| 114 | if (UNLIKELY(is_fast_native && is_critical_native)) { |
| 115 | LOG(FATAL) << "JniCompile: Method cannot be both @CriticalNative and @FastNative" |
| 116 | << art::PrettyMethod(method_idx, dex_file, /* with_signature */ true); |
| 117 | } |
| 118 | |
| 119 | // @CriticalNative - extra checks: |
| 120 | // -- Don't allow virtual criticals |
| 121 | // -- Don't allow synchronized criticals |
| 122 | // -- Don't allow any objects as parameter or return value |
| 123 | if (UNLIKELY(is_critical_native)) { |
| 124 | CHECK(is_static) |
| 125 | << "@CriticalNative functions cannot be virtual since that would" |
| 126 | << "require passing a reference parameter (this), which is illegal " |
| 127 | << art::PrettyMethod(method_idx, dex_file, /* with_signature */ true); |
| 128 | CHECK(!is_synchronized) |
| 129 | << "@CriticalNative functions cannot be synchronized since that would" |
| 130 | << "require passing a (class and/or this) reference parameter, which is illegal " |
| 131 | << art::PrettyMethod(method_idx, dex_file, /* with_signature */ true); |
| 132 | for (size_t i = 0; i < strlen(shorty); ++i) { |
| 133 | CHECK_NE(Primitive::kPrimNot, Primitive::GetType(shorty[i])) |
| 134 | << "@CriticalNative methods' shorty types must not have illegal references " |
| 135 | << art::PrettyMethod(method_idx, dex_file, /* with_signature */ true); |
| 136 | } |
| 137 | } |
| 138 | } |
| 139 | |
Vladimir Marko | 93205e3 | 2016-04-13 11:59:46 +0100 | [diff] [blame] | 140 | ArenaPool pool; |
| 141 | ArenaAllocator arena(&pool); |
| 142 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 143 | // Calling conventions used to iterate over parameters to method |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 144 | std::unique_ptr<JniCallingConvention> main_jni_conv = |
| 145 | JniCallingConvention::Create(&arena, |
| 146 | is_static, |
| 147 | is_synchronized, |
| 148 | is_critical_native, |
| 149 | shorty, |
| 150 | instruction_set); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 151 | bool reference_return = main_jni_conv->IsReturnAReference(); |
| 152 | |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 153 | std::unique_ptr<ManagedRuntimeCallingConvention> mr_conv( |
Vladimir Marko | 93205e3 | 2016-04-13 11:59:46 +0100 | [diff] [blame] | 154 | ManagedRuntimeCallingConvention::Create( |
| 155 | &arena, is_static, is_synchronized, shorty, instruction_set)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 156 | |
| 157 | // Calling conventions to call into JNI method "end" possibly passing a returned reference, the |
| 158 | // method and the current thread. |
Serban Constantinescu | 75b9113 | 2014-04-09 18:39:10 +0100 | [diff] [blame] | 159 | const char* jni_end_shorty; |
| 160 | if (reference_return && is_synchronized) { |
| 161 | jni_end_shorty = "ILL"; |
| 162 | } else if (reference_return) { |
| 163 | jni_end_shorty = "IL"; |
| 164 | } else if (is_synchronized) { |
| 165 | jni_end_shorty = "VL"; |
| 166 | } else { |
| 167 | jni_end_shorty = "V"; |
| 168 | } |
| 169 | |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 170 | std::unique_ptr<JniCallingConvention> end_jni_conv( |
| 171 | JniCallingConvention::Create(&arena, |
| 172 | is_static, |
| 173 | is_synchronized, |
| 174 | is_critical_native, |
| 175 | jni_end_shorty, |
| 176 | instruction_set)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 177 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 178 | // Assembler that holds generated instructions |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 179 | std::unique_ptr<JNIMacroAssembler<kPointerSize>> jni_asm = |
| 180 | GetMacroAssembler<kPointerSize>(&arena, instruction_set, instruction_set_features); |
David Srbecky | 5b1c2ca | 2016-01-25 17:32:41 +0000 | [diff] [blame] | 181 | jni_asm->cfi().SetEnabled(driver->GetCompilerOptions().GenerateAnyDebugInfo()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 182 | |
| 183 | // Offsets into data structures |
| 184 | // TODO: if cross compiling these offsets are for the host not the target |
| 185 | const Offset functions(OFFSETOF_MEMBER(JNIEnvExt, functions)); |
| 186 | const Offset monitor_enter(OFFSETOF_MEMBER(JNINativeInterface, MonitorEnter)); |
| 187 | const Offset monitor_exit(OFFSETOF_MEMBER(JNINativeInterface, MonitorExit)); |
| 188 | |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 189 | // 1. Build the frame saving all callee saves, Method*, and PC return address. |
| 190 | const size_t frame_size(main_jni_conv->FrameSize()); // Excludes outgoing args. |
Vladimir Marko | 3224838 | 2016-05-19 10:37:24 +0100 | [diff] [blame] | 191 | ArrayRef<const ManagedRegister> callee_save_regs = main_jni_conv->CalleeSaveRegisters(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 192 | __ BuildFrame(frame_size, mr_conv->MethodRegister(), callee_save_regs, mr_conv->EntrySpills()); |
David Srbecky | dd97393 | 2015-04-07 20:29:48 +0100 | [diff] [blame] | 193 | DCHECK_EQ(jni_asm->cfi().GetCurrentCFAOffset(), static_cast<int>(frame_size)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 194 | |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 195 | if (LIKELY(!is_critical_native)) { |
| 196 | // NOTE: @CriticalNative methods don't have a HandleScope |
| 197 | // because they can't have any reference parameters or return values. |
Serban Constantinescu | 75b9113 | 2014-04-09 18:39:10 +0100 | [diff] [blame] | 198 | |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 199 | // 2. Set up the HandleScope |
| 200 | mr_conv->ResetIterator(FrameOffset(frame_size)); |
| 201 | main_jni_conv->ResetIterator(FrameOffset(0)); |
| 202 | __ StoreImmediateToFrame(main_jni_conv->HandleScopeNumRefsOffset(), |
| 203 | main_jni_conv->ReferenceCount(), |
| 204 | mr_conv->InterproceduralScratchRegister()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 205 | |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 206 | __ CopyRawPtrFromThread(main_jni_conv->HandleScopeLinkOffset(), |
| 207 | Thread::TopHandleScopeOffset<kPointerSize>(), |
| 208 | mr_conv->InterproceduralScratchRegister()); |
| 209 | __ StoreStackOffsetToThread(Thread::TopHandleScopeOffset<kPointerSize>(), |
| 210 | main_jni_conv->HandleScopeOffset(), |
| 211 | mr_conv->InterproceduralScratchRegister()); |
| 212 | |
| 213 | // 3. Place incoming reference arguments into handle scope |
| 214 | main_jni_conv->Next(); // Skip JNIEnv* |
| 215 | // 3.5. Create Class argument for static methods out of passed method |
| 216 | if (is_static) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 217 | FrameOffset handle_scope_offset = main_jni_conv->CurrentParamHandleScopeEntryOffset(); |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 218 | // Check handle scope offset is within frame |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 219 | CHECK_LT(handle_scope_offset.Uint32Value(), frame_size); |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 220 | // Note this LoadRef() doesn't need heap unpoisoning since it's from the ArtMethod. |
| 221 | // Note this LoadRef() does not include read barrier. It will be handled below. |
| 222 | // |
| 223 | // scratchRegister = *method[DeclaringClassOffset()]; |
| 224 | __ LoadRef(main_jni_conv->InterproceduralScratchRegister(), |
| 225 | mr_conv->MethodRegister(), ArtMethod::DeclaringClassOffset(), false); |
| 226 | __ VerifyObject(main_jni_conv->InterproceduralScratchRegister(), false); |
| 227 | // *handleScopeOffset = scratchRegister |
| 228 | __ StoreRef(handle_scope_offset, main_jni_conv->InterproceduralScratchRegister()); |
| 229 | main_jni_conv->Next(); // in handle scope so move to next argument |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 230 | } |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 231 | // Place every reference into the handle scope (ignore other parameters). |
| 232 | while (mr_conv->HasNext()) { |
| 233 | CHECK(main_jni_conv->HasNext()); |
| 234 | bool ref_param = main_jni_conv->IsCurrentParamAReference(); |
| 235 | CHECK(!ref_param || mr_conv->IsCurrentParamAReference()); |
| 236 | // References need placing in handle scope and the entry value passing |
| 237 | if (ref_param) { |
| 238 | // Compute handle scope entry, note null is placed in the handle scope but its boxed value |
| 239 | // must be null. |
| 240 | FrameOffset handle_scope_offset = main_jni_conv->CurrentParamHandleScopeEntryOffset(); |
| 241 | // Check handle scope offset is within frame and doesn't run into the saved segment state. |
| 242 | CHECK_LT(handle_scope_offset.Uint32Value(), frame_size); |
| 243 | CHECK_NE(handle_scope_offset.Uint32Value(), |
| 244 | main_jni_conv->SavedLocalReferenceCookieOffset().Uint32Value()); |
| 245 | bool input_in_reg = mr_conv->IsCurrentParamInRegister(); |
| 246 | bool input_on_stack = mr_conv->IsCurrentParamOnStack(); |
| 247 | CHECK(input_in_reg || input_on_stack); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 248 | |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 249 | if (input_in_reg) { |
| 250 | ManagedRegister in_reg = mr_conv->CurrentParamRegister(); |
| 251 | __ VerifyObject(in_reg, mr_conv->IsCurrentArgPossiblyNull()); |
| 252 | __ StoreRef(handle_scope_offset, in_reg); |
| 253 | } else if (input_on_stack) { |
| 254 | FrameOffset in_off = mr_conv->CurrentParamStackOffset(); |
| 255 | __ VerifyObject(in_off, mr_conv->IsCurrentArgPossiblyNull()); |
| 256 | __ CopyRef(handle_scope_offset, in_off, |
| 257 | mr_conv->InterproceduralScratchRegister()); |
| 258 | } |
| 259 | } |
| 260 | mr_conv->Next(); |
| 261 | main_jni_conv->Next(); |
| 262 | } |
| 263 | |
| 264 | // 4. Write out the end of the quick frames. |
| 265 | __ StoreStackPointerToThread(Thread::TopOfManagedStackOffset<kPointerSize>()); |
| 266 | |
| 267 | // NOTE: @CriticalNative does not need to store the stack pointer to the thread |
| 268 | // because garbage collections are disabled within the execution of a |
| 269 | // @CriticalNative method. |
| 270 | // (TODO: We could probably disable it for @FastNative too). |
| 271 | } // if (!is_critical_native) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 272 | |
| 273 | // 5. Move frame down to allow space for out going args. |
| 274 | const size_t main_out_arg_size = main_jni_conv->OutArgSize(); |
Vladimir Marko | 4e24b9d | 2014-07-24 17:01:58 +0100 | [diff] [blame] | 275 | size_t current_out_arg_size = main_out_arg_size; |
| 276 | __ IncreaseFrameSize(main_out_arg_size); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 277 | |
Hiroshi Yamauchi | 1cc71eb | 2015-05-07 10:47:27 -0700 | [diff] [blame] | 278 | // Call the read barrier for the declaring class loaded from the method for a static call. |
| 279 | // Note that we always have outgoing param space available for at least two params. |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 280 | if (kUseReadBarrier && is_static && !is_critical_native) { |
| 281 | // XX: Why is this necessary only for the jclass? Why not for every single object ref? |
| 282 | // Skip this for @CriticalNative because we didn't build a HandleScope to begin with. |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 283 | ThreadOffset<kPointerSize> read_barrier = QUICK_ENTRYPOINT_OFFSET(kPointerSize, |
| 284 | pReadBarrierJni); |
Hiroshi Yamauchi | 1cc71eb | 2015-05-07 10:47:27 -0700 | [diff] [blame] | 285 | main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); |
| 286 | main_jni_conv->Next(); // Skip JNIEnv. |
| 287 | FrameOffset class_handle_scope_offset = main_jni_conv->CurrentParamHandleScopeEntryOffset(); |
| 288 | main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); |
| 289 | // Pass the handle for the class as the first argument. |
| 290 | if (main_jni_conv->IsCurrentParamOnStack()) { |
| 291 | FrameOffset out_off = main_jni_conv->CurrentParamStackOffset(); |
| 292 | __ CreateHandleScopeEntry(out_off, class_handle_scope_offset, |
| 293 | mr_conv->InterproceduralScratchRegister(), |
| 294 | false); |
| 295 | } else { |
| 296 | ManagedRegister out_reg = main_jni_conv->CurrentParamRegister(); |
| 297 | __ CreateHandleScopeEntry(out_reg, class_handle_scope_offset, |
| 298 | ManagedRegister::NoRegister(), false); |
| 299 | } |
| 300 | main_jni_conv->Next(); |
| 301 | // Pass the current thread as the second argument and call. |
| 302 | if (main_jni_conv->IsCurrentParamInRegister()) { |
| 303 | __ GetCurrentThread(main_jni_conv->CurrentParamRegister()); |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 304 | __ Call(main_jni_conv->CurrentParamRegister(), |
| 305 | Offset(read_barrier), |
| 306 | main_jni_conv->InterproceduralScratchRegister()); |
Hiroshi Yamauchi | 1cc71eb | 2015-05-07 10:47:27 -0700 | [diff] [blame] | 307 | } else { |
| 308 | __ GetCurrentThread(main_jni_conv->CurrentParamStackOffset(), |
| 309 | main_jni_conv->InterproceduralScratchRegister()); |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 310 | __ CallFromThread(read_barrier, main_jni_conv->InterproceduralScratchRegister()); |
Hiroshi Yamauchi | 1cc71eb | 2015-05-07 10:47:27 -0700 | [diff] [blame] | 311 | } |
| 312 | main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); // Reset. |
| 313 | } |
| 314 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 315 | // 6. Call into appropriate JniMethodStart passing Thread* so that transition out of Runnable |
| 316 | // can occur. The result is the saved JNI local state that is restored by the exit call. We |
| 317 | // abuse the JNI calling convention here, that is guaranteed to support passing 2 pointer |
| 318 | // arguments. |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 319 | FrameOffset locked_object_handle_scope_offset(0xBEEFDEAD); |
| 320 | if (LIKELY(!is_critical_native)) { |
| 321 | // Skip this for @CriticalNative methods. They do not call JniMethodStart. |
| 322 | ThreadOffset<kPointerSize> jni_start = |
| 323 | is_synchronized |
| 324 | ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodStartSynchronized) |
| 325 | : (is_fast_native |
| 326 | ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodFastStart) |
| 327 | : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodStart)); |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 328 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 329 | main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 330 | locked_object_handle_scope_offset = FrameOffset(0); |
| 331 | if (is_synchronized) { |
| 332 | // Pass object for locking. |
| 333 | main_jni_conv->Next(); // Skip JNIEnv. |
| 334 | locked_object_handle_scope_offset = main_jni_conv->CurrentParamHandleScopeEntryOffset(); |
| 335 | main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); |
| 336 | if (main_jni_conv->IsCurrentParamOnStack()) { |
| 337 | FrameOffset out_off = main_jni_conv->CurrentParamStackOffset(); |
| 338 | __ CreateHandleScopeEntry(out_off, locked_object_handle_scope_offset, |
| 339 | mr_conv->InterproceduralScratchRegister(), false); |
| 340 | } else { |
| 341 | ManagedRegister out_reg = main_jni_conv->CurrentParamRegister(); |
| 342 | __ CreateHandleScopeEntry(out_reg, locked_object_handle_scope_offset, |
| 343 | ManagedRegister::NoRegister(), false); |
| 344 | } |
| 345 | main_jni_conv->Next(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 346 | } |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 347 | if (main_jni_conv->IsCurrentParamInRegister()) { |
| 348 | __ GetCurrentThread(main_jni_conv->CurrentParamRegister()); |
| 349 | __ Call(main_jni_conv->CurrentParamRegister(), |
| 350 | Offset(jni_start), |
| 351 | main_jni_conv->InterproceduralScratchRegister()); |
| 352 | } else { |
| 353 | __ GetCurrentThread(main_jni_conv->CurrentParamStackOffset(), |
| 354 | main_jni_conv->InterproceduralScratchRegister()); |
| 355 | __ CallFromThread(jni_start, main_jni_conv->InterproceduralScratchRegister()); |
| 356 | } |
| 357 | if (is_synchronized) { // Check for exceptions from monitor enter. |
| 358 | __ ExceptionPoll(main_jni_conv->InterproceduralScratchRegister(), main_out_arg_size); |
| 359 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 360 | } |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 361 | |
| 362 | // Store into stack_frame[saved_cookie_offset] the return value of JniMethodStart. |
| 363 | FrameOffset saved_cookie_offset( |
| 364 | FrameOffset(0xDEADBEEFu)); // @CriticalNative - use obviously bad value for debugging |
| 365 | if (LIKELY(!is_critical_native)) { |
| 366 | saved_cookie_offset = main_jni_conv->SavedLocalReferenceCookieOffset(); |
| 367 | __ Store(saved_cookie_offset, main_jni_conv->IntReturnRegister(), 4 /* sizeof cookie */); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 368 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 369 | |
| 370 | // 7. Iterate over arguments placing values from managed calling convention in |
| 371 | // to the convention required for a native call (shuffling). For references |
| 372 | // place an index/pointer to the reference after checking whether it is |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 373 | // null (which must be encoded as null). |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 374 | // Note: we do this prior to materializing the JNIEnv* and static's jclass to |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 375 | // give as many free registers for the shuffle as possible. |
Vladimir Marko | 4e24b9d | 2014-07-24 17:01:58 +0100 | [diff] [blame] | 376 | mr_conv->ResetIterator(FrameOffset(frame_size + main_out_arg_size)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 377 | uint32_t args_count = 0; |
| 378 | while (mr_conv->HasNext()) { |
| 379 | args_count++; |
| 380 | mr_conv->Next(); |
| 381 | } |
| 382 | |
| 383 | // Do a backward pass over arguments, so that the generated code will be "mov |
| 384 | // R2, R3; mov R1, R2" instead of "mov R1, R2; mov R2, R3." |
| 385 | // TODO: A reverse iterator to improve readability. |
| 386 | for (uint32_t i = 0; i < args_count; ++i) { |
| 387 | mr_conv->ResetIterator(FrameOffset(frame_size + main_out_arg_size)); |
| 388 | main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 389 | |
| 390 | // Skip the extra JNI parameters for now. |
| 391 | if (LIKELY(!is_critical_native)) { |
| 392 | main_jni_conv->Next(); // Skip JNIEnv*. |
| 393 | if (is_static) { |
| 394 | main_jni_conv->Next(); // Skip Class for now. |
| 395 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 396 | } |
| 397 | // Skip to the argument we're interested in. |
| 398 | for (uint32_t j = 0; j < args_count - i - 1; ++j) { |
| 399 | mr_conv->Next(); |
| 400 | main_jni_conv->Next(); |
| 401 | } |
| 402 | CopyParameter(jni_asm.get(), mr_conv.get(), main_jni_conv.get(), frame_size, main_out_arg_size); |
| 403 | } |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 404 | if (is_static && !is_critical_native) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 405 | // Create argument for Class |
Vladimir Marko | 4e24b9d | 2014-07-24 17:01:58 +0100 | [diff] [blame] | 406 | mr_conv->ResetIterator(FrameOffset(frame_size + main_out_arg_size)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 407 | main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); |
| 408 | main_jni_conv->Next(); // Skip JNIEnv* |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 409 | FrameOffset handle_scope_offset = main_jni_conv->CurrentParamHandleScopeEntryOffset(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 410 | if (main_jni_conv->IsCurrentParamOnStack()) { |
| 411 | FrameOffset out_off = main_jni_conv->CurrentParamStackOffset(); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 412 | __ CreateHandleScopeEntry(out_off, handle_scope_offset, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 413 | mr_conv->InterproceduralScratchRegister(), |
| 414 | false); |
| 415 | } else { |
| 416 | ManagedRegister out_reg = main_jni_conv->CurrentParamRegister(); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 417 | __ CreateHandleScopeEntry(out_reg, handle_scope_offset, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 418 | ManagedRegister::NoRegister(), false); |
| 419 | } |
| 420 | } |
| 421 | |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 422 | // Set the iterator back to the incoming Method*. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 423 | main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 424 | if (LIKELY(!is_critical_native)) { |
| 425 | // 8. Create 1st argument, the JNI environment ptr. |
| 426 | // Register that will hold local indirect reference table |
| 427 | if (main_jni_conv->IsCurrentParamInRegister()) { |
| 428 | ManagedRegister jni_env = main_jni_conv->CurrentParamRegister(); |
| 429 | DCHECK(!jni_env.Equals(main_jni_conv->InterproceduralScratchRegister())); |
| 430 | __ LoadRawPtrFromThread(jni_env, Thread::JniEnvOffset<kPointerSize>()); |
| 431 | } else { |
| 432 | FrameOffset jni_env = main_jni_conv->CurrentParamStackOffset(); |
| 433 | __ CopyRawPtrFromThread(jni_env, |
| 434 | Thread::JniEnvOffset<kPointerSize>(), |
| 435 | main_jni_conv->InterproceduralScratchRegister()); |
| 436 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 437 | } |
| 438 | |
| 439 | // 9. Plant call to native code associated with method. |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 440 | MemberOffset jni_entrypoint_offset = |
| 441 | ArtMethod::EntryPointFromJniOffset(InstructionSetPointerSize(instruction_set)); |
| 442 | // FIXME: Not sure if MethodStackOffset will work here. What does it even do? |
| 443 | __ Call(main_jni_conv->MethodStackOffset(), |
| 444 | jni_entrypoint_offset, |
| 445 | // XX: Why not the jni conv scratch register? |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 446 | mr_conv->InterproceduralScratchRegister()); |
| 447 | |
| 448 | // 10. Fix differences in result widths. |
Andreas Gampe | d110432 | 2014-05-01 14:38:56 -0700 | [diff] [blame] | 449 | if (main_jni_conv->RequiresSmallResultTypeExtension()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 450 | if (main_jni_conv->GetReturnType() == Primitive::kPrimByte || |
| 451 | main_jni_conv->GetReturnType() == Primitive::kPrimShort) { |
| 452 | __ SignExtend(main_jni_conv->ReturnRegister(), |
| 453 | Primitive::ComponentSize(main_jni_conv->GetReturnType())); |
| 454 | } else if (main_jni_conv->GetReturnType() == Primitive::kPrimBoolean || |
| 455 | main_jni_conv->GetReturnType() == Primitive::kPrimChar) { |
| 456 | __ ZeroExtend(main_jni_conv->ReturnRegister(), |
| 457 | Primitive::ComponentSize(main_jni_conv->GetReturnType())); |
| 458 | } |
| 459 | } |
| 460 | |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 461 | // 11. Process return value |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 462 | FrameOffset return_save_location = main_jni_conv->ReturnValueSaveLocation(); |
| 463 | if (main_jni_conv->SizeOfReturnValue() != 0 && !reference_return) { |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 464 | if (LIKELY(!is_critical_native)) { |
| 465 | // For normal JNI, store the return value on the stack because the call to |
| 466 | // JniMethodEnd will clobber the return value. It will be restored in (13). |
| 467 | if ((instruction_set == kMips || instruction_set == kMips64) && |
| 468 | main_jni_conv->GetReturnType() == Primitive::kPrimDouble && |
| 469 | return_save_location.Uint32Value() % 8 != 0) { |
| 470 | // Ensure doubles are 8-byte aligned for MIPS |
| 471 | return_save_location = FrameOffset(return_save_location.Uint32Value() |
| 472 | + static_cast<size_t>(kMipsPointerSize)); |
| 473 | // TODO: refactor this into the JniCallingConvention code |
| 474 | // as a return value alignment requirement. |
| 475 | } |
| 476 | CHECK_LT(return_save_location.Uint32Value(), frame_size + main_out_arg_size); |
| 477 | __ Store(return_save_location, |
| 478 | main_jni_conv->ReturnRegister(), |
| 479 | main_jni_conv->SizeOfReturnValue()); |
| 480 | } else { |
| 481 | // For @CriticalNative only, |
| 482 | // move the JNI return register into the managed return register (if they don't match). |
| 483 | ManagedRegister jni_return_reg = main_jni_conv->ReturnRegister(); |
| 484 | ManagedRegister mr_return_reg = mr_conv->ReturnRegister(); |
| 485 | |
| 486 | // Check if the JNI return register matches the managed return register. |
| 487 | // If they differ, only then do we have to do anything about it. |
| 488 | // Otherwise the return value is already in the right place when we return. |
| 489 | if (!jni_return_reg.Equals(mr_return_reg)) { |
| 490 | // This is typically only necessary on ARM32 due to native being softfloat |
| 491 | // while managed is hardfloat. |
| 492 | // -- For example VMOV {r0, r1} -> D0; VMOV r0 -> S0. |
| 493 | __ Move(mr_return_reg, jni_return_reg, main_jni_conv->SizeOfReturnValue()); |
| 494 | } else if (jni_return_reg.IsNoRegister() && mr_return_reg.IsNoRegister()) { |
| 495 | // Sanity check: If the return value is passed on the stack for some reason, |
| 496 | // then make sure the size matches. |
| 497 | CHECK_EQ(main_jni_conv->SizeOfReturnValue(), mr_conv->SizeOfReturnValue()); |
| 498 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 499 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 500 | } |
| 501 | |
Vladimir Marko | 4e24b9d | 2014-07-24 17:01:58 +0100 | [diff] [blame] | 502 | // Increase frame size for out args if needed by the end_jni_conv. |
| 503 | const size_t end_out_arg_size = end_jni_conv->OutArgSize(); |
| 504 | if (end_out_arg_size > current_out_arg_size) { |
| 505 | size_t out_arg_size_diff = end_out_arg_size - current_out_arg_size; |
| 506 | current_out_arg_size = end_out_arg_size; |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 507 | // TODO: This is redundant for @CriticalNative but we need to |
| 508 | // conditionally do __DecreaseFrameSize below. |
Vladimir Marko | 4e24b9d | 2014-07-24 17:01:58 +0100 | [diff] [blame] | 509 | __ IncreaseFrameSize(out_arg_size_diff); |
| 510 | saved_cookie_offset = FrameOffset(saved_cookie_offset.SizeValue() + out_arg_size_diff); |
| 511 | locked_object_handle_scope_offset = |
| 512 | FrameOffset(locked_object_handle_scope_offset.SizeValue() + out_arg_size_diff); |
| 513 | return_save_location = FrameOffset(return_save_location.SizeValue() + out_arg_size_diff); |
| 514 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 515 | // thread. |
| 516 | end_jni_conv->ResetIterator(FrameOffset(end_out_arg_size)); |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 517 | |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 518 | if (LIKELY(!is_critical_native)) { |
| 519 | // 12. Call JniMethodEnd |
| 520 | ThreadOffset<kPointerSize> jni_end(-1); |
| 521 | if (reference_return) { |
| 522 | // Pass result. |
| 523 | jni_end = is_synchronized |
| 524 | ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndWithReferenceSynchronized) |
| 525 | : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndWithReference); |
| 526 | SetNativeParameter(jni_asm.get(), end_jni_conv.get(), end_jni_conv->ReturnRegister()); |
| 527 | end_jni_conv->Next(); |
| 528 | } else { |
| 529 | jni_end = is_synchronized |
| 530 | ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndSynchronized) |
| 531 | : (is_fast_native |
| 532 | ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodFastEnd) |
| 533 | : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEnd)); |
| 534 | } |
| 535 | // Pass saved local reference state. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 536 | if (end_jni_conv->IsCurrentParamOnStack()) { |
| 537 | FrameOffset out_off = end_jni_conv->CurrentParamStackOffset(); |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 538 | __ Copy(out_off, saved_cookie_offset, end_jni_conv->InterproceduralScratchRegister(), 4); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 539 | } else { |
| 540 | ManagedRegister out_reg = end_jni_conv->CurrentParamRegister(); |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 541 | __ Load(out_reg, saved_cookie_offset, 4); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 542 | } |
| 543 | end_jni_conv->Next(); |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 544 | if (is_synchronized) { |
| 545 | // Pass object for unlocking. |
| 546 | if (end_jni_conv->IsCurrentParamOnStack()) { |
| 547 | FrameOffset out_off = end_jni_conv->CurrentParamStackOffset(); |
| 548 | __ CreateHandleScopeEntry(out_off, locked_object_handle_scope_offset, |
| 549 | end_jni_conv->InterproceduralScratchRegister(), |
| 550 | false); |
| 551 | } else { |
| 552 | ManagedRegister out_reg = end_jni_conv->CurrentParamRegister(); |
| 553 | __ CreateHandleScopeEntry(out_reg, locked_object_handle_scope_offset, |
| 554 | ManagedRegister::NoRegister(), false); |
| 555 | } |
| 556 | end_jni_conv->Next(); |
| 557 | } |
| 558 | if (end_jni_conv->IsCurrentParamInRegister()) { |
| 559 | __ GetCurrentThread(end_jni_conv->CurrentParamRegister()); |
| 560 | __ Call(end_jni_conv->CurrentParamRegister(), |
| 561 | Offset(jni_end), |
| 562 | end_jni_conv->InterproceduralScratchRegister()); |
| 563 | } else { |
| 564 | __ GetCurrentThread(end_jni_conv->CurrentParamStackOffset(), |
| 565 | end_jni_conv->InterproceduralScratchRegister()); |
| 566 | __ CallFromThread(jni_end, end_jni_conv->InterproceduralScratchRegister()); |
| 567 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 568 | |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 569 | // 13. Reload return value |
| 570 | if (main_jni_conv->SizeOfReturnValue() != 0 && !reference_return) { |
| 571 | __ Load(mr_conv->ReturnRegister(), return_save_location, mr_conv->SizeOfReturnValue()); |
| 572 | // NIT: If it's @CriticalNative then we actually only need to do this IF |
| 573 | // the calling convention's native return register doesn't match the managed convention's |
| 574 | // return register. |
| 575 | } |
| 576 | } // if (!is_critical_native) |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 577 | |
| 578 | // 14. Move frame up now we're done with the out arg space. |
Vladimir Marko | 4e24b9d | 2014-07-24 17:01:58 +0100 | [diff] [blame] | 579 | __ DecreaseFrameSize(current_out_arg_size); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 580 | |
| 581 | // 15. Process pending exceptions from JNI call or monitor exit. |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 582 | __ ExceptionPoll(main_jni_conv->InterproceduralScratchRegister(), 0 /* stack_adjust */); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 583 | |
Mathieu Chartier | 8770e5c | 2013-10-16 14:49:01 -0700 | [diff] [blame] | 584 | // 16. Remove activation - need to restore callee save registers since the GC may have changed |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 585 | // them. |
David Srbecky | dd97393 | 2015-04-07 20:29:48 +0100 | [diff] [blame] | 586 | DCHECK_EQ(jni_asm->cfi().GetCurrentCFAOffset(), static_cast<int>(frame_size)); |
Mathieu Chartier | 8770e5c | 2013-10-16 14:49:01 -0700 | [diff] [blame] | 587 | __ RemoveFrame(frame_size, callee_save_regs); |
David Srbecky | dd97393 | 2015-04-07 20:29:48 +0100 | [diff] [blame] | 588 | DCHECK_EQ(jni_asm->cfi().GetCurrentCFAOffset(), static_cast<int>(frame_size)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 589 | |
| 590 | // 17. Finalize code generation |
Vladimir Marko | cf93a5c | 2015-06-16 11:33:24 +0000 | [diff] [blame] | 591 | __ FinalizeCode(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 592 | size_t cs = __ CodeSize(); |
| 593 | std::vector<uint8_t> managed_code(cs); |
| 594 | MemoryRegion code(&managed_code[0], managed_code.size()); |
| 595 | __ FinalizeInstructions(code); |
David Srbecky | 8c57831 | 2015-04-07 19:46:22 +0100 | [diff] [blame] | 596 | |
David Srbecky | c6b4dd8 | 2015-04-07 20:32:43 +0100 | [diff] [blame] | 597 | return CompiledMethod::SwapAllocCompiledMethod(driver, |
| 598 | instruction_set, |
| 599 | ArrayRef<const uint8_t>(managed_code), |
| 600 | frame_size, |
| 601 | main_jni_conv->CoreSpillMask(), |
| 602 | main_jni_conv->FpSpillMask(), |
Vladimir Marko | 35831e8 | 2015-09-11 11:59:18 +0100 | [diff] [blame] | 603 | ArrayRef<const SrcMapElem>(), |
David Srbecky | c6b4dd8 | 2015-04-07 20:32:43 +0100 | [diff] [blame] | 604 | ArrayRef<const uint8_t>(), // vmap_table. |
David Srbecky | c6b4dd8 | 2015-04-07 20:32:43 +0100 | [diff] [blame] | 605 | ArrayRef<const uint8_t>(*jni_asm->cfi().data()), |
| 606 | ArrayRef<const LinkerPatch>()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 607 | } |
| 608 | |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 609 | // Copy a single parameter from the managed to the JNI calling convention. |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 610 | template <PointerSize kPointerSize> |
| 611 | static void CopyParameter(JNIMacroAssembler<kPointerSize>* jni_asm, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 612 | ManagedRuntimeCallingConvention* mr_conv, |
| 613 | JniCallingConvention* jni_conv, |
Igor Murashkin | 367f3dd | 2016-09-01 17:00:24 -0700 | [diff] [blame^] | 614 | size_t frame_size, |
| 615 | size_t out_arg_size) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 616 | bool input_in_reg = mr_conv->IsCurrentParamInRegister(); |
| 617 | bool output_in_reg = jni_conv->IsCurrentParamInRegister(); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 618 | FrameOffset handle_scope_offset(0); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 619 | bool null_allowed = false; |
| 620 | bool ref_param = jni_conv->IsCurrentParamAReference(); |
| 621 | CHECK(!ref_param || mr_conv->IsCurrentParamAReference()); |
| 622 | // input may be in register, on stack or both - but not none! |
| 623 | CHECK(input_in_reg || mr_conv->IsCurrentParamOnStack()); |
| 624 | if (output_in_reg) { // output shouldn't straddle registers and stack |
| 625 | CHECK(!jni_conv->IsCurrentParamOnStack()); |
| 626 | } else { |
| 627 | CHECK(jni_conv->IsCurrentParamOnStack()); |
| 628 | } |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 629 | // References need placing in handle scope and the entry address passing. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 630 | if (ref_param) { |
| 631 | null_allowed = mr_conv->IsCurrentArgPossiblyNull(); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 632 | // Compute handle scope offset. Note null is placed in the handle scope but the jobject |
| 633 | // passed to the native code must be null (not a pointer into the handle scope |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 634 | // as with regular references). |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 635 | handle_scope_offset = jni_conv->CurrentParamHandleScopeEntryOffset(); |
| 636 | // Check handle scope offset is within frame. |
| 637 | CHECK_LT(handle_scope_offset.Uint32Value(), (frame_size + out_arg_size)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 638 | } |
| 639 | if (input_in_reg && output_in_reg) { |
| 640 | ManagedRegister in_reg = mr_conv->CurrentParamRegister(); |
| 641 | ManagedRegister out_reg = jni_conv->CurrentParamRegister(); |
| 642 | if (ref_param) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 643 | __ CreateHandleScopeEntry(out_reg, handle_scope_offset, in_reg, null_allowed); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 644 | } else { |
| 645 | if (!mr_conv->IsCurrentParamOnStack()) { |
| 646 | // regular non-straddling move |
| 647 | __ Move(out_reg, in_reg, mr_conv->CurrentParamSize()); |
| 648 | } else { |
| 649 | UNIMPLEMENTED(FATAL); // we currently don't expect to see this case |
| 650 | } |
| 651 | } |
| 652 | } else if (!input_in_reg && !output_in_reg) { |
| 653 | FrameOffset out_off = jni_conv->CurrentParamStackOffset(); |
| 654 | if (ref_param) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 655 | __ CreateHandleScopeEntry(out_off, handle_scope_offset, mr_conv->InterproceduralScratchRegister(), |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 656 | null_allowed); |
| 657 | } else { |
| 658 | FrameOffset in_off = mr_conv->CurrentParamStackOffset(); |
| 659 | size_t param_size = mr_conv->CurrentParamSize(); |
| 660 | CHECK_EQ(param_size, jni_conv->CurrentParamSize()); |
| 661 | __ Copy(out_off, in_off, mr_conv->InterproceduralScratchRegister(), param_size); |
| 662 | } |
| 663 | } else if (!input_in_reg && output_in_reg) { |
| 664 | FrameOffset in_off = mr_conv->CurrentParamStackOffset(); |
| 665 | ManagedRegister out_reg = jni_conv->CurrentParamRegister(); |
| 666 | // Check that incoming stack arguments are above the current stack frame. |
| 667 | CHECK_GT(in_off.Uint32Value(), frame_size); |
| 668 | if (ref_param) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 669 | __ CreateHandleScopeEntry(out_reg, handle_scope_offset, ManagedRegister::NoRegister(), null_allowed); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 670 | } else { |
| 671 | size_t param_size = mr_conv->CurrentParamSize(); |
| 672 | CHECK_EQ(param_size, jni_conv->CurrentParamSize()); |
| 673 | __ Load(out_reg, in_off, param_size); |
| 674 | } |
| 675 | } else { |
| 676 | CHECK(input_in_reg && !output_in_reg); |
| 677 | ManagedRegister in_reg = mr_conv->CurrentParamRegister(); |
| 678 | FrameOffset out_off = jni_conv->CurrentParamStackOffset(); |
| 679 | // Check outgoing argument is within frame |
| 680 | CHECK_LT(out_off.Uint32Value(), frame_size); |
| 681 | if (ref_param) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 682 | // TODO: recycle value in in_reg rather than reload from handle scope |
| 683 | __ CreateHandleScopeEntry(out_off, handle_scope_offset, mr_conv->InterproceduralScratchRegister(), |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 684 | null_allowed); |
| 685 | } else { |
| 686 | size_t param_size = mr_conv->CurrentParamSize(); |
| 687 | CHECK_EQ(param_size, jni_conv->CurrentParamSize()); |
| 688 | if (!mr_conv->IsCurrentParamOnStack()) { |
| 689 | // regular non-straddling store |
| 690 | __ Store(out_off, in_reg, param_size); |
| 691 | } else { |
| 692 | // store where input straddles registers and stack |
| 693 | CHECK_EQ(param_size, 8u); |
| 694 | FrameOffset in_off = mr_conv->CurrentParamStackOffset(); |
| 695 | __ StoreSpanning(out_off, in_reg, in_off, mr_conv->InterproceduralScratchRegister()); |
| 696 | } |
| 697 | } |
| 698 | } |
| 699 | } |
| 700 | |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 701 | template <PointerSize kPointerSize> |
| 702 | static void SetNativeParameter(JNIMacroAssembler<kPointerSize>* jni_asm, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 703 | JniCallingConvention* jni_conv, |
| 704 | ManagedRegister in_reg) { |
| 705 | if (jni_conv->IsCurrentParamOnStack()) { |
| 706 | FrameOffset dest = jni_conv->CurrentParamStackOffset(); |
| 707 | __ StoreRawPtr(dest, in_reg); |
| 708 | } else { |
| 709 | if (!jni_conv->CurrentParamRegister().Equals(in_reg)) { |
| 710 | __ Move(jni_conv->CurrentParamRegister(), in_reg, jni_conv->CurrentParamSize()); |
| 711 | } |
| 712 | } |
| 713 | } |
| 714 | |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 715 | CompiledMethod* ArtQuickJniCompileMethod(CompilerDriver* compiler, |
| 716 | uint32_t access_flags, |
| 717 | uint32_t method_idx, |
| 718 | const DexFile& dex_file, |
| 719 | Compiler::JniOptimizationFlags optimization_flags) { |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 720 | if (Is64BitInstructionSet(compiler->GetInstructionSet())) { |
| 721 | return ArtJniCompileMethodInternal<PointerSize::k64>( |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 722 | compiler, access_flags, method_idx, dex_file, optimization_flags); |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 723 | } else { |
| 724 | return ArtJniCompileMethodInternal<PointerSize::k32>( |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 725 | compiler, access_flags, method_idx, dex_file, optimization_flags); |
Andreas Gampe | 3b165bc | 2016-08-01 22:07:04 -0700 | [diff] [blame] | 726 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 727 | } |
Andreas Gampe | 53c913b | 2014-08-12 23:19:23 -0700 | [diff] [blame] | 728 | |
| 729 | } // namespace art |