blob: 65c1aa8a79e555e867e0626cd331805fa6077d5f [file] [log] [blame]
buzbee1452bee2015-03-06 14:43:04 -08001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/*
18 * Mterp entry point and support functions.
19 */
buzbee1452bee2015-03-06 14:43:04 -080020#include "mterp.h"
David Sehrc431b9d2018-03-02 12:01:51 -080021
22#include "base/quasi_atomic.h"
Bill Buzbeefd522f92016-02-11 22:37:42 +000023#include "debugger.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070024#include "entrypoints/entrypoint_utils-inl.h"
25#include "interpreter/interpreter_common.h"
26#include "interpreter/interpreter_intrinsics.h"
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +010027#include "interpreter/shadow_frame-inl.h"
buzbee1452bee2015-03-06 14:43:04 -080028
29namespace art {
30namespace interpreter {
31/*
32 * Verify some constants used by the mterp interpreter.
33 */
34void CheckMterpAsmConstants() {
35 /*
36 * If we're using computed goto instruction transitions, make sure
37 * none of the handlers overflows the 128-byte limit. This won't tell
38 * which one did, but if any one is too big the total size will
39 * overflow.
40 */
41 const int width = 128;
42 int interp_size = (uintptr_t) artMterpAsmInstructionEnd -
43 (uintptr_t) artMterpAsmInstructionStart;
44 if ((interp_size == 0) || (interp_size != (art::kNumPackedOpcodes * width))) {
Andreas Gampe3fec9ac2016-09-13 10:47:28 -070045 LOG(FATAL) << "ERROR: unexpected asm interp size " << interp_size
46 << "(did an instruction handler exceed " << width << " bytes?)";
buzbee1452bee2015-03-06 14:43:04 -080047 }
48}
49
50void InitMterpTls(Thread* self) {
51 self->SetMterpDefaultIBase(artMterpAsmInstructionStart);
52 self->SetMterpAltIBase(artMterpAsmAltInstructionStart);
Bill Buzbeed47fd902016-07-07 14:42:43 +000053 self->SetMterpCurrentIBase((kTraceExecutionEnabled || kTestExportPC) ?
Serguei Katkov9fb0ac72016-02-20 12:55:24 +060054 artMterpAsmAltInstructionStart :
55 artMterpAsmInstructionStart);
buzbee1452bee2015-03-06 14:43:04 -080056}
57
58/*
59 * Find the matching case. Returns the offset to the handler instructions.
60 *
61 * Returns 3 if we don't find a match (it's the size of the sparse-switch
62 * instruction).
63 */
Andreas Gampe67409972016-07-19 22:34:53 -070064extern "C" ssize_t MterpDoSparseSwitch(const uint16_t* switchData, int32_t testVal) {
buzbee1452bee2015-03-06 14:43:04 -080065 const int kInstrLen = 3;
66 uint16_t size;
67 const int32_t* keys;
68 const int32_t* entries;
69
70 /*
71 * Sparse switch data format:
72 * ushort ident = 0x0200 magic value
73 * ushort size number of entries in the table; > 0
74 * int keys[size] keys, sorted low-to-high; 32-bit aligned
75 * int targets[size] branch targets, relative to switch opcode
76 *
77 * Total size is (2+size*4) 16-bit code units.
78 */
79
80 uint16_t signature = *switchData++;
81 DCHECK_EQ(signature, static_cast<uint16_t>(art::Instruction::kSparseSwitchSignature));
82
83 size = *switchData++;
84
85 /* The keys are guaranteed to be aligned on a 32-bit boundary;
86 * we can treat them as a native int array.
87 */
88 keys = reinterpret_cast<const int32_t*>(switchData);
89
90 /* The entries are guaranteed to be aligned on a 32-bit boundary;
91 * we can treat them as a native int array.
92 */
93 entries = keys + size;
94
95 /*
96 * Binary-search through the array of keys, which are guaranteed to
97 * be sorted low-to-high.
98 */
99 int lo = 0;
100 int hi = size - 1;
101 while (lo <= hi) {
102 int mid = (lo + hi) >> 1;
103
104 int32_t foundVal = keys[mid];
105 if (testVal < foundVal) {
106 hi = mid - 1;
107 } else if (testVal > foundVal) {
108 lo = mid + 1;
109 } else {
110 return entries[mid];
111 }
112 }
113 return kInstrLen;
114}
115
Andreas Gampe67409972016-07-19 22:34:53 -0700116extern "C" ssize_t MterpDoPackedSwitch(const uint16_t* switchData, int32_t testVal) {
buzbee1452bee2015-03-06 14:43:04 -0800117 const int kInstrLen = 3;
118
119 /*
120 * Packed switch data format:
121 * ushort ident = 0x0100 magic value
122 * ushort size number of entries in the table
123 * int first_key first (and lowest) switch case value
124 * int targets[size] branch targets, relative to switch opcode
125 *
126 * Total size is (4+size*2) 16-bit code units.
127 */
128 uint16_t signature = *switchData++;
129 DCHECK_EQ(signature, static_cast<uint16_t>(art::Instruction::kPackedSwitchSignature));
130
131 uint16_t size = *switchData++;
132
133 int32_t firstKey = *switchData++;
134 firstKey |= (*switchData++) << 16;
135
136 int index = testVal - firstKey;
137 if (index < 0 || index >= size) {
138 return kInstrLen;
139 }
140
141 /*
142 * The entries are guaranteed to be aligned on a 32-bit boundary;
143 * we can treat them as a native int array.
144 */
145 const int32_t* entries = reinterpret_cast<const int32_t*>(switchData);
146 return entries[index];
147}
148
Andreas Gampe67409972016-07-19 22:34:53 -0700149extern "C" size_t MterpShouldSwitchInterpreters()
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700150 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light848574c2017-09-25 16:59:39 -0700151 const Runtime* const runtime = Runtime::Current();
152 const instrumentation::Instrumentation* const instrumentation = runtime->GetInstrumentation();
153 return instrumentation->NonJitProfilingActive() ||
154 Dbg::IsDebuggerActive() ||
155 // An async exception has been thrown. We need to go to the switch interpreter. MTerp doesn't
156 // know how to deal with these so we could end up never dealing with it if we are in an
Alex Light7919db92017-11-29 09:00:55 -0800157 // infinite loop. Since this can be called in a tight loop and getting the current thread
158 // requires a TLS read we instead first check a short-circuit runtime flag that will only be
159 // set if something tries to set an async exception. This will make this function faster in
160 // the common case where no async exception has ever been sent. We don't need to worry about
161 // synchronization on the runtime flag since it is only set in a checkpoint which will either
162 // take place on the current thread or act as a synchronization point.
163 (UNLIKELY(runtime->AreAsyncExceptionsThrown()) &&
164 Thread::Current()->IsAsyncExceptionPending());
Bill Buzbeefd522f92016-02-11 22:37:42 +0000165}
166
buzbee1452bee2015-03-06 14:43:04 -0800167
Andreas Gampe67409972016-07-19 22:34:53 -0700168extern "C" size_t MterpInvokeVirtual(Thread* self,
169 ShadowFrame* shadow_frame,
170 uint16_t* dex_pc_ptr,
171 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700172 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800173 JValue* result_register = shadow_frame->GetResultRegister();
174 const Instruction* inst = Instruction::At(dex_pc_ptr);
buzbee78f1bdc2017-03-01 10:55:57 -0800175 return DoFastInvoke<kVirtual>(
buzbee1452bee2015-03-06 14:43:04 -0800176 self, *shadow_frame, inst, inst_data, result_register);
177}
178
Andreas Gampe67409972016-07-19 22:34:53 -0700179extern "C" size_t MterpInvokeSuper(Thread* self,
180 ShadowFrame* shadow_frame,
181 uint16_t* dex_pc_ptr,
182 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700183 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800184 JValue* result_register = shadow_frame->GetResultRegister();
185 const Instruction* inst = Instruction::At(dex_pc_ptr);
186 return DoInvoke<kSuper, false, false>(
187 self, *shadow_frame, inst, inst_data, result_register);
188}
189
Andreas Gampe67409972016-07-19 22:34:53 -0700190extern "C" size_t MterpInvokeInterface(Thread* self,
191 ShadowFrame* shadow_frame,
192 uint16_t* dex_pc_ptr,
193 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700194 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800195 JValue* result_register = shadow_frame->GetResultRegister();
196 const Instruction* inst = Instruction::At(dex_pc_ptr);
197 return DoInvoke<kInterface, false, false>(
198 self, *shadow_frame, inst, inst_data, result_register);
199}
200
Andreas Gampe67409972016-07-19 22:34:53 -0700201extern "C" size_t MterpInvokeDirect(Thread* self,
202 ShadowFrame* shadow_frame,
203 uint16_t* dex_pc_ptr,
204 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700205 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800206 JValue* result_register = shadow_frame->GetResultRegister();
207 const Instruction* inst = Instruction::At(dex_pc_ptr);
buzbee78f1bdc2017-03-01 10:55:57 -0800208 return DoFastInvoke<kDirect>(
buzbee1452bee2015-03-06 14:43:04 -0800209 self, *shadow_frame, inst, inst_data, result_register);
210}
211
Andreas Gampe67409972016-07-19 22:34:53 -0700212extern "C" size_t MterpInvokeStatic(Thread* self,
213 ShadowFrame* shadow_frame,
214 uint16_t* dex_pc_ptr,
215 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700216 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800217 JValue* result_register = shadow_frame->GetResultRegister();
218 const Instruction* inst = Instruction::At(dex_pc_ptr);
buzbee78f1bdc2017-03-01 10:55:57 -0800219 return DoFastInvoke<kStatic>(
buzbee1452bee2015-03-06 14:43:04 -0800220 self, *shadow_frame, inst, inst_data, result_register);
221}
222
Orion Hodsone7732be2017-10-11 14:35:20 +0100223extern "C" size_t MterpInvokeCustom(Thread* self,
224 ShadowFrame* shadow_frame,
225 uint16_t* dex_pc_ptr,
226 uint16_t inst_data)
227 REQUIRES_SHARED(Locks::mutator_lock_) {
228 JValue* result_register = shadow_frame->GetResultRegister();
229 const Instruction* inst = Instruction::At(dex_pc_ptr);
230 return DoInvokeCustom<false /* is_range */>(
231 self, *shadow_frame, inst, inst_data, result_register);
232}
233
234extern "C" size_t MterpInvokePolymorphic(Thread* self,
235 ShadowFrame* shadow_frame,
236 uint16_t* dex_pc_ptr,
237 uint16_t inst_data)
238 REQUIRES_SHARED(Locks::mutator_lock_) {
239 JValue* result_register = shadow_frame->GetResultRegister();
240 const Instruction* inst = Instruction::At(dex_pc_ptr);
241 return DoInvokePolymorphic<false /* is_range */>(
242 self, *shadow_frame, inst, inst_data, result_register);
243}
244
Andreas Gampe67409972016-07-19 22:34:53 -0700245extern "C" size_t MterpInvokeVirtualRange(Thread* self,
246 ShadowFrame* shadow_frame,
247 uint16_t* dex_pc_ptr,
248 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700249 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800250 JValue* result_register = shadow_frame->GetResultRegister();
251 const Instruction* inst = Instruction::At(dex_pc_ptr);
252 return DoInvoke<kVirtual, true, false>(
253 self, *shadow_frame, inst, inst_data, result_register);
254}
255
Andreas Gampe67409972016-07-19 22:34:53 -0700256extern "C" size_t MterpInvokeSuperRange(Thread* self,
257 ShadowFrame* shadow_frame,
258 uint16_t* dex_pc_ptr,
259 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700260 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800261 JValue* result_register = shadow_frame->GetResultRegister();
262 const Instruction* inst = Instruction::At(dex_pc_ptr);
263 return DoInvoke<kSuper, true, false>(
264 self, *shadow_frame, inst, inst_data, result_register);
265}
266
Andreas Gampe67409972016-07-19 22:34:53 -0700267extern "C" size_t MterpInvokeInterfaceRange(Thread* self,
268 ShadowFrame* shadow_frame,
269 uint16_t* dex_pc_ptr,
270 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700271 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800272 JValue* result_register = shadow_frame->GetResultRegister();
273 const Instruction* inst = Instruction::At(dex_pc_ptr);
274 return DoInvoke<kInterface, true, false>(
275 self, *shadow_frame, inst, inst_data, result_register);
276}
277
Andreas Gampe67409972016-07-19 22:34:53 -0700278extern "C" size_t MterpInvokeDirectRange(Thread* self,
279 ShadowFrame* shadow_frame,
280 uint16_t* dex_pc_ptr,
281 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700282 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800283 JValue* result_register = shadow_frame->GetResultRegister();
284 const Instruction* inst = Instruction::At(dex_pc_ptr);
285 return DoInvoke<kDirect, true, false>(
286 self, *shadow_frame, inst, inst_data, result_register);
287}
288
Andreas Gampe67409972016-07-19 22:34:53 -0700289extern "C" size_t MterpInvokeStaticRange(Thread* self,
290 ShadowFrame* shadow_frame,
291 uint16_t* dex_pc_ptr,
292 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700293 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800294 JValue* result_register = shadow_frame->GetResultRegister();
295 const Instruction* inst = Instruction::At(dex_pc_ptr);
296 return DoInvoke<kStatic, true, false>(
297 self, *shadow_frame, inst, inst_data, result_register);
298}
299
Orion Hodsone7732be2017-10-11 14:35:20 +0100300extern "C" size_t MterpInvokeCustomRange(Thread* self,
301 ShadowFrame* shadow_frame,
302 uint16_t* dex_pc_ptr,
303 uint16_t inst_data)
304 REQUIRES_SHARED(Locks::mutator_lock_) {
305 JValue* result_register = shadow_frame->GetResultRegister();
306 const Instruction* inst = Instruction::At(dex_pc_ptr);
307 return DoInvokeCustom<true /* is_range */>(self, *shadow_frame, inst, inst_data, result_register);
308}
309
310extern "C" size_t MterpInvokePolymorphicRange(Thread* self,
311 ShadowFrame* shadow_frame,
312 uint16_t* dex_pc_ptr,
313 uint16_t inst_data)
314 REQUIRES_SHARED(Locks::mutator_lock_) {
315 JValue* result_register = shadow_frame->GetResultRegister();
316 const Instruction* inst = Instruction::At(dex_pc_ptr);
317 return DoInvokePolymorphic<true /* is_range */>(
318 self, *shadow_frame, inst, inst_data, result_register);
319}
320
Andreas Gampe67409972016-07-19 22:34:53 -0700321extern "C" size_t MterpInvokeVirtualQuick(Thread* self,
322 ShadowFrame* shadow_frame,
323 uint16_t* dex_pc_ptr,
324 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700325 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800326 JValue* result_register = shadow_frame->GetResultRegister();
327 const Instruction* inst = Instruction::At(dex_pc_ptr);
buzbee78f1bdc2017-03-01 10:55:57 -0800328 const uint32_t vregC = inst->VRegC_35c();
329 const uint32_t vtable_idx = inst->VRegB_35c();
330 ObjPtr<mirror::Object> const receiver = shadow_frame->GetVRegReference(vregC);
331 if (receiver != nullptr) {
332 ArtMethod* const called_method = receiver->GetClass()->GetEmbeddedVTableEntry(
333 vtable_idx, kRuntimePointerSize);
334 if ((called_method != nullptr) && called_method->IsIntrinsic()) {
335 if (MterpHandleIntrinsic(shadow_frame, called_method, inst, inst_data, result_register)) {
Nicolas Geoffrayb9bec2e2017-05-24 15:59:18 +0100336 jit::Jit* jit = Runtime::Current()->GetJit();
337 if (jit != nullptr) {
338 jit->InvokeVirtualOrInterface(
339 receiver, shadow_frame->GetMethod(), shadow_frame->GetDexPC(), called_method);
Nicolas Geoffrayb9bec2e2017-05-24 15:59:18 +0100340 }
buzbee78f1bdc2017-03-01 10:55:57 -0800341 return !self->IsExceptionPending();
342 }
343 }
344 }
buzbee1452bee2015-03-06 14:43:04 -0800345 return DoInvokeVirtualQuick<false>(
346 self, *shadow_frame, inst, inst_data, result_register);
347}
348
Andreas Gampe67409972016-07-19 22:34:53 -0700349extern "C" size_t MterpInvokeVirtualQuickRange(Thread* self,
350 ShadowFrame* shadow_frame,
351 uint16_t* dex_pc_ptr,
352 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700353 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800354 JValue* result_register = shadow_frame->GetResultRegister();
355 const Instruction* inst = Instruction::At(dex_pc_ptr);
356 return DoInvokeVirtualQuick<true>(
357 self, *shadow_frame, inst, inst_data, result_register);
358}
359
360extern "C" void MterpThreadFenceForConstructor() {
361 QuasiAtomic::ThreadFenceForConstructor();
362}
363
Andreas Gampe67409972016-07-19 22:34:53 -0700364extern "C" size_t MterpConstString(uint32_t index,
365 uint32_t tgt_vreg,
366 ShadowFrame* shadow_frame,
367 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700368 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800369 ObjPtr<mirror::String> s = ResolveString(self, *shadow_frame, dex::StringIndex(index));
buzbee1452bee2015-03-06 14:43:04 -0800370 if (UNLIKELY(s == nullptr)) {
371 return true;
372 }
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +0100373 shadow_frame->SetVRegReference(tgt_vreg, s);
buzbee1452bee2015-03-06 14:43:04 -0800374 return false;
375}
376
Andreas Gampe67409972016-07-19 22:34:53 -0700377extern "C" size_t MterpConstClass(uint32_t index,
378 uint32_t tgt_vreg,
379 ShadowFrame* shadow_frame,
380 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700381 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko28e012a2017-12-07 11:22:59 +0000382 ObjPtr<mirror::Class> c = ResolveVerifyAndClinit(dex::TypeIndex(index),
383 shadow_frame->GetMethod(),
384 self,
385 /* can_run_clinit */ false,
386 /* verify_access */ false);
buzbee1452bee2015-03-06 14:43:04 -0800387 if (UNLIKELY(c == nullptr)) {
388 return true;
389 }
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +0100390 shadow_frame->SetVRegReference(tgt_vreg, c);
buzbee1452bee2015-03-06 14:43:04 -0800391 return false;
392}
393
Orion Hodsone7732be2017-10-11 14:35:20 +0100394extern "C" size_t MterpConstMethodHandle(uint32_t index,
395 uint32_t tgt_vreg,
396 ShadowFrame* shadow_frame,
397 Thread* self)
398 REQUIRES_SHARED(Locks::mutator_lock_) {
399 ObjPtr<mirror::MethodHandle> mh = ResolveMethodHandle(self, index, shadow_frame->GetMethod());
400 if (UNLIKELY(mh == nullptr)) {
401 return true;
402 }
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +0100403 shadow_frame->SetVRegReference(tgt_vreg, mh);
Orion Hodsone7732be2017-10-11 14:35:20 +0100404 return false;
405}
406
407extern "C" size_t MterpConstMethodType(uint32_t index,
408 uint32_t tgt_vreg,
409 ShadowFrame* shadow_frame,
410 Thread* self)
411 REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodson06d10a72018-05-14 08:53:38 +0100412 ObjPtr<mirror::MethodType> mt =
413 ResolveMethodType(self, dex::ProtoIndex(index), shadow_frame->GetMethod());
Orion Hodsone7732be2017-10-11 14:35:20 +0100414 if (UNLIKELY(mt == nullptr)) {
415 return true;
416 }
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +0100417 shadow_frame->SetVRegReference(tgt_vreg, mt);
Orion Hodsone7732be2017-10-11 14:35:20 +0100418 return false;
419}
420
Andreas Gampe67409972016-07-19 22:34:53 -0700421extern "C" size_t MterpCheckCast(uint32_t index,
422 StackReference<mirror::Object>* vreg_addr,
423 art::ArtMethod* method,
424 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700425 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampea5b09a62016-11-17 15:21:22 -0800426 ObjPtr<mirror::Class> c = ResolveVerifyAndClinit(dex::TypeIndex(index),
427 method,
428 self,
429 false,
430 false);
buzbee1452bee2015-03-06 14:43:04 -0800431 if (UNLIKELY(c == nullptr)) {
432 return true;
433 }
buzbeea2c97a92016-01-25 15:41:24 -0800434 // Must load obj from vreg following ResolveVerifyAndClinit due to moving gc.
Mathieu Chartieref41db72016-10-25 15:08:01 -0700435 mirror::Object* obj = vreg_addr->AsMirrorPtr();
buzbee1452bee2015-03-06 14:43:04 -0800436 if (UNLIKELY(obj != nullptr && !obj->InstanceOf(c))) {
437 ThrowClassCastException(c, obj->GetClass());
438 return true;
439 }
440 return false;
441}
442
Andreas Gampe67409972016-07-19 22:34:53 -0700443extern "C" size_t MterpInstanceOf(uint32_t index,
444 StackReference<mirror::Object>* vreg_addr,
445 art::ArtMethod* method,
446 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700447 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampea5b09a62016-11-17 15:21:22 -0800448 ObjPtr<mirror::Class> c = ResolveVerifyAndClinit(dex::TypeIndex(index),
449 method,
450 self,
451 false,
452 false);
buzbee1452bee2015-03-06 14:43:04 -0800453 if (UNLIKELY(c == nullptr)) {
454 return false; // Caller will check for pending exception. Return value unimportant.
455 }
buzbeea2c97a92016-01-25 15:41:24 -0800456 // Must load obj from vreg following ResolveVerifyAndClinit due to moving gc.
Mathieu Chartieref41db72016-10-25 15:08:01 -0700457 mirror::Object* obj = vreg_addr->AsMirrorPtr();
buzbee1452bee2015-03-06 14:43:04 -0800458 return (obj != nullptr) && obj->InstanceOf(c);
459}
460
Mathieu Chartieref41db72016-10-25 15:08:01 -0700461extern "C" size_t MterpFillArrayData(mirror::Object* obj, const Instruction::ArrayDataPayload* payload)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700462 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800463 return FillArrayData(obj, payload);
464}
465
Andreas Gampe67409972016-07-19 22:34:53 -0700466extern "C" size_t MterpNewInstance(ShadowFrame* shadow_frame, Thread* self, uint32_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700467 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800468 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
Mathieu Chartieref41db72016-10-25 15:08:01 -0700469 mirror::Object* obj = nullptr;
Vladimir Marko28e012a2017-12-07 11:22:59 +0000470 ObjPtr<mirror::Class> c = ResolveVerifyAndClinit(dex::TypeIndex(inst->VRegB_21c()),
471 shadow_frame->GetMethod(),
472 self,
473 /* can_run_clinit */ false,
474 /* verify_access */ false);
buzbee1452bee2015-03-06 14:43:04 -0800475 if (LIKELY(c != nullptr)) {
476 if (UNLIKELY(c->IsStringClass())) {
477 gc::AllocatorType allocator_type = Runtime::Current()->GetHeap()->GetCurrentAllocator();
jessicahandojo3aaa37b2016-07-29 14:46:37 -0700478 obj = mirror::String::AllocEmptyString<true>(self, allocator_type);
buzbee1452bee2015-03-06 14:43:04 -0800479 } else {
Vladimir Marko28e012a2017-12-07 11:22:59 +0000480 obj = AllocObjectFromCode<true>(c.Ptr(),
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +0000481 self,
482 Runtime::Current()->GetHeap()->GetCurrentAllocator());
buzbee1452bee2015-03-06 14:43:04 -0800483 }
484 }
485 if (UNLIKELY(obj == nullptr)) {
486 return false;
487 }
488 obj->GetClass()->AssertInitializedOrInitializingInThread(self);
489 shadow_frame->SetVRegReference(inst->VRegA_21c(inst_data), obj);
490 return true;
491}
492
Andreas Gampe67409972016-07-19 22:34:53 -0700493extern "C" size_t MterpIputObjectQuick(ShadowFrame* shadow_frame,
494 uint16_t* dex_pc_ptr,
495 uint32_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700496 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800497 const Instruction* inst = Instruction::At(dex_pc_ptr);
498 return DoIPutQuick<Primitive::kPrimNot, false>(*shadow_frame, inst, inst_data);
499}
500
Andreas Gampe67409972016-07-19 22:34:53 -0700501extern "C" size_t MterpAputObject(ShadowFrame* shadow_frame,
502 uint16_t* dex_pc_ptr,
503 uint32_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700504 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800505 const Instruction* inst = Instruction::At(dex_pc_ptr);
Mathieu Chartieref41db72016-10-25 15:08:01 -0700506 mirror::Object* a = shadow_frame->GetVRegReference(inst->VRegB_23x());
buzbee1452bee2015-03-06 14:43:04 -0800507 if (UNLIKELY(a == nullptr)) {
508 return false;
509 }
510 int32_t index = shadow_frame->GetVReg(inst->VRegC_23x());
Mathieu Chartieref41db72016-10-25 15:08:01 -0700511 mirror::Object* val = shadow_frame->GetVRegReference(inst->VRegA_23x(inst_data));
512 mirror::ObjectArray<mirror::Object>* array = a->AsObjectArray<mirror::Object>();
buzbee1452bee2015-03-06 14:43:04 -0800513 if (array->CheckIsValidIndex(index) && array->CheckAssignable(val)) {
514 array->SetWithoutChecks<false>(index, val);
515 return true;
516 }
517 return false;
518}
519
Andreas Gampe67409972016-07-19 22:34:53 -0700520extern "C" size_t MterpFilledNewArray(ShadowFrame* shadow_frame,
521 uint16_t* dex_pc_ptr,
522 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700523 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800524 const Instruction* inst = Instruction::At(dex_pc_ptr);
525 return DoFilledNewArray<false, false, false>(inst, *shadow_frame, self,
526 shadow_frame->GetResultRegister());
527}
528
Andreas Gampe67409972016-07-19 22:34:53 -0700529extern "C" size_t MterpFilledNewArrayRange(ShadowFrame* shadow_frame,
530 uint16_t* dex_pc_ptr,
531 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700532 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800533 const Instruction* inst = Instruction::At(dex_pc_ptr);
534 return DoFilledNewArray<true, false, false>(inst, *shadow_frame, self,
535 shadow_frame->GetResultRegister());
536}
537
Andreas Gampe67409972016-07-19 22:34:53 -0700538extern "C" size_t MterpNewArray(ShadowFrame* shadow_frame,
539 uint16_t* dex_pc_ptr,
540 uint32_t inst_data, Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700541 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800542 const Instruction* inst = Instruction::At(dex_pc_ptr);
543 int32_t length = shadow_frame->GetVReg(inst->VRegB_22c(inst_data));
Vladimir Markobcf17522018-06-01 13:14:32 +0100544 ObjPtr<mirror::Object> obj = AllocArrayFromCode<false, true>(
Andreas Gampea5b09a62016-11-17 15:21:22 -0800545 dex::TypeIndex(inst->VRegC_22c()), length, shadow_frame->GetMethod(), self,
buzbee1452bee2015-03-06 14:43:04 -0800546 Runtime::Current()->GetHeap()->GetCurrentAllocator());
547 if (UNLIKELY(obj == nullptr)) {
548 return false;
549 }
550 shadow_frame->SetVRegReference(inst->VRegA_22c(inst_data), obj);
551 return true;
552}
553
Andreas Gampe67409972016-07-19 22:34:53 -0700554extern "C" size_t MterpHandleException(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700555 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800556 DCHECK(self->IsExceptionPending());
557 const instrumentation::Instrumentation* const instrumentation =
558 Runtime::Current()->GetInstrumentation();
Alex Light9fb1ab12017-09-05 09:32:49 -0700559 return MoveToExceptionHandler(self, *shadow_frame, instrumentation);
buzbee1452bee2015-03-06 14:43:04 -0800560}
561
Bill Buzbeed47fd902016-07-07 14:42:43 +0000562extern "C" void MterpCheckBefore(Thread* self, ShadowFrame* shadow_frame, uint16_t* dex_pc_ptr)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700563 REQUIRES_SHARED(Locks::mutator_lock_) {
Bill Buzbeed47fd902016-07-07 14:42:43 +0000564 const Instruction* inst = Instruction::At(dex_pc_ptr);
buzbee1452bee2015-03-06 14:43:04 -0800565 uint16_t inst_data = inst->Fetch16(0);
566 if (inst->Opcode(inst_data) == Instruction::MOVE_EXCEPTION) {
567 self->AssertPendingException();
568 } else {
569 self->AssertNoPendingException();
570 }
Bill Buzbeed47fd902016-07-07 14:42:43 +0000571 if (kTraceExecutionEnabled) {
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800572 uint32_t dex_pc = dex_pc_ptr - shadow_frame->GetDexInstructions();
Bill Buzbeed47fd902016-07-07 14:42:43 +0000573 TraceExecution(*shadow_frame, inst, dex_pc);
574 }
575 if (kTestExportPC) {
576 // Save invalid dex pc to force segfault if improperly used.
577 shadow_frame->SetDexPCPtr(reinterpret_cast<uint16_t*>(kExportPCPoison));
578 }
buzbee1452bee2015-03-06 14:43:04 -0800579}
580
581extern "C" void MterpLogDivideByZeroException(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700582 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800583 UNUSED(self);
584 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
585 uint16_t inst_data = inst->Fetch16(0);
586 LOG(INFO) << "DivideByZero: " << inst->Opcode(inst_data);
587}
588
589extern "C" void MterpLogArrayIndexException(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700590 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800591 UNUSED(self);
592 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
593 uint16_t inst_data = inst->Fetch16(0);
594 LOG(INFO) << "ArrayIndex: " << inst->Opcode(inst_data);
595}
596
597extern "C" void MterpLogNegativeArraySizeException(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700598 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800599 UNUSED(self);
600 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
601 uint16_t inst_data = inst->Fetch16(0);
602 LOG(INFO) << "NegativeArraySize: " << inst->Opcode(inst_data);
603}
604
605extern "C" void MterpLogNoSuchMethodException(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700606 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800607 UNUSED(self);
608 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
609 uint16_t inst_data = inst->Fetch16(0);
610 LOG(INFO) << "NoSuchMethod: " << inst->Opcode(inst_data);
611}
612
613extern "C" void MterpLogExceptionThrownException(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700614 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800615 UNUSED(self);
616 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
617 uint16_t inst_data = inst->Fetch16(0);
618 LOG(INFO) << "ExceptionThrown: " << inst->Opcode(inst_data);
619}
620
621extern "C" void MterpLogNullObjectException(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700622 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800623 UNUSED(self);
624 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
625 uint16_t inst_data = inst->Fetch16(0);
626 LOG(INFO) << "NullObject: " << inst->Opcode(inst_data);
627}
628
629extern "C" void MterpLogFallback(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700630 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800631 UNUSED(self);
632 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
633 uint16_t inst_data = inst->Fetch16(0);
634 LOG(INFO) << "Fallback: " << inst->Opcode(inst_data) << ", Suspend Pending?: "
635 << self->IsExceptionPending();
636}
637
Bill Buzbeefd522f92016-02-11 22:37:42 +0000638extern "C" void MterpLogOSR(Thread* self, ShadowFrame* shadow_frame, int32_t offset)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700639 REQUIRES_SHARED(Locks::mutator_lock_) {
Bill Buzbeefd522f92016-02-11 22:37:42 +0000640 UNUSED(self);
641 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
642 uint16_t inst_data = inst->Fetch16(0);
643 LOG(INFO) << "OSR: " << inst->Opcode(inst_data) << ", offset = " << offset;
644}
645
buzbee1452bee2015-03-06 14:43:04 -0800646extern "C" void MterpLogSuspendFallback(Thread* self, ShadowFrame* shadow_frame, uint32_t flags)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700647 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800648 UNUSED(self);
649 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
650 uint16_t inst_data = inst->Fetch16(0);
651 if (flags & kCheckpointRequest) {
652 LOG(INFO) << "Checkpoint fallback: " << inst->Opcode(inst_data);
653 } else if (flags & kSuspendRequest) {
654 LOG(INFO) << "Suspend fallback: " << inst->Opcode(inst_data);
Hiroshi Yamauchi30493242016-11-03 13:06:52 -0700655 } else if (flags & kEmptyCheckpointRequest) {
656 LOG(INFO) << "Empty checkpoint fallback: " << inst->Opcode(inst_data);
buzbee1452bee2015-03-06 14:43:04 -0800657 }
658}
659
Andreas Gampe67409972016-07-19 22:34:53 -0700660extern "C" size_t MterpSuspendCheck(Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700661 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800662 self->AllowThreadSuspension();
Bill Buzbeefd522f92016-02-11 22:37:42 +0000663 return MterpShouldSwitchInterpreters();
buzbee1452bee2015-03-06 14:43:04 -0800664}
665
David Srbeckyce32c102018-08-31 07:21:07 +0100666// Execute single field access instruction (get/put, static/instance).
667// The template arguments reduce this to fairly small amount of code.
668// It requires the target object and field to be already resolved.
669template<typename PrimType, FindFieldType kAccessType>
670ALWAYS_INLINE void MterpFieldAccess(Instruction* inst,
671 uint16_t inst_data,
672 ShadowFrame* shadow_frame,
673 ObjPtr<mirror::Object> obj,
674 MemberOffset offset,
675 bool is_volatile)
676 REQUIRES_SHARED(Locks::mutator_lock_) {
677 static_assert(std::is_integral<PrimType>::value, "Unexpected primitive type");
678 constexpr bool kIsStatic = (kAccessType & FindFieldFlags::StaticBit) != 0;
679 constexpr bool kIsPrimitive = (kAccessType & FindFieldFlags::PrimitiveBit) != 0;
680 constexpr bool kIsRead = (kAccessType & FindFieldFlags::ReadBit) != 0;
681
682 uint16_t vRegA = kIsStatic ? inst->VRegA_21c(inst_data) : inst->VRegA_22c(inst_data);
683 if (kIsPrimitive) {
684 if (kIsRead) {
685 PrimType value = UNLIKELY(is_volatile)
686 ? obj->GetFieldPrimitive<PrimType, /*kIsVolatile*/ true>(offset)
687 : obj->GetFieldPrimitive<PrimType, /*kIsVolatile*/ false>(offset);
688 if (sizeof(PrimType) == sizeof(uint64_t)) {
689 shadow_frame->SetVRegLong(vRegA, value); // Set two consecutive registers.
690 } else {
691 shadow_frame->SetVReg(vRegA, static_cast<int32_t>(value)); // Sign/zero extend.
692 }
693 } else { // Write.
694 uint64_t value = (sizeof(PrimType) == sizeof(uint64_t))
695 ? shadow_frame->GetVRegLong(vRegA)
696 : shadow_frame->GetVReg(vRegA);
697 if (UNLIKELY(is_volatile)) {
698 obj->SetFieldPrimitive<PrimType, /*kIsVolatile*/ true>(offset, value);
699 } else {
700 obj->SetFieldPrimitive<PrimType, /*kIsVolatile*/ false>(offset, value);
701 }
702 }
703 } else { // Object.
704 if (kIsRead) {
705 ObjPtr<mirror::Object> value = UNLIKELY(is_volatile)
706 ? obj->GetFieldObjectVolatile<mirror::Object>(offset)
707 : obj->GetFieldObject<mirror::Object>(offset);
708 shadow_frame->SetVRegReference(vRegA, value);
709 } else { // Write.
710 ObjPtr<mirror::Object> value = shadow_frame->GetVRegReference(vRegA);
711 if (UNLIKELY(is_volatile)) {
712 obj->SetFieldObjectVolatile</*kTransactionActive*/ false>(offset, value);
713 } else {
714 obj->SetFieldObject</*kTransactionActive*/ false>(offset, value);
715 }
716 }
David Srbecky104bab62018-08-07 17:09:01 +0100717 }
David Srbecky104bab62018-08-07 17:09:01 +0100718}
719
David Srbeckyce32c102018-08-31 07:21:07 +0100720template<typename PrimType, FindFieldType kAccessType>
721NO_INLINE bool MterpFieldAccessSlow(Instruction* inst,
722 uint16_t inst_data,
723 ShadowFrame* shadow_frame,
724 Thread* self)
725 REQUIRES_SHARED(Locks::mutator_lock_) {
726 constexpr bool kIsStatic = (kAccessType & FindFieldFlags::StaticBit) != 0;
727 constexpr bool kIsRead = (kAccessType & FindFieldFlags::ReadBit) != 0;
728
729 // Update the dex pc in shadow frame, just in case anything throws.
730 shadow_frame->SetDexPCPtr(reinterpret_cast<uint16_t*>(inst));
731 ArtMethod* referrer = shadow_frame->GetMethod();
732 uint32_t field_idx = kIsStatic ? inst->VRegB_21c() : inst->VRegC_22c();
733 ArtField* field = FindFieldFromCode<kAccessType, /* access_checks */ false>(
734 field_idx, referrer, self, sizeof(PrimType));
735 if (UNLIKELY(field == nullptr)) {
736 DCHECK(self->IsExceptionPending());
737 return false;
738 }
739 ObjPtr<mirror::Object> obj = kIsStatic
740 ? field->GetDeclaringClass().Ptr()
741 : shadow_frame->GetVRegReference(inst->VRegB_22c(inst_data));
742 if (UNLIKELY(obj == nullptr)) {
743 ThrowNullPointerExceptionForFieldAccess(field, kIsRead);
744 return false;
745 }
746 MterpFieldAccess<PrimType, kAccessType>(
747 inst, inst_data, shadow_frame, obj, field->GetOffset(), field->IsVolatile());
748 return true;
749}
750
751template<typename PrimType, FindFieldType kAccessType>
752ALWAYS_INLINE bool MterpFieldAccessFast(Instruction* inst,
753 uint16_t inst_data,
754 ShadowFrame* shadow_frame,
755 Thread* self)
756 REQUIRES_SHARED(Locks::mutator_lock_) {
757 constexpr bool kIsStatic = (kAccessType & FindFieldFlags::StaticBit) != 0;
David Srbeckyffa15ea2018-08-16 10:04:11 +0100758
759 // This effectively inlines the fast path from ArtMethod::GetDexCache.
760 // It avoids non-inlined call which in turn allows elimination of the prologue and epilogue.
David Srbeckyce32c102018-08-31 07:21:07 +0100761 ArtMethod* referrer = shadow_frame->GetMethod();
David Srbeckyffa15ea2018-08-16 10:04:11 +0100762 if (LIKELY(!referrer->IsObsolete())) {
763 // Avoid read barriers, since we need only the pointer to the native (non-movable)
764 // DexCache field array which we can get even through from-space objects.
765 ObjPtr<mirror::Class> klass = referrer->GetDeclaringClass<kWithoutReadBarrier>();
766 mirror::DexCache* dex_cache = klass->GetDexCache<kDefaultVerifyFlags, kWithoutReadBarrier>();
David Srbeckyce32c102018-08-31 07:21:07 +0100767
David Srbeckyffa15ea2018-08-16 10:04:11 +0100768 // Try to find the desired field in DexCache.
David Srbeckyce32c102018-08-31 07:21:07 +0100769 uint32_t field_idx = kIsStatic ? inst->VRegB_21c() : inst->VRegC_22c();
David Srbeckyffa15ea2018-08-16 10:04:11 +0100770 ArtField* field = dex_cache->GetResolvedField(field_idx, kRuntimePointerSize);
David Srbeckyce32c102018-08-31 07:21:07 +0100771 if (LIKELY(field != nullptr)) {
772 bool initialized = !kIsStatic || field->GetDeclaringClass()->IsInitialized();
773 if (LIKELY(initialized)) {
774 DCHECK_EQ(field, (FindFieldFromCode<kAccessType, /* access_checks */ false>(
David Srbeckyffa15ea2018-08-16 10:04:11 +0100775 field_idx, referrer, self, sizeof(PrimType))));
David Srbeckyce32c102018-08-31 07:21:07 +0100776 ObjPtr<mirror::Object> obj = kIsStatic
777 ? field->GetDeclaringClass().Ptr()
778 : shadow_frame->GetVRegReference(inst->VRegB_22c(inst_data));
779 if (LIKELY(kIsStatic || obj != nullptr)) {
780 MterpFieldAccess<PrimType, kAccessType>(
781 inst, inst_data, shadow_frame, obj, field->GetOffset(), field->IsVolatile());
782 return true;
783 }
David Srbeckyffa15ea2018-08-16 10:04:11 +0100784 }
David Srbeckyffa15ea2018-08-16 10:04:11 +0100785 }
786 }
David Srbeckyce32c102018-08-31 07:21:07 +0100787
David Srbeckyffa15ea2018-08-16 10:04:11 +0100788 // Slow path. Last and with identical arguments so that it becomes single instruction tail call.
David Srbeckyce32c102018-08-31 07:21:07 +0100789 return MterpFieldAccessSlow<PrimType, kAccessType>(inst, inst_data, shadow_frame, self);
David Srbeckyffa15ea2018-08-16 10:04:11 +0100790}
791
David Srbeckyce32c102018-08-31 07:21:07 +0100792#define MTERP_FIELD_ACCESSOR(Name, PrimType, AccessType) \
793extern "C" bool Name(Instruction* inst, uint16_t inst_data, ShadowFrame* sf, Thread* self) \
794 REQUIRES_SHARED(Locks::mutator_lock_) { \
795 return MterpFieldAccessFast<PrimType, AccessType>(inst, inst_data, sf, self); \
buzbee1452bee2015-03-06 14:43:04 -0800796}
797
David Srbeckyce32c102018-08-31 07:21:07 +0100798#define MTERP_FIELD_ACCESSORS_FOR_TYPE(Sufix, PrimType, Kind) \
799 MTERP_FIELD_ACCESSOR(MterpIGet##Sufix, PrimType, Instance##Kind##Read) \
800 MTERP_FIELD_ACCESSOR(MterpIPut##Sufix, PrimType, Instance##Kind##Write) \
801 MTERP_FIELD_ACCESSOR(MterpSGet##Sufix, PrimType, Static##Kind##Read) \
802 MTERP_FIELD_ACCESSOR(MterpSPut##Sufix, PrimType, Static##Kind##Write)
buzbee1452bee2015-03-06 14:43:04 -0800803
David Srbeckyce32c102018-08-31 07:21:07 +0100804MTERP_FIELD_ACCESSORS_FOR_TYPE(I8, int8_t, Primitive)
805MTERP_FIELD_ACCESSORS_FOR_TYPE(U8, uint8_t, Primitive)
806MTERP_FIELD_ACCESSORS_FOR_TYPE(I16, int16_t, Primitive)
807MTERP_FIELD_ACCESSORS_FOR_TYPE(U16, uint16_t, Primitive)
808MTERP_FIELD_ACCESSORS_FOR_TYPE(U32, uint32_t, Primitive)
809MTERP_FIELD_ACCESSORS_FOR_TYPE(U64, uint64_t, Primitive)
810MTERP_FIELD_ACCESSORS_FOR_TYPE(Obj, uint32_t, Object)
David Srbecky28dfc592018-08-22 15:29:09 +0100811
David Srbeckyce32c102018-08-31 07:21:07 +0100812// Check that the primitive type for Obj variant above is correct.
813// It really must be primitive type for the templates to compile.
814// In the case of objects, it is only used to get the field size.
815static_assert(kHeapReferenceSize == sizeof(uint32_t), "Unexpected kHeapReferenceSize");
David Srbecky28dfc592018-08-22 15:29:09 +0100816
David Srbeckyce32c102018-08-31 07:21:07 +0100817#undef MTERP_FIELD_ACCESSORS_FOR_TYPE
818#undef MTERP_FIELD_ACCESSOR
buzbeefa6adfd2017-02-22 13:40:59 -0800819
820extern "C" mirror::Object* artAGetObjectFromMterp(mirror::Object* arr,
821 int32_t index)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700822 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800823 if (UNLIKELY(arr == nullptr)) {
824 ThrowNullPointerExceptionFromInterpreter();
825 return nullptr;
826 }
Mathieu Chartieref41db72016-10-25 15:08:01 -0700827 mirror::ObjectArray<mirror::Object>* array = arr->AsObjectArray<mirror::Object>();
buzbee1452bee2015-03-06 14:43:04 -0800828 if (LIKELY(array->CheckIsValidIndex(index))) {
829 return array->GetWithoutChecks(index);
830 } else {
831 return nullptr;
832 }
833}
834
buzbeefa6adfd2017-02-22 13:40:59 -0800835extern "C" mirror::Object* artIGetObjectFromMterp(mirror::Object* obj,
836 uint32_t field_offset)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700837 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee76833da2016-01-13 13:06:22 -0800838 if (UNLIKELY(obj == nullptr)) {
839 ThrowNullPointerExceptionFromInterpreter();
840 return nullptr;
841 }
842 return obj->GetFieldObject<mirror::Object>(MemberOffset(field_offset));
843}
844
Bill Buzbee1d011d92016-04-04 16:59:29 +0000845/*
846 * Create a hotness_countdown based on the current method hotness_count and profiling
847 * mode. In short, determine how many hotness events we hit before reporting back
848 * to the full instrumentation via MterpAddHotnessBatch. Called once on entry to the method,
849 * and regenerated following batch updates.
850 */
Vladimir Markoa710d912017-09-12 14:56:07 +0100851extern "C" ssize_t MterpSetUpHotnessCountdown(ArtMethod* method,
852 ShadowFrame* shadow_frame,
853 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700854 REQUIRES_SHARED(Locks::mutator_lock_) {
Bill Buzbee1d011d92016-04-04 16:59:29 +0000855 uint16_t hotness_count = method->GetCounter();
856 int32_t countdown_value = jit::kJitHotnessDisabled;
857 jit::Jit* jit = Runtime::Current()->GetJit();
858 if (jit != nullptr) {
Nicolas Geoffray274fe4a2016-04-12 16:33:24 +0100859 int32_t warm_threshold = jit->WarmMethodThreshold();
860 int32_t hot_threshold = jit->HotMethodThreshold();
861 int32_t osr_threshold = jit->OSRMethodThreshold();
Bill Buzbee1d011d92016-04-04 16:59:29 +0000862 if (hotness_count < warm_threshold) {
863 countdown_value = warm_threshold - hotness_count;
864 } else if (hotness_count < hot_threshold) {
865 countdown_value = hot_threshold - hotness_count;
866 } else if (hotness_count < osr_threshold) {
867 countdown_value = osr_threshold - hotness_count;
868 } else {
869 countdown_value = jit::kJitCheckForOSR;
870 }
Vladimir Markoa710d912017-09-12 14:56:07 +0100871 if (jit::Jit::ShouldUsePriorityThreadWeight(self)) {
Nicolas Geoffray274fe4a2016-04-12 16:33:24 +0100872 int32_t priority_thread_weight = jit->PriorityThreadWeight();
Calin Juravleb2771b42016-04-07 17:09:25 +0100873 countdown_value = std::min(countdown_value, countdown_value / priority_thread_weight);
874 }
Bill Buzbee1d011d92016-04-04 16:59:29 +0000875 }
876 /*
877 * The actual hotness threshold may exceed the range of our int16_t countdown value. This is
878 * not a problem, though. We can just break it down into smaller chunks.
879 */
880 countdown_value = std::min(countdown_value,
881 static_cast<int32_t>(std::numeric_limits<int16_t>::max()));
882 shadow_frame->SetCachedHotnessCountdown(countdown_value);
883 shadow_frame->SetHotnessCountdown(countdown_value);
884 return countdown_value;
885}
886
887/*
888 * Report a batch of hotness events to the instrumentation and then return the new
889 * countdown value to the next time we should report.
890 */
Andreas Gampe67409972016-07-19 22:34:53 -0700891extern "C" ssize_t MterpAddHotnessBatch(ArtMethod* method,
Bill Buzbee1d011d92016-04-04 16:59:29 +0000892 ShadowFrame* shadow_frame,
893 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700894 REQUIRES_SHARED(Locks::mutator_lock_) {
Bill Buzbee1d011d92016-04-04 16:59:29 +0000895 jit::Jit* jit = Runtime::Current()->GetJit();
896 if (jit != nullptr) {
897 int16_t count = shadow_frame->GetCachedHotnessCountdown() - shadow_frame->GetHotnessCountdown();
Nicolas Geoffray71cd50f2016-04-14 15:00:33 +0100898 jit->AddSamples(self, method, count, /*with_backedges*/ true);
Bill Buzbee1d011d92016-04-04 16:59:29 +0000899 }
Vladimir Markoa710d912017-09-12 14:56:07 +0100900 return MterpSetUpHotnessCountdown(method, shadow_frame, self);
Bill Buzbee1d011d92016-04-04 16:59:29 +0000901}
902
Andreas Gampe67409972016-07-19 22:34:53 -0700903extern "C" size_t MterpMaybeDoOnStackReplacement(Thread* self,
904 ShadowFrame* shadow_frame,
905 int32_t offset)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700906 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee42a09cb02017-02-01 09:08:31 -0800907 int16_t osr_countdown = shadow_frame->GetCachedHotnessCountdown() - 1;
908 bool did_osr = false;
909 /*
910 * To reduce the cost of polling the compiler to determine whether the requested OSR
911 * compilation has completed, only check every Nth time. NOTE: the "osr_countdown <= 0"
912 * condition is satisfied either by the decrement below or the initial setting of
913 * the cached countdown field to kJitCheckForOSR, which elsewhere is asserted to be -1.
914 */
915 if (osr_countdown <= 0) {
916 ArtMethod* method = shadow_frame->GetMethod();
917 JValue* result = shadow_frame->GetResultRegister();
918 uint32_t dex_pc = shadow_frame->GetDexPC();
919 jit::Jit* jit = Runtime::Current()->GetJit();
920 osr_countdown = jit::Jit::kJitRecheckOSRThreshold;
921 if (offset <= 0) {
922 // Keep updating hotness in case a compilation request was dropped. Eventually it will retry.
923 jit->AddSamples(self, method, osr_countdown, /*with_backedges*/ true);
924 }
925 did_osr = jit::Jit::MaybeDoOnStackReplacement(self, method, dex_pc, offset, result);
buzbee0e6aa6d2016-04-11 07:48:18 -0700926 }
buzbee42a09cb02017-02-01 09:08:31 -0800927 shadow_frame->SetCachedHotnessCountdown(osr_countdown);
928 return did_osr;
Bill Buzbeefd522f92016-02-11 22:37:42 +0000929}
930
buzbee1452bee2015-03-06 14:43:04 -0800931} // namespace interpreter
932} // namespace art