blob: 01a6213a26a470fa49e909c3b7127ef03015b366 [file] [log] [blame]
buzbee1452bee2015-03-06 14:43:04 -08001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/*
18 * Mterp entry point and support functions.
19 */
buzbee1452bee2015-03-06 14:43:04 -080020#include "mterp.h"
David Sehrc431b9d2018-03-02 12:01:51 -080021
22#include "base/quasi_atomic.h"
Bill Buzbeefd522f92016-02-11 22:37:42 +000023#include "debugger.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070024#include "entrypoints/entrypoint_utils-inl.h"
25#include "interpreter/interpreter_common.h"
26#include "interpreter/interpreter_intrinsics.h"
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +010027#include "interpreter/shadow_frame-inl.h"
Andreas Gampefd63bbf2018-10-29 12:55:35 -070028#include "mirror/string-alloc-inl.h"
buzbee1452bee2015-03-06 14:43:04 -080029
30namespace art {
31namespace interpreter {
32/*
33 * Verify some constants used by the mterp interpreter.
34 */
35void CheckMterpAsmConstants() {
36 /*
37 * If we're using computed goto instruction transitions, make sure
David Srbeckyd88f5f72018-10-16 14:22:33 +010038 * none of the handlers overflows the byte limit. This won't tell
buzbee1452bee2015-03-06 14:43:04 -080039 * which one did, but if any one is too big the total size will
40 * overflow.
41 */
David Srbeckyd88f5f72018-10-16 14:22:33 +010042 const int width = kMterpHandlerSize;
buzbee1452bee2015-03-06 14:43:04 -080043 int interp_size = (uintptr_t) artMterpAsmInstructionEnd -
44 (uintptr_t) artMterpAsmInstructionStart;
45 if ((interp_size == 0) || (interp_size != (art::kNumPackedOpcodes * width))) {
Andreas Gampe3fec9ac2016-09-13 10:47:28 -070046 LOG(FATAL) << "ERROR: unexpected asm interp size " << interp_size
47 << "(did an instruction handler exceed " << width << " bytes?)";
buzbee1452bee2015-03-06 14:43:04 -080048 }
49}
50
51void InitMterpTls(Thread* self) {
David Srbecky776f3f72018-10-15 18:03:55 +010052 self->SetMterpCurrentIBase(artMterpAsmInstructionStart);
buzbee1452bee2015-03-06 14:43:04 -080053}
54
55/*
56 * Find the matching case. Returns the offset to the handler instructions.
57 *
58 * Returns 3 if we don't find a match (it's the size of the sparse-switch
59 * instruction).
60 */
Andreas Gampe67409972016-07-19 22:34:53 -070061extern "C" ssize_t MterpDoSparseSwitch(const uint16_t* switchData, int32_t testVal) {
buzbee1452bee2015-03-06 14:43:04 -080062 const int kInstrLen = 3;
63 uint16_t size;
64 const int32_t* keys;
65 const int32_t* entries;
66
67 /*
68 * Sparse switch data format:
69 * ushort ident = 0x0200 magic value
70 * ushort size number of entries in the table; > 0
71 * int keys[size] keys, sorted low-to-high; 32-bit aligned
72 * int targets[size] branch targets, relative to switch opcode
73 *
74 * Total size is (2+size*4) 16-bit code units.
75 */
76
77 uint16_t signature = *switchData++;
78 DCHECK_EQ(signature, static_cast<uint16_t>(art::Instruction::kSparseSwitchSignature));
79
80 size = *switchData++;
81
82 /* The keys are guaranteed to be aligned on a 32-bit boundary;
83 * we can treat them as a native int array.
84 */
85 keys = reinterpret_cast<const int32_t*>(switchData);
86
87 /* The entries are guaranteed to be aligned on a 32-bit boundary;
88 * we can treat them as a native int array.
89 */
90 entries = keys + size;
91
92 /*
93 * Binary-search through the array of keys, which are guaranteed to
94 * be sorted low-to-high.
95 */
96 int lo = 0;
97 int hi = size - 1;
98 while (lo <= hi) {
99 int mid = (lo + hi) >> 1;
100
101 int32_t foundVal = keys[mid];
102 if (testVal < foundVal) {
103 hi = mid - 1;
104 } else if (testVal > foundVal) {
105 lo = mid + 1;
106 } else {
107 return entries[mid];
108 }
109 }
110 return kInstrLen;
111}
112
Andreas Gampe67409972016-07-19 22:34:53 -0700113extern "C" ssize_t MterpDoPackedSwitch(const uint16_t* switchData, int32_t testVal) {
buzbee1452bee2015-03-06 14:43:04 -0800114 const int kInstrLen = 3;
115
116 /*
117 * Packed switch data format:
118 * ushort ident = 0x0100 magic value
119 * ushort size number of entries in the table
120 * int first_key first (and lowest) switch case value
121 * int targets[size] branch targets, relative to switch opcode
122 *
123 * Total size is (4+size*2) 16-bit code units.
124 */
125 uint16_t signature = *switchData++;
126 DCHECK_EQ(signature, static_cast<uint16_t>(art::Instruction::kPackedSwitchSignature));
127
128 uint16_t size = *switchData++;
129
130 int32_t firstKey = *switchData++;
131 firstKey |= (*switchData++) << 16;
132
133 int index = testVal - firstKey;
134 if (index < 0 || index >= size) {
135 return kInstrLen;
136 }
137
138 /*
139 * The entries are guaranteed to be aligned on a 32-bit boundary;
140 * we can treat them as a native int array.
141 */
142 const int32_t* entries = reinterpret_cast<const int32_t*>(switchData);
143 return entries[index];
144}
145
David Srbecky28f6cff2018-10-16 15:07:28 +0100146bool CanUseMterp()
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700147 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Light848574c2017-09-25 16:59:39 -0700148 const Runtime* const runtime = Runtime::Current();
David Srbecky28f6cff2018-10-16 15:07:28 +0100149 return
Nicolas Geoffraydd5db452020-07-07 15:14:06 +0000150 runtime->IsStarted() &&
David Srbeckycb4f09e2018-10-21 08:45:22 +0100151 !runtime->IsAotCompiler() &&
David Srbeckycb4f09e2018-10-21 08:45:22 +0100152 !runtime->GetInstrumentation()->IsActive() &&
Ulyana Trafimoviche886d682020-07-16 15:09:38 +0000153 // In simulator mode, mterp and its fast path are avoided to ensure every
154 // called method can go through ArtMethod::Invoke().
155 !Runtime::SimulatorMode() &&
Alex Light0aa7a5a2018-10-10 15:58:14 +0000156 // mterp only knows how to deal with the normal exits. It cannot handle any of the
157 // non-standard force-returns.
David Srbecky28f6cff2018-10-16 15:07:28 +0100158 !runtime->AreNonStandardExitsEnabled() &&
Alex Light848574c2017-09-25 16:59:39 -0700159 // An async exception has been thrown. We need to go to the switch interpreter. MTerp doesn't
160 // know how to deal with these so we could end up never dealing with it if we are in an
David Srbecky28f6cff2018-10-16 15:07:28 +0100161 // infinite loop.
David Srbeckycb4f09e2018-10-21 08:45:22 +0100162 !runtime->AreAsyncExceptionsThrown() &&
163 (runtime->GetJit() == nullptr || !runtime->GetJit()->JitAtFirstUse());
Bill Buzbeefd522f92016-02-11 22:37:42 +0000164}
165
buzbee1452bee2015-03-06 14:43:04 -0800166
Andreas Gampe67409972016-07-19 22:34:53 -0700167extern "C" size_t MterpInvokeVirtual(Thread* self,
168 ShadowFrame* shadow_frame,
169 uint16_t* dex_pc_ptr,
170 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700171 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800172 JValue* result_register = shadow_frame->GetResultRegister();
173 const Instruction* inst = Instruction::At(dex_pc_ptr);
David Srbecky1f5ab4e2018-10-15 11:46:46 +0100174 return DoInvoke<kVirtual, /*is_range=*/ false, /*do_access_check=*/ false, /*is_mterp=*/ true>(
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700175 self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800176}
177
Andreas Gampe67409972016-07-19 22:34:53 -0700178extern "C" size_t MterpInvokeSuper(Thread* self,
179 ShadowFrame* shadow_frame,
180 uint16_t* dex_pc_ptr,
181 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700182 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800183 JValue* result_register = shadow_frame->GetResultRegister();
184 const Instruction* inst = Instruction::At(dex_pc_ptr);
David Srbecky1f5ab4e2018-10-15 11:46:46 +0100185 return DoInvoke<kSuper, /*is_range=*/ false, /*do_access_check=*/ false, /*is_mterp=*/ true>(
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700186 self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800187}
188
Andreas Gampe67409972016-07-19 22:34:53 -0700189extern "C" size_t MterpInvokeInterface(Thread* self,
190 ShadowFrame* shadow_frame,
191 uint16_t* dex_pc_ptr,
192 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700193 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800194 JValue* result_register = shadow_frame->GetResultRegister();
195 const Instruction* inst = Instruction::At(dex_pc_ptr);
David Srbecky1f5ab4e2018-10-15 11:46:46 +0100196 return DoInvoke<kInterface, /*is_range=*/ false, /*do_access_check=*/ false, /*is_mterp=*/ true>(
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700197 self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800198}
199
Andreas Gampe67409972016-07-19 22:34:53 -0700200extern "C" size_t MterpInvokeDirect(Thread* self,
201 ShadowFrame* shadow_frame,
202 uint16_t* dex_pc_ptr,
203 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700204 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800205 JValue* result_register = shadow_frame->GetResultRegister();
206 const Instruction* inst = Instruction::At(dex_pc_ptr);
David Srbecky1f5ab4e2018-10-15 11:46:46 +0100207 return DoInvoke<kDirect, /*is_range=*/ false, /*do_access_check=*/ false, /*is_mterp=*/ true>(
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700208 self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800209}
210
Andreas Gampe67409972016-07-19 22:34:53 -0700211extern "C" size_t MterpInvokeStatic(Thread* self,
212 ShadowFrame* shadow_frame,
213 uint16_t* dex_pc_ptr,
214 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700215 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800216 JValue* result_register = shadow_frame->GetResultRegister();
217 const Instruction* inst = Instruction::At(dex_pc_ptr);
David Srbecky1f5ab4e2018-10-15 11:46:46 +0100218 return DoInvoke<kStatic, /*is_range=*/ false, /*do_access_check=*/ false, /*is_mterp=*/ true>(
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700219 self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800220}
221
Orion Hodsone7732be2017-10-11 14:35:20 +0100222extern "C" size_t MterpInvokeCustom(Thread* self,
223 ShadowFrame* shadow_frame,
224 uint16_t* dex_pc_ptr,
225 uint16_t inst_data)
226 REQUIRES_SHARED(Locks::mutator_lock_) {
227 JValue* result_register = shadow_frame->GetResultRegister();
228 const Instruction* inst = Instruction::At(dex_pc_ptr);
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700229 return DoInvokeCustom</* is_range= */ false>(
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700230 self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
Orion Hodsone7732be2017-10-11 14:35:20 +0100231}
232
233extern "C" size_t MterpInvokePolymorphic(Thread* self,
234 ShadowFrame* shadow_frame,
235 uint16_t* dex_pc_ptr,
236 uint16_t inst_data)
237 REQUIRES_SHARED(Locks::mutator_lock_) {
238 JValue* result_register = shadow_frame->GetResultRegister();
239 const Instruction* inst = Instruction::At(dex_pc_ptr);
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700240 return DoInvokePolymorphic</* is_range= */ false>(
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700241 self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
Orion Hodsone7732be2017-10-11 14:35:20 +0100242}
243
Andreas Gampe67409972016-07-19 22:34:53 -0700244extern "C" size_t MterpInvokeVirtualRange(Thread* self,
245 ShadowFrame* shadow_frame,
246 uint16_t* dex_pc_ptr,
247 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700248 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800249 JValue* result_register = shadow_frame->GetResultRegister();
250 const Instruction* inst = Instruction::At(dex_pc_ptr);
David Srbecky1f5ab4e2018-10-15 11:46:46 +0100251 return DoInvoke<kVirtual, /*is_range=*/ true, /*do_access_check=*/ false, /*is_mterp=*/ true>(
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700252 self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800253}
254
Andreas Gampe67409972016-07-19 22:34:53 -0700255extern "C" size_t MterpInvokeSuperRange(Thread* self,
256 ShadowFrame* shadow_frame,
257 uint16_t* dex_pc_ptr,
258 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700259 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800260 JValue* result_register = shadow_frame->GetResultRegister();
261 const Instruction* inst = Instruction::At(dex_pc_ptr);
David Srbecky1f5ab4e2018-10-15 11:46:46 +0100262 return DoInvoke<kSuper, /*is_range=*/ true, /*do_access_check=*/ false, /*is_mterp=*/ true>(
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700263 self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800264}
265
Andreas Gampe67409972016-07-19 22:34:53 -0700266extern "C" size_t MterpInvokeInterfaceRange(Thread* self,
267 ShadowFrame* shadow_frame,
268 uint16_t* dex_pc_ptr,
269 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700270 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800271 JValue* result_register = shadow_frame->GetResultRegister();
272 const Instruction* inst = Instruction::At(dex_pc_ptr);
David Srbecky1f5ab4e2018-10-15 11:46:46 +0100273 return DoInvoke<kInterface, /*is_range=*/ true, /*do_access_check=*/ false, /*is_mterp=*/ true>(
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700274 self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800275}
276
Andreas Gampe67409972016-07-19 22:34:53 -0700277extern "C" size_t MterpInvokeDirectRange(Thread* self,
278 ShadowFrame* shadow_frame,
279 uint16_t* dex_pc_ptr,
280 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700281 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800282 JValue* result_register = shadow_frame->GetResultRegister();
283 const Instruction* inst = Instruction::At(dex_pc_ptr);
David Srbecky1f5ab4e2018-10-15 11:46:46 +0100284 return DoInvoke<kDirect, /*is_range=*/ true, /*do_access_check=*/ false, /*is_mterp=*/ true>(
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700285 self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800286}
287
Andreas Gampe67409972016-07-19 22:34:53 -0700288extern "C" size_t MterpInvokeStaticRange(Thread* self,
289 ShadowFrame* shadow_frame,
290 uint16_t* dex_pc_ptr,
291 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700292 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800293 JValue* result_register = shadow_frame->GetResultRegister();
294 const Instruction* inst = Instruction::At(dex_pc_ptr);
David Srbecky1f5ab4e2018-10-15 11:46:46 +0100295 return DoInvoke<kStatic, /*is_range=*/ true, /*do_access_check=*/ false, /*is_mterp=*/ true>(
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700296 self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800297}
298
Orion Hodsone7732be2017-10-11 14:35:20 +0100299extern "C" size_t MterpInvokeCustomRange(Thread* self,
300 ShadowFrame* shadow_frame,
301 uint16_t* dex_pc_ptr,
302 uint16_t inst_data)
303 REQUIRES_SHARED(Locks::mutator_lock_) {
304 JValue* result_register = shadow_frame->GetResultRegister();
305 const Instruction* inst = Instruction::At(dex_pc_ptr);
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700306 return DoInvokeCustom</*is_range=*/ true>(
307 self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
Orion Hodsone7732be2017-10-11 14:35:20 +0100308}
309
310extern "C" size_t MterpInvokePolymorphicRange(Thread* self,
311 ShadowFrame* shadow_frame,
312 uint16_t* dex_pc_ptr,
313 uint16_t inst_data)
314 REQUIRES_SHARED(Locks::mutator_lock_) {
315 JValue* result_register = shadow_frame->GetResultRegister();
316 const Instruction* inst = Instruction::At(dex_pc_ptr);
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700317 return DoInvokePolymorphic</* is_range= */ true>(
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700318 self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
Orion Hodsone7732be2017-10-11 14:35:20 +0100319}
320
Andreas Gampe67409972016-07-19 22:34:53 -0700321extern "C" size_t MterpInvokeVirtualQuick(Thread* self,
322 ShadowFrame* shadow_frame,
323 uint16_t* dex_pc_ptr,
324 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700325 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800326 JValue* result_register = shadow_frame->GetResultRegister();
327 const Instruction* inst = Instruction::At(dex_pc_ptr);
David Srbecky08cb7382018-10-30 09:27:59 +0000328 return DoInvoke<kVirtual, /*is_range=*/ false, /*do_access_check=*/ false, /*is_mterp=*/ true,
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700329 /*is_quick=*/ true>(self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800330}
331
Andreas Gampe67409972016-07-19 22:34:53 -0700332extern "C" size_t MterpInvokeVirtualQuickRange(Thread* self,
333 ShadowFrame* shadow_frame,
334 uint16_t* dex_pc_ptr,
335 uint16_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700336 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800337 JValue* result_register = shadow_frame->GetResultRegister();
338 const Instruction* inst = Instruction::At(dex_pc_ptr);
David Srbecky08cb7382018-10-30 09:27:59 +0000339 return DoInvoke<kVirtual, /*is_range=*/ true, /*do_access_check=*/ false, /*is_mterp=*/ true,
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700340 /*is_quick=*/ true>(self, *shadow_frame, inst, inst_data, result_register) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800341}
342
343extern "C" void MterpThreadFenceForConstructor() {
344 QuasiAtomic::ThreadFenceForConstructor();
345}
346
Andreas Gampe67409972016-07-19 22:34:53 -0700347extern "C" size_t MterpConstString(uint32_t index,
348 uint32_t tgt_vreg,
349 ShadowFrame* shadow_frame,
350 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700351 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800352 ObjPtr<mirror::String> s = ResolveString(self, *shadow_frame, dex::StringIndex(index));
buzbee1452bee2015-03-06 14:43:04 -0800353 if (UNLIKELY(s == nullptr)) {
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700354 return 1u;
buzbee1452bee2015-03-06 14:43:04 -0800355 }
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +0100356 shadow_frame->SetVRegReference(tgt_vreg, s);
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700357 return 0u;
buzbee1452bee2015-03-06 14:43:04 -0800358}
359
Andreas Gampe67409972016-07-19 22:34:53 -0700360extern "C" size_t MterpConstClass(uint32_t index,
361 uint32_t tgt_vreg,
362 ShadowFrame* shadow_frame,
363 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700364 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Marko28e012a2017-12-07 11:22:59 +0000365 ObjPtr<mirror::Class> c = ResolveVerifyAndClinit(dex::TypeIndex(index),
366 shadow_frame->GetMethod(),
367 self,
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700368 /* can_run_clinit= */ false,
369 /* verify_access= */ false);
buzbee1452bee2015-03-06 14:43:04 -0800370 if (UNLIKELY(c == nullptr)) {
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700371 return 1u;
buzbee1452bee2015-03-06 14:43:04 -0800372 }
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +0100373 shadow_frame->SetVRegReference(tgt_vreg, c);
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700374 return 0u;
buzbee1452bee2015-03-06 14:43:04 -0800375}
376
Orion Hodsone7732be2017-10-11 14:35:20 +0100377extern "C" size_t MterpConstMethodHandle(uint32_t index,
378 uint32_t tgt_vreg,
379 ShadowFrame* shadow_frame,
380 Thread* self)
381 REQUIRES_SHARED(Locks::mutator_lock_) {
382 ObjPtr<mirror::MethodHandle> mh = ResolveMethodHandle(self, index, shadow_frame->GetMethod());
383 if (UNLIKELY(mh == nullptr)) {
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700384 return 1u;
Orion Hodsone7732be2017-10-11 14:35:20 +0100385 }
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +0100386 shadow_frame->SetVRegReference(tgt_vreg, mh);
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700387 return 0u;
Orion Hodsone7732be2017-10-11 14:35:20 +0100388}
389
390extern "C" size_t MterpConstMethodType(uint32_t index,
391 uint32_t tgt_vreg,
392 ShadowFrame* shadow_frame,
393 Thread* self)
394 REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodson06d10a72018-05-14 08:53:38 +0100395 ObjPtr<mirror::MethodType> mt =
396 ResolveMethodType(self, dex::ProtoIndex(index), shadow_frame->GetMethod());
Orion Hodsone7732be2017-10-11 14:35:20 +0100397 if (UNLIKELY(mt == nullptr)) {
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700398 return 1u;
Orion Hodsone7732be2017-10-11 14:35:20 +0100399 }
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +0100400 shadow_frame->SetVRegReference(tgt_vreg, mt);
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700401 return 0u;
Orion Hodsone7732be2017-10-11 14:35:20 +0100402}
403
Andreas Gampe67409972016-07-19 22:34:53 -0700404extern "C" size_t MterpCheckCast(uint32_t index,
405 StackReference<mirror::Object>* vreg_addr,
406 art::ArtMethod* method,
407 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700408 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampea5b09a62016-11-17 15:21:22 -0800409 ObjPtr<mirror::Class> c = ResolveVerifyAndClinit(dex::TypeIndex(index),
410 method,
411 self,
412 false,
413 false);
buzbee1452bee2015-03-06 14:43:04 -0800414 if (UNLIKELY(c == nullptr)) {
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700415 return 1u;
buzbee1452bee2015-03-06 14:43:04 -0800416 }
buzbeea2c97a92016-01-25 15:41:24 -0800417 // Must load obj from vreg following ResolveVerifyAndClinit due to moving gc.
Vladimir Marko4bb2af52019-03-22 11:09:19 +0000418 ObjPtr<mirror::Object> obj = vreg_addr->AsMirrorPtr();
buzbee1452bee2015-03-06 14:43:04 -0800419 if (UNLIKELY(obj != nullptr && !obj->InstanceOf(c))) {
420 ThrowClassCastException(c, obj->GetClass());
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700421 return 1u;
buzbee1452bee2015-03-06 14:43:04 -0800422 }
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700423 return 0u;
buzbee1452bee2015-03-06 14:43:04 -0800424}
425
Andreas Gampe67409972016-07-19 22:34:53 -0700426extern "C" size_t MterpInstanceOf(uint32_t index,
427 StackReference<mirror::Object>* vreg_addr,
428 art::ArtMethod* method,
429 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700430 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampea5b09a62016-11-17 15:21:22 -0800431 ObjPtr<mirror::Class> c = ResolveVerifyAndClinit(dex::TypeIndex(index),
432 method,
433 self,
434 false,
435 false);
buzbee1452bee2015-03-06 14:43:04 -0800436 if (UNLIKELY(c == nullptr)) {
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700437 return 0u; // Caller will check for pending exception. Return value unimportant.
buzbee1452bee2015-03-06 14:43:04 -0800438 }
buzbeea2c97a92016-01-25 15:41:24 -0800439 // Must load obj from vreg following ResolveVerifyAndClinit due to moving gc.
Vladimir Marko4bb2af52019-03-22 11:09:19 +0000440 ObjPtr<mirror::Object> obj = vreg_addr->AsMirrorPtr();
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700441 return (obj != nullptr) && obj->InstanceOf(c) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800442}
443
Vladimir Marko4bb2af52019-03-22 11:09:19 +0000444extern "C" size_t MterpFillArrayData(mirror::Object* obj,
445 const Instruction::ArrayDataPayload* payload)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700446 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700447 return FillArrayData(obj, payload) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800448}
449
Andreas Gampe67409972016-07-19 22:34:53 -0700450extern "C" size_t MterpNewInstance(ShadowFrame* shadow_frame, Thread* self, uint32_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700451 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800452 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
Vladimir Marko4bb2af52019-03-22 11:09:19 +0000453 ObjPtr<mirror::Object> obj = nullptr;
Vladimir Marko28e012a2017-12-07 11:22:59 +0000454 ObjPtr<mirror::Class> c = ResolveVerifyAndClinit(dex::TypeIndex(inst->VRegB_21c()),
455 shadow_frame->GetMethod(),
456 self,
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700457 /* can_run_clinit= */ false,
458 /* verify_access= */ false);
buzbee1452bee2015-03-06 14:43:04 -0800459 if (LIKELY(c != nullptr)) {
460 if (UNLIKELY(c->IsStringClass())) {
461 gc::AllocatorType allocator_type = Runtime::Current()->GetHeap()->GetCurrentAllocator();
Vladimir Marko9b81ac32019-05-16 16:47:08 +0100462 obj = mirror::String::AllocEmptyString(self, allocator_type);
buzbee1452bee2015-03-06 14:43:04 -0800463 } else {
Vladimir Marko9b81ac32019-05-16 16:47:08 +0100464 obj = AllocObjectFromCode(c, self, Runtime::Current()->GetHeap()->GetCurrentAllocator());
buzbee1452bee2015-03-06 14:43:04 -0800465 }
466 }
467 if (UNLIKELY(obj == nullptr)) {
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700468 return 0u;
buzbee1452bee2015-03-06 14:43:04 -0800469 }
470 obj->GetClass()->AssertInitializedOrInitializingInThread(self);
471 shadow_frame->SetVRegReference(inst->VRegA_21c(inst_data), obj);
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700472 return 1u;
buzbee1452bee2015-03-06 14:43:04 -0800473}
474
Andreas Gampe67409972016-07-19 22:34:53 -0700475extern "C" size_t MterpIputObjectQuick(ShadowFrame* shadow_frame,
476 uint16_t* dex_pc_ptr,
477 uint32_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700478 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800479 const Instruction* inst = Instruction::At(dex_pc_ptr);
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700480 return DoIPutQuick<Primitive::kPrimNot, false>(*shadow_frame, inst, inst_data) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800481}
482
Andreas Gampe67409972016-07-19 22:34:53 -0700483extern "C" size_t MterpAputObject(ShadowFrame* shadow_frame,
484 uint16_t* dex_pc_ptr,
485 uint32_t inst_data)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700486 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800487 const Instruction* inst = Instruction::At(dex_pc_ptr);
Vladimir Marko4bb2af52019-03-22 11:09:19 +0000488 ObjPtr<mirror::Object> a = shadow_frame->GetVRegReference(inst->VRegB_23x());
buzbee1452bee2015-03-06 14:43:04 -0800489 if (UNLIKELY(a == nullptr)) {
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700490 return 0u;
buzbee1452bee2015-03-06 14:43:04 -0800491 }
492 int32_t index = shadow_frame->GetVReg(inst->VRegC_23x());
Vladimir Marko4bb2af52019-03-22 11:09:19 +0000493 ObjPtr<mirror::Object> val = shadow_frame->GetVRegReference(inst->VRegA_23x(inst_data));
494 ObjPtr<mirror::ObjectArray<mirror::Object>> array = a->AsObjectArray<mirror::Object>();
buzbee1452bee2015-03-06 14:43:04 -0800495 if (array->CheckIsValidIndex(index) && array->CheckAssignable(val)) {
496 array->SetWithoutChecks<false>(index, val);
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700497 return 1u;
buzbee1452bee2015-03-06 14:43:04 -0800498 }
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700499 return 0u;
buzbee1452bee2015-03-06 14:43:04 -0800500}
501
Andreas Gampe67409972016-07-19 22:34:53 -0700502extern "C" size_t MterpFilledNewArray(ShadowFrame* shadow_frame,
503 uint16_t* dex_pc_ptr,
504 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700505 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800506 const Instruction* inst = Instruction::At(dex_pc_ptr);
507 return DoFilledNewArray<false, false, false>(inst, *shadow_frame, self,
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700508 shadow_frame->GetResultRegister()) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800509}
510
Andreas Gampe67409972016-07-19 22:34:53 -0700511extern "C" size_t MterpFilledNewArrayRange(ShadowFrame* shadow_frame,
512 uint16_t* dex_pc_ptr,
513 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700514 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800515 const Instruction* inst = Instruction::At(dex_pc_ptr);
516 return DoFilledNewArray<true, false, false>(inst, *shadow_frame, self,
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700517 shadow_frame->GetResultRegister()) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800518}
519
Andreas Gampe67409972016-07-19 22:34:53 -0700520extern "C" size_t MterpNewArray(ShadowFrame* shadow_frame,
521 uint16_t* dex_pc_ptr,
522 uint32_t inst_data, Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700523 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800524 const Instruction* inst = Instruction::At(dex_pc_ptr);
525 int32_t length = shadow_frame->GetVReg(inst->VRegB_22c(inst_data));
Vladimir Marko9b81ac32019-05-16 16:47:08 +0100526 ObjPtr<mirror::Object> obj = AllocArrayFromCode</*kAccessCheck=*/ false>(
Andreas Gampea5b09a62016-11-17 15:21:22 -0800527 dex::TypeIndex(inst->VRegC_22c()), length, shadow_frame->GetMethod(), self,
buzbee1452bee2015-03-06 14:43:04 -0800528 Runtime::Current()->GetHeap()->GetCurrentAllocator());
529 if (UNLIKELY(obj == nullptr)) {
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700530 return 0u;
buzbee1452bee2015-03-06 14:43:04 -0800531 }
532 shadow_frame->SetVRegReference(inst->VRegA_22c(inst_data), obj);
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700533 return 1u;
buzbee1452bee2015-03-06 14:43:04 -0800534}
535
Andreas Gampe67409972016-07-19 22:34:53 -0700536extern "C" size_t MterpHandleException(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700537 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800538 DCHECK(self->IsExceptionPending());
539 const instrumentation::Instrumentation* const instrumentation =
540 Runtime::Current()->GetInstrumentation();
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700541 return MoveToExceptionHandler(self, *shadow_frame, instrumentation) ? 1u : 0u;
buzbee1452bee2015-03-06 14:43:04 -0800542}
543
David Srbecky68b926e2018-11-28 17:43:42 +0000544struct MterpCheckHelper {
545 DECLARE_RUNTIME_DEBUG_FLAG(kSlowMode);
546};
547DEFINE_RUNTIME_DEBUG_FLAG(MterpCheckHelper, kSlowMode);
548
Bill Buzbeed47fd902016-07-07 14:42:43 +0000549extern "C" void MterpCheckBefore(Thread* self, ShadowFrame* shadow_frame, uint16_t* dex_pc_ptr)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700550 REQUIRES_SHARED(Locks::mutator_lock_) {
David Srbeckyd3883902019-02-26 17:29:32 +0000551 // Check that we are using the right interpreter.
552 if (kIsDebugBuild && self->UseMterp() != CanUseMterp()) {
553 // The flag might be currently being updated on all threads. Retry with lock.
554 MutexLock tll_mu(self, *Locks::thread_list_lock_);
555 DCHECK_EQ(self->UseMterp(), CanUseMterp());
556 }
David Srbeckycb4f09e2018-10-21 08:45:22 +0100557 DCHECK(!Runtime::Current()->IsActiveTransaction());
Bill Buzbeed47fd902016-07-07 14:42:43 +0000558 const Instruction* inst = Instruction::At(dex_pc_ptr);
David Srbeckye81f10a2019-07-04 10:00:12 +0000559 uint16_t inst_data = inst->Fetch16(0);
560 if (inst->Opcode(inst_data) == Instruction::MOVE_EXCEPTION) {
561 self->AssertPendingException();
562 } else {
563 self->AssertNoPendingException();
564 }
Bill Buzbeed47fd902016-07-07 14:42:43 +0000565 if (kTraceExecutionEnabled) {
David Srbeckye81f10a2019-07-04 10:00:12 +0000566 uint32_t dex_pc = dex_pc_ptr - shadow_frame->GetDexInstructions();
Bill Buzbeed47fd902016-07-07 14:42:43 +0000567 TraceExecution(*shadow_frame, inst, dex_pc);
568 }
569 if (kTestExportPC) {
570 // Save invalid dex pc to force segfault if improperly used.
571 shadow_frame->SetDexPCPtr(reinterpret_cast<uint16_t*>(kExportPCPoison));
572 }
David Srbecky68b926e2018-11-28 17:43:42 +0000573 if (MterpCheckHelper::kSlowMode) {
574 shadow_frame->CheckConsistentVRegs();
575 }
buzbee1452bee2015-03-06 14:43:04 -0800576}
577
578extern "C" void MterpLogDivideByZeroException(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700579 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800580 UNUSED(self);
581 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
582 uint16_t inst_data = inst->Fetch16(0);
583 LOG(INFO) << "DivideByZero: " << inst->Opcode(inst_data);
584}
585
586extern "C" void MterpLogArrayIndexException(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700587 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800588 UNUSED(self);
589 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
590 uint16_t inst_data = inst->Fetch16(0);
591 LOG(INFO) << "ArrayIndex: " << inst->Opcode(inst_data);
592}
593
594extern "C" void MterpLogNegativeArraySizeException(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700595 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800596 UNUSED(self);
597 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
598 uint16_t inst_data = inst->Fetch16(0);
599 LOG(INFO) << "NegativeArraySize: " << inst->Opcode(inst_data);
600}
601
602extern "C" void MterpLogNoSuchMethodException(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700603 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800604 UNUSED(self);
605 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
606 uint16_t inst_data = inst->Fetch16(0);
607 LOG(INFO) << "NoSuchMethod: " << inst->Opcode(inst_data);
608}
609
610extern "C" void MterpLogExceptionThrownException(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700611 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800612 UNUSED(self);
613 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
614 uint16_t inst_data = inst->Fetch16(0);
615 LOG(INFO) << "ExceptionThrown: " << inst->Opcode(inst_data);
616}
617
618extern "C" void MterpLogNullObjectException(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700619 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800620 UNUSED(self);
621 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
622 uint16_t inst_data = inst->Fetch16(0);
623 LOG(INFO) << "NullObject: " << inst->Opcode(inst_data);
624}
625
626extern "C" void MterpLogFallback(Thread* self, ShadowFrame* shadow_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700627 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800628 UNUSED(self);
629 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
630 uint16_t inst_data = inst->Fetch16(0);
631 LOG(INFO) << "Fallback: " << inst->Opcode(inst_data) << ", Suspend Pending?: "
632 << self->IsExceptionPending();
633}
634
Bill Buzbeefd522f92016-02-11 22:37:42 +0000635extern "C" void MterpLogOSR(Thread* self, ShadowFrame* shadow_frame, int32_t offset)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700636 REQUIRES_SHARED(Locks::mutator_lock_) {
Bill Buzbeefd522f92016-02-11 22:37:42 +0000637 UNUSED(self);
638 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
639 uint16_t inst_data = inst->Fetch16(0);
640 LOG(INFO) << "OSR: " << inst->Opcode(inst_data) << ", offset = " << offset;
641}
642
buzbee1452bee2015-03-06 14:43:04 -0800643extern "C" void MterpLogSuspendFallback(Thread* self, ShadowFrame* shadow_frame, uint32_t flags)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700644 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800645 UNUSED(self);
646 const Instruction* inst = Instruction::At(shadow_frame->GetDexPCPtr());
647 uint16_t inst_data = inst->Fetch16(0);
648 if (flags & kCheckpointRequest) {
649 LOG(INFO) << "Checkpoint fallback: " << inst->Opcode(inst_data);
650 } else if (flags & kSuspendRequest) {
651 LOG(INFO) << "Suspend fallback: " << inst->Opcode(inst_data);
Hiroshi Yamauchi30493242016-11-03 13:06:52 -0700652 } else if (flags & kEmptyCheckpointRequest) {
653 LOG(INFO) << "Empty checkpoint fallback: " << inst->Opcode(inst_data);
buzbee1452bee2015-03-06 14:43:04 -0800654 }
655}
656
Andreas Gampe67409972016-07-19 22:34:53 -0700657extern "C" size_t MterpSuspendCheck(Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700658 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800659 self->AllowThreadSuspension();
David Srbecky28f6cff2018-10-16 15:07:28 +0100660 return !self->UseMterp();
buzbee1452bee2015-03-06 14:43:04 -0800661}
662
David Srbeckyce32c102018-08-31 07:21:07 +0100663// Execute single field access instruction (get/put, static/instance).
664// The template arguments reduce this to fairly small amount of code.
665// It requires the target object and field to be already resolved.
666template<typename PrimType, FindFieldType kAccessType>
667ALWAYS_INLINE void MterpFieldAccess(Instruction* inst,
668 uint16_t inst_data,
669 ShadowFrame* shadow_frame,
670 ObjPtr<mirror::Object> obj,
671 MemberOffset offset,
672 bool is_volatile)
673 REQUIRES_SHARED(Locks::mutator_lock_) {
674 static_assert(std::is_integral<PrimType>::value, "Unexpected primitive type");
675 constexpr bool kIsStatic = (kAccessType & FindFieldFlags::StaticBit) != 0;
676 constexpr bool kIsPrimitive = (kAccessType & FindFieldFlags::PrimitiveBit) != 0;
677 constexpr bool kIsRead = (kAccessType & FindFieldFlags::ReadBit) != 0;
678
679 uint16_t vRegA = kIsStatic ? inst->VRegA_21c(inst_data) : inst->VRegA_22c(inst_data);
680 if (kIsPrimitive) {
681 if (kIsRead) {
682 PrimType value = UNLIKELY(is_volatile)
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700683 ? obj->GetFieldPrimitive<PrimType, /*kIsVolatile=*/ true>(offset)
684 : obj->GetFieldPrimitive<PrimType, /*kIsVolatile=*/ false>(offset);
David Srbeckyce32c102018-08-31 07:21:07 +0100685 if (sizeof(PrimType) == sizeof(uint64_t)) {
686 shadow_frame->SetVRegLong(vRegA, value); // Set two consecutive registers.
687 } else {
688 shadow_frame->SetVReg(vRegA, static_cast<int32_t>(value)); // Sign/zero extend.
689 }
690 } else { // Write.
691 uint64_t value = (sizeof(PrimType) == sizeof(uint64_t))
692 ? shadow_frame->GetVRegLong(vRegA)
693 : shadow_frame->GetVReg(vRegA);
694 if (UNLIKELY(is_volatile)) {
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700695 obj->SetFieldPrimitive<PrimType, /*kIsVolatile=*/ true>(offset, value);
David Srbeckyce32c102018-08-31 07:21:07 +0100696 } else {
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700697 obj->SetFieldPrimitive<PrimType, /*kIsVolatile=*/ false>(offset, value);
David Srbeckyce32c102018-08-31 07:21:07 +0100698 }
699 }
700 } else { // Object.
701 if (kIsRead) {
702 ObjPtr<mirror::Object> value = UNLIKELY(is_volatile)
703 ? obj->GetFieldObjectVolatile<mirror::Object>(offset)
704 : obj->GetFieldObject<mirror::Object>(offset);
705 shadow_frame->SetVRegReference(vRegA, value);
706 } else { // Write.
707 ObjPtr<mirror::Object> value = shadow_frame->GetVRegReference(vRegA);
708 if (UNLIKELY(is_volatile)) {
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700709 obj->SetFieldObjectVolatile</*kTransactionActive=*/ false>(offset, value);
David Srbeckyce32c102018-08-31 07:21:07 +0100710 } else {
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700711 obj->SetFieldObject</*kTransactionActive=*/ false>(offset, value);
David Srbeckyce32c102018-08-31 07:21:07 +0100712 }
713 }
David Srbecky104bab62018-08-07 17:09:01 +0100714 }
David Srbecky104bab62018-08-07 17:09:01 +0100715}
716
David Srbeckyce32c102018-08-31 07:21:07 +0100717template<typename PrimType, FindFieldType kAccessType>
718NO_INLINE bool MterpFieldAccessSlow(Instruction* inst,
719 uint16_t inst_data,
720 ShadowFrame* shadow_frame,
721 Thread* self)
722 REQUIRES_SHARED(Locks::mutator_lock_) {
723 constexpr bool kIsStatic = (kAccessType & FindFieldFlags::StaticBit) != 0;
724 constexpr bool kIsRead = (kAccessType & FindFieldFlags::ReadBit) != 0;
725
726 // Update the dex pc in shadow frame, just in case anything throws.
727 shadow_frame->SetDexPCPtr(reinterpret_cast<uint16_t*>(inst));
728 ArtMethod* referrer = shadow_frame->GetMethod();
729 uint32_t field_idx = kIsStatic ? inst->VRegB_21c() : inst->VRegC_22c();
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700730 ArtField* field = FindFieldFromCode<kAccessType, /* access_checks= */ false>(
David Srbeckyce32c102018-08-31 07:21:07 +0100731 field_idx, referrer, self, sizeof(PrimType));
732 if (UNLIKELY(field == nullptr)) {
733 DCHECK(self->IsExceptionPending());
734 return false;
735 }
736 ObjPtr<mirror::Object> obj = kIsStatic
737 ? field->GetDeclaringClass().Ptr()
738 : shadow_frame->GetVRegReference(inst->VRegB_22c(inst_data));
739 if (UNLIKELY(obj == nullptr)) {
740 ThrowNullPointerExceptionForFieldAccess(field, kIsRead);
741 return false;
742 }
743 MterpFieldAccess<PrimType, kAccessType>(
744 inst, inst_data, shadow_frame, obj, field->GetOffset(), field->IsVolatile());
745 return true;
746}
747
David Srbeckyef79aa32018-09-08 18:02:36 +0100748// This methods is called from assembly to handle field access instructions.
749//
750// This method is fairly hot. It is long, but it has been carefully optimized.
751// It contains only fully inlined methods -> no spills -> no prologue/epilogue.
David Srbeckyce32c102018-08-31 07:21:07 +0100752template<typename PrimType, FindFieldType kAccessType>
753ALWAYS_INLINE bool MterpFieldAccessFast(Instruction* inst,
754 uint16_t inst_data,
755 ShadowFrame* shadow_frame,
756 Thread* self)
757 REQUIRES_SHARED(Locks::mutator_lock_) {
758 constexpr bool kIsStatic = (kAccessType & FindFieldFlags::StaticBit) != 0;
David Srbeckyffa15ea2018-08-16 10:04:11 +0100759
David Srbeckyef79aa32018-09-08 18:02:36 +0100760 // Try to find the field in small thread-local cache first.
761 InterpreterCache* tls_cache = self->GetInterpreterCache();
762 size_t tls_value;
763 if (LIKELY(tls_cache->Get(inst, &tls_value))) {
764 // The meaning of the cache value is opcode-specific.
765 // It is ArtFiled* for static fields and the raw offset for instance fields.
766 size_t offset = kIsStatic
767 ? reinterpret_cast<ArtField*>(tls_value)->GetOffset().SizeValue()
768 : tls_value;
769 if (kIsDebugBuild) {
770 uint32_t field_idx = kIsStatic ? inst->VRegB_21c() : inst->VRegC_22c();
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700771 ArtField* field = FindFieldFromCode<kAccessType, /* access_checks= */ false>(
David Srbeckyef79aa32018-09-08 18:02:36 +0100772 field_idx, shadow_frame->GetMethod(), self, sizeof(PrimType));
773 DCHECK_EQ(offset, field->GetOffset().SizeValue());
774 }
775 ObjPtr<mirror::Object> obj = kIsStatic
776 ? reinterpret_cast<ArtField*>(tls_value)->GetDeclaringClass()
Vladimir Markod7e9bbf2019-03-28 13:18:57 +0000777 : ObjPtr<mirror::Object>(shadow_frame->GetVRegReference(inst->VRegB_22c(inst_data)));
David Srbeckyef79aa32018-09-08 18:02:36 +0100778 if (LIKELY(obj != nullptr)) {
779 MterpFieldAccess<PrimType, kAccessType>(
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700780 inst, inst_data, shadow_frame, obj, MemberOffset(offset), /* is_volatile= */ false);
David Srbeckyef79aa32018-09-08 18:02:36 +0100781 return true;
782 }
783 }
784
David Srbeckyffa15ea2018-08-16 10:04:11 +0100785 // This effectively inlines the fast path from ArtMethod::GetDexCache.
David Srbeckyce32c102018-08-31 07:21:07 +0100786 ArtMethod* referrer = shadow_frame->GetMethod();
David Srbeckyffa15ea2018-08-16 10:04:11 +0100787 if (LIKELY(!referrer->IsObsolete())) {
788 // Avoid read barriers, since we need only the pointer to the native (non-movable)
789 // DexCache field array which we can get even through from-space objects.
790 ObjPtr<mirror::Class> klass = referrer->GetDeclaringClass<kWithoutReadBarrier>();
Vladimir Markoc524e9e2019-03-26 10:54:50 +0000791 ObjPtr<mirror::DexCache> dex_cache =
792 klass->GetDexCache<kDefaultVerifyFlags, kWithoutReadBarrier>();
David Srbeckyce32c102018-08-31 07:21:07 +0100793
David Srbeckyffa15ea2018-08-16 10:04:11 +0100794 // Try to find the desired field in DexCache.
David Srbeckyce32c102018-08-31 07:21:07 +0100795 uint32_t field_idx = kIsStatic ? inst->VRegB_21c() : inst->VRegC_22c();
David Srbeckyffa15ea2018-08-16 10:04:11 +0100796 ArtField* field = dex_cache->GetResolvedField(field_idx, kRuntimePointerSize);
David Srbeckyce32c102018-08-31 07:21:07 +0100797 if (LIKELY(field != nullptr)) {
Vladimir Markobaa81b52019-08-02 10:14:04 +0100798 bool visibly_initialized = !kIsStatic || field->GetDeclaringClass()->IsVisiblyInitialized();
799 if (LIKELY(visibly_initialized)) {
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700800 DCHECK_EQ(field, (FindFieldFromCode<kAccessType, /* access_checks= */ false>(
David Srbeckyffa15ea2018-08-16 10:04:11 +0100801 field_idx, referrer, self, sizeof(PrimType))));
David Srbeckyce32c102018-08-31 07:21:07 +0100802 ObjPtr<mirror::Object> obj = kIsStatic
803 ? field->GetDeclaringClass().Ptr()
804 : shadow_frame->GetVRegReference(inst->VRegB_22c(inst_data));
805 if (LIKELY(kIsStatic || obj != nullptr)) {
David Srbeckyef79aa32018-09-08 18:02:36 +0100806 // Only non-volatile fields are allowed in the thread-local cache.
807 if (LIKELY(!field->IsVolatile())) {
808 if (kIsStatic) {
809 tls_cache->Set(inst, reinterpret_cast<uintptr_t>(field));
810 } else {
811 tls_cache->Set(inst, field->GetOffset().SizeValue());
812 }
813 }
David Srbeckyce32c102018-08-31 07:21:07 +0100814 MterpFieldAccess<PrimType, kAccessType>(
815 inst, inst_data, shadow_frame, obj, field->GetOffset(), field->IsVolatile());
816 return true;
817 }
David Srbeckyffa15ea2018-08-16 10:04:11 +0100818 }
David Srbeckyffa15ea2018-08-16 10:04:11 +0100819 }
820 }
David Srbeckyce32c102018-08-31 07:21:07 +0100821
David Srbeckyffa15ea2018-08-16 10:04:11 +0100822 // Slow path. Last and with identical arguments so that it becomes single instruction tail call.
David Srbeckyce32c102018-08-31 07:21:07 +0100823 return MterpFieldAccessSlow<PrimType, kAccessType>(inst, inst_data, shadow_frame, self);
David Srbeckyffa15ea2018-08-16 10:04:11 +0100824}
825
David Srbeckyce32c102018-08-31 07:21:07 +0100826#define MTERP_FIELD_ACCESSOR(Name, PrimType, AccessType) \
827extern "C" bool Name(Instruction* inst, uint16_t inst_data, ShadowFrame* sf, Thread* self) \
828 REQUIRES_SHARED(Locks::mutator_lock_) { \
829 return MterpFieldAccessFast<PrimType, AccessType>(inst, inst_data, sf, self); \
buzbee1452bee2015-03-06 14:43:04 -0800830}
831
David Srbeckyce32c102018-08-31 07:21:07 +0100832#define MTERP_FIELD_ACCESSORS_FOR_TYPE(Sufix, PrimType, Kind) \
833 MTERP_FIELD_ACCESSOR(MterpIGet##Sufix, PrimType, Instance##Kind##Read) \
834 MTERP_FIELD_ACCESSOR(MterpIPut##Sufix, PrimType, Instance##Kind##Write) \
835 MTERP_FIELD_ACCESSOR(MterpSGet##Sufix, PrimType, Static##Kind##Read) \
836 MTERP_FIELD_ACCESSOR(MterpSPut##Sufix, PrimType, Static##Kind##Write)
buzbee1452bee2015-03-06 14:43:04 -0800837
David Srbeckyce32c102018-08-31 07:21:07 +0100838MTERP_FIELD_ACCESSORS_FOR_TYPE(I8, int8_t, Primitive)
839MTERP_FIELD_ACCESSORS_FOR_TYPE(U8, uint8_t, Primitive)
840MTERP_FIELD_ACCESSORS_FOR_TYPE(I16, int16_t, Primitive)
841MTERP_FIELD_ACCESSORS_FOR_TYPE(U16, uint16_t, Primitive)
842MTERP_FIELD_ACCESSORS_FOR_TYPE(U32, uint32_t, Primitive)
843MTERP_FIELD_ACCESSORS_FOR_TYPE(U64, uint64_t, Primitive)
844MTERP_FIELD_ACCESSORS_FOR_TYPE(Obj, uint32_t, Object)
David Srbecky28dfc592018-08-22 15:29:09 +0100845
David Srbeckyce32c102018-08-31 07:21:07 +0100846// Check that the primitive type for Obj variant above is correct.
847// It really must be primitive type for the templates to compile.
848// In the case of objects, it is only used to get the field size.
849static_assert(kHeapReferenceSize == sizeof(uint32_t), "Unexpected kHeapReferenceSize");
David Srbecky28dfc592018-08-22 15:29:09 +0100850
David Srbeckyce32c102018-08-31 07:21:07 +0100851#undef MTERP_FIELD_ACCESSORS_FOR_TYPE
852#undef MTERP_FIELD_ACCESSOR
buzbeefa6adfd2017-02-22 13:40:59 -0800853
854extern "C" mirror::Object* artAGetObjectFromMterp(mirror::Object* arr,
855 int32_t index)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700856 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800857 if (UNLIKELY(arr == nullptr)) {
858 ThrowNullPointerExceptionFromInterpreter();
859 return nullptr;
860 }
Vladimir Marko423bebb2019-03-26 15:17:21 +0000861 ObjPtr<mirror::ObjectArray<mirror::Object>> array = arr->AsObjectArray<mirror::Object>();
buzbee1452bee2015-03-06 14:43:04 -0800862 if (LIKELY(array->CheckIsValidIndex(index))) {
Vladimir Marko423bebb2019-03-26 15:17:21 +0000863 return array->GetWithoutChecks(index).Ptr();
buzbee1452bee2015-03-06 14:43:04 -0800864 } else {
865 return nullptr;
866 }
867}
868
buzbeefa6adfd2017-02-22 13:40:59 -0800869extern "C" mirror::Object* artIGetObjectFromMterp(mirror::Object* obj,
870 uint32_t field_offset)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700871 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee76833da2016-01-13 13:06:22 -0800872 if (UNLIKELY(obj == nullptr)) {
873 ThrowNullPointerExceptionFromInterpreter();
874 return nullptr;
875 }
876 return obj->GetFieldObject<mirror::Object>(MemberOffset(field_offset));
877}
878
Bill Buzbee1d011d92016-04-04 16:59:29 +0000879/*
880 * Create a hotness_countdown based on the current method hotness_count and profiling
881 * mode. In short, determine how many hotness events we hit before reporting back
882 * to the full instrumentation via MterpAddHotnessBatch. Called once on entry to the method,
883 * and regenerated following batch updates.
884 */
Vladimir Markoa710d912017-09-12 14:56:07 +0100885extern "C" ssize_t MterpSetUpHotnessCountdown(ArtMethod* method,
886 ShadowFrame* shadow_frame,
887 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700888 REQUIRES_SHARED(Locks::mutator_lock_) {
Bill Buzbee1d011d92016-04-04 16:59:29 +0000889 uint16_t hotness_count = method->GetCounter();
890 int32_t countdown_value = jit::kJitHotnessDisabled;
891 jit::Jit* jit = Runtime::Current()->GetJit();
892 if (jit != nullptr) {
Nicolas Geoffray0402f4b2018-11-29 19:18:46 +0000893 int32_t warm_threshold = jit->WarmMethodThreshold();
894 int32_t hot_threshold = jit->HotMethodThreshold();
895 int32_t osr_threshold = jit->OSRMethodThreshold();
Bill Buzbee1d011d92016-04-04 16:59:29 +0000896 if (hotness_count < warm_threshold) {
897 countdown_value = warm_threshold - hotness_count;
898 } else if (hotness_count < hot_threshold) {
899 countdown_value = hot_threshold - hotness_count;
900 } else if (hotness_count < osr_threshold) {
901 countdown_value = osr_threshold - hotness_count;
902 } else {
903 countdown_value = jit::kJitCheckForOSR;
904 }
Vladimir Markoa710d912017-09-12 14:56:07 +0100905 if (jit::Jit::ShouldUsePriorityThreadWeight(self)) {
Nicolas Geoffray274fe4a2016-04-12 16:33:24 +0100906 int32_t priority_thread_weight = jit->PriorityThreadWeight();
Calin Juravleb2771b42016-04-07 17:09:25 +0100907 countdown_value = std::min(countdown_value, countdown_value / priority_thread_weight);
908 }
Bill Buzbee1d011d92016-04-04 16:59:29 +0000909 }
910 /*
911 * The actual hotness threshold may exceed the range of our int16_t countdown value. This is
912 * not a problem, though. We can just break it down into smaller chunks.
913 */
914 countdown_value = std::min(countdown_value,
915 static_cast<int32_t>(std::numeric_limits<int16_t>::max()));
916 shadow_frame->SetCachedHotnessCountdown(countdown_value);
917 shadow_frame->SetHotnessCountdown(countdown_value);
918 return countdown_value;
919}
920
921/*
922 * Report a batch of hotness events to the instrumentation and then return the new
923 * countdown value to the next time we should report.
924 */
Andreas Gampe67409972016-07-19 22:34:53 -0700925extern "C" ssize_t MterpAddHotnessBatch(ArtMethod* method,
Bill Buzbee1d011d92016-04-04 16:59:29 +0000926 ShadowFrame* shadow_frame,
927 Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700928 REQUIRES_SHARED(Locks::mutator_lock_) {
Bill Buzbee1d011d92016-04-04 16:59:29 +0000929 jit::Jit* jit = Runtime::Current()->GetJit();
930 if (jit != nullptr) {
931 int16_t count = shadow_frame->GetCachedHotnessCountdown() - shadow_frame->GetHotnessCountdown();
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700932 jit->AddSamples(self, method, count, /*with_backedges=*/ true);
Bill Buzbee1d011d92016-04-04 16:59:29 +0000933 }
Vladimir Markoa710d912017-09-12 14:56:07 +0100934 return MterpSetUpHotnessCountdown(method, shadow_frame, self);
Bill Buzbee1d011d92016-04-04 16:59:29 +0000935}
936
Andreas Gampe67409972016-07-19 22:34:53 -0700937extern "C" size_t MterpMaybeDoOnStackReplacement(Thread* self,
938 ShadowFrame* shadow_frame,
939 int32_t offset)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700940 REQUIRES_SHARED(Locks::mutator_lock_) {
buzbee42a09cb02017-02-01 09:08:31 -0800941 int16_t osr_countdown = shadow_frame->GetCachedHotnessCountdown() - 1;
942 bool did_osr = false;
943 /*
944 * To reduce the cost of polling the compiler to determine whether the requested OSR
945 * compilation has completed, only check every Nth time. NOTE: the "osr_countdown <= 0"
946 * condition is satisfied either by the decrement below or the initial setting of
947 * the cached countdown field to kJitCheckForOSR, which elsewhere is asserted to be -1.
948 */
949 if (osr_countdown <= 0) {
950 ArtMethod* method = shadow_frame->GetMethod();
951 JValue* result = shadow_frame->GetResultRegister();
952 uint32_t dex_pc = shadow_frame->GetDexPC();
953 jit::Jit* jit = Runtime::Current()->GetJit();
954 osr_countdown = jit::Jit::kJitRecheckOSRThreshold;
955 if (offset <= 0) {
956 // Keep updating hotness in case a compilation request was dropped. Eventually it will retry.
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700957 jit->AddSamples(self, method, osr_countdown, /*with_backedges=*/ true);
buzbee42a09cb02017-02-01 09:08:31 -0800958 }
959 did_osr = jit::Jit::MaybeDoOnStackReplacement(self, method, dex_pc, offset, result);
buzbee0e6aa6d2016-04-11 07:48:18 -0700960 }
buzbee42a09cb02017-02-01 09:08:31 -0800961 shadow_frame->SetCachedHotnessCountdown(osr_countdown);
Andreas Gampe7c5acbb2018-09-20 13:54:52 -0700962 return did_osr ? 1u : 0u;
Bill Buzbeefd522f92016-02-11 22:37:42 +0000963}
964
buzbee1452bee2015-03-06 14:43:04 -0800965} // namespace interpreter
966} // namespace art