blob: d5451ffded866a226eb9cad79e372f512139a135 [file] [log] [blame]
Andreas Gampe36a296f2017-06-13 14:11:11 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_INTERPRETER_SHADOW_FRAME_H_
18#define ART_RUNTIME_INTERPRETER_SHADOW_FRAME_H_
19
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070020#include <cstdint>
Andreas Gampe36a296f2017-06-13 14:11:11 -070021#include <cstring>
Andreas Gampe36a296f2017-06-13 14:11:11 -070022#include <string>
23
24#include "base/macros.h"
25#include "base/mutex.h"
David Sehr9e734c72018-01-04 17:56:19 -080026#include "dex/dex_file.h"
Andreas Gampe36a296f2017-06-13 14:11:11 -070027#include "lock_count_data.h"
28#include "read_barrier.h"
29#include "stack_reference.h"
30#include "verify_object.h"
31
32namespace art {
33
34namespace mirror {
Igor Murashkin2ffb7032017-11-08 13:35:21 -080035class Object;
Andreas Gampe36a296f2017-06-13 14:11:11 -070036} // namespace mirror
37
38class ArtMethod;
39class ShadowFrame;
40class Thread;
41union JValue;
42
43// Forward declaration. Just calls the destructor.
44struct ShadowFrameDeleter;
45using ShadowFrameAllocaUniquePtr = std::unique_ptr<ShadowFrame, ShadowFrameDeleter>;
46
47// ShadowFrame has 2 possible layouts:
48// - interpreter - separate VRegs and reference arrays. References are in the reference array.
49// - JNI - just VRegs, but where every VReg holds a reference.
50class ShadowFrame {
51 public:
52 // Compute size of ShadowFrame in bytes assuming it has a reference array.
53 static size_t ComputeSize(uint32_t num_vregs) {
54 return sizeof(ShadowFrame) + (sizeof(uint32_t) * num_vregs) +
55 (sizeof(StackReference<mirror::Object>) * num_vregs);
56 }
57
58 // Create ShadowFrame in heap for deoptimization.
59 static ShadowFrame* CreateDeoptimizedFrame(uint32_t num_vregs, ShadowFrame* link,
60 ArtMethod* method, uint32_t dex_pc) {
61 uint8_t* memory = new uint8_t[ComputeSize(num_vregs)];
62 return CreateShadowFrameImpl(num_vregs, link, method, dex_pc, memory);
63 }
64
65 // Delete a ShadowFrame allocated on the heap for deoptimization.
66 static void DeleteDeoptimizedFrame(ShadowFrame* sf) {
67 sf->~ShadowFrame(); // Explicitly destruct.
68 uint8_t* memory = reinterpret_cast<uint8_t*>(sf);
69 delete[] memory;
70 }
71
72 // Create a shadow frame in a fresh alloca. This needs to be in the context of the caller.
73 // Inlining doesn't work, the compiler will still undo the alloca. So this needs to be a macro.
74#define CREATE_SHADOW_FRAME(num_vregs, link, method, dex_pc) ({ \
75 size_t frame_size = ShadowFrame::ComputeSize(num_vregs); \
76 void* alloca_mem = alloca(frame_size); \
77 ShadowFrameAllocaUniquePtr( \
78 ShadowFrame::CreateShadowFrameImpl((num_vregs), (link), (method), (dex_pc), \
79 (alloca_mem))); \
80 })
81
82 ~ShadowFrame() {}
83
84 // TODO(iam): Clean references array up since they're always there,
85 // we don't need to do conditionals.
86 bool HasReferenceArray() const {
87 return true;
88 }
89
90 uint32_t NumberOfVRegs() const {
91 return number_of_vregs_;
92 }
93
94 uint32_t GetDexPC() const {
Mathieu Chartierfc9555d2017-11-05 16:32:19 -080095 return (dex_pc_ptr_ == nullptr) ? dex_pc_ : dex_pc_ptr_ - dex_instructions_;
Andreas Gampe36a296f2017-06-13 14:11:11 -070096 }
97
98 int16_t GetCachedHotnessCountdown() const {
99 return cached_hotness_countdown_;
100 }
101
102 void SetCachedHotnessCountdown(int16_t cached_hotness_countdown) {
103 cached_hotness_countdown_ = cached_hotness_countdown;
104 }
105
106 int16_t GetHotnessCountdown() const {
107 return hotness_countdown_;
108 }
109
110 void SetHotnessCountdown(int16_t hotness_countdown) {
111 hotness_countdown_ = hotness_countdown;
112 }
113
114 void SetDexPC(uint32_t dex_pc) {
115 dex_pc_ = dex_pc;
116 dex_pc_ptr_ = nullptr;
117 }
118
119 ShadowFrame* GetLink() const {
120 return link_;
121 }
122
123 void SetLink(ShadowFrame* frame) {
124 DCHECK_NE(this, frame);
125 link_ = frame;
126 }
127
128 int32_t GetVReg(size_t i) const {
129 DCHECK_LT(i, NumberOfVRegs());
130 const uint32_t* vreg = &vregs_[i];
131 return *reinterpret_cast<const int32_t*>(vreg);
132 }
133
134 // Shorts are extended to Ints in VRegs. Interpreter intrinsics needs them as shorts.
135 int16_t GetVRegShort(size_t i) const {
136 return static_cast<int16_t>(GetVReg(i));
137 }
138
139 uint32_t* GetVRegAddr(size_t i) {
140 return &vregs_[i];
141 }
142
143 uint32_t* GetShadowRefAddr(size_t i) {
144 DCHECK(HasReferenceArray());
145 DCHECK_LT(i, NumberOfVRegs());
146 return &vregs_[i + NumberOfVRegs()];
147 }
148
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800149 const uint16_t* GetDexInstructions() const {
150 return dex_instructions_;
Andreas Gampe36a296f2017-06-13 14:11:11 -0700151 }
152
153 float GetVRegFloat(size_t i) const {
154 DCHECK_LT(i, NumberOfVRegs());
155 // NOTE: Strict-aliasing?
156 const uint32_t* vreg = &vregs_[i];
157 return *reinterpret_cast<const float*>(vreg);
158 }
159
160 int64_t GetVRegLong(size_t i) const {
161 DCHECK_LT(i, NumberOfVRegs());
162 const uint32_t* vreg = &vregs_[i];
163 typedef const int64_t unaligned_int64 __attribute__ ((aligned (4)));
164 return *reinterpret_cast<unaligned_int64*>(vreg);
165 }
166
167 double GetVRegDouble(size_t i) const {
168 DCHECK_LT(i, NumberOfVRegs());
169 const uint32_t* vreg = &vregs_[i];
170 typedef const double unaligned_double __attribute__ ((aligned (4)));
171 return *reinterpret_cast<unaligned_double*>(vreg);
172 }
173
174 // Look up the reference given its virtual register number.
175 // If this returns non-null then this does not mean the vreg is currently a reference
176 // on non-moving collectors. Check that the raw reg with GetVReg is equal to this if not certain.
177 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
178 mirror::Object* GetVRegReference(size_t i) const REQUIRES_SHARED(Locks::mutator_lock_) {
179 DCHECK_LT(i, NumberOfVRegs());
180 mirror::Object* ref;
181 if (HasReferenceArray()) {
182 ref = References()[i].AsMirrorPtr();
183 } else {
184 const uint32_t* vreg_ptr = &vregs_[i];
185 ref = reinterpret_cast<const StackReference<mirror::Object>*>(vreg_ptr)->AsMirrorPtr();
186 }
Roland Levillaina78f5b62017-09-29 13:50:44 +0100187 ReadBarrier::MaybeAssertToSpaceInvariant(ref);
Andreas Gampe36a296f2017-06-13 14:11:11 -0700188 if (kVerifyFlags & kVerifyReads) {
189 VerifyObject(ref);
190 }
191 return ref;
192 }
193
194 // Get view of vregs as range of consecutive arguments starting at i.
195 uint32_t* GetVRegArgs(size_t i) {
196 return &vregs_[i];
197 }
198
199 void SetVReg(size_t i, int32_t val) {
200 DCHECK_LT(i, NumberOfVRegs());
201 uint32_t* vreg = &vregs_[i];
202 *reinterpret_cast<int32_t*>(vreg) = val;
203 // This is needed for moving collectors since these can update the vreg references if they
204 // happen to agree with references in the reference array.
205 if (kMovingCollector && HasReferenceArray()) {
206 References()[i].Clear();
207 }
208 }
209
210 void SetVRegFloat(size_t i, float val) {
211 DCHECK_LT(i, NumberOfVRegs());
212 uint32_t* vreg = &vregs_[i];
213 *reinterpret_cast<float*>(vreg) = val;
214 // This is needed for moving collectors since these can update the vreg references if they
215 // happen to agree with references in the reference array.
216 if (kMovingCollector && HasReferenceArray()) {
217 References()[i].Clear();
218 }
219 }
220
221 void SetVRegLong(size_t i, int64_t val) {
222 DCHECK_LT(i, NumberOfVRegs());
223 uint32_t* vreg = &vregs_[i];
224 typedef int64_t unaligned_int64 __attribute__ ((aligned (4)));
225 *reinterpret_cast<unaligned_int64*>(vreg) = val;
226 // This is needed for moving collectors since these can update the vreg references if they
227 // happen to agree with references in the reference array.
228 if (kMovingCollector && HasReferenceArray()) {
229 References()[i].Clear();
230 References()[i + 1].Clear();
231 }
232 }
233
234 void SetVRegDouble(size_t i, double val) {
235 DCHECK_LT(i, NumberOfVRegs());
236 uint32_t* vreg = &vregs_[i];
237 typedef double unaligned_double __attribute__ ((aligned (4)));
238 *reinterpret_cast<unaligned_double*>(vreg) = val;
239 // This is needed for moving collectors since these can update the vreg references if they
240 // happen to agree with references in the reference array.
241 if (kMovingCollector && HasReferenceArray()) {
242 References()[i].Clear();
243 References()[i + 1].Clear();
244 }
245 }
246
247 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
248 void SetVRegReference(size_t i, mirror::Object* val) REQUIRES_SHARED(Locks::mutator_lock_) {
249 DCHECK_LT(i, NumberOfVRegs());
250 if (kVerifyFlags & kVerifyWrites) {
251 VerifyObject(val);
252 }
Roland Levillaina78f5b62017-09-29 13:50:44 +0100253 ReadBarrier::MaybeAssertToSpaceInvariant(val);
Andreas Gampe36a296f2017-06-13 14:11:11 -0700254 uint32_t* vreg = &vregs_[i];
255 reinterpret_cast<StackReference<mirror::Object>*>(vreg)->Assign(val);
256 if (HasReferenceArray()) {
257 References()[i].Assign(val);
258 }
259 }
260
261 void SetMethod(ArtMethod* method) REQUIRES(Locks::mutator_lock_) {
262 DCHECK(method != nullptr);
263 DCHECK(method_ != nullptr);
264 method_ = method;
265 }
266
267 ArtMethod* GetMethod() const REQUIRES_SHARED(Locks::mutator_lock_) {
268 DCHECK(method_ != nullptr);
269 return method_;
270 }
271
272 mirror::Object* GetThisObject() const REQUIRES_SHARED(Locks::mutator_lock_);
273
274 mirror::Object* GetThisObject(uint16_t num_ins) const REQUIRES_SHARED(Locks::mutator_lock_);
275
276 bool Contains(StackReference<mirror::Object>* shadow_frame_entry_obj) const {
277 if (HasReferenceArray()) {
278 return ((&References()[0] <= shadow_frame_entry_obj) &&
279 (shadow_frame_entry_obj <= (&References()[NumberOfVRegs() - 1])));
280 } else {
281 uint32_t* shadow_frame_entry = reinterpret_cast<uint32_t*>(shadow_frame_entry_obj);
282 return ((&vregs_[0] <= shadow_frame_entry) &&
283 (shadow_frame_entry <= (&vregs_[NumberOfVRegs() - 1])));
284 }
285 }
286
287 LockCountData& GetLockCountData() {
288 return lock_count_data_;
289 }
290
291 static size_t LockCountDataOffset() {
292 return OFFSETOF_MEMBER(ShadowFrame, lock_count_data_);
293 }
294
295 static size_t LinkOffset() {
296 return OFFSETOF_MEMBER(ShadowFrame, link_);
297 }
298
299 static size_t MethodOffset() {
300 return OFFSETOF_MEMBER(ShadowFrame, method_);
301 }
302
303 static size_t DexPCOffset() {
304 return OFFSETOF_MEMBER(ShadowFrame, dex_pc_);
305 }
306
307 static size_t NumberOfVRegsOffset() {
308 return OFFSETOF_MEMBER(ShadowFrame, number_of_vregs_);
309 }
310
311 static size_t VRegsOffset() {
312 return OFFSETOF_MEMBER(ShadowFrame, vregs_);
313 }
314
315 static size_t ResultRegisterOffset() {
316 return OFFSETOF_MEMBER(ShadowFrame, result_register_);
317 }
318
319 static size_t DexPCPtrOffset() {
320 return OFFSETOF_MEMBER(ShadowFrame, dex_pc_ptr_);
321 }
322
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800323 static size_t DexInstructionsOffset() {
324 return OFFSETOF_MEMBER(ShadowFrame, dex_instructions_);
Andreas Gampe36a296f2017-06-13 14:11:11 -0700325 }
326
327 static size_t CachedHotnessCountdownOffset() {
328 return OFFSETOF_MEMBER(ShadowFrame, cached_hotness_countdown_);
329 }
330
331 static size_t HotnessCountdownOffset() {
332 return OFFSETOF_MEMBER(ShadowFrame, hotness_countdown_);
333 }
334
335 // Create ShadowFrame for interpreter using provided memory.
336 static ShadowFrame* CreateShadowFrameImpl(uint32_t num_vregs,
337 ShadowFrame* link,
338 ArtMethod* method,
339 uint32_t dex_pc,
340 void* memory) {
341 return new (memory) ShadowFrame(num_vregs, link, method, dex_pc, true);
342 }
343
344 const uint16_t* GetDexPCPtr() {
345 return dex_pc_ptr_;
346 }
347
348 void SetDexPCPtr(uint16_t* dex_pc_ptr) {
349 dex_pc_ptr_ = dex_pc_ptr;
350 }
351
352 JValue* GetResultRegister() {
353 return result_register_;
354 }
355
Alex Lighte814f9d2017-07-31 16:14:39 -0700356 bool NeedsNotifyPop() const {
357 return needs_notify_pop_;
358 }
359
360 void SetNotifyPop(bool notify) {
361 needs_notify_pop_ = notify;
362 }
363
Andreas Gampe36a296f2017-06-13 14:11:11 -0700364 private:
365 ShadowFrame(uint32_t num_vregs, ShadowFrame* link, ArtMethod* method,
366 uint32_t dex_pc, bool has_reference_array)
367 : link_(link),
368 method_(method),
369 result_register_(nullptr),
370 dex_pc_ptr_(nullptr),
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800371 dex_instructions_(nullptr),
Andreas Gampe36a296f2017-06-13 14:11:11 -0700372 number_of_vregs_(num_vregs),
373 dex_pc_(dex_pc),
374 cached_hotness_countdown_(0),
Alex Lighte814f9d2017-07-31 16:14:39 -0700375 hotness_countdown_(0),
376 needs_notify_pop_(0) {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700377 // TODO(iam): Remove this parameter, it's an an artifact of portable removal
378 DCHECK(has_reference_array);
379 if (has_reference_array) {
380 memset(vregs_, 0, num_vregs * (sizeof(uint32_t) + sizeof(StackReference<mirror::Object>)));
381 } else {
382 memset(vregs_, 0, num_vregs * sizeof(uint32_t));
383 }
384 }
385
386 const StackReference<mirror::Object>* References() const {
387 DCHECK(HasReferenceArray());
388 const uint32_t* vreg_end = &vregs_[NumberOfVRegs()];
389 return reinterpret_cast<const StackReference<mirror::Object>*>(vreg_end);
390 }
391
392 StackReference<mirror::Object>* References() {
393 return const_cast<StackReference<mirror::Object>*>(
394 const_cast<const ShadowFrame*>(this)->References());
395 }
396
397 // Link to previous shadow frame or null.
398 ShadowFrame* link_;
399 ArtMethod* method_;
400 JValue* result_register_;
401 const uint16_t* dex_pc_ptr_;
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800402 // Dex instruction base of the code item.
403 const uint16_t* dex_instructions_;
Andreas Gampe36a296f2017-06-13 14:11:11 -0700404 LockCountData lock_count_data_; // This may contain GC roots when lock counting is active.
405 const uint32_t number_of_vregs_;
406 uint32_t dex_pc_;
407 int16_t cached_hotness_countdown_;
408 int16_t hotness_countdown_;
Alex Lighte814f9d2017-07-31 16:14:39 -0700409 // TODO Might be worth it to try to bit-pack this into some other field to reduce stack usage.
410 // NB alignment requires that this field takes 4 bytes. Only 1 bit is actually ever used.
411 bool needs_notify_pop_;
Andreas Gampe36a296f2017-06-13 14:11:11 -0700412
413 // This is a two-part array:
414 // - [0..number_of_vregs) holds the raw virtual registers, and each element here is always 4
415 // bytes.
416 // - [number_of_vregs..number_of_vregs*2) holds only reference registers. Each element here is
417 // ptr-sized.
418 // In other words when a primitive is stored in vX, the second (reference) part of the array will
419 // be null. When a reference is stored in vX, the second (reference) part of the array will be a
420 // copy of vX.
421 uint32_t vregs_[0];
422
423 DISALLOW_IMPLICIT_CONSTRUCTORS(ShadowFrame);
424};
425
426struct ShadowFrameDeleter {
427 inline void operator()(ShadowFrame* frame) {
428 if (frame != nullptr) {
429 frame->~ShadowFrame();
430 }
431 }
432};
433
434} // namespace art
435
436#endif // ART_RUNTIME_INTERPRETER_SHADOW_FRAME_H_