blob: b438074d6d23225812cabbe164eb2665caeadf1c [file] [log] [blame]
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_STACK_MAP_H_
18#define ART_RUNTIME_STACK_MAP_H_
19
Andreas Gampe69489fa2017-06-08 18:03:25 -070020#include <limits>
21
David Srbeckyf6ba5b32018-06-23 22:05:49 +010022#include "arch/instruction_set.h"
David Sehr1ce2b3b2018-04-05 11:02:03 -070023#include "base/bit_memory_region.h"
David Srbecky052f8ca2018-04-26 15:42:54 +010024#include "base/bit_table.h"
Vladimir Marko80afd022015-05-19 18:08:00 +010025#include "base/bit_utils.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070026#include "base/bit_vector.h"
David Sehr67bf42e2018-02-26 16:43:04 -080027#include "base/leb128.h"
David Sehr1ce2b3b2018-04-05 11:02:03 -070028#include "base/memory_region.h"
David Sehr9e734c72018-01-04 17:56:19 -080029#include "dex/dex_file_types.h"
David Srbecky71ec1cc2018-05-18 15:57:25 +010030#include "dex_register_location.h"
David Srbeckyf6ba5b32018-06-23 22:05:49 +010031#include "quick/quick_method_frame_info.h"
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +010032
33namespace art {
34
David Srbeckyf6ba5b32018-06-23 22:05:49 +010035class OatQuickMethodHeader;
Vladimir Marko8f1e08a2015-06-26 12:06:30 +010036class VariableIndentationOutputStream;
37
Roland Levillaina2d8ec62015-03-12 15:25:29 +000038// Size of a frame slot, in bytes. This constant is a signed value,
39// to please the compiler in arithmetic operations involving int32_t
40// (signed) values.
Roland Levillaina552e1c2015-03-26 15:01:03 +000041static constexpr ssize_t kFrameSlotSize = 4;
Roland Levillaina2d8ec62015-03-12 15:25:29 +000042
David Srbecky6de88332018-06-03 12:00:11 +010043// The delta compression of dex register maps means we need to scan the stackmaps backwards.
44// We compress the data in such a way so that there is an upper bound on the search distance.
45// Max distance 0 means each stack map must be fully defined and no scanning back is allowed.
46// If this value is changed, the oat file version should be incremented (for DCHECK to pass).
47static constexpr size_t kMaxDexRegisterMapSearchDistance = 32;
48
Nicolas Geoffray5d37c152017-01-12 13:25:19 +000049class ArtMethod;
Nicolas Geoffray004c2302015-03-20 10:06:38 +000050class CodeInfo;
David Srbecky86decb62018-06-05 06:41:10 +010051class Stats;
Nicolas Geoffray004c2302015-03-20 10:06:38 +000052
David Srbecky71ec1cc2018-05-18 15:57:25 +010053std::ostream& operator<<(std::ostream& stream, const DexRegisterLocation& reg);
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +010054
David Srbecky71ec1cc2018-05-18 15:57:25 +010055// Information on Dex register locations for a specific PC.
56// Effectively just a convenience wrapper for DexRegisterLocation vector.
57// If the size is small enough, it keeps the data on the stack.
David Srbeckye1402122018-06-13 18:20:45 +010058// TODO: Replace this with generic purpose "small-vector" implementation.
Roland Levillaina552e1c2015-03-26 15:01:03 +000059class DexRegisterMap {
60 public:
David Srbecky6de88332018-06-03 12:00:11 +010061 using iterator = DexRegisterLocation*;
David Srbeckye1402122018-06-13 18:20:45 +010062 using const_iterator = const DexRegisterLocation*;
David Srbecky6de88332018-06-03 12:00:11 +010063
64 // Create map for given number of registers and initialize them to the given value.
65 DexRegisterMap(size_t count, DexRegisterLocation value) : count_(count), regs_small_{} {
David Srbecky71ec1cc2018-05-18 15:57:25 +010066 if (count_ <= kSmallCount) {
David Srbecky6de88332018-06-03 12:00:11 +010067 std::fill_n(regs_small_.begin(), count, value);
David Srbecky71ec1cc2018-05-18 15:57:25 +010068 } else {
David Srbecky6de88332018-06-03 12:00:11 +010069 regs_large_.resize(count, value);
David Srbecky71ec1cc2018-05-18 15:57:25 +010070 }
Roland Levillaina552e1c2015-03-26 15:01:03 +000071 }
72
David Srbecky71ec1cc2018-05-18 15:57:25 +010073 DexRegisterLocation* data() {
74 return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data();
75 }
David Srbeckye1402122018-06-13 18:20:45 +010076 const DexRegisterLocation* data() const {
77 return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data();
78 }
Roland Levillaina552e1c2015-03-26 15:01:03 +000079
David Srbecky6de88332018-06-03 12:00:11 +010080 iterator begin() { return data(); }
81 iterator end() { return data() + count_; }
David Srbeckye1402122018-06-13 18:20:45 +010082 const_iterator begin() const { return data(); }
83 const_iterator end() const { return data() + count_; }
David Srbecky71ec1cc2018-05-18 15:57:25 +010084 size_t size() const { return count_; }
David Srbeckyfd89b072018-06-03 12:00:22 +010085 bool empty() const { return count_ == 0; }
David Srbecky71ec1cc2018-05-18 15:57:25 +010086
David Srbeckye1402122018-06-13 18:20:45 +010087 DexRegisterLocation& operator[](size_t index) {
David Srbecky71ec1cc2018-05-18 15:57:25 +010088 DCHECK_LT(index, count_);
David Srbeckye1402122018-06-13 18:20:45 +010089 return data()[index];
David Srbecky71ec1cc2018-05-18 15:57:25 +010090 }
David Srbeckye1402122018-06-13 18:20:45 +010091 const DexRegisterLocation& operator[](size_t index) const {
92 DCHECK_LT(index, count_);
93 return data()[index];
Roland Levillaina552e1c2015-03-26 15:01:03 +000094 }
95
David Srbecky71ec1cc2018-05-18 15:57:25 +010096 size_t GetNumberOfLiveDexRegisters() const {
David Srbeckye1402122018-06-13 18:20:45 +010097 return std::count_if(begin(), end(), [](auto& loc) { return loc.IsLive(); });
Roland Levillaina552e1c2015-03-26 15:01:03 +000098 }
99
David Srbecky71ec1cc2018-05-18 15:57:25 +0100100 bool HasAnyLiveDexRegisters() const {
David Srbeckye1402122018-06-13 18:20:45 +0100101 return std::any_of(begin(), end(), [](auto& loc) { return loc.IsLive(); });
David Srbecky21d45b42018-05-30 06:35:05 +0100102 }
103
David Srbeckye1402122018-06-13 18:20:45 +0100104 void Dump(VariableIndentationOutputStream* vios) const;
105
Roland Levillaina552e1c2015-03-26 15:01:03 +0000106 private:
David Srbecky71ec1cc2018-05-18 15:57:25 +0100107 // Store the data inline if the number of registers is small to avoid memory allocations.
108 // If count_ <= kSmallCount, we use the regs_small_ array, and regs_large_ otherwise.
109 static constexpr size_t kSmallCount = 16;
110 size_t count_;
111 std::array<DexRegisterLocation, kSmallCount> regs_small_;
112 dchecked_vector<DexRegisterLocation> regs_large_;
Roland Levillaina552e1c2015-03-26 15:01:03 +0000113};
114
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100115/**
116 * A Stack Map holds compilation information for a specific PC necessary for:
117 * - Mapping it to a dex PC,
118 * - Knowing which stack entries are objects,
119 * - Knowing which registers hold objects,
120 * - Knowing the inlining information,
121 * - Knowing the values of dex registers.
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100122 */
David Srbeckycf7833e2018-06-14 16:45:22 +0100123class StackMap : public BitTableAccessor<8> {
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100124 public:
David Srbecky50fac062018-06-13 18:55:35 +0100125 enum Kind {
126 Default = -1,
127 Catch = 0,
128 OSR = 1,
129 Debug = 2,
130 };
David Srbecky42deda82018-08-10 11:23:27 +0100131 BIT_TABLE_HEADER(StackMap)
David Srbecky50fac062018-06-13 18:55:35 +0100132 BIT_TABLE_COLUMN(0, Kind)
133 BIT_TABLE_COLUMN(1, PackedNativePc)
134 BIT_TABLE_COLUMN(2, DexPc)
135 BIT_TABLE_COLUMN(3, RegisterMaskIndex)
136 BIT_TABLE_COLUMN(4, StackMaskIndex)
137 BIT_TABLE_COLUMN(5, InlineInfoIndex)
138 BIT_TABLE_COLUMN(6, DexRegisterMaskIndex)
139 BIT_TABLE_COLUMN(7, DexRegisterMapIndex)
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100140
David Srbecky052f8ca2018-04-26 15:42:54 +0100141 ALWAYS_INLINE uint32_t GetNativePcOffset(InstructionSet instruction_set) const {
David Srbeckycf7833e2018-06-14 16:45:22 +0100142 return UnpackNativePc(GetPackedNativePc(), instruction_set);
David Brazdilf677ebf2015-05-29 16:29:43 +0100143 }
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100144
David Srbeckyd97e0822018-06-03 12:00:24 +0100145 ALWAYS_INLINE bool HasInlineInfo() const {
146 return HasInlineInfoIndex();
147 }
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100148
David Srbeckyd97e0822018-06-03 12:00:24 +0100149 ALWAYS_INLINE bool HasDexRegisterMap() const {
150 return HasDexRegisterMapIndex();
151 }
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100152
David Srbeckyd02b23f2018-05-29 23:27:22 +0100153 static uint32_t PackNativePc(uint32_t native_pc, InstructionSet isa) {
David Srbeckyd775f962018-05-30 18:12:52 +0100154 DCHECK_ALIGNED_PARAM(native_pc, GetInstructionSetInstructionAlignment(isa));
David Srbeckyd02b23f2018-05-29 23:27:22 +0100155 return native_pc / GetInstructionSetInstructionAlignment(isa);
156 }
157
158 static uint32_t UnpackNativePc(uint32_t packed_native_pc, InstructionSet isa) {
159 uint32_t native_pc = packed_native_pc * GetInstructionSetInstructionAlignment(isa);
160 DCHECK_EQ(native_pc / GetInstructionSetInstructionAlignment(isa), packed_native_pc);
161 return native_pc;
162 }
163
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100164 void Dump(VariableIndentationOutputStream* vios,
Roland Levillainf2650d12015-05-28 14:53:28 +0100165 const CodeInfo& code_info,
166 uint32_t code_offset,
David Srbecky71ec1cc2018-05-18 15:57:25 +0100167 InstructionSet instruction_set) const;
David Srbecky61b28a12016-02-25 21:55:03 +0000168};
169
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100170/**
David Srbecky052f8ca2018-04-26 15:42:54 +0100171 * Inline information for a specific PC.
172 * The row referenced from the StackMap holds information at depth 0.
173 * Following rows hold information for further depths.
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100174 */
David Srbeckycf7833e2018-06-14 16:45:22 +0100175class InlineInfo : public BitTableAccessor<6> {
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100176 public:
David Srbecky42deda82018-08-10 11:23:27 +0100177 BIT_TABLE_HEADER(InlineInfo)
David Srbeckyd97e0822018-06-03 12:00:24 +0100178 BIT_TABLE_COLUMN(0, IsLast) // Determines if there are further rows for further depths.
179 BIT_TABLE_COLUMN(1, DexPc)
180 BIT_TABLE_COLUMN(2, MethodInfoIndex)
181 BIT_TABLE_COLUMN(3, ArtMethodHi) // High bits of ArtMethod*.
182 BIT_TABLE_COLUMN(4, ArtMethodLo) // Low bits of ArtMethod*.
David Srbecky6de88332018-06-03 12:00:11 +0100183 BIT_TABLE_COLUMN(5, NumberOfDexRegisters) // Includes outer levels and the main method.
David Srbeckyd97e0822018-06-03 12:00:24 +0100184
David Srbecky052f8ca2018-04-26 15:42:54 +0100185 static constexpr uint32_t kLast = -1;
186 static constexpr uint32_t kMore = 0;
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100187
David Srbecky6e69e522018-06-03 12:00:14 +0100188 bool EncodesArtMethod() const {
David Srbeckyd97e0822018-06-03 12:00:24 +0100189 return HasArtMethodLo();
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100190 }
191
David Srbecky6e69e522018-06-03 12:00:14 +0100192 ArtMethod* GetArtMethod() const {
David Srbeckyd97e0822018-06-03 12:00:24 +0100193 uint64_t lo = GetArtMethodLo();
194 uint64_t hi = GetArtMethodHi();
David Srbecky71ec1cc2018-05-18 15:57:25 +0100195 return reinterpret_cast<ArtMethod*>((hi << 32) | lo);
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100196 }
197
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100198 void Dump(VariableIndentationOutputStream* vios,
David Srbecky61b28a12016-02-25 21:55:03 +0000199 const CodeInfo& info,
David Srbecky8cd54542018-07-15 23:58:44 +0100200 const StackMap& stack_map) const;
Mathieu Chartier575d3e62017-02-06 11:00:40 -0800201};
202
David Srbecky42deda82018-08-10 11:23:27 +0100203class StackMask : public BitTableAccessor<1> {
David Srbecky86decb62018-06-05 06:41:10 +0100204 public:
David Srbecky42deda82018-08-10 11:23:27 +0100205 BIT_TABLE_HEADER(StackMask)
206 BIT_TABLE_COLUMN(0, Mask)
207};
208
209class DexRegisterMask : public BitTableAccessor<1> {
210 public:
211 BIT_TABLE_HEADER(DexRegisterMask)
David Srbecky86decb62018-06-05 06:41:10 +0100212 BIT_TABLE_COLUMN(0, Mask)
213};
214
David Srbeckycf7833e2018-06-14 16:45:22 +0100215class DexRegisterMapInfo : public BitTableAccessor<1> {
David Srbecky86decb62018-06-05 06:41:10 +0100216 public:
David Srbecky42deda82018-08-10 11:23:27 +0100217 BIT_TABLE_HEADER(DexRegisterMapInfo)
David Srbecky86decb62018-06-05 06:41:10 +0100218 BIT_TABLE_COLUMN(0, CatalogueIndex)
219};
220
David Srbeckycf7833e2018-06-14 16:45:22 +0100221class DexRegisterInfo : public BitTableAccessor<2> {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100222 public:
David Srbecky42deda82018-08-10 11:23:27 +0100223 BIT_TABLE_HEADER(DexRegisterInfo)
David Srbeckyd97e0822018-06-03 12:00:24 +0100224 BIT_TABLE_COLUMN(0, Kind)
225 BIT_TABLE_COLUMN(1, PackedValue)
David Srbecky71ec1cc2018-05-18 15:57:25 +0100226
227 ALWAYS_INLINE DexRegisterLocation GetLocation() const {
David Srbeckyd97e0822018-06-03 12:00:24 +0100228 DexRegisterLocation::Kind kind = static_cast<DexRegisterLocation::Kind>(GetKind());
229 return DexRegisterLocation(kind, UnpackValue(kind, GetPackedValue()));
David Srbecky71ec1cc2018-05-18 15:57:25 +0100230 }
231
232 static uint32_t PackValue(DexRegisterLocation::Kind kind, uint32_t value) {
233 uint32_t packed_value = value;
234 if (kind == DexRegisterLocation::Kind::kInStack) {
235 DCHECK(IsAligned<kFrameSlotSize>(packed_value));
236 packed_value /= kFrameSlotSize;
237 }
238 return packed_value;
239 }
240
241 static uint32_t UnpackValue(DexRegisterLocation::Kind kind, uint32_t packed_value) {
242 uint32_t value = packed_value;
243 if (kind == DexRegisterLocation::Kind::kInStack) {
244 value *= kFrameSlotSize;
245 }
246 return value;
247 }
248};
249
David Srbecky4b59d102018-05-29 21:46:10 +0000250// Register masks tend to have many trailing zero bits (caller-saves are usually not encoded),
251// therefore it is worth encoding the mask as value+shift.
David Srbeckycf7833e2018-06-14 16:45:22 +0100252class RegisterMask : public BitTableAccessor<2> {
David Srbecky4b59d102018-05-29 21:46:10 +0000253 public:
David Srbecky42deda82018-08-10 11:23:27 +0100254 BIT_TABLE_HEADER(RegisterMask)
David Srbeckyd97e0822018-06-03 12:00:24 +0100255 BIT_TABLE_COLUMN(0, Value)
256 BIT_TABLE_COLUMN(1, Shift)
David Srbecky4b59d102018-05-29 21:46:10 +0000257
258 ALWAYS_INLINE uint32_t GetMask() const {
David Srbeckyd97e0822018-06-03 12:00:24 +0100259 return GetValue() << GetShift();
David Srbecky4b59d102018-05-29 21:46:10 +0000260 }
261};
262
David Srbecky8cd54542018-07-15 23:58:44 +0100263// Method indices are not very dedup friendly.
264// Separating them greatly improves dedup efficiency of the other tables.
265class MethodInfo : public BitTableAccessor<1> {
266 public:
David Srbecky42deda82018-08-10 11:23:27 +0100267 BIT_TABLE_HEADER(MethodInfo)
David Srbecky8cd54542018-07-15 23:58:44 +0100268 BIT_TABLE_COLUMN(0, MethodIndex)
269};
270
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100271/**
272 * Wrapper around all compiler information collected for a method.
David Srbecky71ec1cc2018-05-18 15:57:25 +0100273 * See the Decode method at the end for the precise binary format.
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100274 */
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100275class CodeInfo {
276 public:
David Srbeckyd1606412018-07-31 15:05:14 +0100277 class Deduper {
278 public:
279 explicit Deduper(std::vector<uint8_t>* output) : writer_(output) {
280 DCHECK_EQ(output->size(), 0u);
281 }
282
283 // Copy CodeInfo into output while de-duplicating the internal bit tables.
284 // It returns the byte offset of the copied CodeInfo within the output.
285 size_t Dedupe(const uint8_t* code_info);
286
287 private:
David Srbeckyd1606412018-07-31 15:05:14 +0100288 BitMemoryWriter<std::vector<uint8_t>> writer_;
289
290 // Deduplicate at BitTable level. The value is bit offset within the output.
291 std::map<BitMemoryRegion, uint32_t, BitMemoryRegion::Less> dedupe_map_;
292 };
293
David Srbecky6ee06e92018-07-25 21:45:54 +0100294 enum DecodeFlags {
David Srbecky42deda82018-08-10 11:23:27 +0100295 AllTables = 0,
David Srbeckya2d29a32018-08-03 11:06:38 +0100296 // Limits the decoding only to the data needed by GC.
297 GcMasksOnly = 1,
David Srbecky6ee06e92018-07-25 21:45:54 +0100298 // Limits the decoding only to the main stack map table and inline info table.
299 // This is sufficient for many use cases and makes the header decoding faster.
David Srbeckya2d29a32018-08-03 11:06:38 +0100300 InlineInfoOnly = 2,
David Srbecky6ee06e92018-07-25 21:45:54 +0100301 };
302
David Srbecky2259f1c2019-01-16 23:18:30 +0000303 CodeInfo() {}
304
David Srbecky42deda82018-08-10 11:23:27 +0100305 explicit CodeInfo(const uint8_t* data, DecodeFlags flags = AllTables) {
David Srbecky6ee06e92018-07-25 21:45:54 +0100306 Decode(reinterpret_cast<const uint8_t*>(data), flags);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100307 }
308
David Srbecky42deda82018-08-10 11:23:27 +0100309 explicit CodeInfo(const OatQuickMethodHeader* header, DecodeFlags flags = AllTables);
Nicolas Geoffray896f8f72015-03-30 15:44:25 +0100310
David Srbecky052f8ca2018-04-26 15:42:54 +0100311 size_t Size() const {
David Srbeckya38e6cf2018-06-26 18:13:49 +0100312 return BitsToBytesRoundUp(size_in_bits_);
Roland Levillaina552e1c2015-03-26 15:01:03 +0000313 }
314
David Srbecky93bd3612018-07-02 19:30:18 +0100315 ALWAYS_INLINE const BitTable<StackMap>& GetStackMaps() const {
316 return stack_maps_;
Nicolas Geoffray39468442014-09-02 15:17:15 +0100317 }
318
David Srbecky052f8ca2018-04-26 15:42:54 +0100319 ALWAYS_INLINE StackMap GetStackMapAt(size_t index) const {
David Srbeckycf7833e2018-06-14 16:45:22 +0100320 return stack_maps_.GetRow(index);
David Srbecky45aa5982016-03-18 02:15:09 +0000321 }
322
David Srbecky052f8ca2018-04-26 15:42:54 +0100323 BitMemoryRegion GetStackMask(size_t index) const {
David Srbecky4b59d102018-05-29 21:46:10 +0000324 return stack_masks_.GetBitMemoryRegion(index);
Mathieu Chartier1a20b682017-01-31 14:25:16 -0800325 }
326
David Srbecky052f8ca2018-04-26 15:42:54 +0100327 BitMemoryRegion GetStackMaskOf(const StackMap& stack_map) const {
David Srbecky4b59d102018-05-29 21:46:10 +0000328 uint32_t index = stack_map.GetStackMaskIndex();
329 return (index == StackMap::kNoValue) ? BitMemoryRegion() : GetStackMask(index);
Mathieu Chartier1a20b682017-01-31 14:25:16 -0800330 }
331
David Srbecky052f8ca2018-04-26 15:42:54 +0100332 uint32_t GetRegisterMaskOf(const StackMap& stack_map) const {
David Srbecky4b59d102018-05-29 21:46:10 +0000333 uint32_t index = stack_map.GetRegisterMaskIndex();
David Srbeckycf7833e2018-06-14 16:45:22 +0100334 return (index == StackMap::kNoValue) ? 0 : register_masks_.GetRow(index).GetMask();
Nicolas Geoffray39468442014-09-02 15:17:15 +0100335 }
336
David Srbecky052f8ca2018-04-26 15:42:54 +0100337 uint32_t GetNumberOfLocationCatalogEntries() const {
David Srbecky71ec1cc2018-05-18 15:57:25 +0100338 return dex_register_catalog_.NumRows();
Roland Levillaina552e1c2015-03-26 15:01:03 +0000339 }
340
David Srbecky71ec1cc2018-05-18 15:57:25 +0100341 ALWAYS_INLINE DexRegisterLocation GetDexRegisterCatalogEntry(size_t index) const {
David Srbecky6de88332018-06-03 12:00:11 +0100342 return (index == StackMap::kNoValue)
343 ? DexRegisterLocation::None()
David Srbeckycf7833e2018-06-14 16:45:22 +0100344 : dex_register_catalog_.GetRow(index).GetLocation();
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100345 }
346
David Srbecky93bd3612018-07-02 19:30:18 +0100347 bool HasInlineInfo() const {
348 return inline_infos_.NumRows() > 0;
349 }
350
David Srbecky052f8ca2018-04-26 15:42:54 +0100351 uint32_t GetNumberOfStackMaps() const {
352 return stack_maps_.NumRows();
Nicolas Geoffray6530baf2015-05-26 15:22:58 +0100353 }
354
David Srbecky8cd54542018-07-15 23:58:44 +0100355 uint32_t GetMethodIndexOf(InlineInfo inline_info) const {
356 return method_infos_.GetRow(inline_info.GetMethodInfoIndex()).GetMethodIndex();
357 }
358
David Srbeckyfd89b072018-06-03 12:00:22 +0100359 ALWAYS_INLINE DexRegisterMap GetDexRegisterMapOf(StackMap stack_map) const {
David Srbecky6de88332018-06-03 12:00:11 +0100360 if (stack_map.HasDexRegisterMap()) {
361 DexRegisterMap map(number_of_dex_registers_, DexRegisterLocation::Invalid());
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700362 DecodeDexRegisterMap(stack_map.Row(), /* first_dex_register= */ 0, &map);
David Srbecky6de88332018-06-03 12:00:11 +0100363 return map;
364 }
365 return DexRegisterMap(0, DexRegisterLocation::None());
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100366 }
367
David Srbecky93bd3612018-07-02 19:30:18 +0100368 ALWAYS_INLINE DexRegisterMap GetInlineDexRegisterMapOf(StackMap stack_map,
369 InlineInfo inline_info) const {
David Srbecky6de88332018-06-03 12:00:11 +0100370 if (stack_map.HasDexRegisterMap()) {
David Srbecky93bd3612018-07-02 19:30:18 +0100371 DCHECK(stack_map.HasInlineInfoIndex());
372 uint32_t depth = inline_info.Row() - stack_map.GetInlineInfoIndex();
David Srbecky6de88332018-06-03 12:00:11 +0100373 // The register counts are commutative and include all outer levels.
374 // This allows us to determine the range [first, last) in just two lookups.
375 // If we are at depth 0 (the first inlinee), the count from the main method is used.
David Srbecky93bd3612018-07-02 19:30:18 +0100376 uint32_t first = (depth == 0)
377 ? number_of_dex_registers_
378 : inline_infos_.GetRow(inline_info.Row() - 1).GetNumberOfDexRegisters();
379 uint32_t last = inline_info.GetNumberOfDexRegisters();
David Srbecky6de88332018-06-03 12:00:11 +0100380 DexRegisterMap map(last - first, DexRegisterLocation::Invalid());
381 DecodeDexRegisterMap(stack_map.Row(), first, &map);
382 return map;
383 }
384 return DexRegisterMap(0, DexRegisterLocation::None());
Nicolas Geoffrayb1d0f3f2015-05-14 12:41:51 +0100385 }
386
David Srbecky93bd3612018-07-02 19:30:18 +0100387 BitTableRange<InlineInfo> GetInlineInfosOf(StackMap stack_map) const {
David Srbecky052f8ca2018-04-26 15:42:54 +0100388 uint32_t index = stack_map.GetInlineInfoIndex();
David Srbecky6e69e522018-06-03 12:00:14 +0100389 if (index != StackMap::kNoValue) {
David Srbecky93bd3612018-07-02 19:30:18 +0100390 auto begin = inline_infos_.begin() + index;
391 auto end = begin;
392 while ((*end++).GetIsLast() == InlineInfo::kMore) { }
393 return BitTableRange<InlineInfo>(begin, end);
394 } else {
395 return BitTableRange<InlineInfo>();
David Srbecky6e69e522018-06-03 12:00:14 +0100396 }
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100397 }
398
David Srbecky052f8ca2018-04-26 15:42:54 +0100399 StackMap GetStackMapForDexPc(uint32_t dex_pc) const {
David Srbecky93bd3612018-07-02 19:30:18 +0100400 for (StackMap stack_map : stack_maps_) {
David Srbecky50fac062018-06-13 18:55:35 +0100401 if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() != StackMap::Kind::Debug) {
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100402 return stack_map;
403 }
404 }
David Srbeckya45a85c2018-06-21 16:03:12 +0100405 return stack_maps_.GetInvalidRow();
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100406 }
407
David Srbecky50fac062018-06-13 18:55:35 +0100408 // Searches the stack map list backwards because catch stack maps are stored at the end.
David Srbecky052f8ca2018-04-26 15:42:54 +0100409 StackMap GetCatchStackMapForDexPc(uint32_t dex_pc) const {
410 for (size_t i = GetNumberOfStackMaps(); i > 0; --i) {
411 StackMap stack_map = GetStackMapAt(i - 1);
David Srbecky50fac062018-06-13 18:55:35 +0100412 if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() == StackMap::Kind::Catch) {
David Brazdil77a48ae2015-09-15 12:34:04 +0000413 return stack_map;
414 }
415 }
David Srbeckya45a85c2018-06-21 16:03:12 +0100416 return stack_maps_.GetInvalidRow();
David Brazdil77a48ae2015-09-15 12:34:04 +0000417 }
418
David Srbecky052f8ca2018-04-26 15:42:54 +0100419 StackMap GetOsrStackMapForDexPc(uint32_t dex_pc) const {
David Srbecky93bd3612018-07-02 19:30:18 +0100420 for (StackMap stack_map : stack_maps_) {
David Srbecky50fac062018-06-13 18:55:35 +0100421 if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() == StackMap::Kind::OSR) {
422 return stack_map;
Nicolas Geoffrayb331feb2016-02-05 16:51:53 +0000423 }
424 }
David Srbeckya45a85c2018-06-21 16:03:12 +0100425 return stack_maps_.GetInvalidRow();
Nicolas Geoffrayb331feb2016-02-05 16:51:53 +0000426 }
427
David Srbecky0b4e5a32018-06-11 16:25:29 +0100428 StackMap GetStackMapForNativePcOffset(uint32_t pc, InstructionSet isa = kRuntimeISA) const;
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100429
David Srbecky71ec1cc2018-05-18 15:57:25 +0100430 // Dump this CodeInfo object on `vios`.
431 // `code_offset` is the (absolute) native PC of the compiled method.
Vladimir Marko8f1e08a2015-06-26 12:06:30 +0100432 void Dump(VariableIndentationOutputStream* vios,
Roland Levillainf2650d12015-05-28 14:53:28 +0100433 uint32_t code_offset,
David Srbecky71ec1cc2018-05-18 15:57:25 +0100434 bool verbose,
David Srbecky8cd54542018-07-15 23:58:44 +0100435 InstructionSet instruction_set) const;
Nicolas Geoffray004c2302015-03-20 10:06:38 +0000436
David Srbecky86decb62018-06-05 06:41:10 +0100437 // Accumulate code info size statistics into the given Stats tree.
David Srbecky42deda82018-08-10 11:23:27 +0100438 static void CollectSizeStats(const uint8_t* code_info, /*out*/ Stats* parent);
David Srbecky86decb62018-06-05 06:41:10 +0100439
David Srbeckye42a4b92019-05-26 00:10:25 +0100440 ALWAYS_INLINE static bool HasInlineInfo(const uint8_t* code_info_data) {
441 return (*code_info_data & kHasInlineInfo) != 0;
442 }
443
444 ALWAYS_INLINE static QuickMethodFrameInfo DecodeFrameInfo(const uint8_t* code_info_data) {
445 BitMemoryReader reader(code_info_data);
David Srbecky6c4ec5c2019-06-20 07:23:19 +0000446 std::array<uint32_t, kNumHeaders> header = reader.ReadInterleavedVarints<kNumHeaders>();
David Srbeckye42a4b92019-05-26 00:10:25 +0100447 return QuickMethodFrameInfo(header[1] * kStackAlignment, header[2], header[3]);
David Srbeckyf6ba5b32018-06-23 22:05:49 +0100448 }
449
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100450 private:
David Srbecky0b4e5a32018-06-11 16:25:29 +0100451 // Returns lower bound (fist stack map which has pc greater or equal than the desired one).
452 // It ignores catch stack maps at the end (it is the same as if they had maximum pc value).
David Srbecky697c47a2019-06-16 21:53:07 +0100453 ALWAYS_INLINE BitTable<StackMap>::const_iterator BinarySearchNativePc(uint32_t packed_pc) const;
David Srbecky0b4e5a32018-06-11 16:25:29 +0100454
David Srbecky6de88332018-06-03 12:00:11 +0100455 // Scan backward to determine dex register locations at given stack map.
456 void DecodeDexRegisterMap(uint32_t stack_map_index,
457 uint32_t first_dex_register,
458 /*out*/ DexRegisterMap* map) const;
Roland Levillaina2d8ec62015-03-12 15:25:29 +0000459
David Srbecky6ee06e92018-07-25 21:45:54 +0100460 void Decode(const uint8_t* data, DecodeFlags flags);
David Srbecky052f8ca2018-04-26 15:42:54 +0100461
David Srbecky697c47a2019-06-16 21:53:07 +0100462 // Invokes the callback with index and member pointer of each header field.
David Srbecky42deda82018-08-10 11:23:27 +0100463 template<typename Callback>
464 ALWAYS_INLINE static void ForEachHeaderField(Callback callback) {
David Srbecky697c47a2019-06-16 21:53:07 +0100465 size_t index = 0;
466 callback(index++, &CodeInfo::flags_);
467 callback(index++, &CodeInfo::packed_frame_size_);
468 callback(index++, &CodeInfo::core_spill_mask_);
469 callback(index++, &CodeInfo::fp_spill_mask_);
470 callback(index++, &CodeInfo::number_of_dex_registers_);
471 callback(index++, &CodeInfo::bit_table_flags_);
472 DCHECK_EQ(index, kNumHeaders);
David Srbecky42deda82018-08-10 11:23:27 +0100473 }
474
David Srbecky697c47a2019-06-16 21:53:07 +0100475 // Invokes the callback with index and member pointer of each BitTable field.
David Srbecky42deda82018-08-10 11:23:27 +0100476 template<typename Callback>
477 ALWAYS_INLINE static void ForEachBitTableField(Callback callback, DecodeFlags flags = AllTables) {
David Srbecky697c47a2019-06-16 21:53:07 +0100478 size_t index = 0;
479 callback(index++, &CodeInfo::stack_maps_);
480 callback(index++, &CodeInfo::register_masks_);
481 callback(index++, &CodeInfo::stack_masks_);
David Srbecky42deda82018-08-10 11:23:27 +0100482 if (flags & DecodeFlags::GcMasksOnly) {
483 return;
484 }
David Srbecky697c47a2019-06-16 21:53:07 +0100485 callback(index++, &CodeInfo::inline_infos_);
486 callback(index++, &CodeInfo::method_infos_);
David Srbecky42deda82018-08-10 11:23:27 +0100487 if (flags & DecodeFlags::InlineInfoOnly) {
488 return;
489 }
David Srbecky697c47a2019-06-16 21:53:07 +0100490 callback(index++, &CodeInfo::dex_register_masks_);
491 callback(index++, &CodeInfo::dex_register_maps_);
492 callback(index++, &CodeInfo::dex_register_catalog_);
493 DCHECK_EQ(index, kNumBitTables);
David Srbecky42deda82018-08-10 11:23:27 +0100494 }
495
David Srbecky697c47a2019-06-16 21:53:07 +0100496 bool HasBitTable(size_t i) { return ((bit_table_flags_ >> i) & 1) != 0; }
497 bool IsBitTableDeduped(size_t i) { return ((bit_table_flags_ >> (kNumBitTables + i)) & 1) != 0; }
498 void SetBitTableDeduped(size_t i) { bit_table_flags_ |= 1 << (kNumBitTables + i); }
499
David Srbeckye42a4b92019-05-26 00:10:25 +0100500 enum Flags {
501 kHasInlineInfo = 1 << 0,
502 };
503
David Srbecky697c47a2019-06-16 21:53:07 +0100504 // The CodeInfo starts with sequence of variable-length bit-encoded integers.
505 static constexpr size_t kNumHeaders = 6;
David Srbeckye42a4b92019-05-26 00:10:25 +0100506 uint32_t flags_ = 0;
David Srbecky2259f1c2019-01-16 23:18:30 +0000507 uint32_t packed_frame_size_ = 0; // Frame size in kStackAlignment units.
508 uint32_t core_spill_mask_ = 0;
509 uint32_t fp_spill_mask_ = 0;
510 uint32_t number_of_dex_registers_ = 0;
David Srbecky697c47a2019-06-16 21:53:07 +0100511 uint32_t bit_table_flags_ = 0;
512
513 // The encoded bit-tables follow the header. Based on the above flags field,
514 // bit-tables might be omitted or replaced by relative bit-offset if deduped.
515 static constexpr size_t kNumBitTables = 8;
David Srbeckycf7833e2018-06-14 16:45:22 +0100516 BitTable<StackMap> stack_maps_;
517 BitTable<RegisterMask> register_masks_;
David Srbecky42deda82018-08-10 11:23:27 +0100518 BitTable<StackMask> stack_masks_;
David Srbeckya2d29a32018-08-03 11:06:38 +0100519 BitTable<InlineInfo> inline_infos_;
520 BitTable<MethodInfo> method_infos_;
David Srbecky42deda82018-08-10 11:23:27 +0100521 BitTable<DexRegisterMask> dex_register_masks_;
David Srbeckycf7833e2018-06-14 16:45:22 +0100522 BitTable<DexRegisterMapInfo> dex_register_maps_;
523 BitTable<DexRegisterInfo> dex_register_catalog_;
David Srbecky697c47a2019-06-16 21:53:07 +0100524
David Srbecky6ee06e92018-07-25 21:45:54 +0100525 uint32_t size_in_bits_ = 0;
David Srbecky67ba8722019-05-23 15:32:18 +0100526
527 friend class StackMapStream;
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100528};
529
Roland Levillain1c1da432015-07-16 11:54:44 +0100530#undef ELEMENT_BYTE_OFFSET_AFTER
531#undef ELEMENT_BIT_OFFSET_AFTER
532
Nicolas Geoffray99ea58c2014-07-02 15:08:17 +0100533} // namespace art
534
535#endif // ART_RUNTIME_STACK_MAP_H_