Compress the StackMaps.
First step towards the compression of the StackMap (not
the DexRegisterMap). Next step will be to just use what is
needed (instead of byte -> word).
Change-Id: I4f81b2d05bf5cc508585e16fbbed1bafbc850e2e
diff --git a/runtime/Android.mk b/runtime/Android.mk
index 6588288..5548d94 100644
--- a/runtime/Android.mk
+++ b/runtime/Android.mk
@@ -151,6 +151,7 @@
runtime_options.cc \
signal_catcher.cc \
stack.cc \
+ stack_map.cc \
thread.cc \
thread_list.cc \
thread_pool.cc \
diff --git a/runtime/check_reference_map_visitor.h b/runtime/check_reference_map_visitor.h
index 0ec0295..204546d 100644
--- a/runtime/check_reference_map_visitor.h
+++ b/runtime/check_reference_map_visitor.h
@@ -69,8 +69,8 @@
uint16_t number_of_dex_registers = m->GetCodeItem()->registers_size_;
DexRegisterMap dex_register_map =
code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
- MemoryRegion stack_mask = stack_map.GetStackMask();
- uint32_t register_mask = stack_map.GetRegisterMask();
+ MemoryRegion stack_mask = stack_map.GetStackMask(code_info);
+ uint32_t register_mask = stack_map.GetRegisterMask(code_info);
for (int i = 0; i < number_of_references; ++i) {
int reg = registers[i];
CHECK(reg < m->GetCodeItem()->registers_size_);
diff --git a/runtime/mirror/art_method-inl.h b/runtime/mirror/art_method-inl.h
index c27c6e9..0ccf5db 100644
--- a/runtime/mirror/art_method-inl.h
+++ b/runtime/mirror/art_method-inl.h
@@ -230,10 +230,6 @@
return reinterpret_cast<const uint8_t*>(code_pointer) - offset;
}
-inline StackMap ArtMethod::GetStackMap(uint32_t native_pc_offset) {
- return GetOptimizedCodeInfo().GetStackMapForNativePcOffset(native_pc_offset);
-}
-
inline CodeInfo ArtMethod::GetOptimizedCodeInfo() {
DCHECK(IsOptimized(sizeof(void*)));
const void* code_pointer = GetQuickOatCodePointer(sizeof(void*));
diff --git a/runtime/mirror/art_method.cc b/runtime/mirror/art_method.cc
index bc58709..ffee59e 100644
--- a/runtime/mirror/art_method.cc
+++ b/runtime/mirror/art_method.cc
@@ -202,8 +202,8 @@
const void* entry_point = GetQuickOatEntryPoint(sizeof(void*));
uint32_t sought_offset = pc - reinterpret_cast<uintptr_t>(entry_point);
if (IsOptimized(sizeof(void*))) {
- uint32_t ret = GetStackMap(sought_offset).GetDexPc();
- return ret;
+ CodeInfo code_info = GetOptimizedCodeInfo();
+ return code_info.GetStackMapForNativePcOffset(sought_offset).GetDexPc(code_info);
}
MappingTable table(entry_point != nullptr ?
diff --git a/runtime/mirror/art_method.h b/runtime/mirror/art_method.h
index d878f25..0eb1b91 100644
--- a/runtime/mirror/art_method.h
+++ b/runtime/mirror/art_method.h
@@ -348,7 +348,6 @@
const uint8_t* GetVmapTable(const void* code_pointer, size_t pointer_size)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
- StackMap GetStackMap(uint32_t native_pc_offset) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
CodeInfo GetOptimizedCodeInfo() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Callers should wrap the uint8_t* in a GcMap instance for convenient access.
diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc
new file mode 100644
index 0000000..020a6e6
--- /dev/null
+++ b/runtime/stack_map.cc
@@ -0,0 +1,198 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "stack_map.h"
+
+namespace art {
+
+constexpr uint32_t StackMap::kNoDexRegisterMapSmallEncoding;
+constexpr uint32_t StackMap::kNoInlineInfoSmallEncoding;
+constexpr uint32_t StackMap::kNoDexRegisterMap;
+constexpr uint32_t StackMap::kNoInlineInfo;
+
+uint32_t StackMap::GetDexPc(const CodeInfo& info) const {
+ return info.HasSmallDexPc()
+ ? region_.LoadUnaligned<kSmallEncoding>(info.ComputeStackMapDexPcOffset())
+ : region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapDexPcOffset());
+}
+
+void StackMap::SetDexPc(const CodeInfo& info, uint32_t dex_pc) {
+ DCHECK(!info.HasSmallDexPc() || IsUint<kBitsForSmallEncoding>(dex_pc)) << dex_pc;
+ info.HasSmallDexPc()
+ ? region_.StoreUnaligned<kSmallEncoding>(info.ComputeStackMapDexPcOffset(), dex_pc)
+ : region_.StoreUnaligned<kLargeEncoding>(info.ComputeStackMapDexPcOffset(), dex_pc);
+}
+
+uint32_t StackMap::GetNativePcOffset(const CodeInfo& info) const {
+ return info.HasSmallNativePc()
+ ? region_.LoadUnaligned<kSmallEncoding>(info.ComputeStackMapNativePcOffset())
+ : region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapNativePcOffset());
+}
+
+void StackMap::SetNativePcOffset(const CodeInfo& info, uint32_t native_pc_offset) {
+ DCHECK(!info.HasSmallNativePc()
+ || IsUint<kBitsForSmallEncoding>(native_pc_offset)) << native_pc_offset;
+ uint32_t entry = info.ComputeStackMapNativePcOffset();
+ info.HasSmallNativePc()
+ ? region_.StoreUnaligned<kSmallEncoding>(entry, native_pc_offset)
+ : region_.StoreUnaligned<kLargeEncoding>(entry, native_pc_offset);
+}
+
+uint32_t StackMap::GetDexRegisterMapOffset(const CodeInfo& info) const {
+ if (info.HasSmallDexRegisterMap()) {
+ uint8_t value = region_.LoadUnaligned<kSmallEncoding>(
+ info.ComputeStackMapDexRegisterMapOffset());
+ if (value == kNoDexRegisterMapSmallEncoding) {
+ return kNoDexRegisterMap;
+ } else {
+ return value;
+ }
+ } else {
+ return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapDexRegisterMapOffset());
+ }
+}
+
+void StackMap::SetDexRegisterMapOffset(const CodeInfo& info, uint32_t offset) {
+ DCHECK(!info.HasSmallDexRegisterMap()
+ || (IsUint<kBitsForSmallEncoding>(offset)
+ || (offset == kNoDexRegisterMap))) << offset;
+ size_t dex_register_map_entry = info.ComputeStackMapDexRegisterMapOffset();
+ info.HasSmallDexRegisterMap()
+ ? region_.StoreUnaligned<kSmallEncoding>(dex_register_map_entry, offset)
+ : region_.StoreUnaligned<kLargeEncoding>(dex_register_map_entry, offset);
+}
+
+uint32_t StackMap::GetInlineDescriptorOffset(const CodeInfo& info) const {
+ if (!info.HasInlineInfo()) return kNoInlineInfo;
+ if (info.HasSmallInlineInfo()) {
+ uint8_t value = region_.LoadUnaligned<kSmallEncoding>(
+ info.ComputeStackMapInlineInfoOffset());
+ if (value == kNoInlineInfoSmallEncoding) {
+ return kNoInlineInfo;
+ } else {
+ return value;
+ }
+ } else {
+ return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapInlineInfoOffset());
+ }
+}
+
+void StackMap::SetInlineDescriptorOffset(const CodeInfo& info, uint32_t offset) {
+ DCHECK(info.HasInlineInfo());
+ DCHECK(!info.HasSmallInlineInfo()
+ || (IsUint<kBitsForSmallEncoding>(offset)
+ || (offset == kNoInlineInfo))) << offset;
+ size_t inline_entry = info.ComputeStackMapInlineInfoOffset();
+ info.HasSmallInlineInfo()
+ ? region_.StoreUnaligned<kSmallEncoding>(inline_entry, offset)
+ : region_.StoreUnaligned<kLargeEncoding>(inline_entry, offset);
+}
+
+uint32_t StackMap::GetRegisterMask(const CodeInfo& info) const {
+ return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapRegisterMaskOffset());
+}
+
+void StackMap::SetRegisterMask(const CodeInfo& info, uint32_t mask) {
+ region_.StoreUnaligned<kLargeEncoding>(info.ComputeStackMapRegisterMaskOffset(), mask);
+}
+
+size_t StackMap::ComputeStackMapSize(size_t stack_mask_size,
+ bool has_inline_info,
+ bool is_small_inline_info,
+ bool is_small_dex_map,
+ bool is_small_dex_pc,
+ bool is_small_native_pc) {
+ return StackMap::kFixedSize
+ + stack_mask_size
+ + (has_inline_info ? NumberOfBytesForEntry(is_small_inline_info) : 0)
+ + NumberOfBytesForEntry(is_small_dex_map)
+ + NumberOfBytesForEntry(is_small_dex_pc)
+ + NumberOfBytesForEntry(is_small_native_pc);
+}
+
+size_t StackMap::ComputeStackMapSize(size_t stack_mask_size,
+ size_t inline_info_size,
+ size_t dex_register_map_size,
+ size_t dex_pc_max,
+ size_t native_pc_max) {
+ return ComputeStackMapSize(
+ stack_mask_size,
+ inline_info_size != 0,
+ // + 1 to also encode kNoInlineInfo.
+ IsUint<kBitsForSmallEncoding>(inline_info_size + dex_register_map_size + 1),
+ // + 1 to also encode kNoDexRegisterMap.
+ IsUint<kBitsForSmallEncoding>(dex_register_map_size + 1),
+ IsUint<kBitsForSmallEncoding>(dex_pc_max),
+ IsUint<kBitsForSmallEncoding>(native_pc_max));
+}
+
+MemoryRegion StackMap::GetStackMask(const CodeInfo& info) const {
+ return region_.Subregion(info.ComputeStackMapStackMaskOffset(), info.GetStackMaskSize());
+}
+
+void CodeInfo::DumpStackMapHeader(std::ostream& os, size_t stack_map_num) const {
+ StackMap stack_map = GetStackMapAt(stack_map_num);
+ os << " StackMap " << stack_map_num
+ << std::hex
+ << " (dex_pc=0x" << stack_map.GetDexPc(*this)
+ << ", native_pc_offset=0x" << stack_map.GetNativePcOffset(*this)
+ << ", dex_register_map_offset=0x" << stack_map.GetDexRegisterMapOffset(*this)
+ << ", inline_info_offset=0x" << stack_map.GetInlineDescriptorOffset(*this)
+ << ", register_mask=0x" << stack_map.GetRegisterMask(*this)
+ << std::dec
+ << ", stack_mask=0b";
+ MemoryRegion stack_mask = stack_map.GetStackMask(*this);
+ for (size_t i = 0, e = stack_mask.size_in_bits(); i < e; ++i) {
+ os << stack_mask.LoadBit(e - i - 1);
+ }
+ os << ")\n";
+};
+
+void CodeInfo::Dump(std::ostream& os, uint16_t number_of_dex_registers) const {
+ uint32_t code_info_size = GetOverallSize();
+ size_t number_of_stack_maps = GetNumberOfStackMaps();
+ os << " Optimized CodeInfo (size=" << code_info_size
+ << ", number_of_dex_registers=" << number_of_dex_registers
+ << ", number_of_stack_maps=" << number_of_stack_maps
+ << ", has_inline_info=" << HasInlineInfo()
+ << ", has_small_inline_info=" << HasSmallInlineInfo()
+ << ", has_small_dex_register_map=" << HasSmallDexRegisterMap()
+ << ", has_small_dex_pc=" << HasSmallDexPc()
+ << ", has_small_native_pc=" << HasSmallNativePc()
+ << ")\n";
+
+ // Display stack maps along with Dex register maps.
+ for (size_t i = 0; i < number_of_stack_maps; ++i) {
+ StackMap stack_map = GetStackMapAt(i);
+ DumpStackMapHeader(os, i);
+ if (stack_map.HasDexRegisterMap(*this)) {
+ DexRegisterMap dex_register_map = GetDexRegisterMapOf(stack_map, number_of_dex_registers);
+ // TODO: Display the bit mask of live Dex registers.
+ for (size_t j = 0; j < number_of_dex_registers; ++j) {
+ if (dex_register_map.IsDexRegisterLive(j)) {
+ DexRegisterLocation location =
+ dex_register_map.GetLocationKindAndValue(j, number_of_dex_registers);
+ os << " " << "v" << j << ": "
+ << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind())
+ << " (" << location.GetValue() << ")" << '\n';
+ }
+ }
+ }
+ }
+ // TODO: Dump the stack map's inline information.
+}
+
+} // namespace art
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 961772c..629fc9a 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -34,6 +34,8 @@
// Size of Dex virtual registers.
static size_t constexpr kVRegSize = 4;
+class CodeInfo;
+
/**
* Classes in the following file are wrapper on stack map information backed
* by a MemoryRegion. As such they read and write to the region, they don't have
@@ -515,63 +517,41 @@
public:
explicit StackMap(MemoryRegion region) : region_(region) {}
- uint32_t GetDexPc() const {
- return region_.LoadUnaligned<uint32_t>(kDexPcOffset);
- }
+ uint32_t GetDexPc(const CodeInfo& info) const;
- void SetDexPc(uint32_t dex_pc) {
- region_.StoreUnaligned<uint32_t>(kDexPcOffset, dex_pc);
- }
+ void SetDexPc(const CodeInfo& info, uint32_t dex_pc);
- uint32_t GetNativePcOffset() const {
- return region_.LoadUnaligned<uint32_t>(kNativePcOffsetOffset);
- }
+ uint32_t GetNativePcOffset(const CodeInfo& info) const;
- void SetNativePcOffset(uint32_t native_pc_offset) {
- region_.StoreUnaligned<uint32_t>(kNativePcOffsetOffset, native_pc_offset);
- }
+ void SetNativePcOffset(const CodeInfo& info, uint32_t native_pc_offset);
- uint32_t GetDexRegisterMapOffset() const {
- return region_.LoadUnaligned<uint32_t>(kDexRegisterMapOffsetOffset);
- }
+ uint32_t GetDexRegisterMapOffset(const CodeInfo& info) const;
- void SetDexRegisterMapOffset(uint32_t offset) {
- region_.StoreUnaligned<uint32_t>(kDexRegisterMapOffsetOffset, offset);
- }
+ void SetDexRegisterMapOffset(const CodeInfo& info, uint32_t offset);
- uint32_t GetInlineDescriptorOffset() const {
- return region_.LoadUnaligned<uint32_t>(kInlineDescriptorOffsetOffset);
- }
+ uint32_t GetInlineDescriptorOffset(const CodeInfo& info) const;
- void SetInlineDescriptorOffset(uint32_t offset) {
- region_.StoreUnaligned<uint32_t>(kInlineDescriptorOffsetOffset, offset);
- }
+ void SetInlineDescriptorOffset(const CodeInfo& info, uint32_t offset);
- uint32_t GetRegisterMask() const {
- return region_.LoadUnaligned<uint32_t>(kRegisterMaskOffset);
- }
+ uint32_t GetRegisterMask(const CodeInfo& info) const;
- void SetRegisterMask(uint32_t mask) {
- region_.StoreUnaligned<uint32_t>(kRegisterMaskOffset, mask);
- }
+ void SetRegisterMask(const CodeInfo& info, uint32_t mask);
- MemoryRegion GetStackMask() const {
- return region_.Subregion(kStackMaskOffset, StackMaskSize());
- }
+ MemoryRegion GetStackMask(const CodeInfo& info) const;
- void SetStackMask(const BitVector& sp_map) {
- MemoryRegion region = GetStackMask();
+ void SetStackMask(const CodeInfo& info, const BitVector& sp_map) {
+ MemoryRegion region = GetStackMask(info);
for (size_t i = 0; i < region.size_in_bits(); i++) {
region.StoreBit(i, sp_map.IsBitSet(i));
}
}
- bool HasDexRegisterMap() const {
- return GetDexRegisterMapOffset() != kNoDexRegisterMap;
+ bool HasDexRegisterMap(const CodeInfo& info) const {
+ return GetDexRegisterMapOffset(info) != kNoDexRegisterMap;
}
- bool HasInlineInfo() const {
- return GetInlineDescriptorOffset() != kNoInlineInfo;
+ bool HasInlineInfo(const CodeInfo& info) const {
+ return GetInlineDescriptorOffset(info) != kNoInlineInfo;
}
bool Equals(const StackMap& other) const {
@@ -579,32 +559,51 @@
&& region_.size() == other.region_.size();
}
- static size_t ComputeStackMapSize(size_t stack_mask_size) {
- return StackMap::kFixedSize + stack_mask_size;
- }
+ static size_t ComputeStackMapSize(size_t stack_mask_size,
+ bool has_inline_info,
+ bool is_small_inline_info,
+ bool is_small_dex_map,
+ bool is_small_dex_pc,
+ bool is_small_native_pc);
+
+ static size_t ComputeStackMapSize(size_t stack_mask_size,
+ size_t inline_info_size,
+ size_t dex_register_map_size,
+ size_t dex_pc_max,
+ size_t native_pc_max);
+
+ // TODO: Revisit this abstraction if we allow 3 bytes encoding.
+ typedef uint8_t kSmallEncoding;
+ typedef uint32_t kLargeEncoding;
+ static constexpr size_t kBytesForSmallEncoding = sizeof(kSmallEncoding);
+ static constexpr size_t kBitsForSmallEncoding = kBitsPerByte * kBytesForSmallEncoding;
+ static constexpr size_t kBytesForLargeEncoding = sizeof(kLargeEncoding);
+ static constexpr size_t kBitsForLargeEncoding = kBitsPerByte * kBytesForLargeEncoding;
// Special (invalid) offset for the DexRegisterMapOffset field meaning
// that there is no Dex register map for this stack map.
static constexpr uint32_t kNoDexRegisterMap = -1;
+ static constexpr uint32_t kNoDexRegisterMapSmallEncoding =
+ std::numeric_limits<kSmallEncoding>::max();
// Special (invalid) offset for the InlineDescriptorOffset field meaning
// that there is no inline info for this stack map.
static constexpr uint32_t kNoInlineInfo = -1;
+ static constexpr uint32_t kNoInlineInfoSmallEncoding =
+ std::numeric_limits<kSmallEncoding>::max();
+
+ // Returns the number of bytes needed for an entry in the StackMap.
+ static size_t NumberOfBytesForEntry(bool small_encoding) {
+ return small_encoding ? kBytesForSmallEncoding : kBytesForLargeEncoding;
+ }
private:
// TODO: Instead of plain types such as "uint32_t", introduce
// typedefs (and document the memory layout of StackMap).
- static constexpr int kDexPcOffset = 0;
- static constexpr int kNativePcOffsetOffset = kDexPcOffset + sizeof(uint32_t);
- static constexpr int kDexRegisterMapOffsetOffset = kNativePcOffsetOffset + sizeof(uint32_t);
- static constexpr int kInlineDescriptorOffsetOffset =
- kDexRegisterMapOffsetOffset + sizeof(uint32_t);
- static constexpr int kRegisterMaskOffset = kInlineDescriptorOffsetOffset + sizeof(uint32_t);
+ static constexpr int kRegisterMaskOffset = 0;
static constexpr int kFixedSize = kRegisterMaskOffset + sizeof(uint32_t);
static constexpr int kStackMaskOffset = kFixedSize;
- size_t StackMaskSize() const { return region_.size() - kFixedSize; }
-
MemoryRegion region_;
friend class CodeInfo;
@@ -626,6 +625,80 @@
region_ = MemoryRegion(const_cast<void*>(data), size);
}
+ void SetEncoding(size_t inline_info_size,
+ size_t dex_register_map_size,
+ size_t dex_pc_max,
+ size_t native_pc_max) {
+ if (inline_info_size != 0) {
+ region_.StoreBit(kHasInlineInfoBitOffset, 1);
+ region_.StoreBit(kHasSmallInlineInfoBitOffset, IsUint<StackMap::kBitsForSmallEncoding>(
+ // + 1 to also encode kNoInlineInfo: if an inline info offset
+ // is at 0xFF, we want to overflow to a larger encoding, because it will
+ // conflict with kNoInlineInfo.
+ // The offset is relative to the dex register map. TODO: Change this.
+ inline_info_size + dex_register_map_size + 1));
+ } else {
+ region_.StoreBit(kHasInlineInfoBitOffset, 0);
+ region_.StoreBit(kHasSmallInlineInfoBitOffset, 0);
+ }
+ region_.StoreBit(kHasSmallDexRegisterMapBitOffset,
+ // + 1 to also encode kNoDexRegisterMap: if a dex register map offset
+ // is at 0xFF, we want to overflow to a larger encoding, because it will
+ // conflict with kNoDexRegisterMap.
+ IsUint<StackMap::kBitsForSmallEncoding>(dex_register_map_size + 1));
+ region_.StoreBit(kHasSmallDexPcBitOffset, IsUint<StackMap::kBitsForSmallEncoding>(dex_pc_max));
+ region_.StoreBit(kHasSmallNativePcBitOffset,
+ IsUint<StackMap::kBitsForSmallEncoding>(native_pc_max));
+ }
+
+ bool HasInlineInfo() const {
+ return region_.LoadBit(kHasInlineInfoBitOffset);
+ }
+
+ bool HasSmallInlineInfo() const {
+ return region_.LoadBit(kHasSmallInlineInfoBitOffset);
+ }
+
+ bool HasSmallDexRegisterMap() const {
+ return region_.LoadBit(kHasSmallDexRegisterMapBitOffset);
+ }
+
+ bool HasSmallNativePc() const {
+ return region_.LoadBit(kHasSmallNativePcBitOffset);
+ }
+
+ bool HasSmallDexPc() const {
+ return region_.LoadBit(kHasSmallDexPcBitOffset);
+ }
+
+ size_t ComputeStackMapRegisterMaskOffset() const {
+ return StackMap::kRegisterMaskOffset;
+ }
+
+ size_t ComputeStackMapStackMaskOffset() const {
+ return StackMap::kStackMaskOffset;
+ }
+
+ size_t ComputeStackMapDexPcOffset() const {
+ return ComputeStackMapStackMaskOffset() + GetStackMaskSize();
+ }
+
+ size_t ComputeStackMapNativePcOffset() const {
+ return ComputeStackMapDexPcOffset()
+ + (HasSmallDexPc() ? sizeof(uint8_t) : sizeof(uint32_t));
+ }
+
+ size_t ComputeStackMapDexRegisterMapOffset() const {
+ return ComputeStackMapNativePcOffset()
+ + (HasSmallNativePc() ? sizeof(uint8_t) : sizeof(uint32_t));
+ }
+
+ size_t ComputeStackMapInlineInfoOffset() const {
+ CHECK(HasInlineInfo());
+ return ComputeStackMapDexRegisterMapOffset()
+ + (HasSmallDexRegisterMap() ? sizeof(uint8_t) : sizeof(uint32_t));
+ }
+
StackMap GetStackMapAt(size_t i) const {
size_t size = StackMapSize();
return StackMap(GetStackMaps().Subregion(i * size, size));
@@ -658,7 +731,12 @@
// Get the size of one stack map of this CodeInfo object, in bytes.
// All stack maps of a CodeInfo have the same size.
size_t StackMapSize() const {
- return StackMap::ComputeStackMapSize(GetStackMaskSize());
+ return StackMap::ComputeStackMapSize(GetStackMaskSize(),
+ HasInlineInfo(),
+ HasSmallInlineInfo(),
+ HasSmallDexRegisterMap(),
+ HasSmallDexPc(),
+ HasSmallNativePc());
}
// Get the size all the stack maps of this CodeInfo object, in bytes.
@@ -666,21 +744,25 @@
return StackMapSize() * GetNumberOfStackMaps();
}
+ size_t GetDexRegisterMapsOffset() const {
+ return CodeInfo::kFixedSize + StackMapsSize();
+ }
+
uint32_t GetStackMapsOffset() const {
return kFixedSize;
}
DexRegisterMap GetDexRegisterMapOf(StackMap stack_map, uint32_t number_of_dex_registers) const {
- DCHECK(stack_map.HasDexRegisterMap());
- uint32_t offset = stack_map.GetDexRegisterMapOffset();
+ DCHECK(stack_map.HasDexRegisterMap(*this));
+ uint32_t offset = stack_map.GetDexRegisterMapOffset(*this) + GetDexRegisterMapsOffset();
size_t size = ComputeDexRegisterMapSize(offset, number_of_dex_registers);
return DexRegisterMap(region_.Subregion(offset, size));
}
InlineInfo GetInlineInfoOf(StackMap stack_map) const {
- DCHECK(stack_map.HasInlineInfo());
- uint32_t offset = stack_map.GetInlineDescriptorOffset();
- uint8_t depth = region_.Load<uint8_t>(offset);
+ DCHECK(stack_map.HasInlineInfo(*this));
+ uint32_t offset = stack_map.GetInlineDescriptorOffset(*this) + GetDexRegisterMapsOffset();
+ uint8_t depth = region_.LoadUnaligned<uint8_t>(offset);
return InlineInfo(region_.Subregion(offset,
InlineInfo::kFixedSize + depth * InlineInfo::SingleEntrySize()));
}
@@ -688,7 +770,7 @@
StackMap GetStackMapForDexPc(uint32_t dex_pc) const {
for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
StackMap stack_map = GetStackMapAt(i);
- if (stack_map.GetDexPc() == dex_pc) {
+ if (stack_map.GetDexPc(*this) == dex_pc) {
return stack_map;
}
}
@@ -700,7 +782,7 @@
// TODO: stack maps are sorted by native pc, we can do a binary search.
for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
StackMap stack_map = GetStackMapAt(i);
- if (stack_map.GetNativePcOffset() == native_pc_offset) {
+ if (stack_map.GetNativePcOffset(*this) == native_pc_offset) {
return stack_map;
}
}
@@ -708,14 +790,24 @@
UNREACHABLE();
}
+ void Dump(std::ostream& os, uint16_t number_of_dex_registers) const;
+ void DumpStackMapHeader(std::ostream& os, size_t stack_map_num) const;
+
private:
// TODO: Instead of plain types such as "uint32_t", introduce
// typedefs (and document the memory layout of CodeInfo).
static constexpr int kOverallSizeOffset = 0;
- static constexpr int kNumberOfStackMapsOffset = kOverallSizeOffset + sizeof(uint32_t);
+ static constexpr int kEncodingInfoOffset = kOverallSizeOffset + sizeof(uint32_t);
+ static constexpr int kNumberOfStackMapsOffset = kEncodingInfoOffset + sizeof(uint8_t);
static constexpr int kStackMaskSizeOffset = kNumberOfStackMapsOffset + sizeof(uint32_t);
static constexpr int kFixedSize = kStackMaskSizeOffset + sizeof(uint32_t);
+ static constexpr int kHasInlineInfoBitOffset = (kEncodingInfoOffset * kBitsPerByte);
+ static constexpr int kHasSmallInlineInfoBitOffset = kHasInlineInfoBitOffset + 1;
+ static constexpr int kHasSmallDexRegisterMapBitOffset = kHasSmallInlineInfoBitOffset + 1;
+ static constexpr int kHasSmallDexPcBitOffset = kHasSmallDexRegisterMapBitOffset + 1;
+ static constexpr int kHasSmallNativePcBitOffset = kHasSmallDexPcBitOffset + 1;
+
MemoryRegion GetStackMaps() const {
return region_.size() == 0
? MemoryRegion()
diff --git a/runtime/thread.cc b/runtime/thread.cc
index affb6cd..e1a07e9 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -2155,8 +2155,9 @@
Runtime* runtime = Runtime::Current();
const void* entry_point = runtime->GetInstrumentation()->GetQuickCodeFor(m, sizeof(void*));
uintptr_t native_pc_offset = m->NativeQuickPcOffset(GetCurrentQuickFramePc(), entry_point);
- StackMap map = m->GetStackMap(native_pc_offset);
- MemoryRegion mask = map.GetStackMask();
+ CodeInfo code_info = m->GetOptimizedCodeInfo();
+ StackMap map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
+ MemoryRegion mask = map.GetStackMask(code_info);
// Visit stack entries that hold pointers.
for (size_t i = 0; i < mask.size_in_bits(); ++i) {
if (mask.LoadBit(i)) {
@@ -2173,7 +2174,7 @@
}
}
// Visit callee-save registers that hold pointers.
- uint32_t register_mask = map.GetRegisterMask();
+ uint32_t register_mask = map.GetRegisterMask(code_info);
for (size_t i = 0; i < BitSizeOf<uint32_t>(); ++i) {
if (register_mask & (1 << i)) {
mirror::Object** ref_addr = reinterpret_cast<mirror::Object**>(GetGPRAddress(i));