Add a new stack map scheme that encodes compilation info per pc.

Encodes stack mask, register mask, dex register values, and inlining
info. The encoding is currently very straightforward: there is no
clever encoding, nor compression.

Change-Id: I5fd9ae28189a5513cd9e3c8d52c648463207643d
diff --git a/build/Android.gtest.mk b/build/Android.gtest.mk
index ee51fcd..08b78a6 100644
--- a/build/Android.gtest.mk
+++ b/build/Android.gtest.mk
@@ -145,6 +145,7 @@
   compiler/optimizing/pretty_printer_test.cc \
   compiler/optimizing/register_allocator_test.cc \
   compiler/optimizing/ssa_test.cc \
+  compiler/optimizing/stack_map_test.cc \
   compiler/output_stream_test.cc \
   compiler/utils/arena_allocator_test.cc \
   compiler/utils/dedupe_set_test.cc \
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
new file mode 100644
index 0000000..3c6ad8f
--- /dev/null
+++ b/compiler/optimizing/stack_map_stream.h
@@ -0,0 +1,210 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_COMPILER_OPTIMIZING_STACK_MAP_STREAM_H_
+#define ART_COMPILER_OPTIMIZING_STACK_MAP_STREAM_H_
+
+#include "base/bit_vector.h"
+#include "memory_region.h"
+#include "stack_map.h"
+#include "utils/allocation.h"
+#include "utils/growable_array.h"
+
+namespace art {
+
+/**
+ * Collects and builds a CodeInfo for a method.
+ */
+template<typename T>
+class StackMapStream : public ValueObject {
+ public:
+  explicit StackMapStream(ArenaAllocator* allocator)
+      : stack_maps_(allocator, 10),
+        dex_register_maps_(allocator, 10 * 4),
+        inline_infos_(allocator, 2),
+        stack_mask_max_(-1) {}
+
+  // Compute bytes needed to encode a mask with the given maximum element.
+  static uint32_t StackMaskEncodingSize(int max_element) {
+    int number_of_bits = max_element + 1;  // Need room for max element too.
+    return RoundUp(number_of_bits, kBitsPerByte) / kBitsPerByte;
+  }
+
+  // See runtime/stack_map.h to know what these fields contain.
+  struct StackMapEntry {
+    uint32_t dex_pc;
+    T native_pc;
+    uint32_t register_mask;
+    BitVector* sp_mask;
+    uint32_t num_dex_registers;
+    uint8_t inlining_depth;
+    size_t dex_register_maps_start_index;
+    size_t inline_infos_start_index;
+  };
+
+  struct DexRegisterEntry {
+    DexRegisterMap::LocationKind kind;
+    int32_t value;
+  };
+
+  struct InlineInfoEntry {
+    uint32_t method_index;
+  };
+
+  void AddStackMapEntry(uint32_t dex_pc,
+                        T native_pc,
+                        uint32_t register_mask,
+                        BitVector* sp_mask,
+                        uint32_t num_dex_registers,
+                        uint8_t inlining_depth) {
+    StackMapEntry entry;
+    entry.dex_pc = dex_pc;
+    entry.native_pc = native_pc;
+    entry.register_mask = register_mask;
+    entry.sp_mask = sp_mask;
+    entry.num_dex_registers = num_dex_registers;
+    entry.inlining_depth = inlining_depth;
+    entry.dex_register_maps_start_index = dex_register_maps_.Size();
+    entry.inline_infos_start_index = inline_infos_.Size();
+    stack_maps_.Add(entry);
+
+    stack_mask_max_ = std::max(stack_mask_max_, sp_mask->GetHighestBitSet());
+    if (inlining_depth > 0) {
+      number_of_stack_maps_with_inline_info_++;
+    }
+  }
+
+  void AddDexRegisterEntry(DexRegisterMap::LocationKind kind, int32_t value) {
+    DexRegisterEntry entry;
+    entry.kind = kind;
+    entry.value = value;
+    dex_register_maps_.Add(entry);
+  }
+
+  void AddInlineInfoEntry(uint32_t method_index) {
+    InlineInfoEntry entry;
+    entry.method_index = method_index;
+    inline_infos_.Add(entry);
+  }
+
+  size_t ComputeNeededSize() const {
+    return CodeInfo<T>::kFixedSize
+        + ComputeStackMapSize()
+        + ComputeDexRegisterMapSize()
+        + ComputeInlineInfoSize();
+  }
+
+  size_t ComputeStackMapSize() const {
+    return stack_maps_.Size() * (StackMap<T>::kFixedSize + StackMaskEncodingSize(stack_mask_max_));
+  }
+
+  size_t ComputeDexRegisterMapSize() const {
+    // We currently encode all dex register information per stack map.
+    return stack_maps_.Size() * DexRegisterMap::kFixedSize
+      // For each dex register entry.
+      + (dex_register_maps_.Size() * DexRegisterMap::SingleEntrySize());
+  }
+
+  size_t ComputeInlineInfoSize() const {
+    return inline_infos_.Size() * InlineInfo::SingleEntrySize()
+      // For encoding the depth.
+      + (number_of_stack_maps_with_inline_info_ * InlineInfo::kFixedSize);
+  }
+
+  size_t ComputeInlineInfoStart() const {
+    return ComputeDexRegisterMapStart() + ComputeDexRegisterMapSize();
+  }
+
+  size_t ComputeDexRegisterMapStart() const {
+    return CodeInfo<T>::kFixedSize + ComputeStackMapSize();
+  }
+
+  void FillIn(MemoryRegion region) {
+    CodeInfo<T> code_info(region);
+
+    size_t stack_mask_size = StackMaskEncodingSize(stack_mask_max_);
+    uint8_t* memory_start = region.start();
+
+    MemoryRegion dex_register_maps_region = region.Subregion(
+      ComputeDexRegisterMapStart(),
+      ComputeDexRegisterMapSize());
+
+    MemoryRegion inline_infos_region = region.Subregion(
+      ComputeInlineInfoStart(),
+      ComputeInlineInfoSize());
+
+    code_info.SetNumberOfStackMaps(stack_maps_.Size());
+    code_info.SetStackMaskSize(stack_mask_size);
+
+    uintptr_t next_dex_register_map_offset = 0;
+    uintptr_t next_inline_info_offset = 0;
+    for (size_t i = 0, e = stack_maps_.Size(); i < e; ++i) {
+      StackMap<T> stack_map = code_info.GetStackMapAt(i);
+      StackMapEntry entry = stack_maps_.Get(i);
+
+      stack_map.SetDexPc(entry.dex_pc);
+      stack_map.SetNativePc(entry.native_pc);
+      stack_map.SetRegisterMask(entry.register_mask);
+      stack_map.SetStackMask(*entry.sp_mask);
+
+      // Set the register map.
+      MemoryRegion region = dex_register_maps_region.Subregion(
+          next_dex_register_map_offset,
+          DexRegisterMap::kFixedSize + entry.num_dex_registers * DexRegisterMap::SingleEntrySize());
+      next_dex_register_map_offset += region.size();
+      DexRegisterMap dex_register_map(region);
+      stack_map.SetDexRegisterMapOffset(region.start() - memory_start);
+
+      for (size_t i = 0; i < entry.num_dex_registers; ++i) {
+        DexRegisterEntry register_entry =
+            dex_register_maps_.Get(i + entry.dex_register_maps_start_index);
+        dex_register_map.SetRegisterInfo(i, register_entry.kind, register_entry.value);
+      }
+
+      // Set the inlining info.
+      if (entry.inlining_depth != 0) {
+        MemoryRegion region = inline_infos_region.Subregion(
+            next_inline_info_offset,
+            InlineInfo::kFixedSize + entry.inlining_depth * InlineInfo::SingleEntrySize());
+        next_inline_info_offset += region.size();
+        InlineInfo inline_info(region);
+
+        stack_map.SetInlineDescriptorOffset(region.start() - memory_start);
+
+        inline_info.SetDepth(entry.inlining_depth);
+        for (size_t i = 0; i < entry.inlining_depth; ++i) {
+          InlineInfoEntry inline_entry = inline_infos_.Get(i + entry.inline_infos_start_index);
+          inline_info.SetMethodReferenceIndexAtDepth(i, inline_entry.method_index);
+        }
+      } else {
+        stack_map.SetInlineDescriptorOffset(InlineInfo::kNoInlineInfo);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<StackMapEntry> stack_maps_;
+  GrowableArray<DexRegisterEntry> dex_register_maps_;
+  GrowableArray<InlineInfoEntry> inline_infos_;
+  int stack_mask_max_;
+  size_t number_of_stack_maps_with_inline_info_;
+
+  DISALLOW_COPY_AND_ASSIGN(StackMapStream);
+};
+
+}  // namespace art
+
+#endif  // ART_COMPILER_OPTIMIZING_STACK_MAP_STREAM_H_
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
new file mode 100644
index 0000000..a70259e
--- /dev/null
+++ b/compiler/optimizing/stack_map_test.cc
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "stack_map.h"
+#include "stack_map_stream.h"
+#include "utils/arena_bit_vector.h"
+
+#include "gtest/gtest.h"
+
+namespace art {
+
+bool SameBits(MemoryRegion region, const BitVector& bit_vector) {
+  for (size_t i = 0; i < region.size_in_bits(); ++i) {
+    if (region.LoadBit(i) != bit_vector.IsBitSet(i)) {
+      return false;
+    }
+  }
+  return true;
+}
+
+TEST(StackMapTest, Test1) {
+  ArenaPool pool;
+  ArenaAllocator arena(&pool);
+  StackMapStream<size_t> stream(&arena);
+
+  ArenaBitVector sp_mask(&arena, 0, false);
+  stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, 2, 0);
+  stream.AddDexRegisterEntry(DexRegisterMap::kInStack, 0);
+  stream.AddDexRegisterEntry(DexRegisterMap::kConstant, -2);
+
+  size_t size = stream.ComputeNeededSize();
+  void* memory = arena.Alloc(size, kArenaAllocMisc);
+  MemoryRegion region(memory, size);
+  stream.FillIn(region);
+
+  CodeInfo<size_t> code_info(region);
+  ASSERT_EQ(0u, code_info.GetStackMaskSize());
+  ASSERT_EQ(1u, code_info.GetNumberOfStackMaps());
+
+  StackMap<size_t> stack_map = code_info.GetStackMapAt(0);
+  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
+  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePc(64)));
+  ASSERT_EQ(0u, stack_map.GetDexPc());
+  ASSERT_EQ(64u, stack_map.GetNativePc());
+  ASSERT_EQ(0x3u, stack_map.GetRegisterMask());
+  ASSERT_FALSE(stack_map.HasInlineInfo());
+
+  MemoryRegion stack_mask = stack_map.GetStackMask();
+  ASSERT_TRUE(SameBits(stack_mask, sp_mask));
+
+  DexRegisterMap dex_registers = code_info.GetDexRegisterMapOf(stack_map, 2);
+  ASSERT_EQ(DexRegisterMap::kInStack, dex_registers.GetLocationKind(0));
+  ASSERT_EQ(DexRegisterMap::kConstant, dex_registers.GetLocationKind(1));
+  ASSERT_EQ(0, dex_registers.GetValue(0));
+  ASSERT_EQ(-2, dex_registers.GetValue(1));
+}
+
+TEST(StackMapTest, Test2) {
+  ArenaPool pool;
+  ArenaAllocator arena(&pool);
+  StackMapStream<size_t> stream(&arena);
+
+  ArenaBitVector sp_mask1(&arena, 0, true);
+  sp_mask1.SetBit(2);
+  sp_mask1.SetBit(4);
+  stream.AddStackMapEntry(0, 64, 0x3, &sp_mask1, 2, 2);
+  stream.AddDexRegisterEntry(DexRegisterMap::kInStack, 0);
+  stream.AddDexRegisterEntry(DexRegisterMap::kConstant, -2);
+  stream.AddInlineInfoEntry(42);
+  stream.AddInlineInfoEntry(82);
+
+  ArenaBitVector sp_mask2(&arena, 0, true);
+  sp_mask2.SetBit(3);
+  sp_mask1.SetBit(8);
+  stream.AddStackMapEntry(1, 128, 0xFF, &sp_mask2, 1, 0);
+  stream.AddDexRegisterEntry(DexRegisterMap::kInRegister, 0);
+
+  size_t size = stream.ComputeNeededSize();
+  void* memory = arena.Alloc(size, kArenaAllocMisc);
+  MemoryRegion region(memory, size);
+  stream.FillIn(region);
+
+  CodeInfo<size_t> code_info(region);
+  ASSERT_EQ(1u, code_info.GetStackMaskSize());
+  ASSERT_EQ(2u, code_info.GetNumberOfStackMaps());
+
+  StackMap<size_t> stack_map = code_info.GetStackMapAt(0);
+  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
+  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePc(64)));
+  ASSERT_EQ(0u, stack_map.GetDexPc());
+  ASSERT_EQ(64u, stack_map.GetNativePc());
+  ASSERT_EQ(0x3u, stack_map.GetRegisterMask());
+
+  MemoryRegion stack_mask = stack_map.GetStackMask();
+  ASSERT_TRUE(SameBits(stack_mask, sp_mask1));
+
+  DexRegisterMap dex_registers = code_info.GetDexRegisterMapOf(stack_map, 2);
+  ASSERT_EQ(DexRegisterMap::kInStack, dex_registers.GetLocationKind(0));
+  ASSERT_EQ(DexRegisterMap::kConstant, dex_registers.GetLocationKind(1));
+  ASSERT_EQ(0, dex_registers.GetValue(0));
+  ASSERT_EQ(-2, dex_registers.GetValue(1));
+
+  InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
+  ASSERT_EQ(2u, inline_info.GetDepth());
+  ASSERT_EQ(42u, inline_info.GetMethodReferenceIndexAtDepth(0));
+  ASSERT_EQ(82u, inline_info.GetMethodReferenceIndexAtDepth(1));
+
+  stack_map = code_info.GetStackMapAt(1);
+  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u)));
+  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePc(128u)));
+  ASSERT_EQ(1u, stack_map.GetDexPc());
+  ASSERT_EQ(128u, stack_map.GetNativePc());
+  ASSERT_EQ(0xFFu, stack_map.GetRegisterMask());
+
+  stack_mask = stack_map.GetStackMask();
+  ASSERT_TRUE(SameBits(stack_mask, sp_mask2));
+
+  ASSERT_FALSE(stack_map.HasInlineInfo());
+}
+
+}  // namespace art
diff --git a/runtime/memory_region.h b/runtime/memory_region.h
index 849ab1c..bab2e86 100644
--- a/runtime/memory_region.h
+++ b/runtime/memory_region.h
@@ -56,14 +56,31 @@
     return ComputeInternalPointer<T>(offset);
   }
 
+  // Load a single bit in the region. The bit at offset 0 is the least
+  // significant bit in the first byte.
+  bool LoadBit(uintptr_t bit_offset) const {
+    uint8_t bit_mask;
+    uint8_t byte = *ComputeBitPointer(bit_offset, &bit_mask);
+    return byte & bit_mask;
+  }
+
+  void StoreBit(uintptr_t bit_offset, bool value) const {
+    uint8_t bit_mask;
+    uint8_t* byte = ComputeBitPointer(bit_offset, &bit_mask);
+    if (value) {
+      *byte |= bit_mask;
+    } else {
+      *byte &= ~bit_mask;
+    }
+  }
+
   void CopyFrom(size_t offset, const MemoryRegion& from) const;
 
   // Compute a sub memory region based on an existing one.
-  void Subregion(const MemoryRegion& from, uintptr_t offset, uintptr_t size) {
-    CHECK_GE(from.size(), size);
-    CHECK_LE(offset,  from.size() - size);
-    pointer_ = reinterpret_cast<void*>(from.start() + offset);
-    size_ = size;
+  MemoryRegion Subregion(uintptr_t offset, uintptr_t size) const {
+    CHECK_GE(this->size(), size);
+    CHECK_LE(offset,  this->size() - size);
+    return MemoryRegion(reinterpret_cast<void*>(start() + offset), size);
   }
 
   // Compute an extended memory region based on an existing one.
@@ -90,8 +107,6 @@
 
   void* pointer_;
   size_t size_;
-
-  DISALLOW_COPY_AND_ASSIGN(MemoryRegion);
 };
 
 }  // namespace art
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
new file mode 100644
index 0000000..7d3a48f
--- /dev/null
+++ b/runtime/stack_map.h
@@ -0,0 +1,307 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_STACK_MAP_H_
+#define ART_RUNTIME_STACK_MAP_H_
+
+#include "base/bit_vector.h"
+#include "memory_region.h"
+
+namespace art {
+
+/**
+ * Classes in the following file are wrapper on stack map information backed
+ * by a MemoryRegion. As such they read and write to the region, they don't have
+ * their own fields.
+ */
+
+/**
+ * Inline information for a specific PC. The information is of the form:
+ * [inlining_depth, [method_dex reference]+]
+ */
+class InlineInfo {
+ public:
+  explicit InlineInfo(MemoryRegion region) : region_(region) {}
+
+  uint8_t GetDepth() const {
+    return region_.Load<uint8_t>(kDepthOffset);
+  }
+
+  void SetDepth(uint8_t depth) {
+    region_.Store<uint8_t>(kDepthOffset, depth);
+  }
+
+  uint32_t GetMethodReferenceIndexAtDepth(uint8_t depth) const {
+    return region_.Load<uint32_t>(kFixedSize + depth * SingleEntrySize());
+  }
+
+  void SetMethodReferenceIndexAtDepth(uint8_t depth, uint32_t index) {
+    region_.Store<uint32_t>(kFixedSize + depth * SingleEntrySize(), index);
+  }
+
+  static size_t SingleEntrySize() {
+    return sizeof(uint32_t);
+  }
+
+ private:
+  static constexpr int kDepthOffset = 0;
+  static constexpr int kFixedSize = kDepthOffset + sizeof(uint8_t);
+
+  static constexpr uint32_t kNoInlineInfo = -1;
+
+  MemoryRegion region_;
+
+  template<typename T> friend class CodeInfo;
+  template<typename T> friend class StackMap;
+  template<typename T> friend class StackMapStream;
+};
+
+/**
+ * Information on dex register values for a specific PC. The information is
+ * of the form:
+ * [location_kind, register_value]+.
+ *
+ * The location_kind for a Dex register can either be:
+ * - Constant: register_value holds the constant,
+ * - Stack: register_value holds the stack offset,
+ * - Register: register_value holds the register number.
+ */
+class DexRegisterMap {
+ public:
+  explicit DexRegisterMap(MemoryRegion region) : region_(region) {}
+
+  enum LocationKind {
+    kInStack,
+    kInRegister,
+    kConstant
+  };
+
+  LocationKind GetLocationKind(uint16_t register_index) const {
+    return region_.Load<LocationKind>(
+        kFixedSize + register_index * SingleEntrySize());
+  }
+
+  void SetRegisterInfo(uint16_t register_index, LocationKind kind, int32_t value) {
+    size_t entry = kFixedSize + register_index * SingleEntrySize();
+    region_.Store<LocationKind>(entry, kind);
+    region_.Store<int32_t>(entry + sizeof(LocationKind), value);
+  }
+
+  int32_t GetValue(uint16_t register_index) const {
+    return region_.Load<int32_t>(
+        kFixedSize + sizeof(LocationKind) + register_index * SingleEntrySize());
+  }
+
+  static size_t SingleEntrySize() {
+    return sizeof(LocationKind) + sizeof(int32_t);
+  }
+
+ private:
+  static constexpr int kFixedSize = 0;
+
+  MemoryRegion region_;
+
+  template <typename T> friend class CodeInfo;
+  template <typename T> friend class StackMapStream;
+};
+
+/**
+ * A Stack Map holds compilation information for a specific PC necessary for:
+ * - Mapping it to a dex PC,
+ * - Knowing which stack entries are objects,
+ * - Knowing which registers hold objects,
+ * - Knowing the inlining information,
+ * - Knowing the values of dex registers.
+ *
+ * The information is of the form:
+ * [dex_pc, native_pc, dex_register_map_offset, inlining_info_offset, register_mask, stack_mask].
+ *
+ * Note that register_mask is fixed size, but stack_mask is variable size, depending on the
+ * stack size of a method.
+ */
+template <typename T>
+class StackMap {
+ public:
+  explicit StackMap(MemoryRegion region) : region_(region) {}
+
+  uint32_t GetDexPc() const {
+    return region_.Load<uint32_t>(kDexPcOffset);
+  }
+
+  void SetDexPc(uint32_t dex_pc) {
+    region_.Store<uint32_t>(kDexPcOffset, dex_pc);
+  }
+
+  T GetNativePc() const {
+    return region_.Load<T>(kNativePcOffset);
+  }
+
+  void SetNativePc(T native_pc) {
+    return region_.Store<T>(kNativePcOffset, native_pc);
+  }
+
+  uint32_t GetDexRegisterMapOffset() const {
+    return region_.Load<uint32_t>(kDexRegisterMapOffsetOffset);
+  }
+
+  void SetDexRegisterMapOffset(uint32_t offset) {
+    return region_.Store<uint32_t>(kDexRegisterMapOffsetOffset, offset);
+  }
+
+  uint32_t GetInlineDescriptorOffset() const {
+    return region_.Load<uint32_t>(kInlineDescriptorOffsetOffset);
+  }
+
+  void SetInlineDescriptorOffset(uint32_t offset) {
+    return region_.Store<uint32_t>(kInlineDescriptorOffsetOffset, offset);
+  }
+
+  uint32_t GetRegisterMask() const {
+    return region_.Load<uint32_t>(kRegisterMaskOffset);
+  }
+
+  void SetRegisterMask(uint32_t mask) {
+    region_.Store<uint32_t>(kRegisterMaskOffset, mask);
+  }
+
+  MemoryRegion GetStackMask() const {
+    return region_.Subregion(kStackMaskOffset, StackMaskSize());
+  }
+
+  void SetStackMask(const BitVector& sp_map) {
+    MemoryRegion region = GetStackMask();
+    for (size_t i = 0; i < region.size_in_bits(); i++) {
+      region.StoreBit(i, sp_map.IsBitSet(i));
+    }
+  }
+
+  bool HasInlineInfo() const {
+    return GetInlineDescriptorOffset() != InlineInfo::kNoInlineInfo;
+  }
+
+  bool Equals(const StackMap& other) {
+    return region_.pointer() == other.region_.pointer()
+       && region_.size() == other.region_.size();
+  }
+
+ private:
+  static constexpr int kDexPcOffset = 0;
+  static constexpr int kNativePcOffset = kDexPcOffset + sizeof(uint32_t);
+  static constexpr int kDexRegisterMapOffsetOffset = kNativePcOffset + sizeof(T);
+  static constexpr int kInlineDescriptorOffsetOffset =
+      kDexRegisterMapOffsetOffset + sizeof(uint32_t);
+  static constexpr int kRegisterMaskOffset = kInlineDescriptorOffsetOffset + sizeof(uint32_t);
+  static constexpr int kFixedSize = kRegisterMaskOffset + sizeof(uint32_t);
+  static constexpr int kStackMaskOffset = kFixedSize;
+
+  size_t StackMaskSize() const { return region_.size() - kFixedSize; }
+
+  MemoryRegion region_;
+
+  template <typename U> friend class CodeInfo;
+  template <typename U> friend class StackMapStream;
+};
+
+
+/**
+ * Wrapper around all compiler information collected for a method.
+ * The information is of the form:
+ * [number_of_stack_maps, stack_mask_size, StackMap+, DexRegisterInfo+, InlineInfo*].
+ */
+template <typename T>
+class CodeInfo {
+ public:
+  explicit CodeInfo(MemoryRegion region) : region_(region) {}
+
+  StackMap<T> GetStackMapAt(size_t i) const {
+    size_t size = StackMapSize();
+    return StackMap<T>(GetStackMaps().Subregion(i * size, size));
+  }
+
+  uint32_t GetStackMaskSize() const {
+    return region_.Load<uint32_t>(kStackMaskSizeOffset);
+  }
+
+  void SetStackMaskSize(uint32_t size) {
+    region_.Store<uint32_t>(kStackMaskSizeOffset, size);
+  }
+
+  size_t GetNumberOfStackMaps() const {
+    return region_.Load<uint32_t>(kNumberOfStackMapsOffset);
+  }
+
+  void SetNumberOfStackMaps(uint32_t number_of_stack_maps) {
+    region_.Store<uint32_t>(kNumberOfStackMapsOffset, number_of_stack_maps);
+  }
+
+  size_t StackMapSize() const {
+    return StackMap<T>::kFixedSize + GetStackMaskSize();
+  }
+
+  DexRegisterMap GetDexRegisterMapOf(StackMap<T> stack_map, uint32_t number_of_dex_registers) {
+    uint32_t offset = stack_map.GetDexRegisterMapOffset();
+    return DexRegisterMap(region_.Subregion(offset,
+        DexRegisterMap::kFixedSize + number_of_dex_registers * DexRegisterMap::SingleEntrySize()));
+  }
+
+  InlineInfo GetInlineInfoOf(StackMap<T> stack_map) {
+    uint32_t offset = stack_map.GetInlineDescriptorOffset();
+    uint8_t depth = region_.Load<uint8_t>(offset);
+    return InlineInfo(region_.Subregion(offset,
+        InlineInfo::kFixedSize + depth * InlineInfo::SingleEntrySize()));
+  }
+
+  StackMap<T> GetStackMapForDexPc(uint32_t dex_pc) {
+    for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
+      StackMap<T> stack_map = GetStackMapAt(i);
+      if (stack_map.GetDexPc() == dex_pc) {
+        return stack_map;
+      }
+    }
+    LOG(FATAL) << "Unreachable";
+    return StackMap<T>(MemoryRegion());
+  }
+
+  StackMap<T> GetStackMapForNativePc(T native_pc) {
+    // TODO: stack maps are sorted by native pc, we can do a binary search.
+    for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
+      StackMap<T> stack_map = GetStackMapAt(i);
+      if (stack_map.GetNativePc() == native_pc) {
+        return stack_map;
+      }
+    }
+    LOG(FATAL) << "Unreachable";
+    return StackMap<T>(MemoryRegion());
+  }
+
+ private:
+  static constexpr int kNumberOfStackMapsOffset = 0;
+  static constexpr int kStackMaskSizeOffset = kNumberOfStackMapsOffset + sizeof(uint32_t);
+  static constexpr int kFixedSize = kStackMaskSizeOffset + sizeof(uint32_t);
+
+  MemoryRegion GetStackMaps() const {
+    return region_.size() == 0
+        ? MemoryRegion()
+        : region_.Subregion(kFixedSize, StackMapSize() * GetNumberOfStackMaps());
+  }
+
+  MemoryRegion region_;
+  template<typename U> friend class StackMapStream;
+};
+
+}  // namespace art
+
+#endif  // ART_RUNTIME_STACK_MAP_H_