Compress the StackMaps.

First step towards the compression of the StackMap (not
the DexRegisterMap). Next step will be to just use what is
needed (instead of byte -> word).

Change-Id: I4f81b2d05bf5cc508585e16fbbed1bafbc850e2e
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index e1a5afe..9914ef4 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -38,6 +38,8 @@
         dex_register_locations_(allocator, 10 * 4),
         inline_infos_(allocator, 2),
         stack_mask_max_(-1),
+        dex_pc_max_(0),
+        native_pc_offset_max_(0),
         number_of_stack_maps_with_inline_info_(0) {}
 
   // Compute bytes needed to encode a mask with the given maximum element.
@@ -92,6 +94,9 @@
     if (inlining_depth > 0) {
       number_of_stack_maps_with_inline_info_++;
     }
+
+    dex_pc_max_ = std::max(dex_pc_max_, dex_pc);
+    native_pc_offset_max_ = std::max(native_pc_offset_max_, native_pc_offset);
   }
 
   void AddInlineInfoEntry(uint32_t method_index) {
@@ -114,7 +119,12 @@
   }
 
   size_t ComputeStackMapsSize() const {
-    return stack_maps_.Size() * StackMap::ComputeStackMapSize(ComputeStackMaskSize());
+    return stack_maps_.Size() * StackMap::ComputeStackMapSize(
+        ComputeStackMaskSize(),
+        ComputeInlineInfoSize(),
+        ComputeDexRegisterMapsSize(),
+        dex_pc_max_,
+        native_pc_offset_max_);
   }
 
   // Compute the size of the Dex register map of `entry`.
@@ -165,16 +175,20 @@
     code_info.SetOverallSize(region.size());
 
     size_t stack_mask_size = ComputeStackMaskSize();
-    uint8_t* memory_start = region.start();
+
+    size_t dex_register_map_size = ComputeDexRegisterMapsSize();
+    size_t inline_info_size = ComputeInlineInfoSize();
 
     MemoryRegion dex_register_locations_region = region.Subregion(
       ComputeDexRegisterMapsStart(),
-      ComputeDexRegisterMapsSize());
+      dex_register_map_size);
 
     MemoryRegion inline_infos_region = region.Subregion(
       ComputeInlineInfoStart(),
-      ComputeInlineInfoSize());
+      inline_info_size);
 
+    code_info.SetEncoding(
+        inline_info_size, dex_register_map_size, dex_pc_max_, native_pc_offset_max_);
     code_info.SetNumberOfStackMaps(stack_maps_.Size());
     code_info.SetStackMaskSize(stack_mask_size);
     DCHECK_EQ(code_info.StackMapsSize(), ComputeStackMapsSize());
@@ -185,11 +199,11 @@
       StackMap stack_map = code_info.GetStackMapAt(i);
       StackMapEntry entry = stack_maps_.Get(i);
 
-      stack_map.SetDexPc(entry.dex_pc);
-      stack_map.SetNativePcOffset(entry.native_pc_offset);
-      stack_map.SetRegisterMask(entry.register_mask);
+      stack_map.SetDexPc(code_info, entry.dex_pc);
+      stack_map.SetNativePcOffset(code_info, entry.native_pc_offset);
+      stack_map.SetRegisterMask(code_info, entry.register_mask);
       if (entry.sp_mask != nullptr) {
-        stack_map.SetStackMask(*entry.sp_mask);
+        stack_map.SetStackMask(code_info, *entry.sp_mask);
       }
 
       if (entry.num_dex_registers != 0) {
@@ -200,7 +214,8 @@
                 ComputeDexRegisterMapSize(entry));
         next_dex_register_map_offset += register_region.size();
         DexRegisterMap dex_register_map(register_region);
-        stack_map.SetDexRegisterMapOffset(register_region.start() - memory_start);
+        stack_map.SetDexRegisterMapOffset(
+            code_info, register_region.start() - dex_register_locations_region.start());
 
         // Offset in `dex_register_map` where to store the next register entry.
         size_t offset = DexRegisterMap::kFixedSize;
@@ -222,7 +237,7 @@
         // Ensure we reached the end of the Dex registers region.
         DCHECK_EQ(offset, register_region.size());
       } else {
-        stack_map.SetDexRegisterMapOffset(StackMap::kNoDexRegisterMap);
+        stack_map.SetDexRegisterMapOffset(code_info, StackMap::kNoDexRegisterMap);
       }
 
       // Set the inlining info.
@@ -233,7 +248,9 @@
         next_inline_info_offset += inline_region.size();
         InlineInfo inline_info(inline_region);
 
-        stack_map.SetInlineDescriptorOffset(inline_region.start() - memory_start);
+        // Currently relative to the dex register map.
+        stack_map.SetInlineDescriptorOffset(
+            code_info, inline_region.start() - dex_register_locations_region.start());
 
         inline_info.SetDepth(entry.inlining_depth);
         for (size_t j = 0; j < entry.inlining_depth; ++j) {
@@ -241,7 +258,9 @@
           inline_info.SetMethodReferenceIndexAtDepth(j, inline_entry.method_index);
         }
       } else {
-        stack_map.SetInlineDescriptorOffset(StackMap::kNoInlineInfo);
+        if (inline_info_size != 0) {
+          stack_map.SetInlineDescriptorOffset(code_info, StackMap::kNoInlineInfo);
+        }
       }
     }
   }
@@ -262,6 +281,8 @@
   GrowableArray<DexRegisterLocation> dex_register_locations_;
   GrowableArray<InlineInfoEntry> inline_infos_;
   int stack_mask_max_;
+  uint32_t dex_pc_max_;
+  uint32_t native_pc_offset_max_;
   size_t number_of_stack_maps_with_inline_info_;
 
   ART_FRIEND_TEST(StackMapTest, Test1);
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index 87ac2e7..e7075c0 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -54,14 +54,14 @@
   StackMap stack_map = code_info.GetStackMapAt(0);
   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
-  ASSERT_EQ(0u, stack_map.GetDexPc());
-  ASSERT_EQ(64u, stack_map.GetNativePcOffset());
-  ASSERT_EQ(0x3u, stack_map.GetRegisterMask());
+  ASSERT_EQ(0u, stack_map.GetDexPc(code_info));
+  ASSERT_EQ(64u, stack_map.GetNativePcOffset(code_info));
+  ASSERT_EQ(0x3u, stack_map.GetRegisterMask(code_info));
 
-  MemoryRegion stack_mask = stack_map.GetStackMask();
+  MemoryRegion stack_mask = stack_map.GetStackMask(code_info);
   ASSERT_TRUE(SameBits(stack_mask, sp_mask));
 
-  ASSERT_TRUE(stack_map.HasDexRegisterMap());
+  ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info));
   DexRegisterMap dex_registers = code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
   ASSERT_EQ(7u, dex_registers.Size());
   DexRegisterLocation location0 = dex_registers.GetLocationKindAndValue(0, number_of_dex_registers);
@@ -73,7 +73,7 @@
   ASSERT_EQ(0, location0.GetValue());
   ASSERT_EQ(-2, location1.GetValue());
 
-  ASSERT_FALSE(stack_map.HasInlineInfo());
+  ASSERT_FALSE(stack_map.HasInlineInfo(code_info));
 }
 
 TEST(StackMapTest, Test2) {
@@ -112,14 +112,14 @@
     StackMap stack_map = code_info.GetStackMapAt(0);
     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
-    ASSERT_EQ(0u, stack_map.GetDexPc());
-    ASSERT_EQ(64u, stack_map.GetNativePcOffset());
-    ASSERT_EQ(0x3u, stack_map.GetRegisterMask());
+    ASSERT_EQ(0u, stack_map.GetDexPc(code_info));
+    ASSERT_EQ(64u, stack_map.GetNativePcOffset(code_info));
+    ASSERT_EQ(0x3u, stack_map.GetRegisterMask(code_info));
 
-    MemoryRegion stack_mask = stack_map.GetStackMask();
+    MemoryRegion stack_mask = stack_map.GetStackMask(code_info);
     ASSERT_TRUE(SameBits(stack_mask, sp_mask1));
 
-    ASSERT_TRUE(stack_map.HasDexRegisterMap());
+    ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info));
     DexRegisterMap dex_registers =
         code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
     ASSERT_EQ(7u, dex_registers.Size());
@@ -134,7 +134,7 @@
     ASSERT_EQ(0, location0.GetValue());
     ASSERT_EQ(-2, location1.GetValue());
 
-    ASSERT_TRUE(stack_map.HasInlineInfo());
+    ASSERT_TRUE(stack_map.HasInlineInfo(code_info));
     InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
     ASSERT_EQ(2u, inline_info.GetDepth());
     ASSERT_EQ(42u, inline_info.GetMethodReferenceIndexAtDepth(0));
@@ -146,14 +146,14 @@
     StackMap stack_map = code_info.GetStackMapAt(1);
     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u)));
     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u)));
-    ASSERT_EQ(1u, stack_map.GetDexPc());
-    ASSERT_EQ(128u, stack_map.GetNativePcOffset());
-    ASSERT_EQ(0xFFu, stack_map.GetRegisterMask());
+    ASSERT_EQ(1u, stack_map.GetDexPc(code_info));
+    ASSERT_EQ(128u, stack_map.GetNativePcOffset(code_info));
+    ASSERT_EQ(0xFFu, stack_map.GetRegisterMask(code_info));
 
-    MemoryRegion stack_mask = stack_map.GetStackMask();
+    MemoryRegion stack_mask = stack_map.GetStackMask(code_info);
     ASSERT_TRUE(SameBits(stack_mask, sp_mask2));
 
-    ASSERT_TRUE(stack_map.HasDexRegisterMap());
+    ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info));
     DexRegisterMap dex_registers =
         code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
     ASSERT_EQ(3u, dex_registers.Size());
@@ -168,7 +168,7 @@
     ASSERT_EQ(18, location0.GetValue());
     ASSERT_EQ(3, location1.GetValue());
 
-    ASSERT_FALSE(stack_map.HasInlineInfo());
+    ASSERT_FALSE(stack_map.HasInlineInfo(code_info));
   }
 }
 
@@ -190,14 +190,45 @@
 
   CodeInfo code_info(region);
   StackMap stack_map = code_info.GetStackMapAt(0);
-  ASSERT_TRUE(stack_map.HasDexRegisterMap());
+  ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info));
   DexRegisterMap dex_registers = code_info.GetDexRegisterMapOf(stack_map, 2);
   ASSERT_EQ(DexRegisterLocation::Kind::kNone,
             dex_registers.GetLocationKind(0, number_of_dex_registers));
   ASSERT_EQ(DexRegisterLocation::Kind::kConstant,
             dex_registers.GetLocationKind(1, number_of_dex_registers));
   ASSERT_EQ(-2, dex_registers.GetConstant(1, number_of_dex_registers));
-  ASSERT_FALSE(stack_map.HasInlineInfo());
+  ASSERT_FALSE(stack_map.HasInlineInfo(code_info));
+}
+
+// Generate a stack map whose dex register offset is
+// StackMap::kNoDexRegisterMapSmallEncoding, and ensure we do
+// not treat it as kNoDexRegisterMap.
+TEST(StackMapTest, DexRegisterMapOffsetOverflow) {
+  ArenaPool pool;
+  ArenaAllocator arena(&pool);
+  StackMapStream stream(&arena);
+
+  ArenaBitVector sp_mask(&arena, 0, false);
+  uint32_t number_of_dex_registers = 0xEA;
+  stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+  for (uint32_t i = 0; i < number_of_dex_registers - 9; ++i) {
+    stream.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kConstant, 0);
+  }
+  stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+  for (uint32_t i = 0; i < number_of_dex_registers; ++i) {
+    stream.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kConstant, 0);
+  }
+
+  size_t size = stream.ComputeNeededSize();
+  void* memory = arena.Alloc(size, kArenaAllocMisc);
+  MemoryRegion region(memory, size);
+  stream.FillIn(region);
+
+  CodeInfo code_info(region);
+  StackMap stack_map = code_info.GetStackMapAt(1);
+  ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info));
+  ASSERT_NE(stack_map.GetDexRegisterMapOffset(code_info), StackMap::kNoDexRegisterMap);
+  ASSERT_EQ(stack_map.GetDexRegisterMapOffset(code_info), StackMap::kNoDexRegisterMapSmallEncoding);
 }
 
 }  // namespace art
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index c27b3d4..14bcd4b 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -1039,62 +1039,11 @@
     }
   }
 
-  void DumpRegisterMapping(std::ostream& os,
-                           size_t dex_register_num,
-                           DexRegisterLocation::Kind kind,
-                           int32_t value,
-                           const std::string& prefix = "v",
-                           const std::string& suffix = "") {
-    os << "      " << prefix << dex_register_num << ": "
-       << DexRegisterLocation::PrettyDescriptor(kind)
-       << " (" << value << ")" << suffix << '\n';
-  }
-
-  void DumpStackMapHeader(std::ostream& os, const CodeInfo& code_info, size_t stack_map_num) {
-    StackMap stack_map = code_info.GetStackMapAt(stack_map_num);
-    os << "    StackMap " << stack_map_num
-       << std::hex
-       << " (dex_pc=0x" << stack_map.GetDexPc()
-       << ", native_pc_offset=0x" << stack_map.GetNativePcOffset()
-       << ", register_mask=0x" << stack_map.GetRegisterMask()
-       << std::dec
-       << ", stack_mask=0b";
-    MemoryRegion stack_mask = stack_map.GetStackMask();
-    for (size_t i = 0, e = stack_mask.size_in_bits(); i < e; ++i) {
-      os << stack_mask.LoadBit(e - i - 1);
-    }
-    os << ")\n";
-  };
-
   // Display a CodeInfo object emitted by the optimizing compiler.
   void DumpCodeInfo(std::ostream& os,
                     const CodeInfo& code_info,
                     const DexFile::CodeItem& code_item) {
-    uint16_t number_of_dex_registers = code_item.registers_size_;
-    uint32_t code_info_size = code_info.GetOverallSize();
-    size_t number_of_stack_maps = code_info.GetNumberOfStackMaps();
-    os << "  Optimized CodeInfo (size=" << code_info_size
-       << ", number_of_dex_registers=" << number_of_dex_registers
-       << ", number_of_stack_maps=" << number_of_stack_maps << ")\n";
-
-    // Display stack maps along with Dex register maps.
-    for (size_t i = 0; i < number_of_stack_maps; ++i) {
-      StackMap stack_map = code_info.GetStackMapAt(i);
-      DumpStackMapHeader(os, code_info, i);
-      if (stack_map.HasDexRegisterMap()) {
-        DexRegisterMap dex_register_map =
-            code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
-        // TODO: Display the bit mask of live Dex registers.
-        for (size_t j = 0; j < number_of_dex_registers; ++j) {
-          if (dex_register_map.IsDexRegisterLive(j)) {
-            DexRegisterLocation location =
-                dex_register_map.GetLocationKindAndValue(j, number_of_dex_registers);
-            DumpRegisterMapping(os, j, location.GetInternalKind(), location.GetValue());
-          }
-        }
-      }
-    }
-    // TODO: Dump the stack map's inline information.
+    code_info.Dump(os, code_item.registers_size_);
   }
 
   // Display a vmap table.
diff --git a/runtime/Android.mk b/runtime/Android.mk
index 6588288..5548d94 100644
--- a/runtime/Android.mk
+++ b/runtime/Android.mk
@@ -151,6 +151,7 @@
   runtime_options.cc \
   signal_catcher.cc \
   stack.cc \
+  stack_map.cc \
   thread.cc \
   thread_list.cc \
   thread_pool.cc \
diff --git a/runtime/check_reference_map_visitor.h b/runtime/check_reference_map_visitor.h
index 0ec0295..204546d 100644
--- a/runtime/check_reference_map_visitor.h
+++ b/runtime/check_reference_map_visitor.h
@@ -69,8 +69,8 @@
     uint16_t number_of_dex_registers = m->GetCodeItem()->registers_size_;
     DexRegisterMap dex_register_map =
         code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
-    MemoryRegion stack_mask = stack_map.GetStackMask();
-    uint32_t register_mask = stack_map.GetRegisterMask();
+    MemoryRegion stack_mask = stack_map.GetStackMask(code_info);
+    uint32_t register_mask = stack_map.GetRegisterMask(code_info);
     for (int i = 0; i < number_of_references; ++i) {
       int reg = registers[i];
       CHECK(reg < m->GetCodeItem()->registers_size_);
diff --git a/runtime/mirror/art_method-inl.h b/runtime/mirror/art_method-inl.h
index c27c6e9..0ccf5db 100644
--- a/runtime/mirror/art_method-inl.h
+++ b/runtime/mirror/art_method-inl.h
@@ -230,10 +230,6 @@
   return reinterpret_cast<const uint8_t*>(code_pointer) - offset;
 }
 
-inline StackMap ArtMethod::GetStackMap(uint32_t native_pc_offset) {
-  return GetOptimizedCodeInfo().GetStackMapForNativePcOffset(native_pc_offset);
-}
-
 inline CodeInfo ArtMethod::GetOptimizedCodeInfo() {
   DCHECK(IsOptimized(sizeof(void*)));
   const void* code_pointer = GetQuickOatCodePointer(sizeof(void*));
diff --git a/runtime/mirror/art_method.cc b/runtime/mirror/art_method.cc
index bc58709..ffee59e 100644
--- a/runtime/mirror/art_method.cc
+++ b/runtime/mirror/art_method.cc
@@ -202,8 +202,8 @@
   const void* entry_point = GetQuickOatEntryPoint(sizeof(void*));
   uint32_t sought_offset = pc - reinterpret_cast<uintptr_t>(entry_point);
   if (IsOptimized(sizeof(void*))) {
-    uint32_t ret = GetStackMap(sought_offset).GetDexPc();
-    return ret;
+    CodeInfo code_info = GetOptimizedCodeInfo();
+    return code_info.GetStackMapForNativePcOffset(sought_offset).GetDexPc(code_info);
   }
 
   MappingTable table(entry_point != nullptr ?
diff --git a/runtime/mirror/art_method.h b/runtime/mirror/art_method.h
index d878f25..0eb1b91 100644
--- a/runtime/mirror/art_method.h
+++ b/runtime/mirror/art_method.h
@@ -348,7 +348,6 @@
   const uint8_t* GetVmapTable(const void* code_pointer, size_t pointer_size)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
-  StackMap GetStackMap(uint32_t native_pc_offset) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
   CodeInfo GetOptimizedCodeInfo() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   // Callers should wrap the uint8_t* in a GcMap instance for convenient access.
diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc
new file mode 100644
index 0000000..020a6e6
--- /dev/null
+++ b/runtime/stack_map.cc
@@ -0,0 +1,198 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "stack_map.h"
+
+namespace art {
+
+constexpr uint32_t StackMap::kNoDexRegisterMapSmallEncoding;
+constexpr uint32_t StackMap::kNoInlineInfoSmallEncoding;
+constexpr uint32_t StackMap::kNoDexRegisterMap;
+constexpr uint32_t StackMap::kNoInlineInfo;
+
+uint32_t StackMap::GetDexPc(const CodeInfo& info) const {
+  return info.HasSmallDexPc()
+      ? region_.LoadUnaligned<kSmallEncoding>(info.ComputeStackMapDexPcOffset())
+      : region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapDexPcOffset());
+}
+
+void StackMap::SetDexPc(const CodeInfo& info, uint32_t dex_pc) {
+  DCHECK(!info.HasSmallDexPc() || IsUint<kBitsForSmallEncoding>(dex_pc)) << dex_pc;
+  info.HasSmallDexPc()
+      ? region_.StoreUnaligned<kSmallEncoding>(info.ComputeStackMapDexPcOffset(), dex_pc)
+      : region_.StoreUnaligned<kLargeEncoding>(info.ComputeStackMapDexPcOffset(), dex_pc);
+}
+
+uint32_t StackMap::GetNativePcOffset(const CodeInfo& info) const {
+  return info.HasSmallNativePc()
+      ? region_.LoadUnaligned<kSmallEncoding>(info.ComputeStackMapNativePcOffset())
+      : region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapNativePcOffset());
+}
+
+void StackMap::SetNativePcOffset(const CodeInfo& info, uint32_t native_pc_offset) {
+  DCHECK(!info.HasSmallNativePc()
+         || IsUint<kBitsForSmallEncoding>(native_pc_offset)) << native_pc_offset;
+  uint32_t entry = info.ComputeStackMapNativePcOffset();
+  info.HasSmallNativePc()
+      ? region_.StoreUnaligned<kSmallEncoding>(entry, native_pc_offset)
+      : region_.StoreUnaligned<kLargeEncoding>(entry, native_pc_offset);
+}
+
+uint32_t StackMap::GetDexRegisterMapOffset(const CodeInfo& info) const {
+  if (info.HasSmallDexRegisterMap()) {
+    uint8_t value = region_.LoadUnaligned<kSmallEncoding>(
+        info.ComputeStackMapDexRegisterMapOffset());
+    if (value == kNoDexRegisterMapSmallEncoding) {
+      return kNoDexRegisterMap;
+    } else {
+      return value;
+    }
+  } else {
+    return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapDexRegisterMapOffset());
+  }
+}
+
+void StackMap::SetDexRegisterMapOffset(const CodeInfo& info, uint32_t offset) {
+  DCHECK(!info.HasSmallDexRegisterMap()
+         || (IsUint<kBitsForSmallEncoding>(offset)
+             || (offset == kNoDexRegisterMap))) << offset;
+  size_t dex_register_map_entry = info.ComputeStackMapDexRegisterMapOffset();
+  info.HasSmallDexRegisterMap()
+      ? region_.StoreUnaligned<kSmallEncoding>(dex_register_map_entry, offset)
+      : region_.StoreUnaligned<kLargeEncoding>(dex_register_map_entry, offset);
+}
+
+uint32_t StackMap::GetInlineDescriptorOffset(const CodeInfo& info) const {
+  if (!info.HasInlineInfo()) return kNoInlineInfo;
+  if (info.HasSmallInlineInfo()) {
+    uint8_t value = region_.LoadUnaligned<kSmallEncoding>(
+        info.ComputeStackMapInlineInfoOffset());
+    if (value == kNoInlineInfoSmallEncoding) {
+      return kNoInlineInfo;
+    } else {
+      return value;
+    }
+  } else {
+    return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapInlineInfoOffset());
+  }
+}
+
+void StackMap::SetInlineDescriptorOffset(const CodeInfo& info, uint32_t offset) {
+  DCHECK(info.HasInlineInfo());
+  DCHECK(!info.HasSmallInlineInfo()
+         || (IsUint<kBitsForSmallEncoding>(offset)
+             || (offset == kNoInlineInfo))) << offset;
+  size_t inline_entry = info.ComputeStackMapInlineInfoOffset();
+  info.HasSmallInlineInfo()
+      ? region_.StoreUnaligned<kSmallEncoding>(inline_entry, offset)
+      : region_.StoreUnaligned<kLargeEncoding>(inline_entry, offset);
+}
+
+uint32_t StackMap::GetRegisterMask(const CodeInfo& info) const {
+  return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapRegisterMaskOffset());
+}
+
+void StackMap::SetRegisterMask(const CodeInfo& info, uint32_t mask) {
+  region_.StoreUnaligned<kLargeEncoding>(info.ComputeStackMapRegisterMaskOffset(), mask);
+}
+
+size_t StackMap::ComputeStackMapSize(size_t stack_mask_size,
+                                     bool has_inline_info,
+                                     bool is_small_inline_info,
+                                     bool is_small_dex_map,
+                                     bool is_small_dex_pc,
+                                     bool is_small_native_pc) {
+  return StackMap::kFixedSize
+      + stack_mask_size
+      + (has_inline_info ? NumberOfBytesForEntry(is_small_inline_info) : 0)
+      + NumberOfBytesForEntry(is_small_dex_map)
+      + NumberOfBytesForEntry(is_small_dex_pc)
+      + NumberOfBytesForEntry(is_small_native_pc);
+}
+
+size_t StackMap::ComputeStackMapSize(size_t stack_mask_size,
+                                     size_t inline_info_size,
+                                     size_t dex_register_map_size,
+                                     size_t dex_pc_max,
+                                     size_t native_pc_max) {
+  return ComputeStackMapSize(
+      stack_mask_size,
+      inline_info_size != 0,
+      // + 1 to also encode kNoInlineInfo.
+      IsUint<kBitsForSmallEncoding>(inline_info_size + dex_register_map_size + 1),
+      // + 1 to also encode kNoDexRegisterMap.
+      IsUint<kBitsForSmallEncoding>(dex_register_map_size + 1),
+      IsUint<kBitsForSmallEncoding>(dex_pc_max),
+      IsUint<kBitsForSmallEncoding>(native_pc_max));
+}
+
+MemoryRegion StackMap::GetStackMask(const CodeInfo& info) const {
+  return region_.Subregion(info.ComputeStackMapStackMaskOffset(), info.GetStackMaskSize());
+}
+
+void CodeInfo::DumpStackMapHeader(std::ostream& os, size_t stack_map_num) const {
+  StackMap stack_map = GetStackMapAt(stack_map_num);
+  os << "    StackMap " << stack_map_num
+     << std::hex
+     << " (dex_pc=0x" << stack_map.GetDexPc(*this)
+     << ", native_pc_offset=0x" << stack_map.GetNativePcOffset(*this)
+     << ", dex_register_map_offset=0x" << stack_map.GetDexRegisterMapOffset(*this)
+     << ", inline_info_offset=0x" << stack_map.GetInlineDescriptorOffset(*this)
+     << ", register_mask=0x" << stack_map.GetRegisterMask(*this)
+     << std::dec
+     << ", stack_mask=0b";
+  MemoryRegion stack_mask = stack_map.GetStackMask(*this);
+  for (size_t i = 0, e = stack_mask.size_in_bits(); i < e; ++i) {
+    os << stack_mask.LoadBit(e - i - 1);
+  }
+  os << ")\n";
+};
+
+void CodeInfo::Dump(std::ostream& os, uint16_t number_of_dex_registers) const {
+  uint32_t code_info_size = GetOverallSize();
+  size_t number_of_stack_maps = GetNumberOfStackMaps();
+  os << "  Optimized CodeInfo (size=" << code_info_size
+     << ", number_of_dex_registers=" << number_of_dex_registers
+     << ", number_of_stack_maps=" << number_of_stack_maps
+     << ", has_inline_info=" << HasInlineInfo()
+     << ", has_small_inline_info=" << HasSmallInlineInfo()
+     << ", has_small_dex_register_map=" << HasSmallDexRegisterMap()
+     << ", has_small_dex_pc=" << HasSmallDexPc()
+     << ", has_small_native_pc=" << HasSmallNativePc()
+     << ")\n";
+
+  // Display stack maps along with Dex register maps.
+  for (size_t i = 0; i < number_of_stack_maps; ++i) {
+    StackMap stack_map = GetStackMapAt(i);
+    DumpStackMapHeader(os, i);
+    if (stack_map.HasDexRegisterMap(*this)) {
+      DexRegisterMap dex_register_map = GetDexRegisterMapOf(stack_map, number_of_dex_registers);
+      // TODO: Display the bit mask of live Dex registers.
+      for (size_t j = 0; j < number_of_dex_registers; ++j) {
+        if (dex_register_map.IsDexRegisterLive(j)) {
+          DexRegisterLocation location =
+              dex_register_map.GetLocationKindAndValue(j, number_of_dex_registers);
+           os << "      " << "v" << j << ": "
+              << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind())
+              << " (" << location.GetValue() << ")" << '\n';
+        }
+      }
+    }
+  }
+  // TODO: Dump the stack map's inline information.
+}
+
+}  // namespace art
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 961772c..629fc9a 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -34,6 +34,8 @@
 // Size of Dex virtual registers.
 static size_t constexpr kVRegSize = 4;
 
+class CodeInfo;
+
 /**
  * Classes in the following file are wrapper on stack map information backed
  * by a MemoryRegion. As such they read and write to the region, they don't have
@@ -515,63 +517,41 @@
  public:
   explicit StackMap(MemoryRegion region) : region_(region) {}
 
-  uint32_t GetDexPc() const {
-    return region_.LoadUnaligned<uint32_t>(kDexPcOffset);
-  }
+  uint32_t GetDexPc(const CodeInfo& info) const;
 
-  void SetDexPc(uint32_t dex_pc) {
-    region_.StoreUnaligned<uint32_t>(kDexPcOffset, dex_pc);
-  }
+  void SetDexPc(const CodeInfo& info, uint32_t dex_pc);
 
-  uint32_t GetNativePcOffset() const {
-    return region_.LoadUnaligned<uint32_t>(kNativePcOffsetOffset);
-  }
+  uint32_t GetNativePcOffset(const CodeInfo& info) const;
 
-  void SetNativePcOffset(uint32_t native_pc_offset) {
-    region_.StoreUnaligned<uint32_t>(kNativePcOffsetOffset, native_pc_offset);
-  }
+  void SetNativePcOffset(const CodeInfo& info, uint32_t native_pc_offset);
 
-  uint32_t GetDexRegisterMapOffset() const {
-    return region_.LoadUnaligned<uint32_t>(kDexRegisterMapOffsetOffset);
-  }
+  uint32_t GetDexRegisterMapOffset(const CodeInfo& info) const;
 
-  void SetDexRegisterMapOffset(uint32_t offset) {
-    region_.StoreUnaligned<uint32_t>(kDexRegisterMapOffsetOffset, offset);
-  }
+  void SetDexRegisterMapOffset(const CodeInfo& info, uint32_t offset);
 
-  uint32_t GetInlineDescriptorOffset() const {
-    return region_.LoadUnaligned<uint32_t>(kInlineDescriptorOffsetOffset);
-  }
+  uint32_t GetInlineDescriptorOffset(const CodeInfo& info) const;
 
-  void SetInlineDescriptorOffset(uint32_t offset) {
-    region_.StoreUnaligned<uint32_t>(kInlineDescriptorOffsetOffset, offset);
-  }
+  void SetInlineDescriptorOffset(const CodeInfo& info, uint32_t offset);
 
-  uint32_t GetRegisterMask() const {
-    return region_.LoadUnaligned<uint32_t>(kRegisterMaskOffset);
-  }
+  uint32_t GetRegisterMask(const CodeInfo& info) const;
 
-  void SetRegisterMask(uint32_t mask) {
-    region_.StoreUnaligned<uint32_t>(kRegisterMaskOffset, mask);
-  }
+  void SetRegisterMask(const CodeInfo& info, uint32_t mask);
 
-  MemoryRegion GetStackMask() const {
-    return region_.Subregion(kStackMaskOffset, StackMaskSize());
-  }
+  MemoryRegion GetStackMask(const CodeInfo& info) const;
 
-  void SetStackMask(const BitVector& sp_map) {
-    MemoryRegion region = GetStackMask();
+  void SetStackMask(const CodeInfo& info, const BitVector& sp_map) {
+    MemoryRegion region = GetStackMask(info);
     for (size_t i = 0; i < region.size_in_bits(); i++) {
       region.StoreBit(i, sp_map.IsBitSet(i));
     }
   }
 
-  bool HasDexRegisterMap() const {
-    return GetDexRegisterMapOffset() != kNoDexRegisterMap;
+  bool HasDexRegisterMap(const CodeInfo& info) const {
+    return GetDexRegisterMapOffset(info) != kNoDexRegisterMap;
   }
 
-  bool HasInlineInfo() const {
-    return GetInlineDescriptorOffset() != kNoInlineInfo;
+  bool HasInlineInfo(const CodeInfo& info) const {
+    return GetInlineDescriptorOffset(info) != kNoInlineInfo;
   }
 
   bool Equals(const StackMap& other) const {
@@ -579,32 +559,51 @@
        && region_.size() == other.region_.size();
   }
 
-  static size_t ComputeStackMapSize(size_t stack_mask_size) {
-    return StackMap::kFixedSize + stack_mask_size;
-  }
+  static size_t ComputeStackMapSize(size_t stack_mask_size,
+                                    bool has_inline_info,
+                                    bool is_small_inline_info,
+                                    bool is_small_dex_map,
+                                    bool is_small_dex_pc,
+                                    bool is_small_native_pc);
+
+  static size_t ComputeStackMapSize(size_t stack_mask_size,
+                                    size_t inline_info_size,
+                                    size_t dex_register_map_size,
+                                    size_t dex_pc_max,
+                                    size_t native_pc_max);
+
+  // TODO: Revisit this abstraction if we allow 3 bytes encoding.
+  typedef uint8_t kSmallEncoding;
+  typedef uint32_t kLargeEncoding;
+  static constexpr size_t kBytesForSmallEncoding = sizeof(kSmallEncoding);
+  static constexpr size_t kBitsForSmallEncoding = kBitsPerByte * kBytesForSmallEncoding;
+  static constexpr size_t kBytesForLargeEncoding = sizeof(kLargeEncoding);
+  static constexpr size_t kBitsForLargeEncoding = kBitsPerByte * kBytesForLargeEncoding;
 
   // Special (invalid) offset for the DexRegisterMapOffset field meaning
   // that there is no Dex register map for this stack map.
   static constexpr uint32_t kNoDexRegisterMap = -1;
+  static constexpr uint32_t kNoDexRegisterMapSmallEncoding =
+      std::numeric_limits<kSmallEncoding>::max();
 
   // Special (invalid) offset for the InlineDescriptorOffset field meaning
   // that there is no inline info for this stack map.
   static constexpr uint32_t kNoInlineInfo = -1;
+  static constexpr uint32_t kNoInlineInfoSmallEncoding =
+    std::numeric_limits<kSmallEncoding>::max();
+
+  // Returns the number of bytes needed for an entry in the StackMap.
+  static size_t NumberOfBytesForEntry(bool small_encoding) {
+    return small_encoding ? kBytesForSmallEncoding : kBytesForLargeEncoding;
+  }
 
  private:
   // TODO: Instead of plain types such as "uint32_t", introduce
   // typedefs (and document the memory layout of StackMap).
-  static constexpr int kDexPcOffset = 0;
-  static constexpr int kNativePcOffsetOffset = kDexPcOffset + sizeof(uint32_t);
-  static constexpr int kDexRegisterMapOffsetOffset = kNativePcOffsetOffset + sizeof(uint32_t);
-  static constexpr int kInlineDescriptorOffsetOffset =
-      kDexRegisterMapOffsetOffset + sizeof(uint32_t);
-  static constexpr int kRegisterMaskOffset = kInlineDescriptorOffsetOffset + sizeof(uint32_t);
+  static constexpr int kRegisterMaskOffset = 0;
   static constexpr int kFixedSize = kRegisterMaskOffset + sizeof(uint32_t);
   static constexpr int kStackMaskOffset = kFixedSize;
 
-  size_t StackMaskSize() const { return region_.size() - kFixedSize; }
-
   MemoryRegion region_;
 
   friend class CodeInfo;
@@ -626,6 +625,80 @@
     region_ = MemoryRegion(const_cast<void*>(data), size);
   }
 
+  void SetEncoding(size_t inline_info_size,
+                   size_t dex_register_map_size,
+                   size_t dex_pc_max,
+                   size_t native_pc_max) {
+    if (inline_info_size != 0) {
+      region_.StoreBit(kHasInlineInfoBitOffset, 1);
+      region_.StoreBit(kHasSmallInlineInfoBitOffset, IsUint<StackMap::kBitsForSmallEncoding>(
+          // + 1 to also encode kNoInlineInfo: if an inline info offset
+          // is at 0xFF, we want to overflow to a larger encoding, because it will
+          // conflict with kNoInlineInfo.
+          // The offset is relative to the dex register map. TODO: Change this.
+          inline_info_size + dex_register_map_size + 1));
+    } else {
+      region_.StoreBit(kHasInlineInfoBitOffset, 0);
+      region_.StoreBit(kHasSmallInlineInfoBitOffset, 0);
+    }
+    region_.StoreBit(kHasSmallDexRegisterMapBitOffset,
+                     // + 1 to also encode kNoDexRegisterMap: if a dex register map offset
+                     // is at 0xFF, we want to overflow to a larger encoding, because it will
+                     // conflict with kNoDexRegisterMap.
+                     IsUint<StackMap::kBitsForSmallEncoding>(dex_register_map_size + 1));
+    region_.StoreBit(kHasSmallDexPcBitOffset, IsUint<StackMap::kBitsForSmallEncoding>(dex_pc_max));
+    region_.StoreBit(kHasSmallNativePcBitOffset,
+                     IsUint<StackMap::kBitsForSmallEncoding>(native_pc_max));
+  }
+
+  bool HasInlineInfo() const {
+    return region_.LoadBit(kHasInlineInfoBitOffset);
+  }
+
+  bool HasSmallInlineInfo() const {
+    return region_.LoadBit(kHasSmallInlineInfoBitOffset);
+  }
+
+  bool HasSmallDexRegisterMap() const {
+    return region_.LoadBit(kHasSmallDexRegisterMapBitOffset);
+  }
+
+  bool HasSmallNativePc() const {
+    return region_.LoadBit(kHasSmallNativePcBitOffset);
+  }
+
+  bool HasSmallDexPc() const {
+    return region_.LoadBit(kHasSmallDexPcBitOffset);
+  }
+
+  size_t ComputeStackMapRegisterMaskOffset() const {
+    return StackMap::kRegisterMaskOffset;
+  }
+
+  size_t ComputeStackMapStackMaskOffset() const {
+    return StackMap::kStackMaskOffset;
+  }
+
+  size_t ComputeStackMapDexPcOffset() const {
+    return ComputeStackMapStackMaskOffset() + GetStackMaskSize();
+  }
+
+  size_t ComputeStackMapNativePcOffset() const {
+    return ComputeStackMapDexPcOffset()
+        + (HasSmallDexPc() ? sizeof(uint8_t) : sizeof(uint32_t));
+  }
+
+  size_t ComputeStackMapDexRegisterMapOffset() const {
+    return ComputeStackMapNativePcOffset()
+        + (HasSmallNativePc() ? sizeof(uint8_t) : sizeof(uint32_t));
+  }
+
+  size_t ComputeStackMapInlineInfoOffset() const {
+    CHECK(HasInlineInfo());
+    return ComputeStackMapDexRegisterMapOffset()
+        + (HasSmallDexRegisterMap() ? sizeof(uint8_t) : sizeof(uint32_t));
+  }
+
   StackMap GetStackMapAt(size_t i) const {
     size_t size = StackMapSize();
     return StackMap(GetStackMaps().Subregion(i * size, size));
@@ -658,7 +731,12 @@
   // Get the size of one stack map of this CodeInfo object, in bytes.
   // All stack maps of a CodeInfo have the same size.
   size_t StackMapSize() const {
-    return StackMap::ComputeStackMapSize(GetStackMaskSize());
+    return StackMap::ComputeStackMapSize(GetStackMaskSize(),
+                                         HasInlineInfo(),
+                                         HasSmallInlineInfo(),
+                                         HasSmallDexRegisterMap(),
+                                         HasSmallDexPc(),
+                                         HasSmallNativePc());
   }
 
   // Get the size all the stack maps of this CodeInfo object, in bytes.
@@ -666,21 +744,25 @@
     return StackMapSize() * GetNumberOfStackMaps();
   }
 
+  size_t GetDexRegisterMapsOffset() const {
+    return CodeInfo::kFixedSize + StackMapsSize();
+  }
+
   uint32_t GetStackMapsOffset() const {
     return kFixedSize;
   }
 
   DexRegisterMap GetDexRegisterMapOf(StackMap stack_map, uint32_t number_of_dex_registers) const {
-    DCHECK(stack_map.HasDexRegisterMap());
-    uint32_t offset = stack_map.GetDexRegisterMapOffset();
+    DCHECK(stack_map.HasDexRegisterMap(*this));
+    uint32_t offset = stack_map.GetDexRegisterMapOffset(*this) + GetDexRegisterMapsOffset();
     size_t size = ComputeDexRegisterMapSize(offset, number_of_dex_registers);
     return DexRegisterMap(region_.Subregion(offset, size));
   }
 
   InlineInfo GetInlineInfoOf(StackMap stack_map) const {
-    DCHECK(stack_map.HasInlineInfo());
-    uint32_t offset = stack_map.GetInlineDescriptorOffset();
-    uint8_t depth = region_.Load<uint8_t>(offset);
+    DCHECK(stack_map.HasInlineInfo(*this));
+    uint32_t offset = stack_map.GetInlineDescriptorOffset(*this) + GetDexRegisterMapsOffset();
+    uint8_t depth = region_.LoadUnaligned<uint8_t>(offset);
     return InlineInfo(region_.Subregion(offset,
         InlineInfo::kFixedSize + depth * InlineInfo::SingleEntrySize()));
   }
@@ -688,7 +770,7 @@
   StackMap GetStackMapForDexPc(uint32_t dex_pc) const {
     for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
       StackMap stack_map = GetStackMapAt(i);
-      if (stack_map.GetDexPc() == dex_pc) {
+      if (stack_map.GetDexPc(*this) == dex_pc) {
         return stack_map;
       }
     }
@@ -700,7 +782,7 @@
     // TODO: stack maps are sorted by native pc, we can do a binary search.
     for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
       StackMap stack_map = GetStackMapAt(i);
-      if (stack_map.GetNativePcOffset() == native_pc_offset) {
+      if (stack_map.GetNativePcOffset(*this) == native_pc_offset) {
         return stack_map;
       }
     }
@@ -708,14 +790,24 @@
     UNREACHABLE();
   }
 
+  void Dump(std::ostream& os, uint16_t number_of_dex_registers) const;
+  void DumpStackMapHeader(std::ostream& os, size_t stack_map_num) const;
+
  private:
   // TODO: Instead of plain types such as "uint32_t", introduce
   // typedefs (and document the memory layout of CodeInfo).
   static constexpr int kOverallSizeOffset = 0;
-  static constexpr int kNumberOfStackMapsOffset = kOverallSizeOffset + sizeof(uint32_t);
+  static constexpr int kEncodingInfoOffset = kOverallSizeOffset + sizeof(uint32_t);
+  static constexpr int kNumberOfStackMapsOffset = kEncodingInfoOffset + sizeof(uint8_t);
   static constexpr int kStackMaskSizeOffset = kNumberOfStackMapsOffset + sizeof(uint32_t);
   static constexpr int kFixedSize = kStackMaskSizeOffset + sizeof(uint32_t);
 
+  static constexpr int kHasInlineInfoBitOffset = (kEncodingInfoOffset * kBitsPerByte);
+  static constexpr int kHasSmallInlineInfoBitOffset = kHasInlineInfoBitOffset + 1;
+  static constexpr int kHasSmallDexRegisterMapBitOffset = kHasSmallInlineInfoBitOffset + 1;
+  static constexpr int kHasSmallDexPcBitOffset = kHasSmallDexRegisterMapBitOffset + 1;
+  static constexpr int kHasSmallNativePcBitOffset = kHasSmallDexPcBitOffset + 1;
+
   MemoryRegion GetStackMaps() const {
     return region_.size() == 0
         ? MemoryRegion()
diff --git a/runtime/thread.cc b/runtime/thread.cc
index affb6cd..e1a07e9 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -2155,8 +2155,9 @@
         Runtime* runtime = Runtime::Current();
         const void* entry_point = runtime->GetInstrumentation()->GetQuickCodeFor(m, sizeof(void*));
         uintptr_t native_pc_offset = m->NativeQuickPcOffset(GetCurrentQuickFramePc(), entry_point);
-        StackMap map = m->GetStackMap(native_pc_offset);
-        MemoryRegion mask = map.GetStackMask();
+        CodeInfo code_info = m->GetOptimizedCodeInfo();
+        StackMap map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
+        MemoryRegion mask = map.GetStackMask(code_info);
         // Visit stack entries that hold pointers.
         for (size_t i = 0; i < mask.size_in_bits(); ++i) {
           if (mask.LoadBit(i)) {
@@ -2173,7 +2174,7 @@
           }
         }
         // Visit callee-save registers that hold pointers.
-        uint32_t register_mask = map.GetRegisterMask();
+        uint32_t register_mask = map.GetRegisterMask(code_info);
         for (size_t i = 0; i < BitSizeOf<uint32_t>(); ++i) {
           if (register_mask & (1 << i)) {
             mirror::Object** ref_addr = reinterpret_cast<mirror::Object**>(GetGPRAddress(i));