Merge "ObjPtr<>-ify array allocations."
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index b358bfa..4791fa3 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -63,6 +63,7 @@
 #include "parallel_move_resolver.h"
 #include "scoped_thread_state_change-inl.h"
 #include "ssa_liveness_analysis.h"
+#include "stack_map.h"
 #include "stack_map_stream.h"
 #include "thread-current-inl.h"
 #include "utils/assembler.h"
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index b1dcb68..d99beac 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -16,15 +16,20 @@
 
 #include "stack_map_stream.h"
 
+#include <memory>
+
 #include "art_method-inl.h"
 #include "base/stl_util.h"
 #include "dex/dex_file_types.h"
 #include "optimizing/optimizing_compiler.h"
 #include "runtime.h"
 #include "scoped_thread_state_change-inl.h"
+#include "stack_map.h"
 
 namespace art {
 
+constexpr static bool kVerifyStackMaps = kIsDebugBuild;
+
 uint32_t StackMapStream::GetStackMapNativePcOffset(size_t i) {
   return StackMap::UnpackNativePc(stack_maps_[i].packed_native_pc, instruction_set_);
 }
@@ -36,518 +41,286 @@
 void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
                                         uint32_t native_pc_offset,
                                         uint32_t register_mask,
-                                        BitVector* sp_mask,
+                                        BitVector* stack_mask,
                                         uint32_t num_dex_registers,
                                         uint8_t inlining_depth) {
-  DCHECK_EQ(0u, current_entry_.dex_pc) << "EndStackMapEntry not called after BeginStackMapEntry";
-  current_entry_.dex_pc = dex_pc;
-  current_entry_.packed_native_pc = StackMap::PackNativePc(native_pc_offset, instruction_set_);
-  current_entry_.register_mask = register_mask;
-  current_entry_.sp_mask = sp_mask;
-  current_entry_.inlining_depth = inlining_depth;
-  current_entry_.inline_infos_start_index = inline_infos_.size();
-  current_entry_.stack_mask_index = 0;
-  current_entry_.dex_method_index = dex::kDexNoIndex;
-  current_entry_.dex_register_entry.num_dex_registers = num_dex_registers;
-  current_entry_.dex_register_entry.locations_start_index = dex_register_locations_.size();
-  current_entry_.dex_register_entry.live_dex_registers_mask = nullptr;
-  if (num_dex_registers != 0u) {
-    current_entry_.dex_register_entry.live_dex_registers_mask =
-        ArenaBitVector::Create(allocator_, num_dex_registers, true, kArenaAllocStackMapStream);
-    current_entry_.dex_register_entry.live_dex_registers_mask->ClearAllBits();
+  DCHECK(!in_stack_map_) << "Mismatched Begin/End calls";
+  in_stack_map_ = true;
+
+  current_stack_map_ = StackMapEntry {
+    .packed_native_pc = StackMap::PackNativePc(native_pc_offset, instruction_set_),
+    .dex_pc = dex_pc,
+    .register_mask_index = kNoValue,
+    .stack_mask_index = kNoValue,
+    .inline_info_index = kNoValue,
+    .dex_register_mask_index = kNoValue,
+    .dex_register_map_index = kNoValue,
+  };
+  if (register_mask != 0) {
+    uint32_t shift = LeastSignificantBit(register_mask);
+    RegisterMaskEntry entry = { register_mask >> shift, shift };
+    current_stack_map_.register_mask_index = register_masks_.Dedup(&entry);
   }
-  current_dex_register_ = 0;
+  // The compiler assumes the bit vector will be read during PrepareForFillIn(),
+  // and it might modify the data before that. Therefore, just store the pointer.
+  // See ClearSpillSlotsFromLoopPhisInStackMap in code_generator.h.
+  lazy_stack_masks_.push_back(stack_mask);
+  current_inline_infos_ = 0;
+  current_dex_registers_.clear();
+  expected_num_dex_registers_ = num_dex_registers;
+
+  if (kVerifyStackMaps) {
+    size_t stack_map_index = stack_maps_.size();
+    // Create lambda method, which will be executed at the very end to verify data.
+    // Parameters and local variables will be captured(stored) by the lambda "[=]".
+    dchecks_.emplace_back([=](const CodeInfo& code_info) {
+      StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
+      CHECK_EQ(stack_map.GetNativePcOffset(instruction_set_), native_pc_offset);
+      CHECK_EQ(stack_map.GetDexPc(), dex_pc);
+      CHECK_EQ(code_info.GetRegisterMaskOf(stack_map), register_mask);
+      BitMemoryRegion seen_stack_mask = code_info.GetStackMaskOf(stack_map);
+      CHECK_GE(seen_stack_mask.size_in_bits(), stack_mask ? stack_mask->GetNumberOfBits() : 0);
+      for (size_t b = 0; b < seen_stack_mask.size_in_bits(); b++) {
+        CHECK_EQ(seen_stack_mask.LoadBit(b), stack_mask != nullptr && stack_mask->IsBitSet(b));
+      }
+      CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0));
+      if (inlining_depth != 0) {
+        CHECK_EQ(code_info.GetInlineInfoOf(stack_map).GetDepth(), inlining_depth);
+      }
+      CHECK_EQ(stack_map.HasDexRegisterMap(), (num_dex_registers != 0));
+    });
+  }
 }
 
 void StackMapStream::EndStackMapEntry() {
-  current_entry_.dex_register_map_index = AddDexRegisterMapEntry(current_entry_.dex_register_entry);
-  stack_maps_.push_back(current_entry_);
-  current_entry_ = StackMapEntry();
+  DCHECK(in_stack_map_) << "Mismatched Begin/End calls";
+  in_stack_map_ = false;
+  DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
+
+  // Mark the last inline info as last in the list for the stack map.
+  if (current_inline_infos_ > 0) {
+    inline_infos_[inline_infos_.size() - 1].is_last = InlineInfo::kLast;
+  }
+
+  stack_maps_.Add(current_stack_map_);
 }
 
 void StackMapStream::AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t value) {
-  if (kind != DexRegisterLocation::Kind::kNone) {
-    // Ensure we only use non-compressed location kind at this stage.
-    DCHECK(DexRegisterLocation::IsShortLocationKind(kind)) << kind;
-    DexRegisterLocation location(kind, value);
+  current_dex_registers_.push_back(DexRegisterLocation(kind, value));
 
-    // Look for Dex register `location` in the location catalog (using the
-    // companion hash map of locations to indices).  Use its index if it
-    // is already in the location catalog.  If not, insert it (in the
-    // location catalog and the hash map) and use the newly created index.
-    auto it = location_catalog_entries_indices_.Find(location);
-    if (it != location_catalog_entries_indices_.end()) {
-      // Retrieve the index from the hash map.
-      dex_register_locations_.push_back(it->second);
-    } else {
-      // Create a new entry in the location catalog and the hash map.
-      size_t index = location_catalog_entries_.size();
-      location_catalog_entries_.push_back(location);
-      dex_register_locations_.push_back(index);
-      location_catalog_entries_indices_.Insert(std::make_pair(location, index));
-    }
-    DexRegisterMapEntry* const entry = in_inline_frame_
-        ? &current_inline_info_.dex_register_entry
-        : &current_entry_.dex_register_entry;
-    DCHECK_LT(current_dex_register_, entry->num_dex_registers);
-    entry->live_dex_registers_mask->SetBit(current_dex_register_);
-    entry->hash += (1 <<
-        (current_dex_register_ % (sizeof(DexRegisterMapEntry::hash) * kBitsPerByte)));
-    entry->hash += static_cast<uint32_t>(value);
-    entry->hash += static_cast<uint32_t>(kind);
+  // We have collected all the dex registers for StackMap/InlineInfo - create the map.
+  if (current_dex_registers_.size() == expected_num_dex_registers_) {
+    CreateDexRegisterMap();
   }
-  current_dex_register_++;
 }
 
 void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) {
-  current_entry_.invoke_type = invoke_type;
-  current_entry_.dex_method_index = dex_method_index;
+  uint32_t packed_native_pc = current_stack_map_.packed_native_pc;
+  size_t invoke_info_index = invoke_infos_.size();
+  invoke_infos_.Add(InvokeInfoEntry {
+    .packed_native_pc = packed_native_pc,
+    .invoke_type = invoke_type,
+    .method_info_index = method_infos_.Dedup(&dex_method_index),
+  });
+
+  if (kVerifyStackMaps) {
+    dchecks_.emplace_back([=](const CodeInfo& code_info) {
+      InvokeInfo invoke_info = code_info.GetInvokeInfo(invoke_info_index);
+      CHECK_EQ(invoke_info.GetNativePcOffset(instruction_set_),
+               StackMap::UnpackNativePc(packed_native_pc, instruction_set_));
+      CHECK_EQ(invoke_info.GetInvokeType(), invoke_type);
+      CHECK_EQ(method_infos_[invoke_info.GetMethodIndexIdx()], dex_method_index);
+    });
+  }
 }
 
 void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
                                           uint32_t dex_pc,
                                           uint32_t num_dex_registers,
                                           const DexFile* outer_dex_file) {
-  DCHECK(!in_inline_frame_);
-  in_inline_frame_ = true;
+  DCHECK(!in_inline_info_) << "Mismatched Begin/End calls";
+  in_inline_info_ = true;
+  DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
+
+  InlineInfoEntry entry = {
+    .is_last = InlineInfo::kMore,
+    .dex_pc = dex_pc,
+    .method_info_index = kNoValue,
+    .art_method_hi = kNoValue,
+    .art_method_lo = kNoValue,
+    .dex_register_mask_index = kNoValue,
+    .dex_register_map_index = kNoValue,
+  };
   if (EncodeArtMethodInInlineInfo(method)) {
-    current_inline_info_.method = method;
+    entry.art_method_hi = High32Bits(reinterpret_cast<uintptr_t>(method));
+    entry.art_method_lo = Low32Bits(reinterpret_cast<uintptr_t>(method));
   } else {
     if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) {
       ScopedObjectAccess soa(Thread::Current());
       DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile()));
     }
-    current_inline_info_.method_index = method->GetDexMethodIndexUnchecked();
+    uint32_t dex_method_index = method->GetDexMethodIndexUnchecked();
+    entry.method_info_index = method_infos_.Dedup(&dex_method_index);
   }
-  current_inline_info_.dex_pc = dex_pc;
-  current_inline_info_.dex_register_entry.num_dex_registers = num_dex_registers;
-  current_inline_info_.dex_register_entry.locations_start_index = dex_register_locations_.size();
-  current_inline_info_.dex_register_entry.live_dex_registers_mask = nullptr;
-  if (num_dex_registers != 0) {
-    current_inline_info_.dex_register_entry.live_dex_registers_mask =
-        ArenaBitVector::Create(allocator_, num_dex_registers, true, kArenaAllocStackMapStream);
-    current_inline_info_.dex_register_entry.live_dex_registers_mask->ClearAllBits();
+  if (current_inline_infos_++ == 0) {
+    current_stack_map_.inline_info_index = inline_infos_.size();
   }
-  current_dex_register_ = 0;
+  inline_infos_.Add(entry);
+
+  current_dex_registers_.clear();
+  expected_num_dex_registers_ = num_dex_registers;
+
+  if (kVerifyStackMaps) {
+    size_t stack_map_index = stack_maps_.size();
+    size_t depth = current_inline_infos_ - 1;
+    dchecks_.emplace_back([=](const CodeInfo& code_info) {
+      StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
+      InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
+      CHECK_EQ(inline_info.GetDexPcAtDepth(depth), dex_pc);
+      bool encode_art_method = EncodeArtMethodInInlineInfo(method);
+      CHECK_EQ(inline_info.EncodesArtMethodAtDepth(depth), encode_art_method);
+      if (encode_art_method) {
+        CHECK_EQ(inline_info.GetArtMethodAtDepth(depth), method);
+      } else {
+        CHECK_EQ(method_infos_[inline_info.GetMethodIndexIdxAtDepth(depth)],
+                 method->GetDexMethodIndexUnchecked());
+      }
+      CHECK_EQ(inline_info.HasDexRegisterMapAtDepth(depth), (num_dex_registers != 0));
+    });
+  }
 }
 
 void StackMapStream::EndInlineInfoEntry() {
-  current_inline_info_.dex_register_map_index =
-      AddDexRegisterMapEntry(current_inline_info_.dex_register_entry);
-  DCHECK(in_inline_frame_);
-  DCHECK_EQ(current_dex_register_, current_inline_info_.dex_register_entry.num_dex_registers)
-      << "Inline information contains less registers than expected";
-  in_inline_frame_ = false;
-  inline_infos_.push_back(current_inline_info_);
-  current_inline_info_ = InlineInfoEntry();
+  DCHECK(in_inline_info_) << "Mismatched Begin/End calls";
+  in_inline_info_ = false;
+  DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
 }
 
-size_t StackMapStream::ComputeDexRegisterLocationCatalogSize() const {
-  size_t size = DexRegisterLocationCatalog::kFixedSize;
-  for (const DexRegisterLocation& dex_register_location : location_catalog_entries_) {
-    size += DexRegisterLocationCatalog::EntrySize(dex_register_location);
+// Create dex register map (bitmap + indices + catalogue entries)
+// based on the currently accumulated list of DexRegisterLocations.
+void StackMapStream::CreateDexRegisterMap() {
+  // Create mask and map based on current registers.
+  temp_dex_register_mask_.ClearAllBits();
+  temp_dex_register_map_.clear();
+  for (size_t i = 0; i < current_dex_registers_.size(); i++) {
+    DexRegisterLocation reg = current_dex_registers_[i];
+    if (reg.IsLive()) {
+      DexRegisterEntry entry = DexRegisterEntry {
+        .kind = static_cast<uint32_t>(reg.GetKind()),
+        .packed_value = DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue()),
+      };
+      temp_dex_register_mask_.SetBit(i);
+      temp_dex_register_map_.push_back(dex_register_catalog_.Dedup(&entry));
+    }
   }
-  return size;
-}
 
-size_t StackMapStream::DexRegisterMapEntry::ComputeSize(size_t catalog_size) const {
-  // For num_dex_registers == 0u live_dex_registers_mask may be null.
-  if (num_dex_registers == 0u) {
-    return 0u;  // No register map will be emitted.
+  // Set the mask and map for the current StackMap/InlineInfo.
+  uint32_t mask_index = StackMap::kNoValue;  // Represents mask with all zero bits.
+  if (temp_dex_register_mask_.GetNumberOfBits() != 0) {
+    mask_index = dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(),
+                                           temp_dex_register_mask_.GetNumberOfBits());
   }
-  size_t number_of_live_dex_registers = live_dex_registers_mask->NumSetBits();
-  if (live_dex_registers_mask->NumSetBits() == 0) {
-    return 0u;  // No register map will be emitted.
+  uint32_t map_index = dex_register_maps_.Dedup(temp_dex_register_map_.data(),
+                                                temp_dex_register_map_.size());
+  if (current_inline_infos_ > 0) {
+    inline_infos_[inline_infos_.size() - 1].dex_register_mask_index = mask_index;
+    inline_infos_[inline_infos_.size() - 1].dex_register_map_index = map_index;
+  } else {
+    current_stack_map_.dex_register_mask_index = mask_index;
+    current_stack_map_.dex_register_map_index = map_index;
   }
-  DCHECK(live_dex_registers_mask != nullptr);
 
-  // Size of the map in bytes.
-  size_t size = DexRegisterMap::kFixedSize;
-  // Add the live bit mask for the Dex register liveness.
-  size += DexRegisterMap::GetLiveBitMaskSize(num_dex_registers);
-  // Compute the size of the set of live Dex register entries.
-  size_t map_entries_size_in_bits =
-      DexRegisterMap::SingleEntrySizeInBits(catalog_size) * number_of_live_dex_registers;
-  size_t map_entries_size_in_bytes =
-      RoundUp(map_entries_size_in_bits, kBitsPerByte) / kBitsPerByte;
-  size += map_entries_size_in_bytes;
-  return size;
+  if (kVerifyStackMaps) {
+    size_t stack_map_index = stack_maps_.size();
+    int32_t depth = current_inline_infos_ - 1;
+    // We need to make copy of the current registers for later (when the check is run).
+    auto expected_dex_registers = std::make_shared<std::vector<DexRegisterLocation>>(
+        current_dex_registers_.begin(), current_dex_registers_.end());
+    dchecks_.emplace_back([=](const CodeInfo& code_info) {
+      StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
+      size_t num_dex_registers = expected_dex_registers->size();
+      DexRegisterMap map = (depth == -1)
+        ? code_info.GetDexRegisterMapOf(stack_map, num_dex_registers)
+        : code_info.GetDexRegisterMapAtDepth(depth,
+                                             code_info.GetInlineInfoOf(stack_map),
+                                             num_dex_registers);
+      CHECK_EQ(map.size(), num_dex_registers);
+      for (size_t r = 0; r < num_dex_registers; r++) {
+        CHECK_EQ(expected_dex_registers->at(r), map.Get(r));
+      }
+    });
+  }
 }
 
 void StackMapStream::FillInMethodInfo(MemoryRegion region) {
   {
-    MethodInfo info(region.begin(), method_indices_.size());
-    for (size_t i = 0; i < method_indices_.size(); ++i) {
-      info.SetMethodIndex(i, method_indices_[i]);
+    MethodInfo info(region.begin(), method_infos_.size());
+    for (size_t i = 0; i < method_infos_.size(); ++i) {
+      info.SetMethodIndex(i, method_infos_[i]);
     }
   }
-  if (kIsDebugBuild) {
+  if (kVerifyStackMaps) {
     // Check the data matches.
     MethodInfo info(region.begin());
     const size_t count = info.NumMethodIndices();
-    DCHECK_EQ(count, method_indices_.size());
+    DCHECK_EQ(count, method_infos_.size());
     for (size_t i = 0; i < count; ++i) {
-      DCHECK_EQ(info.GetMethodIndex(i), method_indices_[i]);
+      DCHECK_EQ(info.GetMethodIndex(i), method_infos_[i]);
     }
   }
 }
 
-template<typename Vector>
-static MemoryRegion EncodeMemoryRegion(Vector* out, size_t* bit_offset, uint32_t bit_length) {
-  uint32_t byte_length = BitsToBytesRoundUp(bit_length);
-  EncodeVarintBits(out, bit_offset, byte_length);
-  *bit_offset = RoundUp(*bit_offset, kBitsPerByte);
-  out->resize(out->size() + byte_length);
-  MemoryRegion region(out->data() + *bit_offset / kBitsPerByte, byte_length);
-  *bit_offset += kBitsPerByte * byte_length;
-  return region;
-}
-
 size_t StackMapStream::PrepareForFillIn() {
+  static_assert(sizeof(StackMapEntry) == StackMap::kCount * sizeof(uint32_t), "Layout");
+  static_assert(sizeof(InvokeInfoEntry) == InvokeInfo::kCount * sizeof(uint32_t), "Layout");
+  static_assert(sizeof(InlineInfoEntry) == InlineInfo::kCount * sizeof(uint32_t), "Layout");
+  static_assert(sizeof(DexRegisterEntry) == DexRegisterInfo::kCount * sizeof(uint32_t), "Layout");
+  DCHECK_EQ(out_.size(), 0u);
+
+  // Read the stack masks now. The compiler might have updated them.
+  for (size_t i = 0; i < lazy_stack_masks_.size(); i++) {
+    BitVector* stack_mask = lazy_stack_masks_[i];
+    if (stack_mask != nullptr && stack_mask->GetNumberOfBits() != 0) {
+      stack_maps_[i].stack_mask_index =
+        stack_masks_.Dedup(stack_mask->GetRawStorage(), stack_mask->GetNumberOfBits());
+    }
+  }
+
   size_t bit_offset = 0;
-  out_.clear();
-
-  // Decide the offsets of dex register map entries, but do not write them out yet.
-  // Needs to be done first as it modifies the stack map entry.
-  size_t dex_register_map_bytes = 0;
-  for (DexRegisterMapEntry& entry : dex_register_entries_) {
-    size_t size = entry.ComputeSize(location_catalog_entries_.size());
-    entry.offset = size == 0 ? DexRegisterMapEntry::kOffsetUnassigned : dex_register_map_bytes;
-    dex_register_map_bytes += size;
-  }
-
-  // Must be done before calling ComputeInlineInfoEncoding since ComputeInlineInfoEncoding requires
-  // dex_method_index_idx to be filled in.
-  PrepareMethodIndices();
-
-  // Dedup stack masks. Needs to be done first as it modifies the stack map entry.
-  BitmapTableBuilder stack_mask_builder(allocator_);
-  for (StackMapEntry& stack_map : stack_maps_) {
-    BitVector* mask = stack_map.sp_mask;
-    size_t num_bits = (mask != nullptr) ? mask->GetNumberOfBits() : 0;
-    if (num_bits != 0) {
-      stack_map.stack_mask_index = stack_mask_builder.Dedup(mask->GetRawStorage(), num_bits);
-    } else {
-      stack_map.stack_mask_index = StackMap::kNoValue;
-    }
-  }
-
-  // Dedup register masks. Needs to be done first as it modifies the stack map entry.
-  BitTableBuilder<std::array<uint32_t, RegisterMask::kCount>> register_mask_builder(allocator_);
-  for (StackMapEntry& stack_map : stack_maps_) {
-    uint32_t register_mask = stack_map.register_mask;
-    if (register_mask != 0) {
-      uint32_t shift = LeastSignificantBit(register_mask);
-      std::array<uint32_t, RegisterMask::kCount> entry = {
-        register_mask >> shift,
-        shift,
-      };
-      stack_map.register_mask_index = register_mask_builder.Dedup(&entry);
-    } else {
-      stack_map.register_mask_index = StackMap::kNoValue;
-    }
-  }
-
-  // Allocate space for dex register maps.
-  EncodeMemoryRegion(&out_, &bit_offset, dex_register_map_bytes * kBitsPerByte);
-
-  // Write dex register catalog.
-  EncodeVarintBits(&out_, &bit_offset, location_catalog_entries_.size());
-  size_t location_catalog_bytes = ComputeDexRegisterLocationCatalogSize();
-  MemoryRegion dex_register_location_catalog_region =
-      EncodeMemoryRegion(&out_, &bit_offset, location_catalog_bytes * kBitsPerByte);
-  DexRegisterLocationCatalog dex_register_location_catalog(dex_register_location_catalog_region);
-  // Offset in `dex_register_location_catalog` where to store the next
-  // register location.
-  size_t location_catalog_offset = DexRegisterLocationCatalog::kFixedSize;
-  for (DexRegisterLocation dex_register_location : location_catalog_entries_) {
-    dex_register_location_catalog.SetRegisterInfo(location_catalog_offset, dex_register_location);
-    location_catalog_offset += DexRegisterLocationCatalog::EntrySize(dex_register_location);
-  }
-  // Ensure we reached the end of the Dex registers location_catalog.
-  DCHECK_EQ(location_catalog_offset, dex_register_location_catalog_region.size());
-
-  // Write stack maps.
-  BitTableBuilder<std::array<uint32_t, StackMap::kCount>> stack_map_builder(allocator_);
-  BitTableBuilder<std::array<uint32_t, InvokeInfo::kCount>> invoke_info_builder(allocator_);
-  BitTableBuilder<std::array<uint32_t, InlineInfo::kCount>> inline_info_builder(allocator_);
-  for (const StackMapEntry& entry : stack_maps_) {
-    if (entry.dex_method_index != dex::kDexNoIndex) {
-      std::array<uint32_t, InvokeInfo::kCount> invoke_info_entry {
-          entry.packed_native_pc,
-          entry.invoke_type,
-          entry.dex_method_index_idx
-      };
-      invoke_info_builder.Add(invoke_info_entry);
-    }
-
-    // Set the inlining info.
-    uint32_t inline_info_index = inline_info_builder.size();
-    DCHECK_LE(entry.inline_infos_start_index + entry.inlining_depth, inline_infos_.size());
-    for (size_t depth = 0; depth < entry.inlining_depth; ++depth) {
-      InlineInfoEntry inline_entry = inline_infos_[depth + entry.inline_infos_start_index];
-      uint32_t method_index_idx = inline_entry.dex_method_index_idx;
-      uint32_t extra_data = 1;
-      if (inline_entry.method != nullptr) {
-        method_index_idx = High32Bits(reinterpret_cast<uintptr_t>(inline_entry.method));
-        extra_data = Low32Bits(reinterpret_cast<uintptr_t>(inline_entry.method));
-      }
-      std::array<uint32_t, InlineInfo::kCount> inline_info_entry {
-          (depth == entry.inlining_depth - 1) ? InlineInfo::kLast : InlineInfo::kMore,
-          method_index_idx,
-          inline_entry.dex_pc,
-          extra_data,
-          dex_register_entries_[inline_entry.dex_register_map_index].offset,
-      };
-      inline_info_builder.Add(inline_info_entry);
-    }
-    std::array<uint32_t, StackMap::kCount> stack_map_entry {
-        entry.packed_native_pc,
-        entry.dex_pc,
-        dex_register_entries_[entry.dex_register_map_index].offset,
-        entry.inlining_depth != 0 ? inline_info_index : InlineInfo::kNoValue,
-        entry.register_mask_index,
-        entry.stack_mask_index,
-    };
-    stack_map_builder.Add(stack_map_entry);
-  }
-  stack_map_builder.Encode(&out_, &bit_offset);
-  invoke_info_builder.Encode(&out_, &bit_offset);
-  inline_info_builder.Encode(&out_, &bit_offset);
-  register_mask_builder.Encode(&out_, &bit_offset);
-  stack_mask_builder.Encode(&out_, &bit_offset);
+  stack_maps_.Encode(&out_, &bit_offset);
+  register_masks_.Encode(&out_, &bit_offset);
+  stack_masks_.Encode(&out_, &bit_offset);
+  invoke_infos_.Encode(&out_, &bit_offset);
+  inline_infos_.Encode(&out_, &bit_offset);
+  dex_register_masks_.Encode(&out_, &bit_offset);
+  dex_register_maps_.Encode(&out_, &bit_offset);
+  dex_register_catalog_.Encode(&out_, &bit_offset);
 
   return UnsignedLeb128Size(out_.size()) +  out_.size();
 }
 
 void StackMapStream::FillInCodeInfo(MemoryRegion region) {
-  DCHECK_EQ(0u, current_entry_.dex_pc) << "EndStackMapEntry not called after BeginStackMapEntry";
+  DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls";
+  DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls";
   DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before FillIn";
   DCHECK_EQ(region.size(), UnsignedLeb128Size(out_.size()) +  out_.size());
 
   uint8_t* ptr = EncodeUnsignedLeb128(region.begin(), out_.size());
   region.CopyFromVector(ptr - region.begin(), out_);
 
-  // Write dex register maps.
-  CodeInfo code_info(region);
-  for (DexRegisterMapEntry& entry : dex_register_entries_) {
-    size_t entry_size = entry.ComputeSize(location_catalog_entries_.size());
-    if (entry_size != 0) {
-      DexRegisterMap dex_register_map(
-          code_info.dex_register_maps_.Subregion(entry.offset, entry_size),
-          entry.num_dex_registers,
-          code_info);
-      FillInDexRegisterMap(dex_register_map,
-                           entry.num_dex_registers,
-                           *entry.live_dex_registers_mask,
-                           entry.locations_start_index);
-    }
-  }
-
-  // Verify all written data in debug build.
-  if (kIsDebugBuild) {
-    CheckCodeInfo(region);
-  }
-}
-
-void StackMapStream::FillInDexRegisterMap(DexRegisterMap dex_register_map,
-                                          uint32_t num_dex_registers,
-                                          const BitVector& live_dex_registers_mask,
-                                          uint32_t start_index_in_dex_register_locations) const {
-  dex_register_map.SetLiveBitMask(num_dex_registers, live_dex_registers_mask);
-  // Set the dex register location mapping data.
-  size_t number_of_live_dex_registers = live_dex_registers_mask.NumSetBits();
-  DCHECK_LE(number_of_live_dex_registers, dex_register_locations_.size());
-  DCHECK_LE(start_index_in_dex_register_locations,
-            dex_register_locations_.size() - number_of_live_dex_registers);
-  for (size_t index_in_dex_register_locations = 0;
-      index_in_dex_register_locations != number_of_live_dex_registers;
-       ++index_in_dex_register_locations) {
-    size_t location_catalog_entry_index = dex_register_locations_[
-        start_index_in_dex_register_locations + index_in_dex_register_locations];
-    dex_register_map.SetLocationCatalogEntryIndex(
-        index_in_dex_register_locations,
-        location_catalog_entry_index,
-        location_catalog_entries_.size());
-  }
-}
-
-size_t StackMapStream::AddDexRegisterMapEntry(const DexRegisterMapEntry& entry) {
-  const size_t current_entry_index = dex_register_entries_.size();
-  auto entries_it = dex_map_hash_to_stack_map_indices_.find(entry.hash);
-  if (entries_it == dex_map_hash_to_stack_map_indices_.end()) {
-    // We don't have a perfect hash functions so we need a list to collect all stack maps
-    // which might have the same dex register map.
-    ScopedArenaVector<uint32_t> stack_map_indices(allocator_->Adapter(kArenaAllocStackMapStream));
-    stack_map_indices.push_back(current_entry_index);
-    dex_map_hash_to_stack_map_indices_.Put(entry.hash, std::move(stack_map_indices));
-  } else {
-    // We might have collisions, so we need to check whether or not we really have a match.
-    for (uint32_t test_entry_index : entries_it->second) {
-      if (DexRegisterMapEntryEquals(dex_register_entries_[test_entry_index], entry)) {
-        return test_entry_index;
-      }
-    }
-    entries_it->second.push_back(current_entry_index);
-  }
-  dex_register_entries_.push_back(entry);
-  return current_entry_index;
-}
-
-bool StackMapStream::DexRegisterMapEntryEquals(const DexRegisterMapEntry& a,
-                                               const DexRegisterMapEntry& b) const {
-  if ((a.live_dex_registers_mask == nullptr) != (b.live_dex_registers_mask == nullptr)) {
-    return false;
-  }
-  if (a.num_dex_registers != b.num_dex_registers) {
-    return false;
-  }
-  if (a.num_dex_registers != 0u) {
-    DCHECK(a.live_dex_registers_mask != nullptr);
-    DCHECK(b.live_dex_registers_mask != nullptr);
-    if (!a.live_dex_registers_mask->Equal(b.live_dex_registers_mask)) {
-      return false;
-    }
-    size_t number_of_live_dex_registers = a.live_dex_registers_mask->NumSetBits();
-    DCHECK_LE(number_of_live_dex_registers, dex_register_locations_.size());
-    DCHECK_LE(a.locations_start_index,
-              dex_register_locations_.size() - number_of_live_dex_registers);
-    DCHECK_LE(b.locations_start_index,
-              dex_register_locations_.size() - number_of_live_dex_registers);
-    auto a_begin = dex_register_locations_.begin() + a.locations_start_index;
-    auto b_begin = dex_register_locations_.begin() + b.locations_start_index;
-    if (!std::equal(a_begin, a_begin + number_of_live_dex_registers, b_begin)) {
-      return false;
-    }
-  }
-  return true;
-}
-
-// Helper for CheckCodeInfo - check that register map has the expected content.
-void StackMapStream::CheckDexRegisterMap(const DexRegisterMap& dex_register_map,
-                                         size_t num_dex_registers,
-                                         BitVector* live_dex_registers_mask,
-                                         size_t dex_register_locations_index) const {
-  for (size_t reg = 0; reg < num_dex_registers; reg++) {
-    // Find the location we tried to encode.
-    DexRegisterLocation expected = DexRegisterLocation::None();
-    if (live_dex_registers_mask->IsBitSet(reg)) {
-      size_t catalog_index = dex_register_locations_[dex_register_locations_index++];
-      expected = location_catalog_entries_[catalog_index];
-    }
-    // Compare to the seen location.
-    if (expected.GetKind() == DexRegisterLocation::Kind::kNone) {
-      DCHECK(!dex_register_map.IsValid() || !dex_register_map.IsDexRegisterLive(reg))
-          << dex_register_map.IsValid() << " " << dex_register_map.IsDexRegisterLive(reg);
-    } else {
-      DCHECK(dex_register_map.IsDexRegisterLive(reg));
-      DexRegisterLocation seen = dex_register_map.GetDexRegisterLocation(reg);
-      DCHECK_EQ(expected.GetKind(), seen.GetKind());
-      DCHECK_EQ(expected.GetValue(), seen.GetValue());
-    }
-  }
-  if (num_dex_registers == 0) {
-    DCHECK(!dex_register_map.IsValid());
-  }
-}
-
-void StackMapStream::PrepareMethodIndices() {
-  CHECK(method_indices_.empty());
-  method_indices_.resize(stack_maps_.size() + inline_infos_.size());
-  ScopedArenaUnorderedMap<uint32_t, size_t> dedupe(allocator_->Adapter(kArenaAllocStackMapStream));
-  for (StackMapEntry& stack_map : stack_maps_) {
-    const size_t index = dedupe.size();
-    const uint32_t method_index = stack_map.dex_method_index;
-    if (method_index != dex::kDexNoIndex) {
-      stack_map.dex_method_index_idx = dedupe.emplace(method_index, index).first->second;
-      method_indices_[index] = method_index;
-    }
-  }
-  for (InlineInfoEntry& inline_info : inline_infos_) {
-    const size_t index = dedupe.size();
-    const uint32_t method_index = inline_info.method_index;
-    CHECK_NE(method_index, dex::kDexNoIndex);
-    inline_info.dex_method_index_idx = dedupe.emplace(method_index, index).first->second;
-    method_indices_[index] = method_index;
-  }
-  method_indices_.resize(dedupe.size());
-}
-
-// Check that all StackMapStream inputs are correctly encoded by trying to read them back.
-void StackMapStream::CheckCodeInfo(MemoryRegion region) const {
-  CodeInfo code_info(region);
-  DCHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size());
-  DCHECK_EQ(code_info.GetNumberOfLocationCatalogEntries(), location_catalog_entries_.size());
-  size_t invoke_info_index = 0;
-  for (size_t s = 0; s < stack_maps_.size(); ++s) {
-    const StackMap stack_map = code_info.GetStackMapAt(s);
-    StackMapEntry entry = stack_maps_[s];
-
-    // Check main stack map fields.
-    DCHECK_EQ(stack_map.GetNativePcOffset(instruction_set_),
-              StackMap::UnpackNativePc(entry.packed_native_pc, instruction_set_));
-    DCHECK_EQ(stack_map.GetDexPc(), entry.dex_pc);
-    DCHECK_EQ(stack_map.GetRegisterMaskIndex(), entry.register_mask_index);
-    DCHECK_EQ(code_info.GetRegisterMaskOf(stack_map), entry.register_mask);
-    DCHECK_EQ(stack_map.GetStackMaskIndex(), entry.stack_mask_index);
-    BitMemoryRegion stack_mask = code_info.GetStackMaskOf(stack_map);
-    if (entry.sp_mask != nullptr) {
-      DCHECK_GE(stack_mask.size_in_bits(), entry.sp_mask->GetNumberOfBits());
-      for (size_t b = 0; b < stack_mask.size_in_bits(); b++) {
-        DCHECK_EQ(stack_mask.LoadBit(b), entry.sp_mask->IsBitSet(b)) << b;
-      }
-    } else {
-      DCHECK_EQ(stack_mask.size_in_bits(), 0u);
-    }
-    if (entry.dex_method_index != dex::kDexNoIndex) {
-      InvokeInfo invoke_info = code_info.GetInvokeInfo(invoke_info_index);
-      DCHECK_EQ(invoke_info.GetNativePcOffset(instruction_set_),
-                StackMap::UnpackNativePc(entry.packed_native_pc, instruction_set_));
-      DCHECK_EQ(invoke_info.GetInvokeType(), entry.invoke_type);
-      DCHECK_EQ(invoke_info.GetMethodIndexIdx(), entry.dex_method_index_idx);
-      invoke_info_index++;
-    }
-    CheckDexRegisterMap(code_info.GetDexRegisterMapOf(
-                            stack_map, entry.dex_register_entry.num_dex_registers),
-                        entry.dex_register_entry.num_dex_registers,
-                        entry.dex_register_entry.live_dex_registers_mask,
-                        entry.dex_register_entry.locations_start_index);
-
-    // Check inline info.
-    DCHECK_EQ(stack_map.HasInlineInfo(), (entry.inlining_depth != 0));
-    if (entry.inlining_depth != 0) {
-      InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
-      DCHECK_EQ(inline_info.GetDepth(), entry.inlining_depth);
-      for (size_t d = 0; d < entry.inlining_depth; ++d) {
-        size_t inline_info_index = entry.inline_infos_start_index + d;
-        DCHECK_LT(inline_info_index, inline_infos_.size());
-        InlineInfoEntry inline_entry = inline_infos_[inline_info_index];
-        DCHECK_EQ(inline_info.GetDexPcAtDepth(d), inline_entry.dex_pc);
-        if (inline_info.EncodesArtMethodAtDepth(d)) {
-          DCHECK_EQ(inline_info.GetArtMethodAtDepth(d),
-                    inline_entry.method);
-        } else {
-          const size_t method_index_idx =
-              inline_info.GetMethodIndexIdxAtDepth(d);
-          DCHECK_EQ(method_index_idx, inline_entry.dex_method_index_idx);
-          DCHECK_EQ(method_indices_[method_index_idx], inline_entry.method_index);
-        }
-
-        CheckDexRegisterMap(code_info.GetDexRegisterMapAtDepth(
-                                d,
-                                inline_info,
-                                inline_entry.dex_register_entry.num_dex_registers),
-                            inline_entry.dex_register_entry.num_dex_registers,
-                            inline_entry.dex_register_entry.live_dex_registers_mask,
-                            inline_entry.dex_register_entry.locations_start_index);
-      }
+  // Verify all written data (usually only in debug builds).
+  if (kVerifyStackMaps) {
+    CodeInfo code_info(region);
+    CHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size());
+    for (const auto& dcheck : dchecks_) {
+      dcheck(code_info);
     }
   }
 }
 
 size_t StackMapStream::ComputeMethodInfoSize() const {
   DCHECK_NE(0u, out_.size()) << "PrepareForFillIn not called before " << __FUNCTION__;
-  return MethodInfo::ComputeSize(method_indices_.size());
+  return MethodInfo::ComputeSize(method_infos_.size());
 }
 
 }  // namespace art
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index 6d505b9..c758bca 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -17,42 +17,20 @@
 #ifndef ART_COMPILER_OPTIMIZING_STACK_MAP_STREAM_H_
 #define ART_COMPILER_OPTIMIZING_STACK_MAP_STREAM_H_
 
+#include "base/allocator.h"
+#include "base/arena_bit_vector.h"
+#include "base/bit_table.h"
 #include "base/bit_vector-inl.h"
-#include "base/hash_map.h"
 #include "base/memory_region.h"
 #include "base/scoped_arena_containers.h"
 #include "base/value_object.h"
+#include "dex_register_location.h"
 #include "method_info.h"
 #include "nodes.h"
-#include "stack_map.h"
 
 namespace art {
 
-// Helper to build art::StackMapStream::LocationCatalogEntriesIndices.
-class LocationCatalogEntriesIndicesEmptyFn {
- public:
-  void MakeEmpty(std::pair<DexRegisterLocation, size_t>& item) const {
-    item.first = DexRegisterLocation::None();
-  }
-  bool IsEmpty(const std::pair<DexRegisterLocation, size_t>& item) const {
-    return item.first == DexRegisterLocation::None();
-  }
-};
-
-// Hash function for art::StackMapStream::LocationCatalogEntriesIndices.
-// This hash function does not create collisions.
-class DexRegisterLocationHashFn {
- public:
-  size_t operator()(DexRegisterLocation key) const {
-    // Concatenate `key`s fields to create a 64-bit value to be hashed.
-    int64_t kind_and_value =
-        (static_cast<int64_t>(key.kind_) << 32) | static_cast<int64_t>(key.value_);
-    return inner_hash_fn_(kind_and_value);
-  }
- private:
-  std::hash<int64_t> inner_hash_fn_;
-};
-
+class CodeInfo;
 
 /**
  * Collects and builds stack maps for a method. All the stack maps
@@ -61,71 +39,26 @@
 class StackMapStream : public ValueObject {
  public:
   explicit StackMapStream(ScopedArenaAllocator* allocator, InstructionSet instruction_set)
-      : allocator_(allocator),
-        instruction_set_(instruction_set),
-        stack_maps_(allocator->Adapter(kArenaAllocStackMapStream)),
-        location_catalog_entries_(allocator->Adapter(kArenaAllocStackMapStream)),
-        location_catalog_entries_indices_(allocator->Adapter(kArenaAllocStackMapStream)),
-        dex_register_locations_(allocator->Adapter(kArenaAllocStackMapStream)),
-        inline_infos_(allocator->Adapter(kArenaAllocStackMapStream)),
-        method_indices_(allocator->Adapter(kArenaAllocStackMapStream)),
-        dex_register_entries_(allocator->Adapter(kArenaAllocStackMapStream)),
+      : instruction_set_(instruction_set),
+        stack_maps_(allocator),
+        register_masks_(allocator),
+        stack_masks_(allocator),
+        invoke_infos_(allocator),
+        inline_infos_(allocator),
+        dex_register_masks_(allocator),
+        dex_register_maps_(allocator),
+        dex_register_catalog_(allocator),
         out_(allocator->Adapter(kArenaAllocStackMapStream)),
-        dex_map_hash_to_stack_map_indices_(std::less<uint32_t>(),
-                                           allocator->Adapter(kArenaAllocStackMapStream)),
-        current_entry_(),
-        current_inline_info_(),
-        current_dex_register_(0),
-        in_inline_frame_(false) {
-    stack_maps_.reserve(10);
-    out_.reserve(64);
-    location_catalog_entries_.reserve(4);
-    dex_register_locations_.reserve(10 * 4);
-    inline_infos_.reserve(2);
+        method_infos_(allocator),
+        lazy_stack_masks_(allocator->Adapter(kArenaAllocStackMapStream)),
+        in_stack_map_(false),
+        in_inline_info_(false),
+        current_inline_infos_(0),
+        current_dex_registers_(allocator->Adapter(kArenaAllocStackMapStream)),
+        temp_dex_register_mask_(allocator, 32, true, kArenaAllocStackMapStream),
+        temp_dex_register_map_(allocator->Adapter(kArenaAllocStackMapStream)) {
   }
 
-  // A dex register map entry for a single stack map entry, contains what registers are live as
-  // well as indices into the location catalog.
-  class DexRegisterMapEntry {
-   public:
-    static const uint32_t kOffsetUnassigned = -1;
-
-    BitVector* live_dex_registers_mask;
-    uint32_t num_dex_registers;
-    size_t locations_start_index;
-    // Computed fields
-    size_t hash = 0;
-    uint32_t offset = kOffsetUnassigned;
-
-    size_t ComputeSize(size_t catalog_size) const;
-  };
-
-  // See runtime/stack_map.h to know what these fields contain.
-  struct StackMapEntry {
-    uint32_t dex_pc;
-    uint32_t packed_native_pc;
-    uint32_t register_mask;
-    BitVector* sp_mask;
-    uint32_t inlining_depth;
-    size_t inline_infos_start_index;
-    uint32_t stack_mask_index;
-    uint32_t register_mask_index;
-    DexRegisterMapEntry dex_register_entry;
-    size_t dex_register_map_index;
-    InvokeType invoke_type;
-    uint32_t dex_method_index;
-    uint32_t dex_method_index_idx;  // Index into dex method index table.
-  };
-
-  struct InlineInfoEntry {
-    uint32_t dex_pc;  // dex::kDexNoIndex for intrinsified native methods.
-    ArtMethod* method;
-    uint32_t method_index;
-    DexRegisterMapEntry dex_register_entry;
-    size_t dex_register_map_index;
-    uint32_t dex_method_index_idx;  // Index into the dex method index table.
-  };
-
   void BeginStackMapEntry(uint32_t dex_pc,
                           uint32_t native_pc_offset,
                           uint32_t register_mask,
@@ -160,58 +93,82 @@
   size_t ComputeMethodInfoSize() const;
 
  private:
-  size_t ComputeDexRegisterLocationCatalogSize() const;
+  static constexpr uint32_t kNoValue = -1;
 
-  // Prepare and deduplicate method indices.
-  void PrepareMethodIndices();
+  // The fields must be uint32_t and mirror the StackMap accessor in stack_map.h!
+  struct StackMapEntry {
+    uint32_t packed_native_pc;
+    uint32_t dex_pc;
+    uint32_t register_mask_index;
+    uint32_t stack_mask_index;
+    uint32_t inline_info_index;
+    uint32_t dex_register_mask_index;
+    uint32_t dex_register_map_index;
+  };
 
-  // Deduplicate entry if possible and return the corresponding index into dex_register_entries_
-  // array. If entry is not a duplicate, a new entry is added to dex_register_entries_.
-  size_t AddDexRegisterMapEntry(const DexRegisterMapEntry& entry);
+  // The fields must be uint32_t and mirror the InlineInfo accessor in stack_map.h!
+  struct InlineInfoEntry {
+    uint32_t is_last;
+    uint32_t dex_pc;
+    uint32_t method_info_index;
+    uint32_t art_method_hi;
+    uint32_t art_method_lo;
+    uint32_t dex_register_mask_index;
+    uint32_t dex_register_map_index;
+  };
 
-  // Return true if the two dex register map entries are equal.
-  bool DexRegisterMapEntryEquals(const DexRegisterMapEntry& a, const DexRegisterMapEntry& b) const;
+  // The fields must be uint32_t and mirror the InvokeInfo accessor in stack_map.h!
+  struct InvokeInfoEntry {
+    uint32_t packed_native_pc;
+    uint32_t invoke_type;
+    uint32_t method_info_index;
+  };
 
-  // Fill in the corresponding entries of a register map.
-  void FillInDexRegisterMap(DexRegisterMap dex_register_map,
-                            uint32_t num_dex_registers,
-                            const BitVector& live_dex_registers_mask,
-                            uint32_t start_index_in_dex_register_locations) const;
+  // The fields must be uint32_t and mirror the DexRegisterInfo accessor in stack_map.h!
+  struct DexRegisterEntry {
+    uint32_t kind;
+    uint32_t packed_value;
+  };
 
-  void CheckDexRegisterMap(const DexRegisterMap& dex_register_map,
-                           size_t num_dex_registers,
-                           BitVector* live_dex_registers_mask,
-                           size_t dex_register_locations_index) const;
-  void CheckCodeInfo(MemoryRegion region) const;
+  // The fields must be uint32_t and mirror the RegisterMask accessor in stack_map.h!
+  struct RegisterMaskEntry {
+    uint32_t value;
+    uint32_t shift;
+  };
 
-  ScopedArenaAllocator* const allocator_;
+  void CreateDexRegisterMap();
+
   const InstructionSet instruction_set_;
-  ScopedArenaVector<StackMapEntry> stack_maps_;
-
-  // A catalog of unique [location_kind, register_value] pairs (per method).
-  ScopedArenaVector<DexRegisterLocation> location_catalog_entries_;
-  // Map from Dex register location catalog entries to their indices in the
-  // location catalog.
-  using LocationCatalogEntriesIndices = ScopedArenaHashMap<DexRegisterLocation,
-                                                           size_t,
-                                                           LocationCatalogEntriesIndicesEmptyFn,
-                                                           DexRegisterLocationHashFn>;
-  LocationCatalogEntriesIndices location_catalog_entries_indices_;
-
-  // A set of concatenated maps of Dex register locations indices to `location_catalog_entries_`.
-  ScopedArenaVector<size_t> dex_register_locations_;
-  ScopedArenaVector<InlineInfoEntry> inline_infos_;
-  ScopedArenaVector<uint32_t> method_indices_;
-  ScopedArenaVector<DexRegisterMapEntry> dex_register_entries_;
-
+  BitTableBuilder<StackMapEntry> stack_maps_;
+  BitTableBuilder<RegisterMaskEntry> register_masks_;
+  BitmapTableBuilder stack_masks_;
+  BitTableBuilder<InvokeInfoEntry> invoke_infos_;
+  BitTableBuilder<InlineInfoEntry> inline_infos_;
+  BitmapTableBuilder dex_register_masks_;
+  BitTableBuilder<uint32_t> dex_register_maps_;
+  BitTableBuilder<DexRegisterEntry> dex_register_catalog_;
   ScopedArenaVector<uint8_t> out_;
 
-  ScopedArenaSafeMap<uint32_t, ScopedArenaVector<uint32_t>> dex_map_hash_to_stack_map_indices_;
+  BitTableBuilder<uint32_t> method_infos_;
 
-  StackMapEntry current_entry_;
-  InlineInfoEntry current_inline_info_;
-  uint32_t current_dex_register_;
-  bool in_inline_frame_;
+  ScopedArenaVector<BitVector*> lazy_stack_masks_;
+
+  // Variables which track the current state between Begin/End calls;
+  bool in_stack_map_;
+  bool in_inline_info_;
+  StackMapEntry current_stack_map_;
+  uint32_t current_inline_infos_;
+  ScopedArenaVector<DexRegisterLocation> current_dex_registers_;
+  size_t expected_num_dex_registers_;
+
+  // Temporary variables used in CreateDexRegisterMap.
+  // They are here so that we can reuse the reserved memory.
+  ArenaBitVector temp_dex_register_mask_;
+  ScopedArenaVector<uint32_t> temp_dex_register_map_;
+
+  // A set of lambda functions to be executed at the end to verify
+  // the encoded data. It is generally only used in debug builds.
+  std::vector<std::function<void(CodeInfo&)>> dchecks_;
 
   DISALLOW_COPY_AND_ASSIGN(StackMapStream);
 };
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index 1127718..262c240 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -45,6 +45,8 @@
 
 using Kind = DexRegisterLocation::Kind;
 
+constexpr static uint32_t kPcAlign = GetInstructionSetInstructionAlignment(kRuntimeISA);
+
 TEST(StackMapTest, Test1) {
   MallocArenaPool pool;
   ArenaStack arena_stack(&pool);
@@ -53,7 +55,7 @@
 
   ArenaBitVector sp_mask(&allocator, 0, false);
   size_t number_of_dex_registers = 2;
-  stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+  stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
   stream.AddDexRegisterEntry(Kind::kInStack, 0);         // Short location.
   stream.AddDexRegisterEntry(Kind::kConstant, -2);       // Short location.
   stream.EndStackMapEntry();
@@ -68,18 +70,12 @@
 
   uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
   ASSERT_EQ(2u, number_of_catalog_entries);
-  DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
-  // The Dex register location catalog contains:
-  // - one 1-byte short Dex register location, and
-  // - one 5-byte large Dex register location.
-  size_t expected_location_catalog_size = 1u + 5u;
-  ASSERT_EQ(expected_location_catalog_size, location_catalog.Size());
 
   StackMap stack_map = code_info.GetStackMapAt(0);
   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
-  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
   ASSERT_EQ(0u, stack_map.GetDexPc());
-  ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+  ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
   ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
 
   ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask));
@@ -89,30 +85,17 @@
       code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
   ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
   ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
-  ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
-  // The Dex register map contains:
-  // - one 1-byte live bit mask, and
-  // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
-  size_t expected_dex_register_map_size = 1u + 1u;
-  ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+  ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
 
   ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationKind(0));
   ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind(1));
-  ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationInternalKind(0));
-  ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind(1));
   ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes(0));
   ASSERT_EQ(-2, dex_register_map.GetConstant(1));
 
-  size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(0, number_of_catalog_entries);
-  size_t index1 = dex_register_map.GetLocationCatalogEntryIndex(1, number_of_catalog_entries);
-  ASSERT_EQ(0u, index0);
-  ASSERT_EQ(1u, index1);
-  DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
-  DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+  DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(0);
+  DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(1);
   ASSERT_EQ(Kind::kInStack, location0.GetKind());
   ASSERT_EQ(Kind::kConstant, location1.GetKind());
-  ASSERT_EQ(Kind::kInStack, location0.GetInternalKind());
-  ASSERT_EQ(Kind::kConstantLargeValue, location1.GetInternalKind());
   ASSERT_EQ(0, location0.GetValue());
   ASSERT_EQ(-2, location1.GetValue());
 
@@ -131,7 +114,7 @@
   sp_mask1.SetBit(4);
   size_t number_of_dex_registers = 2;
   size_t number_of_dex_registers_in_inline_info = 0;
-  stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask1, number_of_dex_registers, 2);
+  stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1, number_of_dex_registers, 2);
   stream.AddDexRegisterEntry(Kind::kInStack, 0);         // Short location.
   stream.AddDexRegisterEntry(Kind::kConstant, -2);       // Large location.
   stream.BeginInlineInfoEntry(&art_method, 3, number_of_dex_registers_in_inline_info);
@@ -143,7 +126,7 @@
   ArenaBitVector sp_mask2(&allocator, 0, true);
   sp_mask2.SetBit(3);
   sp_mask2.SetBit(8);
-  stream.BeginStackMapEntry(1, 128, 0xFF, &sp_mask2, number_of_dex_registers, 0);
+  stream.BeginStackMapEntry(1, 128 * kPcAlign, 0xFF, &sp_mask2, number_of_dex_registers, 0);
   stream.AddDexRegisterEntry(Kind::kInRegister, 18);     // Short location.
   stream.AddDexRegisterEntry(Kind::kInFpuRegister, 3);   // Short location.
   stream.EndStackMapEntry();
@@ -151,7 +134,7 @@
   ArenaBitVector sp_mask3(&allocator, 0, true);
   sp_mask3.SetBit(1);
   sp_mask3.SetBit(5);
-  stream.BeginStackMapEntry(2, 192, 0xAB, &sp_mask3, number_of_dex_registers, 0);
+  stream.BeginStackMapEntry(2, 192 * kPcAlign, 0xAB, &sp_mask3, number_of_dex_registers, 0);
   stream.AddDexRegisterEntry(Kind::kInRegister, 6);       // Short location.
   stream.AddDexRegisterEntry(Kind::kInRegisterHigh, 8);   // Short location.
   stream.EndStackMapEntry();
@@ -159,7 +142,7 @@
   ArenaBitVector sp_mask4(&allocator, 0, true);
   sp_mask4.SetBit(6);
   sp_mask4.SetBit(7);
-  stream.BeginStackMapEntry(3, 256, 0xCD, &sp_mask4, number_of_dex_registers, 0);
+  stream.BeginStackMapEntry(3, 256 * kPcAlign, 0xCD, &sp_mask4, number_of_dex_registers, 0);
   stream.AddDexRegisterEntry(Kind::kInFpuRegister, 3);      // Short location, same in stack map 2.
   stream.AddDexRegisterEntry(Kind::kInFpuRegisterHigh, 1);  // Short location.
   stream.EndStackMapEntry();
@@ -174,20 +157,14 @@
 
   uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
   ASSERT_EQ(7u, number_of_catalog_entries);
-  DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
-  // The Dex register location catalog contains:
-  // - six 1-byte short Dex register locations, and
-  // - one 5-byte large Dex register location.
-  size_t expected_location_catalog_size = 6u * 1u + 5u;
-  ASSERT_EQ(expected_location_catalog_size, location_catalog.Size());
 
   // First stack map.
   {
     StackMap stack_map = code_info.GetStackMapAt(0);
     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
-    ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+    ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
     ASSERT_EQ(0u, stack_map.GetDexPc());
-    ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+    ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
     ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
 
     ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask1));
@@ -197,30 +174,17 @@
         code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
     ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
     ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
-    ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
-    // The Dex register map contains:
-    // - one 1-byte live bit mask, and
-    // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
-    size_t expected_dex_register_map_size = 1u + 1u;
-    ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+    ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
 
     ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationKind(0));
     ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind(1));
-    ASSERT_EQ(Kind::kInStack, dex_register_map.GetLocationInternalKind(0));
-    ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind(1));
     ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes(0));
     ASSERT_EQ(-2, dex_register_map.GetConstant(1));
 
-    size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(0, number_of_catalog_entries);
-    size_t index1 = dex_register_map.GetLocationCatalogEntryIndex(1, number_of_catalog_entries);
-    ASSERT_EQ(0u, index0);
-    ASSERT_EQ(1u, index1);
-    DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
-    DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+    DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(0);
+    DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(1);
     ASSERT_EQ(Kind::kInStack, location0.GetKind());
     ASSERT_EQ(Kind::kConstant, location1.GetKind());
-    ASSERT_EQ(Kind::kInStack, location0.GetInternalKind());
-    ASSERT_EQ(Kind::kConstantLargeValue, location1.GetInternalKind());
     ASSERT_EQ(0, location0.GetValue());
     ASSERT_EQ(-2, location1.GetValue());
 
@@ -237,9 +201,9 @@
   {
     StackMap stack_map = code_info.GetStackMapAt(1);
     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1u)));
-    ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u)));
+    ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(128u * kPcAlign)));
     ASSERT_EQ(1u, stack_map.GetDexPc());
-    ASSERT_EQ(128u, stack_map.GetNativePcOffset(kRuntimeISA));
+    ASSERT_EQ(128u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
     ASSERT_EQ(0xFFu, code_info.GetRegisterMaskOf(stack_map));
 
     ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask2));
@@ -249,30 +213,17 @@
         code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
     ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
     ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
-    ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
-    // The Dex register map contains:
-    // - one 1-byte live bit mask, and
-    // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
-    size_t expected_dex_register_map_size = 1u + 1u;
-    ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+    ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
 
     ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationKind(0));
     ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationKind(1));
-    ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationInternalKind(0));
-    ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationInternalKind(1));
     ASSERT_EQ(18, dex_register_map.GetMachineRegister(0));
     ASSERT_EQ(3, dex_register_map.GetMachineRegister(1));
 
-    size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(0, number_of_catalog_entries);
-    size_t index1 = dex_register_map.GetLocationCatalogEntryIndex(1, number_of_catalog_entries);
-    ASSERT_EQ(2u, index0);
-    ASSERT_EQ(3u, index1);
-    DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
-    DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+    DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(2);
+    DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(3);
     ASSERT_EQ(Kind::kInRegister, location0.GetKind());
     ASSERT_EQ(Kind::kInFpuRegister, location1.GetKind());
-    ASSERT_EQ(Kind::kInRegister, location0.GetInternalKind());
-    ASSERT_EQ(Kind::kInFpuRegister, location1.GetInternalKind());
     ASSERT_EQ(18, location0.GetValue());
     ASSERT_EQ(3, location1.GetValue());
 
@@ -283,9 +234,9 @@
   {
     StackMap stack_map = code_info.GetStackMapAt(2);
     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(2u)));
-    ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(192u)));
+    ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(192u * kPcAlign)));
     ASSERT_EQ(2u, stack_map.GetDexPc());
-    ASSERT_EQ(192u, stack_map.GetNativePcOffset(kRuntimeISA));
+    ASSERT_EQ(192u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
     ASSERT_EQ(0xABu, code_info.GetRegisterMaskOf(stack_map));
 
     ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask3));
@@ -295,30 +246,17 @@
         code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
     ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
     ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
-    ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
-    // The Dex register map contains:
-    // - one 1-byte live bit mask, and
-    // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
-    size_t expected_dex_register_map_size = 1u + 1u;
-    ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+    ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
 
     ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationKind(0));
     ASSERT_EQ(Kind::kInRegisterHigh, dex_register_map.GetLocationKind(1));
-    ASSERT_EQ(Kind::kInRegister, dex_register_map.GetLocationInternalKind(0));
-    ASSERT_EQ(Kind::kInRegisterHigh, dex_register_map.GetLocationInternalKind(1));
     ASSERT_EQ(6, dex_register_map.GetMachineRegister(0));
     ASSERT_EQ(8, dex_register_map.GetMachineRegister(1));
 
-    size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(0, number_of_catalog_entries);
-    size_t index1 = dex_register_map.GetLocationCatalogEntryIndex(1, number_of_catalog_entries);
-    ASSERT_EQ(4u, index0);
-    ASSERT_EQ(5u, index1);
-    DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
-    DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+    DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(4);
+    DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(5);
     ASSERT_EQ(Kind::kInRegister, location0.GetKind());
     ASSERT_EQ(Kind::kInRegisterHigh, location1.GetKind());
-    ASSERT_EQ(Kind::kInRegister, location0.GetInternalKind());
-    ASSERT_EQ(Kind::kInRegisterHigh, location1.GetInternalKind());
     ASSERT_EQ(6, location0.GetValue());
     ASSERT_EQ(8, location1.GetValue());
 
@@ -329,9 +267,9 @@
   {
     StackMap stack_map = code_info.GetStackMapAt(3);
     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(3u)));
-    ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(256u)));
+    ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(256u * kPcAlign)));
     ASSERT_EQ(3u, stack_map.GetDexPc());
-    ASSERT_EQ(256u, stack_map.GetNativePcOffset(kRuntimeISA));
+    ASSERT_EQ(256u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
     ASSERT_EQ(0xCDu, code_info.GetRegisterMaskOf(stack_map));
 
     ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask4));
@@ -341,30 +279,17 @@
         code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
     ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
     ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
-    ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
-    // The Dex register map contains:
-    // - one 1-byte live bit mask, and
-    // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
-    size_t expected_dex_register_map_size = 1u + 1u;
-    ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+    ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters());
 
     ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationKind(0));
     ASSERT_EQ(Kind::kInFpuRegisterHigh, dex_register_map.GetLocationKind(1));
-    ASSERT_EQ(Kind::kInFpuRegister, dex_register_map.GetLocationInternalKind(0));
-    ASSERT_EQ(Kind::kInFpuRegisterHigh, dex_register_map.GetLocationInternalKind(1));
     ASSERT_EQ(3, dex_register_map.GetMachineRegister(0));
     ASSERT_EQ(1, dex_register_map.GetMachineRegister(1));
 
-    size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(0, number_of_catalog_entries);
-    size_t index1 = dex_register_map.GetLocationCatalogEntryIndex(1, number_of_catalog_entries);
-    ASSERT_EQ(3u, index0);  // Shared with second stack map.
-    ASSERT_EQ(6u, index1);
-    DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
-    DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+    DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(3);
+    DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(6);
     ASSERT_EQ(Kind::kInFpuRegister, location0.GetKind());
     ASSERT_EQ(Kind::kInFpuRegisterHigh, location1.GetKind());
-    ASSERT_EQ(Kind::kInFpuRegister, location0.GetInternalKind());
-    ASSERT_EQ(Kind::kInFpuRegisterHigh, location1.GetInternalKind());
     ASSERT_EQ(3, location0.GetValue());
     ASSERT_EQ(1, location1.GetValue());
 
@@ -384,7 +309,7 @@
   sp_mask1.SetBit(4);
   const size_t number_of_dex_registers = 2;
   const size_t number_of_dex_registers_in_inline_info = 2;
-  stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask1, number_of_dex_registers, 1);
+  stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1, number_of_dex_registers, 1);
   stream.AddDexRegisterEntry(Kind::kInStack, 0);         // Short location.
   stream.AddDexRegisterEntry(Kind::kConstant, -2);       // Large location.
   stream.BeginInlineInfoEntry(&art_method, 3, number_of_dex_registers_in_inline_info);
@@ -403,20 +328,14 @@
 
   uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
   ASSERT_EQ(2u, number_of_catalog_entries);
-  DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
-  // The Dex register location catalog contains:
-  // - one 1-byte short Dex register locations, and
-  // - one 5-byte large Dex register location.
-  const size_t expected_location_catalog_size = 1u + 5u;
-  ASSERT_EQ(expected_location_catalog_size, location_catalog.Size());
 
   // First stack map.
   {
     StackMap stack_map = code_info.GetStackMapAt(0);
     ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
-    ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+    ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
     ASSERT_EQ(0u, stack_map.GetDexPc());
-    ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+    ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
     ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
 
     ASSERT_TRUE(CheckStackMask(code_info, stack_map, sp_mask1));
@@ -425,30 +344,17 @@
     DexRegisterMap map(code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers));
     ASSERT_TRUE(map.IsDexRegisterLive(0));
     ASSERT_TRUE(map.IsDexRegisterLive(1));
-    ASSERT_EQ(2u, map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
-    // The Dex register map contains:
-    // - one 1-byte live bit mask, and
-    // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
-    size_t expected_map_size = 1u + 1u;
-    ASSERT_EQ(expected_map_size, map.Size());
+    ASSERT_EQ(2u, map.GetNumberOfLiveDexRegisters());
 
     ASSERT_EQ(Kind::kInStack, map.GetLocationKind(0));
     ASSERT_EQ(Kind::kConstant, map.GetLocationKind(1));
-    ASSERT_EQ(Kind::kInStack, map.GetLocationInternalKind(0));
-    ASSERT_EQ(Kind::kConstantLargeValue, map.GetLocationInternalKind(1));
     ASSERT_EQ(0, map.GetStackOffsetInBytes(0));
     ASSERT_EQ(-2, map.GetConstant(1));
 
-    const size_t index0 = map.GetLocationCatalogEntryIndex(0, number_of_catalog_entries);
-    const size_t index1 = map.GetLocationCatalogEntryIndex(1, number_of_catalog_entries);
-    ASSERT_EQ(0u, index0);
-    ASSERT_EQ(1u, index1);
-    DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
-    DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+    DexRegisterLocation location0 = code_info.GetDexRegisterCatalogEntry(0);
+    DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(1);
     ASSERT_EQ(Kind::kInStack, location0.GetKind());
     ASSERT_EQ(Kind::kConstant, location1.GetKind());
-    ASSERT_EQ(Kind::kInStack, location0.GetInternalKind());
-    ASSERT_EQ(Kind::kConstantLargeValue, location1.GetInternalKind());
     ASSERT_EQ(0, location0.GetValue());
     ASSERT_EQ(-2, location1.GetValue());
 
@@ -456,8 +362,8 @@
     // one.
     ASSERT_TRUE(stack_map.HasInlineInfo());
     InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
-    EXPECT_EQ(inline_info.GetDexRegisterMapOffsetAtDepth(0),
-              stack_map.GetDexRegisterMapOffset());
+    EXPECT_EQ(inline_info.GetDexRegisterMapIndexAtDepth(0),
+              stack_map.GetDexRegisterMapIndex());
   }
 }
 
@@ -469,7 +375,7 @@
 
   ArenaBitVector sp_mask(&allocator, 0, false);
   uint32_t number_of_dex_registers = 2;
-  stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+  stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
   stream.AddDexRegisterEntry(Kind::kNone, 0);            // No location.
   stream.AddDexRegisterEntry(Kind::kConstant, -2);       // Large location.
   stream.EndStackMapEntry();
@@ -484,17 +390,12 @@
 
   uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
   ASSERT_EQ(1u, number_of_catalog_entries);
-  DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
-  // The Dex register location catalog contains:
-  // - one 5-byte large Dex register location.
-  size_t expected_location_catalog_size = 5u;
-  ASSERT_EQ(expected_location_catalog_size, location_catalog.Size());
 
   StackMap stack_map = code_info.GetStackMapAt(0);
   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
-  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
   ASSERT_EQ(0u, stack_map.GetDexPc());
-  ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+  ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
   ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
 
   ASSERT_TRUE(stack_map.HasDexRegisterMap());
@@ -502,100 +403,19 @@
       code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
   ASSERT_FALSE(dex_register_map.IsDexRegisterLive(0));
   ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
-  ASSERT_EQ(1u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
-  // The Dex register map contains:
-  // - one 1-byte live bit mask.
-  // No space is allocated for the sole location catalog entry index, as it is useless.
-  size_t expected_dex_register_map_size = 1u + 0u;
-  ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+  ASSERT_EQ(1u, dex_register_map.GetNumberOfLiveDexRegisters());
 
   ASSERT_EQ(Kind::kNone, dex_register_map.GetLocationKind(0));
   ASSERT_EQ(Kind::kConstant, dex_register_map.GetLocationKind(1));
-  ASSERT_EQ(Kind::kNone, dex_register_map.GetLocationInternalKind(0));
-  ASSERT_EQ(Kind::kConstantLargeValue, dex_register_map.GetLocationInternalKind(1));
   ASSERT_EQ(-2, dex_register_map.GetConstant(1));
 
-  size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(0, number_of_catalog_entries);
-  size_t index1 =  dex_register_map.GetLocationCatalogEntryIndex(1, number_of_catalog_entries);
-  ASSERT_EQ(DexRegisterLocationCatalog::kNoLocationEntryIndex, index0);
-  ASSERT_EQ(0u, index1);
-  DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
-  DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
-  ASSERT_EQ(Kind::kNone, location0.GetKind());
+  DexRegisterLocation location1 = code_info.GetDexRegisterCatalogEntry(0);
   ASSERT_EQ(Kind::kConstant, location1.GetKind());
-  ASSERT_EQ(Kind::kNone, location0.GetInternalKind());
-  ASSERT_EQ(Kind::kConstantLargeValue, location1.GetInternalKind());
-  ASSERT_EQ(0, location0.GetValue());
   ASSERT_EQ(-2, location1.GetValue());
 
   ASSERT_FALSE(stack_map.HasInlineInfo());
 }
 
-// Generate a stack map whose dex register offset is
-// StackMap::kNoDexRegisterMapSmallEncoding, and ensure we do
-// not treat it as kNoDexRegisterMap.
-TEST(StackMapTest, DexRegisterMapOffsetOverflow) {
-  MallocArenaPool pool;
-  ArenaStack arena_stack(&pool);
-  ScopedArenaAllocator allocator(&arena_stack);
-  StackMapStream stream(&allocator, kRuntimeISA);
-
-  ArenaBitVector sp_mask(&allocator, 0, false);
-  uint32_t number_of_dex_registers = 1024;
-  // Create the first stack map (and its Dex register map).
-  stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
-  uint32_t number_of_dex_live_registers_in_dex_register_map_0 = number_of_dex_registers - 8;
-  for (uint32_t i = 0; i < number_of_dex_live_registers_in_dex_register_map_0; ++i) {
-    // Use two different Dex register locations to populate this map,
-    // as using a single value (in the whole CodeInfo object) would
-    // make this Dex register mapping data empty (see
-    // art::DexRegisterMap::SingleEntrySizeInBits).
-    stream.AddDexRegisterEntry(Kind::kConstant, i % 2);  // Short location.
-  }
-  stream.EndStackMapEntry();
-  // Create the second stack map (and its Dex register map).
-  stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
-  for (uint32_t i = 0; i < number_of_dex_registers; ++i) {
-    stream.AddDexRegisterEntry(Kind::kConstant, 0);  // Short location.
-  }
-  stream.EndStackMapEntry();
-
-  size_t size = stream.PrepareForFillIn();
-  void* memory = allocator.Alloc(size, kArenaAllocMisc);
-  MemoryRegion region(memory, size);
-  stream.FillInCodeInfo(region);
-
-  CodeInfo code_info(region);
-  // The location catalog contains two entries (DexRegisterLocation(kConstant, 0)
-  // and DexRegisterLocation(kConstant, 1)), therefore the location catalog index
-  // has a size of 1 bit.
-  uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
-  ASSERT_EQ(2u, number_of_catalog_entries);
-  ASSERT_EQ(1u, DexRegisterMap::SingleEntrySizeInBits(number_of_catalog_entries));
-
-  // The first Dex register map contains:
-  // - a live register bit mask for 1024 registers (that is, 128 bytes of
-  //   data); and
-  // - Dex register mapping information for 1016 1-bit Dex (live) register
-  //   locations (that is, 127 bytes of data).
-  // Hence it has a size of 255 bytes, and therefore...
-  ASSERT_EQ(128u, DexRegisterMap::GetLiveBitMaskSize(number_of_dex_registers));
-  StackMap stack_map0 = code_info.GetStackMapAt(0);
-  DexRegisterMap dex_register_map0 =
-      code_info.GetDexRegisterMapOf(stack_map0, number_of_dex_registers);
-  ASSERT_EQ(127u, dex_register_map0.GetLocationMappingDataSize(number_of_catalog_entries));
-  ASSERT_EQ(255u, dex_register_map0.Size());
-
-  StackMap stack_map1 = code_info.GetStackMapAt(1);
-  ASSERT_TRUE(stack_map1.HasDexRegisterMap());
-  // ...the offset of the second Dex register map (relative to the
-  // beginning of the Dex register maps region) is 255 (i.e.,
-  // kNoDexRegisterMapSmallEncoding).
-  ASSERT_NE(stack_map1.GetDexRegisterMapOffset(),
-            StackMap::kNoValue);
-  ASSERT_EQ(stack_map1.GetDexRegisterMapOffset(), 0xFFu);
-}
-
 TEST(StackMapTest, TestShareDexRegisterMap) {
   MallocArenaPool pool;
   ArenaStack arena_stack(&pool);
@@ -605,17 +425,17 @@
   ArenaBitVector sp_mask(&allocator, 0, false);
   uint32_t number_of_dex_registers = 2;
   // First stack map.
-  stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+  stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
   stream.AddDexRegisterEntry(Kind::kInRegister, 0);  // Short location.
   stream.AddDexRegisterEntry(Kind::kConstant, -2);   // Large location.
   stream.EndStackMapEntry();
   // Second stack map, which should share the same dex register map.
-  stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+  stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
   stream.AddDexRegisterEntry(Kind::kInRegister, 0);  // Short location.
   stream.AddDexRegisterEntry(Kind::kConstant, -2);   // Large location.
   stream.EndStackMapEntry();
   // Third stack map (doesn't share the dex register map).
-  stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+  stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
   stream.AddDexRegisterEntry(Kind::kInRegister, 2);  // Short location.
   stream.AddDexRegisterEntry(Kind::kConstant, -2);   // Large location.
   stream.EndStackMapEntry();
@@ -646,12 +466,12 @@
   ASSERT_EQ(-2, dex_registers2.GetConstant(1));
 
   // Verify dex register map offsets.
-  ASSERT_EQ(sm0.GetDexRegisterMapOffset(),
-            sm1.GetDexRegisterMapOffset());
-  ASSERT_NE(sm0.GetDexRegisterMapOffset(),
-            sm2.GetDexRegisterMapOffset());
-  ASSERT_NE(sm1.GetDexRegisterMapOffset(),
-            sm2.GetDexRegisterMapOffset());
+  ASSERT_EQ(sm0.GetDexRegisterMapIndex(),
+            sm1.GetDexRegisterMapIndex());
+  ASSERT_NE(sm0.GetDexRegisterMapIndex(),
+            sm2.GetDexRegisterMapIndex());
+  ASSERT_NE(sm1.GetDexRegisterMapIndex(),
+            sm2.GetDexRegisterMapIndex());
 }
 
 TEST(StackMapTest, TestNoDexRegisterMap) {
@@ -662,11 +482,12 @@
 
   ArenaBitVector sp_mask(&allocator, 0, false);
   uint32_t number_of_dex_registers = 0;
-  stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+  stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask, number_of_dex_registers, 0);
   stream.EndStackMapEntry();
 
   number_of_dex_registers = 1;
-  stream.BeginStackMapEntry(1, 68, 0x4, &sp_mask, number_of_dex_registers, 0);
+  stream.BeginStackMapEntry(1, 68 * kPcAlign, 0x4, &sp_mask, number_of_dex_registers, 0);
+  stream.AddDexRegisterEntry(Kind::kNone, 0);
   stream.EndStackMapEntry();
 
   size_t size = stream.PrepareForFillIn();
@@ -679,14 +500,12 @@
 
   uint32_t number_of_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
   ASSERT_EQ(0u, number_of_catalog_entries);
-  DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
-  ASSERT_EQ(0u, location_catalog.Size());
 
   StackMap stack_map = code_info.GetStackMapAt(0);
   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
-  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64 * kPcAlign)));
   ASSERT_EQ(0u, stack_map.GetDexPc());
-  ASSERT_EQ(64u, stack_map.GetNativePcOffset(kRuntimeISA));
+  ASSERT_EQ(64u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
   ASSERT_EQ(0x3u, code_info.GetRegisterMaskOf(stack_map));
 
   ASSERT_FALSE(stack_map.HasDexRegisterMap());
@@ -694,12 +513,12 @@
 
   stack_map = code_info.GetStackMapAt(1);
   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(1)));
-  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(68)));
+  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(68 * kPcAlign)));
   ASSERT_EQ(1u, stack_map.GetDexPc());
-  ASSERT_EQ(68u, stack_map.GetNativePcOffset(kRuntimeISA));
+  ASSERT_EQ(68u * kPcAlign, stack_map.GetNativePcOffset(kRuntimeISA));
   ASSERT_EQ(0x4u, code_info.GetRegisterMaskOf(stack_map));
 
-  ASSERT_FALSE(stack_map.HasDexRegisterMap());
+  ASSERT_TRUE(stack_map.HasDexRegisterMap());
   ASSERT_FALSE(stack_map.HasInlineInfo());
 }
 
@@ -715,7 +534,7 @@
   sp_mask1.SetBit(4);
 
   // First stack map.
-  stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask1, 2, 2);
+  stream.BeginStackMapEntry(0, 64 * kPcAlign, 0x3, &sp_mask1, 2, 2);
   stream.AddDexRegisterEntry(Kind::kInStack, 0);
   stream.AddDexRegisterEntry(Kind::kConstant, 4);
 
@@ -731,7 +550,7 @@
   stream.EndStackMapEntry();
 
   // Second stack map.
-  stream.BeginStackMapEntry(2, 22, 0x3, &sp_mask1, 2, 3);
+  stream.BeginStackMapEntry(2, 22 * kPcAlign, 0x3, &sp_mask1, 2, 3);
   stream.AddDexRegisterEntry(Kind::kInStack, 56);
   stream.AddDexRegisterEntry(Kind::kConstant, 0);
 
@@ -749,13 +568,13 @@
   stream.EndStackMapEntry();
 
   // Third stack map.
-  stream.BeginStackMapEntry(4, 56, 0x3, &sp_mask1, 2, 0);
+  stream.BeginStackMapEntry(4, 56 * kPcAlign, 0x3, &sp_mask1, 2, 0);
   stream.AddDexRegisterEntry(Kind::kNone, 0);
   stream.AddDexRegisterEntry(Kind::kConstant, 4);
   stream.EndStackMapEntry();
 
   // Fourth stack map.
-  stream.BeginStackMapEntry(6, 78, 0x3, &sp_mask1, 2, 3);
+  stream.BeginStackMapEntry(6, 78 * kPcAlign, 0x3, &sp_mask1, 2, 3);
   stream.AddDexRegisterEntry(Kind::kInStack, 56);
   stream.AddDexRegisterEntry(Kind::kConstant, 0);
 
@@ -869,6 +688,7 @@
 }
 
 TEST(StackMapTest, PackedNativePcTest) {
+  // Test minimum alignments, and decoding.
   uint32_t packed_thumb2 =
       StackMap::PackNativePc(kThumb2InstructionAlignment, InstructionSet::kThumb2);
   uint32_t packed_arm64 =
@@ -904,9 +724,9 @@
   ArenaBitVector sp_mask(&allocator, 0, true);
   sp_mask.SetBit(1);
   sp_mask.SetBit(4);
-  stream.BeginStackMapEntry(0, 4, 0x3, &sp_mask, 0, 0);
+  stream.BeginStackMapEntry(0, 4 * kPcAlign, 0x3, &sp_mask, 0, 0);
   stream.EndStackMapEntry();
-  stream.BeginStackMapEntry(0, 8, 0x3, &sp_mask, 0, 0);
+  stream.BeginStackMapEntry(0, 8 * kPcAlign, 0x3, &sp_mask, 0, 0);
   stream.EndStackMapEntry();
 
   size_t size = stream.PrepareForFillIn();
@@ -917,8 +737,8 @@
   CodeInfo code_info(region);
   ASSERT_EQ(2u, code_info.GetNumberOfStackMaps());
 
-  StackMap stack_map1 = code_info.GetStackMapForNativePcOffset(4);
-  StackMap stack_map2 = code_info.GetStackMapForNativePcOffset(8);
+  StackMap stack_map1 = code_info.GetStackMapForNativePcOffset(4 * kPcAlign);
+  StackMap stack_map2 = code_info.GetStackMapForNativePcOffset(8 * kPcAlign);
   EXPECT_EQ(stack_map1.GetStackMaskIndex(),
             stack_map2.GetStackMaskIndex());
 }
@@ -931,13 +751,13 @@
 
   ArenaBitVector sp_mask(&allocator, 0, true);
   sp_mask.SetBit(1);
-  stream.BeginStackMapEntry(0, 4, 0x3, &sp_mask, 0, 0);
+  stream.BeginStackMapEntry(0, 4 * kPcAlign, 0x3, &sp_mask, 0, 0);
   stream.AddInvoke(kSuper, 1);
   stream.EndStackMapEntry();
-  stream.BeginStackMapEntry(0, 8, 0x3, &sp_mask, 0, 0);
+  stream.BeginStackMapEntry(0, 8 * kPcAlign, 0x3, &sp_mask, 0, 0);
   stream.AddInvoke(kStatic, 3);
   stream.EndStackMapEntry();
-  stream.BeginStackMapEntry(0, 16, 0x3, &sp_mask, 0, 0);
+  stream.BeginStackMapEntry(0, 16 * kPcAlign, 0x3, &sp_mask, 0, 0);
   stream.AddInvoke(kDirect, 65535);
   stream.EndStackMapEntry();
 
@@ -954,9 +774,9 @@
   MethodInfo method_info(method_info_region.begin());
   ASSERT_EQ(3u, code_info.GetNumberOfStackMaps());
 
-  InvokeInfo invoke1(code_info.GetInvokeInfoForNativePcOffset(4));
-  InvokeInfo invoke2(code_info.GetInvokeInfoForNativePcOffset(8));
-  InvokeInfo invoke3(code_info.GetInvokeInfoForNativePcOffset(16));
+  InvokeInfo invoke1(code_info.GetInvokeInfoForNativePcOffset(4 * kPcAlign));
+  InvokeInfo invoke2(code_info.GetInvokeInfoForNativePcOffset(8 * kPcAlign));
+  InvokeInfo invoke3(code_info.GetInvokeInfoForNativePcOffset(16 * kPcAlign));
   InvokeInfo invoke_invalid(code_info.GetInvokeInfoForNativePcOffset(12));
   EXPECT_FALSE(invoke_invalid.IsValid());  // No entry for that index.
   EXPECT_TRUE(invoke1.IsValid());
@@ -964,13 +784,13 @@
   EXPECT_TRUE(invoke3.IsValid());
   EXPECT_EQ(invoke1.GetInvokeType(), kSuper);
   EXPECT_EQ(invoke1.GetMethodIndex(method_info), 1u);
-  EXPECT_EQ(invoke1.GetNativePcOffset(kRuntimeISA), 4u);
+  EXPECT_EQ(invoke1.GetNativePcOffset(kRuntimeISA), 4u * kPcAlign);
   EXPECT_EQ(invoke2.GetInvokeType(), kStatic);
   EXPECT_EQ(invoke2.GetMethodIndex(method_info), 3u);
-  EXPECT_EQ(invoke2.GetNativePcOffset(kRuntimeISA), 8u);
+  EXPECT_EQ(invoke2.GetNativePcOffset(kRuntimeISA), 8u * kPcAlign);
   EXPECT_EQ(invoke3.GetInvokeType(), kDirect);
   EXPECT_EQ(invoke3.GetMethodIndex(method_info), 65535u);
-  EXPECT_EQ(invoke3.GetNativePcOffset(kRuntimeISA), 16u);
+  EXPECT_EQ(invoke3.GetNativePcOffset(kRuntimeISA), 16u * kPcAlign);
 }
 
 }  // namespace art
diff --git a/libartbase/base/bit_table.h b/libartbase/base/bit_table.h
index 8cfd044..bf3d3b0 100644
--- a/libartbase/base/bit_table.h
+++ b/libartbase/base/bit_table.h
@@ -194,7 +194,7 @@
       if (count <= size() - index &&
           std::equal(values,
                      values + count,
-                     &rows_[index],
+                     rows_.begin() + index,
                      [](const T& lhs, const T& rhs) {
                        return memcmp(&lhs, &rhs, sizeof(T)) == 0;
                      })) {
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index 6b626c2..7ac9e98 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -738,6 +738,7 @@
       kByteKindCode,
       kByteKindQuickMethodHeader,
       kByteKindCodeInfoLocationCatalog,
+      kByteKindCodeInfoDexRegisterMask,
       kByteKindCodeInfoDexRegisterMap,
       kByteKindCodeInfo,
       kByteKindCodeInfoInvokeInfo,
@@ -751,7 +752,7 @@
       kByteKindStackMapStackMaskIndex,
       kByteKindInlineInfoMethodIndexIdx,
       kByteKindInlineInfoDexPc,
-      kByteKindInlineInfoExtraData,
+      kByteKindInlineInfoArtMethod,
       kByteKindInlineInfoDexRegisterMap,
       kByteKindInlineInfoIsLast,
       kByteKindCount,
@@ -788,6 +789,7 @@
         Dump(os, "QuickMethodHeader               ", bits[kByteKindQuickMethodHeader], sum);
         Dump(os, "CodeInfo                        ", bits[kByteKindCodeInfo], sum);
         Dump(os, "CodeInfoLocationCatalog         ", bits[kByteKindCodeInfoLocationCatalog], sum);
+        Dump(os, "CodeInfoDexRegisterMask         ", bits[kByteKindCodeInfoDexRegisterMask], sum);
         Dump(os, "CodeInfoDexRegisterMap          ", bits[kByteKindCodeInfoDexRegisterMap], sum);
         Dump(os, "CodeInfoStackMasks              ", bits[kByteKindCodeInfoStackMasks], sum);
         Dump(os, "CodeInfoRegisterMasks           ", bits[kByteKindCodeInfoRegisterMasks], sum);
@@ -848,8 +850,8 @@
                inline_info_bits,
                "inline info");
           Dump(os,
-               "InlineInfoExtraData           ",
-               bits[kByteKindInlineInfoExtraData],
+               "InlineInfoArtMethod           ",
+               bits[kByteKindInlineInfoArtMethod],
                inline_info_bits,
                "inline info");
           Dump(os,
@@ -1706,7 +1708,7 @@
               stack_maps.NumColumnBits(StackMap::kDexPc) * num_stack_maps);
           stats_.AddBits(
               Stats::kByteKindStackMapDexRegisterMap,
-              stack_maps.NumColumnBits(StackMap::kDexRegisterMapOffset) * num_stack_maps);
+              stack_maps.NumColumnBits(StackMap::kDexRegisterMapIndex) * num_stack_maps);
           stats_.AddBits(
               Stats::kByteKindStackMapInlineInfoIndex,
               stack_maps.NumColumnBits(StackMap::kInlineInfoIndex) * num_stack_maps);
@@ -1733,16 +1735,12 @@
               code_info.invoke_infos_.DataBitSize());
 
           // Location catalog
-          const size_t location_catalog_bytes =
-              helper.GetCodeInfo().GetDexRegisterLocationCatalogSize();
           stats_.AddBits(Stats::kByteKindCodeInfoLocationCatalog,
-                         kBitsPerByte * location_catalog_bytes);
-          // Dex register bytes.
-          const size_t dex_register_bytes =
-              helper.GetCodeInfo().GetDexRegisterMapsSize(code_item_accessor.RegistersSize());
-          stats_.AddBits(
-              Stats::kByteKindCodeInfoDexRegisterMap,
-              kBitsPerByte * dex_register_bytes);
+                         code_info.dex_register_catalog_.DataBitSize());
+          stats_.AddBits(Stats::kByteKindCodeInfoDexRegisterMask,
+                         code_info.dex_register_masks_.DataBitSize());
+          stats_.AddBits(Stats::kByteKindCodeInfoDexRegisterMap,
+                         code_info.dex_register_maps_.DataBitSize());
 
           // Inline infos.
           const BitTable<InlineInfo::kCount>& inline_infos = code_info.inline_infos_;
@@ -1755,11 +1753,12 @@
                 Stats::kByteKindInlineInfoDexPc,
                 inline_infos.NumColumnBits(InlineInfo::kDexPc) * num_inline_infos);
             stats_.AddBits(
-                Stats::kByteKindInlineInfoExtraData,
-                inline_infos.NumColumnBits(InlineInfo::kExtraData) * num_inline_infos);
+                Stats::kByteKindInlineInfoArtMethod,
+                inline_infos.NumColumnBits(InlineInfo::kArtMethodHi) * num_inline_infos +
+                inline_infos.NumColumnBits(InlineInfo::kArtMethodLo) * num_inline_infos);
             stats_.AddBits(
                 Stats::kByteKindInlineInfoDexRegisterMap,
-                inline_infos.NumColumnBits(InlineInfo::kDexRegisterMapOffset) * num_inline_infos);
+                inline_infos.NumColumnBits(InlineInfo::kDexRegisterMapIndex) * num_inline_infos);
             stats_.AddBits(Stats::kByteKindInlineInfoIsLast, num_inline_infos);
           }
         }
diff --git a/runtime/dex_register_location.h b/runtime/dex_register_location.h
new file mode 100644
index 0000000..c6d4ad2
--- /dev/null
+++ b/runtime/dex_register_location.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_DEX_REGISTER_LOCATION_H_
+#define ART_RUNTIME_DEX_REGISTER_LOCATION_H_
+
+#include <array>
+#include <cstdint>
+
+#include "base/dchecked_vector.h"
+#include "base/memory_region.h"
+
+namespace art {
+
+// Dex register location container used by DexRegisterMap and StackMapStream.
+class DexRegisterLocation {
+ public:
+  enum class Kind : int32_t {
+    kNone = -1,          // vreg has not been set.
+    kInStack,            // vreg is on the stack, value holds the stack offset.
+    kConstant,           // vreg is a constant value.
+    kInRegister,         // vreg is in low 32 bits of a core physical register.
+    kInRegisterHigh,     // vreg is in high 32 bits of a core physical register.
+    kInFpuRegister,      // vreg is in low 32 bits of an FPU register.
+    kInFpuRegisterHigh,  // vreg is in high 32 bits of an FPU register.
+  };
+
+  DexRegisterLocation(Kind kind, int32_t value) : kind_(kind), value_(value) {}
+
+  static DexRegisterLocation None() {
+    return DexRegisterLocation(Kind::kNone, 0);
+  }
+
+  bool IsLive() const { return kind_ != Kind::kNone; }
+
+  Kind GetKind() const { return kind_; }
+
+  // TODO: Remove.
+  Kind GetInternalKind() const { return kind_; }
+
+  int32_t GetValue() const { return value_; }
+
+  bool operator==(DexRegisterLocation other) const {
+    return kind_ == other.kind_ && value_ == other.value_;
+  }
+
+  bool operator!=(DexRegisterLocation other) const {
+    return !(*this == other);
+  }
+
+ private:
+  DexRegisterLocation() {}
+
+  Kind kind_;
+  int32_t value_;
+
+  friend class DexRegisterMap;  // Allow creation of uninitialized array of locations.
+};
+
+static inline std::ostream& operator<<(std::ostream& stream, DexRegisterLocation::Kind kind) {
+  return stream << "Kind<" <<  static_cast<int32_t>(kind) << ">";
+}
+
+}  // namespace art
+
+#endif  // ART_RUNTIME_DEX_REGISTER_LOCATION_H_
diff --git a/runtime/interpreter/mterp/arm/instruction_end.S b/runtime/interpreter/mterp/arm/instruction_end.S
new file mode 100644
index 0000000..32c725c
--- /dev/null
+++ b/runtime/interpreter/mterp/arm/instruction_end.S
@@ -0,0 +1,3 @@
+
+    .global artMterpAsmInstructionEnd
+artMterpAsmInstructionEnd:
diff --git a/runtime/interpreter/mterp/arm/instruction_end_alt.S b/runtime/interpreter/mterp/arm/instruction_end_alt.S
new file mode 100644
index 0000000..f90916f
--- /dev/null
+++ b/runtime/interpreter/mterp/arm/instruction_end_alt.S
@@ -0,0 +1,3 @@
+
+    .global artMterpAsmAltInstructionEnd
+artMterpAsmAltInstructionEnd:
diff --git a/runtime/interpreter/mterp/arm/instruction_end_sister.S b/runtime/interpreter/mterp/arm/instruction_end_sister.S
new file mode 100644
index 0000000..c5f4886
--- /dev/null
+++ b/runtime/interpreter/mterp/arm/instruction_end_sister.S
@@ -0,0 +1,3 @@
+
+    .global artMterpAsmSisterEnd
+artMterpAsmSisterEnd:
diff --git a/runtime/interpreter/mterp/arm/instruction_start.S b/runtime/interpreter/mterp/arm/instruction_start.S
new file mode 100644
index 0000000..8874c20
--- /dev/null
+++ b/runtime/interpreter/mterp/arm/instruction_start.S
@@ -0,0 +1,4 @@
+
+    .global artMterpAsmInstructionStart
+artMterpAsmInstructionStart = .L_op_nop
+    .text
diff --git a/runtime/interpreter/mterp/arm/instruction_start_alt.S b/runtime/interpreter/mterp/arm/instruction_start_alt.S
new file mode 100644
index 0000000..0c9ffdb
--- /dev/null
+++ b/runtime/interpreter/mterp/arm/instruction_start_alt.S
@@ -0,0 +1,4 @@
+
+    .global artMterpAsmAltInstructionStart
+artMterpAsmAltInstructionStart = .L_ALT_op_nop
+    .text
diff --git a/runtime/interpreter/mterp/arm/instruction_start_sister.S b/runtime/interpreter/mterp/arm/instruction_start_sister.S
new file mode 100644
index 0000000..2ec51f7
--- /dev/null
+++ b/runtime/interpreter/mterp/arm/instruction_start_sister.S
@@ -0,0 +1,5 @@
+
+    .global artMterpAsmSisterStart
+    .text
+    .balign 4
+artMterpAsmSisterStart:
diff --git a/runtime/interpreter/mterp/arm64/instruction_end.S b/runtime/interpreter/mterp/arm64/instruction_end.S
new file mode 100644
index 0000000..32c725c
--- /dev/null
+++ b/runtime/interpreter/mterp/arm64/instruction_end.S
@@ -0,0 +1,3 @@
+
+    .global artMterpAsmInstructionEnd
+artMterpAsmInstructionEnd:
diff --git a/runtime/interpreter/mterp/arm64/instruction_end_alt.S b/runtime/interpreter/mterp/arm64/instruction_end_alt.S
new file mode 100644
index 0000000..f90916f
--- /dev/null
+++ b/runtime/interpreter/mterp/arm64/instruction_end_alt.S
@@ -0,0 +1,3 @@
+
+    .global artMterpAsmAltInstructionEnd
+artMterpAsmAltInstructionEnd:
diff --git a/runtime/interpreter/mterp/arm64/instruction_end_sister.S b/runtime/interpreter/mterp/arm64/instruction_end_sister.S
new file mode 100644
index 0000000..c5f4886
--- /dev/null
+++ b/runtime/interpreter/mterp/arm64/instruction_end_sister.S
@@ -0,0 +1,3 @@
+
+    .global artMterpAsmSisterEnd
+artMterpAsmSisterEnd:
diff --git a/runtime/interpreter/mterp/arm64/instruction_start.S b/runtime/interpreter/mterp/arm64/instruction_start.S
new file mode 100644
index 0000000..8874c20
--- /dev/null
+++ b/runtime/interpreter/mterp/arm64/instruction_start.S
@@ -0,0 +1,4 @@
+
+    .global artMterpAsmInstructionStart
+artMterpAsmInstructionStart = .L_op_nop
+    .text
diff --git a/runtime/interpreter/mterp/arm64/instruction_start_alt.S b/runtime/interpreter/mterp/arm64/instruction_start_alt.S
new file mode 100644
index 0000000..0c9ffdb
--- /dev/null
+++ b/runtime/interpreter/mterp/arm64/instruction_start_alt.S
@@ -0,0 +1,4 @@
+
+    .global artMterpAsmAltInstructionStart
+artMterpAsmAltInstructionStart = .L_ALT_op_nop
+    .text
diff --git a/runtime/interpreter/mterp/arm64/instruction_start_sister.S b/runtime/interpreter/mterp/arm64/instruction_start_sister.S
new file mode 100644
index 0000000..2ec51f7
--- /dev/null
+++ b/runtime/interpreter/mterp/arm64/instruction_start_sister.S
@@ -0,0 +1,5 @@
+
+    .global artMterpAsmSisterStart
+    .text
+    .balign 4
+artMterpAsmSisterStart:
diff --git a/runtime/interpreter/mterp/gen_mterp.py b/runtime/interpreter/mterp/gen_mterp.py
index 64114d7..75c5174 100755
--- a/runtime/interpreter/mterp/gen_mterp.py
+++ b/runtime/interpreter/mterp/gen_mterp.py
@@ -279,13 +279,8 @@
     sister_list = []
     assert len(opcodes) == kNumPackedOpcodes
     need_dummy_start = False
-    start_label = global_name_format % "artMterpAsmInstructionStart"
-    end_label = global_name_format % "artMterpAsmInstructionEnd"
 
-    # point MterpAsmInstructionStart at the first handler or stub
-    asm_fp.write("\n    .global %s\n" % start_label)
-    asm_fp.write("%s = " % start_label + label_prefix + "_op_nop\n")
-    asm_fp.write("    .text\n\n")
+    loadAndEmitGenericAsm("instruction_start")
 
     for i in xrange(kNumPackedOpcodes):
         op = opcodes[i]
@@ -309,20 +304,14 @@
         asm_fp.write(label_prefix + "_op_nop:   /* dummy */\n");
 
     emitAlign()
-    asm_fp.write("    .global %s\n" % end_label)
-    asm_fp.write("%s:\n" % end_label)
+
+    loadAndEmitGenericAsm("instruction_end")
 
     if style == "computed-goto":
-        start_sister_label = global_name_format % "artMterpAsmSisterStart"
-        end_sister_label = global_name_format % "artMterpAsmSisterEnd"
         emitSectionComment("Sister implementations", asm_fp)
-        asm_fp.write("    .global %s\n" % start_sister_label)
-        asm_fp.write("    .text\n")
-        asm_fp.write("    .balign 4\n")
-        asm_fp.write("%s:\n" % start_sister_label)
+        loadAndEmitGenericAsm("instruction_start_sister")
         asm_fp.writelines(sister_list)
-        asm_fp.write("    .global %s\n" % end_sister_label)
-        asm_fp.write("%s:\n\n" % end_sister_label)
+        loadAndEmitGenericAsm("instruction_end_sister")
 
 #
 # Load an alternate entry stub
@@ -345,10 +334,7 @@
     start_label = global_name_format % "artMterpAsmAltInstructionStart"
     end_label = global_name_format % "artMterpAsmAltInstructionEnd"
 
-    # point MterpAsmInstructionStart at the first handler or stub
-    asm_fp.write("\n    .global %s\n" % start_label)
-    asm_fp.write("    .text\n\n")
-    asm_fp.write("%s = " % start_label + label_prefix + "_ALT_op_nop\n")
+    loadAndEmitGenericAsm("instruction_start_alt")
 
     for i in xrange(kNumPackedOpcodes):
         op = opcodes[i]
@@ -359,8 +345,8 @@
         loadAndEmitAltStub(source, i)
 
     emitAlign()
-    asm_fp.write("    .global %s\n" % end_label)
-    asm_fp.write("%s:\n" % end_label)
+
+    loadAndEmitGenericAsm("instruction_end_alt")
 
 #
 # Load an assembly fragment and emit it.
@@ -377,6 +363,14 @@
     appendSourceFile(source, dict, asm_fp, sister_list)
 
 #
+# Load a non-handler assembly fragment and emit it.
+#
+def loadAndEmitGenericAsm(name):
+    source = "%s/%s.S" % (default_op_dir, name)
+    dict = getGlobalSubDict()
+    appendSourceFile(source, dict, asm_fp, None)
+
+#
 # Emit fallback fragment
 #
 def emitFallback(opindex):
diff --git a/runtime/interpreter/mterp/mips/instruction_end.S b/runtime/interpreter/mterp/mips/instruction_end.S
new file mode 100644
index 0000000..32c725c
--- /dev/null
+++ b/runtime/interpreter/mterp/mips/instruction_end.S
@@ -0,0 +1,3 @@
+
+    .global artMterpAsmInstructionEnd
+artMterpAsmInstructionEnd:
diff --git a/runtime/interpreter/mterp/mips/instruction_end_alt.S b/runtime/interpreter/mterp/mips/instruction_end_alt.S
new file mode 100644
index 0000000..f90916f
--- /dev/null
+++ b/runtime/interpreter/mterp/mips/instruction_end_alt.S
@@ -0,0 +1,3 @@
+
+    .global artMterpAsmAltInstructionEnd
+artMterpAsmAltInstructionEnd:
diff --git a/runtime/interpreter/mterp/mips/instruction_end_sister.S b/runtime/interpreter/mterp/mips/instruction_end_sister.S
new file mode 100644
index 0000000..c5f4886
--- /dev/null
+++ b/runtime/interpreter/mterp/mips/instruction_end_sister.S
@@ -0,0 +1,3 @@
+
+    .global artMterpAsmSisterEnd
+artMterpAsmSisterEnd:
diff --git a/runtime/interpreter/mterp/mips/instruction_start.S b/runtime/interpreter/mterp/mips/instruction_start.S
new file mode 100644
index 0000000..8874c20
--- /dev/null
+++ b/runtime/interpreter/mterp/mips/instruction_start.S
@@ -0,0 +1,4 @@
+
+    .global artMterpAsmInstructionStart
+artMterpAsmInstructionStart = .L_op_nop
+    .text
diff --git a/runtime/interpreter/mterp/mips/instruction_start_alt.S b/runtime/interpreter/mterp/mips/instruction_start_alt.S
new file mode 100644
index 0000000..0c9ffdb
--- /dev/null
+++ b/runtime/interpreter/mterp/mips/instruction_start_alt.S
@@ -0,0 +1,4 @@
+
+    .global artMterpAsmAltInstructionStart
+artMterpAsmAltInstructionStart = .L_ALT_op_nop
+    .text
diff --git a/runtime/interpreter/mterp/mips/instruction_start_sister.S b/runtime/interpreter/mterp/mips/instruction_start_sister.S
new file mode 100644
index 0000000..2ec51f7
--- /dev/null
+++ b/runtime/interpreter/mterp/mips/instruction_start_sister.S
@@ -0,0 +1,5 @@
+
+    .global artMterpAsmSisterStart
+    .text
+    .balign 4
+artMterpAsmSisterStart:
diff --git a/runtime/interpreter/mterp/mips64/instruction_end.S b/runtime/interpreter/mterp/mips64/instruction_end.S
new file mode 100644
index 0000000..32c725c
--- /dev/null
+++ b/runtime/interpreter/mterp/mips64/instruction_end.S
@@ -0,0 +1,3 @@
+
+    .global artMterpAsmInstructionEnd
+artMterpAsmInstructionEnd:
diff --git a/runtime/interpreter/mterp/mips64/instruction_end_alt.S b/runtime/interpreter/mterp/mips64/instruction_end_alt.S
new file mode 100644
index 0000000..f90916f
--- /dev/null
+++ b/runtime/interpreter/mterp/mips64/instruction_end_alt.S
@@ -0,0 +1,3 @@
+
+    .global artMterpAsmAltInstructionEnd
+artMterpAsmAltInstructionEnd:
diff --git a/runtime/interpreter/mterp/mips64/instruction_end_sister.S b/runtime/interpreter/mterp/mips64/instruction_end_sister.S
new file mode 100644
index 0000000..c5f4886
--- /dev/null
+++ b/runtime/interpreter/mterp/mips64/instruction_end_sister.S
@@ -0,0 +1,3 @@
+
+    .global artMterpAsmSisterEnd
+artMterpAsmSisterEnd:
diff --git a/runtime/interpreter/mterp/mips64/instruction_start.S b/runtime/interpreter/mterp/mips64/instruction_start.S
new file mode 100644
index 0000000..8874c20
--- /dev/null
+++ b/runtime/interpreter/mterp/mips64/instruction_start.S
@@ -0,0 +1,4 @@
+
+    .global artMterpAsmInstructionStart
+artMterpAsmInstructionStart = .L_op_nop
+    .text
diff --git a/runtime/interpreter/mterp/mips64/instruction_start_alt.S b/runtime/interpreter/mterp/mips64/instruction_start_alt.S
new file mode 100644
index 0000000..0c9ffdb
--- /dev/null
+++ b/runtime/interpreter/mterp/mips64/instruction_start_alt.S
@@ -0,0 +1,4 @@
+
+    .global artMterpAsmAltInstructionStart
+artMterpAsmAltInstructionStart = .L_ALT_op_nop
+    .text
diff --git a/runtime/interpreter/mterp/mips64/instruction_start_sister.S b/runtime/interpreter/mterp/mips64/instruction_start_sister.S
new file mode 100644
index 0000000..2ec51f7
--- /dev/null
+++ b/runtime/interpreter/mterp/mips64/instruction_start_sister.S
@@ -0,0 +1,5 @@
+
+    .global artMterpAsmSisterStart
+    .text
+    .balign 4
+artMterpAsmSisterStart:
diff --git a/runtime/interpreter/mterp/out/mterp_arm.S b/runtime/interpreter/mterp/out/mterp_arm.S
index 7ea7982..b2702a9 100644
--- a/runtime/interpreter/mterp/out/mterp_arm.S
+++ b/runtime/interpreter/mterp/out/mterp_arm.S
@@ -396,6 +396,7 @@
     GOTO_OPCODE ip                      @ jump to next instruction
     /* NOTE: no fallthrough */
 
+/* File: arm/instruction_start.S */
 
     .global artMterpAsmInstructionStart
 artMterpAsmInstructionStart = .L_op_nop
@@ -7509,19 +7510,25 @@
 
 
     .balign 128
+/* File: arm/instruction_end.S */
+
     .global artMterpAsmInstructionEnd
 artMterpAsmInstructionEnd:
 
+
 /*
  * ===========================================================================
  *  Sister implementations
  * ===========================================================================
  */
+/* File: arm/instruction_start_sister.S */
+
     .global artMterpAsmSisterStart
     .text
     .balign 4
 artMterpAsmSisterStart:
 
+
 /* continuation for op_float_to_long */
 /*
  * Convert the float in r0 to a long in r0/r1.
@@ -7583,14 +7590,17 @@
     mov     r0, #0
     mov     r1, #0
     bx      lr                          @ return 0 for NaN
+/* File: arm/instruction_end_sister.S */
+
     .global artMterpAsmSisterEnd
 artMterpAsmSisterEnd:
 
+/* File: arm/instruction_start_alt.S */
 
     .global artMterpAsmAltInstructionStart
+artMterpAsmAltInstructionStart = .L_ALT_op_nop
     .text
 
-artMterpAsmAltInstructionStart = .L_ALT_op_nop
 /* ------------------------------ */
     .balign 128
 .L_ALT_op_nop: /* 0x00 */
@@ -11944,8 +11954,11 @@
     b      MterpCheckBefore     @ (self, shadow_frame, dex_pc_ptr)  @ Tail call.
 
     .balign 128
+/* File: arm/instruction_end_alt.S */
+
     .global artMterpAsmAltInstructionEnd
 artMterpAsmAltInstructionEnd:
+
 /* File: arm/footer.S */
 /*
  * ===========================================================================
diff --git a/runtime/interpreter/mterp/out/mterp_arm64.S b/runtime/interpreter/mterp/out/mterp_arm64.S
index 70f71ff..2a0c4df 100644
--- a/runtime/interpreter/mterp/out/mterp_arm64.S
+++ b/runtime/interpreter/mterp/out/mterp_arm64.S
@@ -427,6 +427,7 @@
     GOTO_OPCODE ip                      // jump to next instruction
     /* NOTE: no fallthrough */
 
+/* File: arm64/instruction_start.S */
 
     .global artMterpAsmInstructionStart
 artMterpAsmInstructionStart = .L_op_nop
@@ -7075,18 +7076,26 @@
 
 
     .balign 128
+/* File: arm64/instruction_end.S */
+
     .global artMterpAsmInstructionEnd
 artMterpAsmInstructionEnd:
 
+
 /*
  * ===========================================================================
  *  Sister implementations
  * ===========================================================================
  */
+/* File: arm64/instruction_start_sister.S */
+
     .global artMterpAsmSisterStart
     .text
     .balign 4
 artMterpAsmSisterStart:
+
+/* File: arm64/instruction_end_sister.S */
+
     .global artMterpAsmSisterEnd
 artMterpAsmSisterEnd:
 
@@ -7398,11 +7407,12 @@
     ret
 
 
+/* File: arm64/instruction_start_alt.S */
 
     .global artMterpAsmAltInstructionStart
+artMterpAsmAltInstructionStart = .L_ALT_op_nop
     .text
 
-artMterpAsmAltInstructionStart = .L_ALT_op_nop
 /* ------------------------------ */
     .balign 128
 .L_ALT_op_nop: /* 0x00 */
@@ -11756,8 +11766,11 @@
     b      MterpCheckBefore     // (self, shadow_frame, dex_pc_ptr) Note: tail call.
 
     .balign 128
+/* File: arm64/instruction_end_alt.S */
+
     .global artMterpAsmAltInstructionEnd
 artMterpAsmAltInstructionEnd:
+
 /* File: arm64/close_cfi.S */
 // Close out the cfi info.  We're treating mterp as a single function.
 
diff --git a/runtime/interpreter/mterp/out/mterp_mips.S b/runtime/interpreter/mterp/out/mterp_mips.S
index 69568ea..3b86279 100644
--- a/runtime/interpreter/mterp/out/mterp_mips.S
+++ b/runtime/interpreter/mterp/out/mterp_mips.S
@@ -810,6 +810,7 @@
     GOTO_OPCODE(t0)                        # jump to next instruction
     /* NOTE: no fallthrough */
 
+/* File: mips/instruction_start.S */
 
     .global artMterpAsmInstructionStart
 artMterpAsmInstructionStart = .L_op_nop
@@ -7873,19 +7874,25 @@
 
 
     .balign 128
+/* File: mips/instruction_end.S */
+
     .global artMterpAsmInstructionEnd
 artMterpAsmInstructionEnd:
 
+
 /*
  * ===========================================================================
  *  Sister implementations
  * ===========================================================================
  */
+/* File: mips/instruction_start_sister.S */
+
     .global artMterpAsmSisterStart
     .text
     .balign 4
 artMterpAsmSisterStart:
 
+
 /* continuation for op_float_to_long */
 
 #ifndef MIPS32REVGE6
@@ -7941,14 +7948,17 @@
 
 .Lop_ushr_long_2addr_finish:
     SET_VREG64_GOTO(v1, zero, t3, t0)      #  vA/vA+1 <- rlo/rhi
+/* File: mips/instruction_end_sister.S */
+
     .global artMterpAsmSisterEnd
 artMterpAsmSisterEnd:
 
+/* File: mips/instruction_start_alt.S */
 
     .global artMterpAsmAltInstructionStart
+artMterpAsmAltInstructionStart = .L_ALT_op_nop
     .text
 
-artMterpAsmAltInstructionStart = .L_ALT_op_nop
 /* ------------------------------ */
     .balign 128
 .L_ALT_op_nop: /* 0x00 */
@@ -12558,8 +12568,11 @@
     jalr   zero, t9                     # Tail call to Mterp(self, shadow_frame, dex_pc_ptr)
 
     .balign 128
+/* File: mips/instruction_end_alt.S */
+
     .global artMterpAsmAltInstructionEnd
 artMterpAsmAltInstructionEnd:
+
 /* File: mips/footer.S */
 /*
  * ===========================================================================
diff --git a/runtime/interpreter/mterp/out/mterp_mips64.S b/runtime/interpreter/mterp/out/mterp_mips64.S
index 83a6613..58f98df 100644
--- a/runtime/interpreter/mterp/out/mterp_mips64.S
+++ b/runtime/interpreter/mterp/out/mterp_mips64.S
@@ -430,6 +430,7 @@
 
     /* NOTE: no fallthrough */
 
+/* File: mips64/instruction_start.S */
 
     .global artMterpAsmInstructionStart
 artMterpAsmInstructionStart = .L_op_nop
@@ -7299,26 +7300,35 @@
 
 
     .balign 128
+/* File: mips64/instruction_end.S */
+
     .global artMterpAsmInstructionEnd
 artMterpAsmInstructionEnd:
 
+
 /*
  * ===========================================================================
  *  Sister implementations
  * ===========================================================================
  */
+/* File: mips64/instruction_start_sister.S */
+
     .global artMterpAsmSisterStart
     .text
     .balign 4
 artMterpAsmSisterStart:
+
+/* File: mips64/instruction_end_sister.S */
+
     .global artMterpAsmSisterEnd
 artMterpAsmSisterEnd:
 
+/* File: mips64/instruction_start_alt.S */
 
     .global artMterpAsmAltInstructionStart
+artMterpAsmAltInstructionStart = .L_ALT_op_nop
     .text
 
-artMterpAsmAltInstructionStart = .L_ALT_op_nop
 /* ------------------------------ */
     .balign 128
 .L_ALT_op_nop: /* 0x00 */
@@ -12184,8 +12194,11 @@
     jalr    zero, t9                            # (self, shadow_frame, dex_pc_ptr) Note: tail call.
 
     .balign 128
+/* File: mips64/instruction_end_alt.S */
+
     .global artMterpAsmAltInstructionEnd
 artMterpAsmAltInstructionEnd:
+
 /* File: mips64/footer.S */
 /*
  * We've detected a condition that will result in an exception, but the exception
diff --git a/runtime/interpreter/mterp/out/mterp_x86.S b/runtime/interpreter/mterp/out/mterp_x86.S
index 1eacfe8..6be70cc 100644
--- a/runtime/interpreter/mterp/out/mterp_x86.S
+++ b/runtime/interpreter/mterp/out/mterp_x86.S
@@ -405,6 +405,7 @@
     GOTO_NEXT
     /* NOTE: no fallthrough */
 
+/* File: x86/instruction_start.S */
 
     .global SYMBOL(artMterpAsmInstructionStart)
 SYMBOL(artMterpAsmInstructionStart) = .L_op_nop
@@ -6470,26 +6471,35 @@
 
 
     .balign 128
+/* File: x86/instruction_end.S */
+
     .global SYMBOL(artMterpAsmInstructionEnd)
 SYMBOL(artMterpAsmInstructionEnd):
 
+
 /*
  * ===========================================================================
  *  Sister implementations
  * ===========================================================================
  */
+/* File: x86/instruction_start_sister.S */
+
     .global SYMBOL(artMterpAsmSisterStart)
     .text
     .balign 4
 SYMBOL(artMterpAsmSisterStart):
+
+/* File: x86/instruction_end_sister.S */
+
     .global SYMBOL(artMterpAsmSisterEnd)
 SYMBOL(artMterpAsmSisterEnd):
 
+/* File: x86/instruction_start_alt.S */
 
     .global SYMBOL(artMterpAsmAltInstructionStart)
     .text
-
 SYMBOL(artMterpAsmAltInstructionStart) = .L_ALT_op_nop
+
 /* ------------------------------ */
     .balign 128
 .L_ALT_op_nop: /* 0x00 */
@@ -12635,8 +12645,11 @@
     jmp     .L_op_nop+(255*128)
 
     .balign 128
+/* File: x86/instruction_end_alt.S */
+
     .global SYMBOL(artMterpAsmAltInstructionEnd)
 SYMBOL(artMterpAsmAltInstructionEnd):
+
 /* File: x86/footer.S */
 /*
  * ===========================================================================
diff --git a/runtime/interpreter/mterp/out/mterp_x86_64.S b/runtime/interpreter/mterp/out/mterp_x86_64.S
index ea8f483..562cf7c 100644
--- a/runtime/interpreter/mterp/out/mterp_x86_64.S
+++ b/runtime/interpreter/mterp/out/mterp_x86_64.S
@@ -387,6 +387,7 @@
     GOTO_NEXT
     /* NOTE: no fallthrough */
 
+/* File: x86_64/instruction_start.S */
 
     .global SYMBOL(artMterpAsmInstructionStart)
 SYMBOL(artMterpAsmInstructionStart) = .L_op_nop
@@ -6217,26 +6218,35 @@
 
 
     .balign 128
+/* File: x86_64/instruction_end.S */
+
     .global SYMBOL(artMterpAsmInstructionEnd)
 SYMBOL(artMterpAsmInstructionEnd):
 
+
 /*
  * ===========================================================================
  *  Sister implementations
  * ===========================================================================
  */
+/* File: x86_64/instruction_start_sister.S */
+
     .global SYMBOL(artMterpAsmSisterStart)
     .text
     .balign 4
 SYMBOL(artMterpAsmSisterStart):
+
+/* File: x86_64/instruction_end_sister.S */
+
     .global SYMBOL(artMterpAsmSisterEnd)
 SYMBOL(artMterpAsmSisterEnd):
 
+/* File: x86_64/instruction_start_alt.S */
 
     .global SYMBOL(artMterpAsmAltInstructionStart)
     .text
-
 SYMBOL(artMterpAsmAltInstructionStart) = .L_ALT_op_nop
+
 /* ------------------------------ */
     .balign 128
 .L_ALT_op_nop: /* 0x00 */
@@ -11870,8 +11880,11 @@
     jmp     .L_op_nop+(255*128)
 
     .balign 128
+/* File: x86_64/instruction_end_alt.S */
+
     .global SYMBOL(artMterpAsmAltInstructionEnd)
 SYMBOL(artMterpAsmAltInstructionEnd):
+
 /* File: x86_64/footer.S */
 /*
  * ===========================================================================
diff --git a/runtime/interpreter/mterp/x86/instruction_end.S b/runtime/interpreter/mterp/x86/instruction_end.S
new file mode 100644
index 0000000..3a02a21
--- /dev/null
+++ b/runtime/interpreter/mterp/x86/instruction_end.S
@@ -0,0 +1,3 @@
+
+    .global SYMBOL(artMterpAsmInstructionEnd)
+SYMBOL(artMterpAsmInstructionEnd):
diff --git a/runtime/interpreter/mterp/x86/instruction_end_alt.S b/runtime/interpreter/mterp/x86/instruction_end_alt.S
new file mode 100644
index 0000000..33c2b8e
--- /dev/null
+++ b/runtime/interpreter/mterp/x86/instruction_end_alt.S
@@ -0,0 +1,3 @@
+
+    .global SYMBOL(artMterpAsmAltInstructionEnd)
+SYMBOL(artMterpAsmAltInstructionEnd):
diff --git a/runtime/interpreter/mterp/x86/instruction_end_sister.S b/runtime/interpreter/mterp/x86/instruction_end_sister.S
new file mode 100644
index 0000000..ea14b11
--- /dev/null
+++ b/runtime/interpreter/mterp/x86/instruction_end_sister.S
@@ -0,0 +1,3 @@
+
+    .global SYMBOL(artMterpAsmSisterEnd)
+SYMBOL(artMterpAsmSisterEnd):
diff --git a/runtime/interpreter/mterp/x86/instruction_start.S b/runtime/interpreter/mterp/x86/instruction_start.S
new file mode 100644
index 0000000..ca711de
--- /dev/null
+++ b/runtime/interpreter/mterp/x86/instruction_start.S
@@ -0,0 +1,4 @@
+
+    .global SYMBOL(artMterpAsmInstructionStart)
+SYMBOL(artMterpAsmInstructionStart) = .L_op_nop
+    .text
diff --git a/runtime/interpreter/mterp/x86/instruction_start_alt.S b/runtime/interpreter/mterp/x86/instruction_start_alt.S
new file mode 100644
index 0000000..9272a6a
--- /dev/null
+++ b/runtime/interpreter/mterp/x86/instruction_start_alt.S
@@ -0,0 +1,4 @@
+
+    .global SYMBOL(artMterpAsmAltInstructionStart)
+    .text
+SYMBOL(artMterpAsmAltInstructionStart) = .L_ALT_op_nop
diff --git a/runtime/interpreter/mterp/x86/instruction_start_sister.S b/runtime/interpreter/mterp/x86/instruction_start_sister.S
new file mode 100644
index 0000000..b9ac994
--- /dev/null
+++ b/runtime/interpreter/mterp/x86/instruction_start_sister.S
@@ -0,0 +1,5 @@
+
+    .global SYMBOL(artMterpAsmSisterStart)
+    .text
+    .balign 4
+SYMBOL(artMterpAsmSisterStart):
diff --git a/runtime/interpreter/mterp/x86_64/instruction_end.S b/runtime/interpreter/mterp/x86_64/instruction_end.S
new file mode 100644
index 0000000..3a02a21
--- /dev/null
+++ b/runtime/interpreter/mterp/x86_64/instruction_end.S
@@ -0,0 +1,3 @@
+
+    .global SYMBOL(artMterpAsmInstructionEnd)
+SYMBOL(artMterpAsmInstructionEnd):
diff --git a/runtime/interpreter/mterp/x86_64/instruction_end_alt.S b/runtime/interpreter/mterp/x86_64/instruction_end_alt.S
new file mode 100644
index 0000000..33c2b8e
--- /dev/null
+++ b/runtime/interpreter/mterp/x86_64/instruction_end_alt.S
@@ -0,0 +1,3 @@
+
+    .global SYMBOL(artMterpAsmAltInstructionEnd)
+SYMBOL(artMterpAsmAltInstructionEnd):
diff --git a/runtime/interpreter/mterp/x86_64/instruction_end_sister.S b/runtime/interpreter/mterp/x86_64/instruction_end_sister.S
new file mode 100644
index 0000000..ea14b11
--- /dev/null
+++ b/runtime/interpreter/mterp/x86_64/instruction_end_sister.S
@@ -0,0 +1,3 @@
+
+    .global SYMBOL(artMterpAsmSisterEnd)
+SYMBOL(artMterpAsmSisterEnd):
diff --git a/runtime/interpreter/mterp/x86_64/instruction_start.S b/runtime/interpreter/mterp/x86_64/instruction_start.S
new file mode 100644
index 0000000..ca711de
--- /dev/null
+++ b/runtime/interpreter/mterp/x86_64/instruction_start.S
@@ -0,0 +1,4 @@
+
+    .global SYMBOL(artMterpAsmInstructionStart)
+SYMBOL(artMterpAsmInstructionStart) = .L_op_nop
+    .text
diff --git a/runtime/interpreter/mterp/x86_64/instruction_start_alt.S b/runtime/interpreter/mterp/x86_64/instruction_start_alt.S
new file mode 100644
index 0000000..9272a6a
--- /dev/null
+++ b/runtime/interpreter/mterp/x86_64/instruction_start_alt.S
@@ -0,0 +1,4 @@
+
+    .global SYMBOL(artMterpAsmAltInstructionStart)
+    .text
+SYMBOL(artMterpAsmAltInstructionStart) = .L_ALT_op_nop
diff --git a/runtime/interpreter/mterp/x86_64/instruction_start_sister.S b/runtime/interpreter/mterp/x86_64/instruction_start_sister.S
new file mode 100644
index 0000000..b9ac994
--- /dev/null
+++ b/runtime/interpreter/mterp/x86_64/instruction_start_sister.S
@@ -0,0 +1,5 @@
+
+    .global SYMBOL(artMterpAsmSisterStart)
+    .text
+    .balign 4
+SYMBOL(artMterpAsmSisterStart):
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index 8bc7a81..fffd7f3 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -145,6 +145,7 @@
                                        GetDirectMethodsStartOffset(),
                                        GetCopiedMethodsStartOffset());
 }
+
 template<VerifyObjectFlags kVerifyFlags>
 inline ArraySlice<ArtMethod> Class::GetDeclaredVirtualMethodsSlice(PointerSize pointer_size) {
   DCHECK(IsLoaded() || IsErroneous());
@@ -281,8 +282,7 @@
   return &GetVirtualMethodsSliceUnchecked(pointer_size)[i];
 }
 
-template<VerifyObjectFlags kVerifyFlags,
-         ReadBarrierOption kReadBarrierOption>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
 inline PointerArray* Class::GetVTable() {
   DCHECK(IsLoaded<kVerifyFlags>() || IsErroneous<kVerifyFlags>());
   return GetFieldObject<PointerArray, kVerifyFlags, kReadBarrierOption>(
@@ -302,8 +302,7 @@
   return GetVTable() != nullptr || ShouldHaveEmbeddedVTable();
 }
 
-  template<VerifyObjectFlags kVerifyFlags,
-           ReadBarrierOption kReadBarrierOption>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
 inline int32_t Class::GetVTableLength() {
   if (ShouldHaveEmbeddedVTable<kVerifyFlags, kReadBarrierOption>()) {
     return GetEmbeddedVTableLength();
@@ -312,15 +311,15 @@
       GetVTable<kVerifyFlags, kReadBarrierOption>()->GetLength() : 0;
 }
 
-  template<VerifyObjectFlags kVerifyFlags,
-           ReadBarrierOption kReadBarrierOption>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
 inline ArtMethod* Class::GetVTableEntry(uint32_t i, PointerSize pointer_size) {
   if (ShouldHaveEmbeddedVTable<kVerifyFlags, kReadBarrierOption>()) {
     return GetEmbeddedVTableEntry(i, pointer_size);
   }
   auto* vtable = GetVTable<kVerifyFlags, kReadBarrierOption>();
   DCHECK(vtable != nullptr);
-  return vtable->template GetElementPtrSize<ArtMethod*, kVerifyFlags, kReadBarrierOption>(i, pointer_size);
+  return vtable->template GetElementPtrSize<ArtMethod*, kVerifyFlags, kReadBarrierOption>(
+      i, pointer_size);
 }
 
 inline int32_t Class::GetEmbeddedVTableLength() {
@@ -410,7 +409,7 @@
 //   Object[]         = int[] --> false
 //
 inline bool Class::IsArrayAssignableFromArray(ObjPtr<Class> src) {
-  DCHECK(IsArrayClass())  << PrettyClass();
+  DCHECK(IsArrayClass()) << PrettyClass();
   DCHECK(src->IsArrayClass()) << src->PrettyClass();
   return GetComponentType()->IsAssignableFrom(src->GetComponentType());
 }
@@ -622,16 +621,14 @@
   return FindVirtualMethodForVirtual(method, pointer_size);
 }
 
-template<VerifyObjectFlags kVerifyFlags,
-         ReadBarrierOption kReadBarrierOption>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
 inline IfTable* Class::GetIfTable() {
   ObjPtr<IfTable> ret = GetFieldObject<IfTable, kVerifyFlags, kReadBarrierOption>(IfTableOffset());
   DCHECK(ret != nullptr) << PrettyClass(this);
   return ret.Ptr();
 }
 
-template<VerifyObjectFlags kVerifyFlags,
-         ReadBarrierOption kReadBarrierOption>
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
 inline int32_t Class::GetIfTableCount() {
   return GetIfTable<kVerifyFlags, kReadBarrierOption>()->Count();
 }
@@ -734,7 +731,7 @@
 }
 
 inline void Class::SetName(ObjPtr<String> name) {
-    SetFieldObjectTransaction(OFFSET_OF_OBJECT_MEMBER(Class, name_), name);
+  SetFieldObjectTransaction(OFFSET_OF_OBJECT_MEMBER(Class, name_), name);
 }
 
 template<VerifyObjectFlags kVerifyFlags>
@@ -887,8 +884,8 @@
 inline void Class::AssertInitializedOrInitializingInThread(Thread* self) {
   if (kIsDebugBuild && !IsInitialized()) {
     CHECK(IsInitializing()) << PrettyClass() << " is not initializing: " << GetStatus();
-    CHECK_EQ(GetClinitThreadId(), self->GetTid()) << PrettyClass()
-                                                  << " is initializing in a different thread";
+    CHECK_EQ(GetClinitThreadId(), self->GetTid())
+        << PrettyClass() << " is initializing in a different thread";
   }
 }
 
@@ -964,18 +961,15 @@
   return GetDirectMethodsSliceUnchecked(pointer_size);
 }
 
-inline ArraySlice<ArtMethod> Class::GetDeclaredMethods(
-      PointerSize pointer_size) {
+inline ArraySlice<ArtMethod> Class::GetDeclaredMethods(PointerSize pointer_size) {
   return GetDeclaredMethodsSliceUnchecked(pointer_size);
 }
 
-inline ArraySlice<ArtMethod> Class::GetDeclaredVirtualMethods(
-      PointerSize pointer_size) {
+inline ArraySlice<ArtMethod> Class::GetDeclaredVirtualMethods(PointerSize pointer_size) {
   return GetDeclaredVirtualMethodsSliceUnchecked(pointer_size);
 }
 
-inline ArraySlice<ArtMethod> Class::GetVirtualMethods(
-    PointerSize pointer_size) {
+inline ArraySlice<ArtMethod> Class::GetVirtualMethods(PointerSize pointer_size) {
   CheckPointerSize(pointer_size);
   return GetVirtualMethodsSliceUnchecked(pointer_size);
 }
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 996461b..31a83f8 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -1233,7 +1233,7 @@
 
 uint32_t Class::Depth() {
   uint32_t depth = 0;
-  for (ObjPtr<Class> klass = this; klass->GetSuperClass() != nullptr; klass = klass->GetSuperClass()) {
+  for (ObjPtr<Class> cls = this; cls->GetSuperClass() != nullptr; cls = cls->GetSuperClass()) {
     depth++;
   }
   return depth;
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index 7adb0d0..c3e167c 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -932,12 +932,10 @@
   ArtMethod* FindConstructor(const StringPiece& signature, PointerSize pointer_size)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  ArtMethod* FindDeclaredVirtualMethodByName(const StringPiece& name,
-                                             PointerSize pointer_size)
+  ArtMethod* FindDeclaredVirtualMethodByName(const StringPiece& name, PointerSize pointer_size)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  ArtMethod* FindDeclaredDirectMethodByName(const StringPiece& name,
-                                            PointerSize pointer_size)
+  ArtMethod* FindDeclaredDirectMethodByName(const StringPiece& name, PointerSize pointer_size)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
   ArtMethod* FindClassInitializer(PointerSize pointer_size) REQUIRES_SHARED(Locks::mutator_lock_);
@@ -1181,10 +1179,7 @@
   void AssertInitializedOrInitializingInThread(Thread* self)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  Class* CopyOf(Thread* self,
-                int32_t new_length,
-                ImTable* imt,
-                PointerSize pointer_size)
+  Class* CopyOf(Thread* self, int32_t new_length, ImTable* imt, PointerSize pointer_size)
       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_);
 
   // For proxy class only.
diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h
index 2d10b97..e022db8 100644
--- a/runtime/mirror/object-inl.h
+++ b/runtime/mirror/object-inl.h
@@ -412,17 +412,21 @@
   return GetFieldByte<kVerifyFlags, true>(field_offset);
 }
 
-template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
-    bool kIsVolatile>
+template<bool kTransactionActive,
+         bool kCheckTransaction,
+         VerifyObjectFlags kVerifyFlags,
+         bool kIsVolatile>
 inline void Object::SetFieldBoolean(MemberOffset field_offset, uint8_t new_value)
     REQUIRES_SHARED(Locks::mutator_lock_) {
   if (kCheckTransaction) {
     DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
   }
   if (kTransactionActive) {
-    Runtime::Current()->RecordWriteFieldBoolean(this, field_offset,
-                                           GetFieldBoolean<kVerifyFlags, kIsVolatile>(field_offset),
-                                           kIsVolatile);
+    Runtime::Current()->RecordWriteFieldBoolean(
+        this,
+        field_offset,
+        GetFieldBoolean<kVerifyFlags, kIsVolatile>(field_offset),
+        kIsVolatile);
   }
   if (kVerifyFlags & kVerifyThis) {
     VerifyObject(this);
@@ -430,17 +434,20 @@
   SetField<uint8_t, kIsVolatile>(field_offset, new_value);
 }
 
-template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
-    bool kIsVolatile>
+template<bool kTransactionActive,
+         bool kCheckTransaction,
+         VerifyObjectFlags kVerifyFlags,
+         bool kIsVolatile>
 inline void Object::SetFieldByte(MemberOffset field_offset, int8_t new_value)
     REQUIRES_SHARED(Locks::mutator_lock_) {
   if (kCheckTransaction) {
     DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
   }
   if (kTransactionActive) {
-    Runtime::Current()->RecordWriteFieldByte(this, field_offset,
-                                           GetFieldByte<kVerifyFlags, kIsVolatile>(field_offset),
-                                           kIsVolatile);
+    Runtime::Current()->RecordWriteFieldByte(this,
+                                             field_offset,
+                                             GetFieldByte<kVerifyFlags, kIsVolatile>(field_offset),
+                                             kIsVolatile);
   }
   if (kVerifyFlags & kVerifyThis) {
     VerifyObject(this);
@@ -486,16 +493,19 @@
   return GetFieldShort<kVerifyFlags, true>(field_offset);
 }
 
-template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
-    bool kIsVolatile>
+template<bool kTransactionActive,
+         bool kCheckTransaction,
+         VerifyObjectFlags kVerifyFlags,
+         bool kIsVolatile>
 inline void Object::SetFieldChar(MemberOffset field_offset, uint16_t new_value) {
   if (kCheckTransaction) {
     DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
   }
   if (kTransactionActive) {
-    Runtime::Current()->RecordWriteFieldChar(this, field_offset,
-                                           GetFieldChar<kVerifyFlags, kIsVolatile>(field_offset),
-                                           kIsVolatile);
+    Runtime::Current()->RecordWriteFieldChar(this,
+                                             field_offset,
+                                             GetFieldChar<kVerifyFlags, kIsVolatile>(field_offset),
+                                             kIsVolatile);
   }
   if (kVerifyFlags & kVerifyThis) {
     VerifyObject(this);
@@ -503,16 +513,19 @@
   SetField<uint16_t, kIsVolatile>(field_offset, new_value);
 }
 
-template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
-    bool kIsVolatile>
+template<bool kTransactionActive,
+         bool kCheckTransaction,
+         VerifyObjectFlags kVerifyFlags,
+         bool kIsVolatile>
 inline void Object::SetFieldShort(MemberOffset field_offset, int16_t new_value) {
   if (kCheckTransaction) {
     DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
   }
   if (kTransactionActive) {
-    Runtime::Current()->RecordWriteFieldChar(this, field_offset,
-                                           GetFieldShort<kVerifyFlags, kIsVolatile>(field_offset),
-                                           kIsVolatile);
+    Runtime::Current()->RecordWriteFieldChar(this,
+                                             field_offset,
+                                             GetFieldShort<kVerifyFlags, kIsVolatile>(field_offset),
+                                             kIsVolatile);
   }
   if (kVerifyFlags & kVerifyThis) {
     VerifyObject(this);
@@ -532,14 +545,17 @@
       field_offset, new_value);
 }
 
-template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
-    bool kIsVolatile>
+template<bool kTransactionActive,
+         bool kCheckTransaction,
+         VerifyObjectFlags kVerifyFlags,
+         bool kIsVolatile>
 inline void Object::SetField32(MemberOffset field_offset, int32_t new_value) {
   if (kCheckTransaction) {
     DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
   }
   if (kTransactionActive) {
-    Runtime::Current()->RecordWriteField32(this, field_offset,
+    Runtime::Current()->RecordWriteField32(this,
+                                           field_offset,
                                            GetField32<kVerifyFlags, kIsVolatile>(field_offset),
                                            kIsVolatile);
   }
@@ -567,7 +583,8 @@
 
 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
 inline bool Object::CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset,
-                                                         int32_t old_value, int32_t new_value) {
+                                                         int32_t old_value,
+                                                         int32_t new_value) {
   if (kCheckTransaction) {
     DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
   }
@@ -585,7 +602,8 @@
 
 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
 inline bool Object::CasFieldWeakAcquire32(MemberOffset field_offset,
-                                          int32_t old_value, int32_t new_value) {
+                                          int32_t old_value,
+                                          int32_t new_value) {
   if (kCheckTransaction) {
     DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
   }
@@ -603,7 +621,8 @@
 
 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
 inline bool Object::CasFieldWeakRelease32(MemberOffset field_offset,
-                                          int32_t old_value, int32_t new_value) {
+                                          int32_t old_value,
+                                          int32_t new_value) {
   if (kCheckTransaction) {
     DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
   }
@@ -621,7 +640,8 @@
 
 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
 inline bool Object::CasFieldStrongSequentiallyConsistent32(MemberOffset field_offset,
-                                                           int32_t old_value, int32_t new_value) {
+                                                           int32_t old_value,
+                                                           int32_t new_value) {
   if (kCheckTransaction) {
     DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
   }
@@ -637,14 +657,17 @@
   return atomic_addr->CompareAndSetStrongSequentiallyConsistent(old_value, new_value);
 }
 
-template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
-    bool kIsVolatile>
+template<bool kTransactionActive,
+         bool kCheckTransaction,
+         VerifyObjectFlags kVerifyFlags,
+         bool kIsVolatile>
 inline void Object::SetField64(MemberOffset field_offset, int64_t new_value) {
   if (kCheckTransaction) {
     DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
   }
   if (kTransactionActive) {
-    Runtime::Current()->RecordWriteField64(this, field_offset,
+    Runtime::Current()->RecordWriteField64(this,
+                                           field_offset,
                                            GetField64<kVerifyFlags, kIsVolatile>(field_offset),
                                            kIsVolatile);
   }
@@ -678,7 +701,8 @@
 
 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
 inline bool Object::CasFieldWeakSequentiallyConsistent64(MemberOffset field_offset,
-                                                         int64_t old_value, int64_t new_value) {
+                                                         int64_t old_value,
+                                                         int64_t new_value) {
   if (kCheckTransaction) {
     DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
   }
@@ -695,7 +719,8 @@
 
 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
 inline bool Object::CasFieldStrongSequentiallyConsistent64(MemberOffset field_offset,
-                                                           int64_t old_value, int64_t new_value) {
+                                                           int64_t old_value,
+                                                           int64_t new_value) {
   if (kCheckTransaction) {
     DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
   }
@@ -710,7 +735,9 @@
   return atomic_addr->CompareAndSetStrongSequentiallyConsistent(old_value, new_value);
 }
 
-template<class T, VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption,
+template<class T,
+         VerifyObjectFlags kVerifyFlags,
+         ReadBarrierOption kReadBarrierOption,
          bool kIsVolatile>
 inline T* Object::GetFieldObject(MemberOffset field_offset) {
   if (kVerifyFlags & kVerifyThis) {
@@ -733,8 +760,10 @@
   return GetFieldObject<T, kVerifyFlags, kReadBarrierOption, true>(field_offset);
 }
 
-template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
-    bool kIsVolatile>
+template<bool kTransactionActive,
+         bool kCheckTransaction,
+         VerifyObjectFlags kVerifyFlags,
+         bool kIsVolatile>
 inline void Object::SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset,
                                                       ObjPtr<Object> new_value) {
   if (kCheckTransaction) {
@@ -760,8 +789,10 @@
   objref_addr->Assign<kIsVolatile>(new_value.Ptr());
 }
 
-template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
-    bool kIsVolatile>
+template<bool kTransactionActive,
+         bool kCheckTransaction,
+         VerifyObjectFlags kVerifyFlags,
+         bool kIsVolatile>
 inline void Object::SetFieldObject(MemberOffset field_offset, ObjPtr<Object> new_value) {
   SetFieldObjectWithoutWriteBarrier<kTransactionActive, kCheckTransaction, kVerifyFlags,
       kIsVolatile>(field_offset, new_value);
diff --git a/runtime/mirror/object.cc b/runtime/mirror/object.cc
index 0e03e37..4240e70 100644
--- a/runtime/mirror/object.cc
+++ b/runtime/mirror/object.cc
@@ -271,7 +271,7 @@
     }
   }
   LOG(FATAL) << "Failed to find field for assignment to " << reinterpret_cast<void*>(this)
-      << " of type " << c->PrettyDescriptor() << " at offset " << field_offset;
+             << " of type " << c->PrettyDescriptor() << " at offset " << field_offset;
   UNREACHABLE();
 }
 
diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h
index 82045c7..8584b8a 100644
--- a/runtime/mirror/object.h
+++ b/runtime/mirror/object.h
@@ -282,13 +282,16 @@
   bool IsPhantomReferenceInstance() REQUIRES_SHARED(Locks::mutator_lock_);
 
   // Accessor for Java type fields.
-  template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
-      ReadBarrierOption kReadBarrierOption = kWithReadBarrier, bool kIsVolatile = false>
+  template<class T,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
+           bool kIsVolatile = false>
   ALWAYS_INLINE T* GetFieldObject(MemberOffset field_offset)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
-      ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+  template<class T,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
   ALWAYS_INLINE T* GetFieldObjectVolatile(MemberOffset field_offset)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
@@ -310,11 +313,11 @@
   template<bool kTransactionActive,
            bool kCheckTransaction = true,
            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  ALWAYS_INLINE void SetFieldObjectVolatile(MemberOffset field_offset,
-                                            ObjPtr<Object> new_value)
+  ALWAYS_INLINE void SetFieldObjectVolatile(MemberOffset field_offset, ObjPtr<Object> new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kCheckTransaction = true, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+  template<bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
            bool kIsVolatile = false>
   ALWAYS_INLINE void SetFieldObjectTransaction(MemberOffset field_offset, ObjPtr<Object> new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
@@ -416,23 +419,29 @@
   ALWAYS_INLINE int8_t GetFieldByteVolatile(MemberOffset field_offset)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           bool kIsVolatile = false>
   ALWAYS_INLINE void SetFieldBoolean(MemberOffset field_offset, uint8_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           bool kIsVolatile = false>
   ALWAYS_INLINE void SetFieldByte(MemberOffset field_offset, int8_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   ALWAYS_INLINE void SetFieldBooleanVolatile(MemberOffset field_offset, uint8_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   ALWAYS_INLINE void SetFieldByteVolatile(MemberOffset field_offset, int8_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
@@ -452,23 +461,29 @@
   ALWAYS_INLINE int16_t GetFieldShortVolatile(MemberOffset field_offset)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           bool kIsVolatile = false>
   ALWAYS_INLINE void SetFieldChar(MemberOffset field_offset, uint16_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           bool kIsVolatile = false>
   ALWAYS_INLINE void SetFieldShort(MemberOffset field_offset, int16_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   ALWAYS_INLINE void SetFieldCharVolatile(MemberOffset field_offset, uint16_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   ALWAYS_INLINE void SetFieldShortVolatile(MemberOffset field_offset, int16_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
@@ -487,13 +502,16 @@
     return GetField32<kVerifyFlags, true>(field_offset);
   }
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           bool kIsVolatile = false>
   ALWAYS_INLINE void SetField32(MemberOffset field_offset, int32_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   ALWAYS_INLINE void SetField32Volatile(MemberOffset field_offset, int32_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
@@ -503,34 +521,44 @@
   ALWAYS_INLINE void SetField32Transaction(MemberOffset field_offset, int32_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   ALWAYS_INLINE bool CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset,
-                                                          int32_t old_value, int32_t new_value)
+                                                          int32_t old_value,
+                                                          int32_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  bool CasFieldWeakRelaxed32(MemberOffset field_offset, int32_t old_value,
-                             int32_t new_value) ALWAYS_INLINE
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  ALWAYS_INLINE bool CasFieldWeakRelaxed32(MemberOffset field_offset,
+                                           int32_t old_value,
+                                           int32_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  bool CasFieldWeakAcquire32(MemberOffset field_offset, int32_t old_value,
-                             int32_t new_value) ALWAYS_INLINE
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  ALWAYS_INLINE bool CasFieldWeakAcquire32(MemberOffset field_offset,
+                                           int32_t old_value,
+                                           int32_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  bool CasFieldWeakRelease32(MemberOffset field_offset, int32_t old_value,
-                             int32_t new_value) ALWAYS_INLINE
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  ALWAYS_INLINE bool CasFieldWeakRelease32(MemberOffset field_offset,
+                                           int32_t old_value,
+                                           int32_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  bool CasFieldStrongSequentiallyConsistent32(MemberOffset field_offset, int32_t old_value,
-                                              int32_t new_value) ALWAYS_INLINE
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  ALWAYS_INLINE bool CasFieldStrongSequentiallyConsistent32(MemberOffset field_offset,
+                                                            int32_t old_value,
+                                                            int32_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
@@ -548,13 +576,16 @@
     return GetField64<kVerifyFlags, true>(field_offset);
   }
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           bool kIsVolatile = false>
   ALWAYS_INLINE void SetField64(MemberOffset field_offset, int64_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   ALWAYS_INLINE void SetField64Volatile(MemberOffset field_offset, int64_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
@@ -564,35 +595,45 @@
   ALWAYS_INLINE void SetField64Transaction(MemberOffset field_offset, int32_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  bool CasFieldWeakSequentiallyConsistent64(MemberOffset field_offset, int64_t old_value,
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  bool CasFieldWeakSequentiallyConsistent64(MemberOffset field_offset,
+                                            int64_t old_value,
                                             int64_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
-  bool CasFieldStrongSequentiallyConsistent64(MemberOffset field_offset, int64_t old_value,
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  bool CasFieldStrongSequentiallyConsistent64(MemberOffset field_offset,
+                                              int64_t old_value,
                                               int64_t new_value)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           typename T>
   void SetFieldPtr(MemberOffset field_offset, T new_value)
       REQUIRES_SHARED(Locks::mutator_lock_) {
     SetFieldPtrWithSize<kTransactionActive, kCheckTransaction, kVerifyFlags>(
         field_offset, new_value, kRuntimePointerSize);
   }
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           typename T>
   void SetFieldPtr64(MemberOffset field_offset, T new_value)
       REQUIRES_SHARED(Locks::mutator_lock_) {
     SetFieldPtrWithSize<kTransactionActive, kCheckTransaction, kVerifyFlags>(
         field_offset, new_value, 8u);
   }
 
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           typename T>
   ALWAYS_INLINE void SetFieldPtrWithSize(MemberOffset field_offset,
                                          T new_value,
                                          PointerSize pointer_size)
@@ -628,28 +669,34 @@
   // Update methods that expose the raw address of a primitive value-type to an Accessor instance
   // that will attempt to update the field. These are used by VarHandle accessor methods to
   // atomically update fields with a wider range of memory orderings than usually required.
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   void UpdateFieldBooleanViaAccessor(MemberOffset field_offset, Accessor<uint8_t>* accessor)
       REQUIRES_SHARED(Locks::mutator_lock_);
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   void UpdateFieldByteViaAccessor(MemberOffset field_offset, Accessor<int8_t>* accessor)
       REQUIRES_SHARED(Locks::mutator_lock_);
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   void UpdateFieldCharViaAccessor(MemberOffset field_offset, Accessor<uint16_t>* accessor)
       REQUIRES_SHARED(Locks::mutator_lock_);
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   void UpdateFieldShortViaAccessor(MemberOffset field_offset, Accessor<int16_t>* accessor)
       REQUIRES_SHARED(Locks::mutator_lock_);
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   void UpdateField32ViaAccessor(MemberOffset field_offset, Accessor<int32_t>* accessor)
       REQUIRES_SHARED(Locks::mutator_lock_);
-  template<bool kTransactionActive, bool kCheckTransaction = true,
-      VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
+  template<bool kTransactionActive,
+           bool kCheckTransaction = true,
+           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
   void UpdateField64ViaAccessor(MemberOffset field_offset, Accessor<int64_t>* accessor)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
diff --git a/runtime/oat.h b/runtime/oat.h
index 8069a15..e7e5848 100644
--- a/runtime/oat.h
+++ b/runtime/oat.h
@@ -32,8 +32,8 @@
 class PACKED(4) OatHeader {
  public:
   static constexpr uint8_t kOatMagic[] = { 'o', 'a', 't', '\n' };
-  // Last oat version changed reason: Optimize masks in stack maps.
-  static constexpr uint8_t kOatVersion[] = { '1', '4', '5', '\0' };
+  // Last oat version changed reason: Rewrite dex register map encoding.
+  static constexpr uint8_t kOatVersion[] = { '1', '4', '6', '\0' };
 
   static constexpr const char* kImageLocationKey = "image-location";
   static constexpr const char* kDex2OatCmdLineKey = "dex2oat-cmdline";
diff --git a/runtime/quick_exception_handler.cc b/runtime/quick_exception_handler.cc
index 63a09f2..4f4abf7 100644
--- a/runtime/quick_exception_handler.cc
+++ b/runtime/quick_exception_handler.cc
@@ -232,7 +232,7 @@
   DCHECK(catch_stack_map.IsValid());
   DexRegisterMap catch_vreg_map =
       code_info.GetDexRegisterMapOf(catch_stack_map, number_of_vregs);
-  if (!catch_vreg_map.IsValid()) {
+  if (!catch_vreg_map.IsValid() || !catch_vreg_map.HasAnyLiveDexRegisters()) {
     return;
   }
 
diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc
index 61fe2e7..923bb35 100644
--- a/runtime/stack_map.cc
+++ b/runtime/stack_map.cc
@@ -16,6 +16,7 @@
 
 #include "stack_map.h"
 
+#include <iomanip>
 #include <stdint.h>
 
 #include "art_method.h"
@@ -24,149 +25,102 @@
 
 namespace art {
 
-constexpr size_t DexRegisterLocationCatalog::kNoLocationEntryIndex;
-
-std::ostream& operator<<(std::ostream& stream, const DexRegisterLocation::Kind& kind) {
+std::ostream& operator<<(std::ostream& stream, const DexRegisterLocation& reg) {
   using Kind = DexRegisterLocation::Kind;
-  switch (kind) {
+  switch (reg.GetKind()) {
     case Kind::kNone:
-      return stream << "none";
+      return stream << "None";
     case Kind::kInStack:
-      return stream << "in stack";
+      return stream << "sp+" << reg.GetValue();
     case Kind::kInRegister:
-      return stream << "in register";
+      return stream << "r" << reg.GetValue();
     case Kind::kInRegisterHigh:
-      return stream << "in register high";
+      return stream << "r" << reg.GetValue() << "/hi";
     case Kind::kInFpuRegister:
-      return stream << "in fpu register";
+      return stream << "f" << reg.GetValue();
     case Kind::kInFpuRegisterHigh:
-      return stream << "in fpu register high";
+      return stream << "f" << reg.GetValue() << "/hi";
     case Kind::kConstant:
-      return stream << "as constant";
-    case Kind::kInStackLargeOffset:
-      return stream << "in stack (large offset)";
-    case Kind::kConstantLargeValue:
-      return stream << "as constant (large value)";
+      return stream << "#" << reg.GetValue();
+    default:
+      return stream << "DexRegisterLocation(" << static_cast<uint32_t>(reg.GetKind())
+                    << "," << reg.GetValue() << ")";
   }
-  return stream << "Kind<" << static_cast<uint32_t>(kind) << ">";
 }
 
-DexRegisterLocation::Kind DexRegisterMap::GetLocationInternalKind(
-    uint16_t dex_register_number) const {
-  DexRegisterLocationCatalog dex_register_location_catalog =
-      code_info_.GetDexRegisterLocationCatalog();
-  size_t location_catalog_entry_index = GetLocationCatalogEntryIndex(
-      dex_register_number,
-      code_info_.GetNumberOfLocationCatalogEntries());
-  return dex_register_location_catalog.GetLocationInternalKind(location_catalog_entry_index);
+static void DumpDexRegisterMap(VariableIndentationOutputStream* vios,
+                               const DexRegisterMap& map) {
+  if (map.IsValid()) {
+    ScopedIndentation indent1(vios);
+    for (size_t i = 0; i < map.size(); ++i) {
+      if (map.IsDexRegisterLive(i)) {
+        vios->Stream() << "v" << i << ":" << map.Get(i) << " ";
+      }
+    }
+    vios->Stream() << "\n";
+  }
 }
 
-DexRegisterLocation DexRegisterMap::GetDexRegisterLocation(uint16_t dex_register_number) const {
-  DexRegisterLocationCatalog dex_register_location_catalog =
-      code_info_.GetDexRegisterLocationCatalog();
-  size_t location_catalog_entry_index = GetLocationCatalogEntryIndex(
-      dex_register_number,
-      code_info_.GetNumberOfLocationCatalogEntries());
-  return dex_register_location_catalog.GetDexRegisterLocation(location_catalog_entry_index);
-}
-
-static void DumpRegisterMapping(std::ostream& os,
-                                size_t dex_register_num,
-                                DexRegisterLocation location,
-                                const std::string& prefix = "v",
-                                const std::string& suffix = "") {
-  os << prefix << dex_register_num << ": "
-     << location.GetInternalKind()
-     << " (" << location.GetValue() << ")" << suffix << '\n';
-}
-
-void StackMap::DumpEncoding(const BitTable<6>& table,
-                            VariableIndentationOutputStream* vios) {
-  vios->Stream()
-      << "StackMapEncoding"
-      << " (PackedNativePcBits=" << table.NumColumnBits(kPackedNativePc)
-      << ", DexPcBits=" << table.NumColumnBits(kDexPc)
-      << ", DexRegisterMapOffsetBits=" << table.NumColumnBits(kDexRegisterMapOffset)
-      << ", InlineInfoIndexBits=" << table.NumColumnBits(kInlineInfoIndex)
-      << ", RegisterMaskIndexBits=" << table.NumColumnBits(kRegisterMaskIndex)
-      << ", StackMaskIndexBits=" << table.NumColumnBits(kStackMaskIndex)
-      << ")\n";
-}
-
-void InlineInfo::DumpEncoding(const BitTable<5>& table,
-                              VariableIndentationOutputStream* vios) {
-  vios->Stream()
-      << "InlineInfoEncoding"
-      << " (IsLastBits=" << table.NumColumnBits(kIsLast)
-      << ", MethodIndexIdxBits=" << table.NumColumnBits(kMethodIndexIdx)
-      << ", DexPcBits=" << table.NumColumnBits(kDexPc)
-      << ", ExtraDataBits=" << table.NumColumnBits(kExtraData)
-      << ", DexRegisterMapOffsetBits=" << table.NumColumnBits(kDexRegisterMapOffset)
-      << ")\n";
+template<uint32_t kNumColumns>
+static void DumpTable(VariableIndentationOutputStream* vios,
+                      const char* table_name,
+                      const BitTable<kNumColumns>& table,
+                      bool verbose,
+                      bool is_mask = false) {
+  if (table.NumRows() != 0) {
+    vios->Stream() << table_name << " BitSize=" << table.NumRows() * table.NumRowBits();
+    vios->Stream() << " Rows=" << table.NumRows() << " Bits={";
+    for (size_t c = 0; c < table.NumColumns(); c++) {
+      vios->Stream() << (c != 0 ? " " : "");
+      vios->Stream() << table.NumColumnBits(c);
+    }
+    vios->Stream() << "}\n";
+    if (verbose) {
+      ScopedIndentation indent1(vios);
+      for (size_t r = 0; r < table.NumRows(); r++) {
+        vios->Stream() << "[" << std::right << std::setw(3) << r << "]={";
+        for (size_t c = 0; c < table.NumColumns(); c++) {
+          vios->Stream() << (c != 0 ? " " : "");
+          if (is_mask) {
+            BitMemoryRegion bits = table.GetBitMemoryRegion(r, c);
+            for (size_t b = 0, e = bits.size_in_bits(); b < e; b++) {
+              vios->Stream() << bits.LoadBit(e - b - 1);
+            }
+          } else {
+            vios->Stream() << std::right << std::setw(8) << static_cast<int32_t>(table.Get(r, c));
+          }
+        }
+        vios->Stream() << "}\n";
+      }
+    }
+  }
 }
 
 void CodeInfo::Dump(VariableIndentationOutputStream* vios,
                     uint32_t code_offset,
-                    uint16_t number_of_dex_registers,
-                    bool dump_stack_maps,
+                    uint16_t num_dex_registers,
+                    bool verbose,
                     InstructionSet instruction_set,
                     const MethodInfo& method_info) const {
-  size_t number_of_stack_maps = GetNumberOfStackMaps();
   vios->Stream()
-      << "Optimized CodeInfo (number_of_dex_registers=" << number_of_dex_registers
-      << ", number_of_stack_maps=" << number_of_stack_maps
-      << ")\n";
+      << "CodeInfo"
+      << " BitSize="  << size_ * kBitsPerByte
+      << "\n";
   ScopedIndentation indent1(vios);
-  StackMap::DumpEncoding(stack_maps_, vios);
-  if (HasInlineInfo()) {
-    InlineInfo::DumpEncoding(inline_infos_, vios);
-  }
-  // Display the Dex register location catalog.
-  GetDexRegisterLocationCatalog().Dump(vios, *this);
+  DumpTable(vios, "StackMaps", stack_maps_, verbose);
+  DumpTable(vios, "RegisterMasks", register_masks_, verbose);
+  DumpTable(vios, "StackMasks", stack_masks_, verbose, true /* is_mask */);
+  DumpTable(vios, "InvokeInfos", invoke_infos_, verbose);
+  DumpTable(vios, "InlineInfos", inline_infos_, verbose);
+  DumpTable(vios, "DexRegisterMasks", dex_register_masks_, verbose, true /* is_mask */);
+  DumpTable(vios, "DexRegisterMaps", dex_register_maps_, verbose);
+  DumpTable(vios, "DexRegisterCatalog", dex_register_catalog_, verbose);
+
   // Display stack maps along with (live) Dex register maps.
-  if (dump_stack_maps) {
-    for (size_t i = 0; i < number_of_stack_maps; ++i) {
+  if (verbose) {
+    for (size_t i = 0; i < GetNumberOfStackMaps(); ++i) {
       StackMap stack_map = GetStackMapAt(i);
-      stack_map.Dump(vios,
-                     *this,
-                     method_info,
-                     code_offset,
-                     number_of_dex_registers,
-                     instruction_set,
-                     " " + std::to_string(i));
-    }
-  }
-  // TODO: Dump the stack map's inline information? We need to know more from the caller:
-  //       we need to know the number of dex registers for each inlined method.
-}
-
-void DexRegisterLocationCatalog::Dump(VariableIndentationOutputStream* vios,
-                                      const CodeInfo& code_info) {
-  size_t number_of_location_catalog_entries = code_info.GetNumberOfLocationCatalogEntries();
-  size_t location_catalog_size_in_bytes = code_info.GetDexRegisterLocationCatalogSize();
-  vios->Stream()
-      << "DexRegisterLocationCatalog (number_of_entries=" << number_of_location_catalog_entries
-      << ", size_in_bytes=" << location_catalog_size_in_bytes << ")\n";
-  for (size_t i = 0; i < number_of_location_catalog_entries; ++i) {
-    DexRegisterLocation location = GetDexRegisterLocation(i);
-    ScopedIndentation indent1(vios);
-    DumpRegisterMapping(vios->Stream(), i, location, "entry ");
-  }
-}
-
-void DexRegisterMap::Dump(VariableIndentationOutputStream* vios) const {
-  size_t number_of_location_catalog_entries = code_info_.GetNumberOfLocationCatalogEntries();
-  // TODO: Display the bit mask of live Dex registers.
-  for (size_t j = 0; j < number_of_dex_registers_; ++j) {
-    if (IsDexRegisterLive(j)) {
-      size_t location_catalog_entry_index = GetLocationCatalogEntryIndex(
-          j,
-          number_of_location_catalog_entries);
-      DexRegisterLocation location = GetDexRegisterLocation(j);
-      ScopedIndentation indent1(vios);
-      DumpRegisterMapping(
-          vios->Stream(), j, location, "v",
-          "\t[entry " + std::to_string(static_cast<int>(location_catalog_entry_index)) + "]");
+      stack_map.Dump(vios, *this, method_info, code_offset, num_dex_registers, instruction_set);
     }
   }
 }
@@ -176,17 +130,13 @@
                     const MethodInfo& method_info,
                     uint32_t code_offset,
                     uint16_t number_of_dex_registers,
-                    InstructionSet instruction_set,
-                    const std::string& header_suffix) const {
+                    InstructionSet instruction_set) const {
   const uint32_t pc_offset = GetNativePcOffset(instruction_set);
   vios->Stream()
-      << "StackMap" << header_suffix
+      << "StackMap[" << Row() << "]"
       << std::hex
-      << " [native_pc=0x" << code_offset + pc_offset << "]"
-      << " (dex_pc=0x" << GetDexPc()
-      << ", native_pc_offset=0x" << pc_offset
-      << ", dex_register_map_offset=0x" << GetDexRegisterMapOffset()
-      << ", inline_info_offset=0x" << GetInlineInfoIndex()
+      << " (native_pc=0x" << code_offset + pc_offset
+      << ", dex_pc=0x" << GetDexPc()
       << ", register_mask=0x" << code_info.GetRegisterMaskOf(*this)
       << std::dec
       << ", stack_mask=0b";
@@ -195,11 +145,7 @@
     vios->Stream() << stack_mask.LoadBit(e - i - 1);
   }
   vios->Stream() << ")\n";
-  if (HasDexRegisterMap()) {
-    DexRegisterMap dex_register_map = code_info.GetDexRegisterMapOf(
-        *this, number_of_dex_registers);
-    dex_register_map.Dump(vios);
-  }
+  DumpDexRegisterMap(vios, code_info.GetDexRegisterMapOf(*this, number_of_dex_registers));
   if (HasInlineInfo()) {
     InlineInfo inline_info = code_info.GetInlineInfoOf(*this);
     // We do not know the length of the dex register maps of inlined frames
@@ -213,15 +159,12 @@
                       const CodeInfo& code_info,
                       const MethodInfo& method_info,
                       uint16_t number_of_dex_registers[]) const {
-  vios->Stream() << "InlineInfo with depth "
-                 << static_cast<uint32_t>(GetDepth())
-                 << "\n";
-
   for (size_t i = 0; i < GetDepth(); ++i) {
     vios->Stream()
-        << " At depth " << i
+        << "InlineInfo[" << Row() + i << "]"
+        << " (depth=" << i
         << std::hex
-        << " (dex_pc=0x" << GetDexPcAtDepth(i);
+        << ", dex_pc=0x" << GetDexPcAtDepth(i);
     if (EncodesArtMethodAtDepth(i)) {
       ScopedObjectAccess soa(Thread::Current());
       vios->Stream() << ", method=" << GetArtMethodAtDepth(i)->PrettyMethod();
@@ -231,11 +174,9 @@
           << ", method_index=" << GetMethodIndexAtDepth(method_info, i);
     }
     vios->Stream() << ")\n";
-    if (HasDexRegisterMapAtDepth(i) && (number_of_dex_registers != nullptr)) {
-      DexRegisterMap dex_register_map =
-          code_info.GetDexRegisterMapAtDepth(i, *this, number_of_dex_registers[i]);
-      ScopedIndentation indent1(vios);
-      dex_register_map.Dump(vios);
+    if (number_of_dex_registers != nullptr) {
+      uint16_t vregs = number_of_dex_registers[i];
+      DumpDexRegisterMap(vios, code_info.GetDexRegisterMapAtDepth(i, *this, vregs));
     }
   }
 }
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 9d66b31..9aac204 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -26,6 +26,7 @@
 #include "base/leb128.h"
 #include "base/memory_region.h"
 #include "dex/dex_file_types.h"
+#include "dex_register_location.h"
 #include "method_info.h"
 #include "oat_quick_method_header.h"
 
@@ -41,522 +42,76 @@
 class ArtMethod;
 class CodeInfo;
 
-/**
- * Classes in the following file are wrapper on stack map information backed
- * by a MemoryRegion. As such they read and write to the region, they don't have
- * their own fields.
- */
+std::ostream& operator<<(std::ostream& stream, const DexRegisterLocation& reg);
 
-// Dex register location container used by DexRegisterMap and StackMapStream.
-class DexRegisterLocation {
- public:
-  /*
-   * The location kind used to populate the Dex register information in a
-   * StackMapStream can either be:
-   * - kStack: vreg stored on the stack, value holds the stack offset;
-   * - kInRegister: vreg stored in low 32 bits of a core physical register,
-   *                value holds the register number;
-   * - kInRegisterHigh: vreg stored in high 32 bits of a core physical register,
-   *                    value holds the register number;
-   * - kInFpuRegister: vreg stored in low 32 bits of an FPU register,
-   *                   value holds the register number;
-   * - kInFpuRegisterHigh: vreg stored in high 32 bits of an FPU register,
-   *                       value holds the register number;
-   * - kConstant: value holds the constant;
-   *
-   * In addition, DexRegisterMap also uses these values:
-   * - kInStackLargeOffset: value holds a "large" stack offset (greater than
-   *   or equal to 128 bytes);
-   * - kConstantLargeValue: value holds a "large" constant (lower than 0, or
-   *   or greater than or equal to 32);
-   * - kNone: the register has no location, meaning it has not been set.
-   */
-  enum class Kind : uint8_t {
-    // Short location kinds, for entries fitting on one byte (3 bits
-    // for the kind, 5 bits for the value) in a DexRegisterMap.
-    kInStack = 0,             // 0b000
-    kInRegister = 1,          // 0b001
-    kInRegisterHigh = 2,      // 0b010
-    kInFpuRegister = 3,       // 0b011
-    kInFpuRegisterHigh = 4,   // 0b100
-    kConstant = 5,            // 0b101
-
-    // Large location kinds, requiring a 5-byte encoding (1 byte for the
-    // kind, 4 bytes for the value).
-
-    // Stack location at a large offset, meaning that the offset value
-    // divided by the stack frame slot size (4 bytes) cannot fit on a
-    // 5-bit unsigned integer (i.e., this offset value is greater than
-    // or equal to 2^5 * 4 = 128 bytes).
-    kInStackLargeOffset = 6,  // 0b110
-
-    // Large constant, that cannot fit on a 5-bit signed integer (i.e.,
-    // lower than 0, or greater than or equal to 2^5 = 32).
-    kConstantLargeValue = 7,  // 0b111
-
-    // Entries with no location are not stored and do not need own marker.
-    kNone = static_cast<uint8_t>(-1),
-
-    kLastLocationKind = kConstantLargeValue
-  };
-
-  static_assert(
-      sizeof(Kind) == 1u,
-      "art::DexRegisterLocation::Kind has a size different from one byte.");
-
-  static bool IsShortLocationKind(Kind kind) {
-    switch (kind) {
-      case Kind::kInStack:
-      case Kind::kInRegister:
-      case Kind::kInRegisterHigh:
-      case Kind::kInFpuRegister:
-      case Kind::kInFpuRegisterHigh:
-      case Kind::kConstant:
-        return true;
-
-      case Kind::kInStackLargeOffset:
-      case Kind::kConstantLargeValue:
-        return false;
-
-      case Kind::kNone:
-        LOG(FATAL) << "Unexpected location kind";
-    }
-    UNREACHABLE();
-  }
-
-  // Convert `kind` to a "surface" kind, i.e. one that doesn't include
-  // any value with a "large" qualifier.
-  // TODO: Introduce another enum type for the surface kind?
-  static Kind ConvertToSurfaceKind(Kind kind) {
-    switch (kind) {
-      case Kind::kInStack:
-      case Kind::kInRegister:
-      case Kind::kInRegisterHigh:
-      case Kind::kInFpuRegister:
-      case Kind::kInFpuRegisterHigh:
-      case Kind::kConstant:
-        return kind;
-
-      case Kind::kInStackLargeOffset:
-        return Kind::kInStack;
-
-      case Kind::kConstantLargeValue:
-        return Kind::kConstant;
-
-      case Kind::kNone:
-        return kind;
-    }
-    UNREACHABLE();
-  }
-
-  // Required by art::StackMapStream::LocationCatalogEntriesIndices.
-  DexRegisterLocation() : kind_(Kind::kNone), value_(0) {}
-
-  DexRegisterLocation(Kind kind, int32_t value) : kind_(kind), value_(value) {}
-
-  static DexRegisterLocation None() {
-    return DexRegisterLocation(Kind::kNone, 0);
-  }
-
-  // Get the "surface" kind of the location, i.e., the one that doesn't
-  // include any value with a "large" qualifier.
-  Kind GetKind() const {
-    return ConvertToSurfaceKind(kind_);
-  }
-
-  // Get the value of the location.
-  int32_t GetValue() const { return value_; }
-
-  // Get the actual kind of the location.
-  Kind GetInternalKind() const { return kind_; }
-
-  bool operator==(DexRegisterLocation other) const {
-    return kind_ == other.kind_ && value_ == other.value_;
-  }
-
-  bool operator!=(DexRegisterLocation other) const {
-    return !(*this == other);
-  }
-
- private:
-  Kind kind_;
-  int32_t value_;
-
-  friend class DexRegisterLocationHashFn;
-};
-
-std::ostream& operator<<(std::ostream& stream, const DexRegisterLocation::Kind& kind);
-
-/**
- * Store information on unique Dex register locations used in a method.
- * The information is of the form:
- *
- *   [DexRegisterLocation+].
- *
- * DexRegisterLocations are either 1- or 5-byte wide (see art::DexRegisterLocation::Kind).
- */
-class DexRegisterLocationCatalog {
- public:
-  explicit DexRegisterLocationCatalog(MemoryRegion region) : region_(region) {}
-
-  // Short (compressed) location, fitting on one byte.
-  typedef uint8_t ShortLocation;
-
-  void SetRegisterInfo(size_t offset, const DexRegisterLocation& dex_register_location) {
-    DexRegisterLocation::Kind kind = ComputeCompressedKind(dex_register_location);
-    int32_t value = dex_register_location.GetValue();
-    if (DexRegisterLocation::IsShortLocationKind(kind)) {
-      // Short location.  Compress the kind and the value as a single byte.
-      if (kind == DexRegisterLocation::Kind::kInStack) {
-        // Instead of storing stack offsets expressed in bytes for
-        // short stack locations, store slot offsets.  A stack offset
-        // is a multiple of 4 (kFrameSlotSize).  This means that by
-        // dividing it by 4, we can fit values from the [0, 128)
-        // interval in a short stack location, and not just values
-        // from the [0, 32) interval.
-        DCHECK_EQ(value % kFrameSlotSize, 0);
-        value /= kFrameSlotSize;
-      }
-      DCHECK(IsShortValue(value)) << value;
-      region_.StoreUnaligned<ShortLocation>(offset, MakeShortLocation(kind, value));
-    } else {
-      // Large location.  Write the location on one byte and the value
-      // on 4 bytes.
-      DCHECK(!IsShortValue(value)) << value;
-      if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
-        // Also divide large stack offsets by 4 for the sake of consistency.
-        DCHECK_EQ(value % kFrameSlotSize, 0);
-        value /= kFrameSlotSize;
-      }
-      // Data can be unaligned as the written Dex register locations can
-      // either be 1-byte or 5-byte wide.  Use
-      // art::MemoryRegion::StoreUnaligned instead of
-      // art::MemoryRegion::Store to prevent unligned word accesses on ARM.
-      region_.StoreUnaligned<DexRegisterLocation::Kind>(offset, kind);
-      region_.StoreUnaligned<int32_t>(offset + sizeof(DexRegisterLocation::Kind), value);
-    }
-  }
-
-  // Find the offset of the location catalog entry number `location_catalog_entry_index`.
-  size_t FindLocationOffset(size_t location_catalog_entry_index) const {
-    size_t offset = kFixedSize;
-    // Skip the first `location_catalog_entry_index - 1` entries.
-    for (uint16_t i = 0; i < location_catalog_entry_index; ++i) {
-      // Read the first next byte and inspect its first 3 bits to decide
-      // whether it is a short or a large location.
-      DexRegisterLocation::Kind kind = ExtractKindAtOffset(offset);
-      if (DexRegisterLocation::IsShortLocationKind(kind)) {
-        // Short location.  Skip the current byte.
-        offset += SingleShortEntrySize();
-      } else {
-        // Large location.  Skip the 5 next bytes.
-        offset += SingleLargeEntrySize();
-      }
-    }
-    return offset;
-  }
-
-  // Get the internal kind of entry at `location_catalog_entry_index`.
-  DexRegisterLocation::Kind GetLocationInternalKind(size_t location_catalog_entry_index) const {
-    if (location_catalog_entry_index == kNoLocationEntryIndex) {
-      return DexRegisterLocation::Kind::kNone;
-    }
-    return ExtractKindAtOffset(FindLocationOffset(location_catalog_entry_index));
-  }
-
-  // Get the (surface) kind and value of entry at `location_catalog_entry_index`.
-  DexRegisterLocation GetDexRegisterLocation(size_t location_catalog_entry_index) const {
-    if (location_catalog_entry_index == kNoLocationEntryIndex) {
-      return DexRegisterLocation::None();
-    }
-    size_t offset = FindLocationOffset(location_catalog_entry_index);
-    // Read the first byte and inspect its first 3 bits to get the location.
-    ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
-    DexRegisterLocation::Kind kind = ExtractKindFromShortLocation(first_byte);
-    if (DexRegisterLocation::IsShortLocationKind(kind)) {
-      // Short location.  Extract the value from the remaining 5 bits.
-      int32_t value = ExtractValueFromShortLocation(first_byte);
-      if (kind == DexRegisterLocation::Kind::kInStack) {
-        // Convert the stack slot (short) offset to a byte offset value.
-        value *= kFrameSlotSize;
-      }
-      return DexRegisterLocation(kind, value);
-    } else {
-      // Large location.  Read the four next bytes to get the value.
-      int32_t value = region_.LoadUnaligned<int32_t>(offset + sizeof(DexRegisterLocation::Kind));
-      if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
-        // Convert the stack slot (large) offset to a byte offset value.
-        value *= kFrameSlotSize;
-      }
-      return DexRegisterLocation(kind, value);
-    }
-  }
-
-  // Compute the compressed kind of `location`.
-  static DexRegisterLocation::Kind ComputeCompressedKind(const DexRegisterLocation& location) {
-    DexRegisterLocation::Kind kind = location.GetInternalKind();
-    switch (kind) {
-      case DexRegisterLocation::Kind::kInStack:
-        return IsShortStackOffsetValue(location.GetValue())
-            ? DexRegisterLocation::Kind::kInStack
-            : DexRegisterLocation::Kind::kInStackLargeOffset;
-
-      case DexRegisterLocation::Kind::kInRegister:
-      case DexRegisterLocation::Kind::kInRegisterHigh:
-        DCHECK_GE(location.GetValue(), 0);
-        DCHECK_LT(location.GetValue(), 1 << kValueBits);
-        return kind;
-
-      case DexRegisterLocation::Kind::kInFpuRegister:
-      case DexRegisterLocation::Kind::kInFpuRegisterHigh:
-        DCHECK_GE(location.GetValue(), 0);
-        DCHECK_LT(location.GetValue(), 1 << kValueBits);
-        return kind;
-
-      case DexRegisterLocation::Kind::kConstant:
-        return IsShortConstantValue(location.GetValue())
-            ? DexRegisterLocation::Kind::kConstant
-            : DexRegisterLocation::Kind::kConstantLargeValue;
-
-      case DexRegisterLocation::Kind::kConstantLargeValue:
-      case DexRegisterLocation::Kind::kInStackLargeOffset:
-      case DexRegisterLocation::Kind::kNone:
-        LOG(FATAL) << "Unexpected location kind " << kind;
-    }
-    UNREACHABLE();
-  }
-
-  // Can `location` be turned into a short location?
-  static bool CanBeEncodedAsShortLocation(const DexRegisterLocation& location) {
-    DexRegisterLocation::Kind kind = location.GetInternalKind();
-    switch (kind) {
-      case DexRegisterLocation::Kind::kInStack:
-        return IsShortStackOffsetValue(location.GetValue());
-
-      case DexRegisterLocation::Kind::kInRegister:
-      case DexRegisterLocation::Kind::kInRegisterHigh:
-      case DexRegisterLocation::Kind::kInFpuRegister:
-      case DexRegisterLocation::Kind::kInFpuRegisterHigh:
-        return true;
-
-      case DexRegisterLocation::Kind::kConstant:
-        return IsShortConstantValue(location.GetValue());
-
-      case DexRegisterLocation::Kind::kConstantLargeValue:
-      case DexRegisterLocation::Kind::kInStackLargeOffset:
-      case DexRegisterLocation::Kind::kNone:
-        LOG(FATAL) << "Unexpected location kind " << kind;
-    }
-    UNREACHABLE();
-  }
-
-  static size_t EntrySize(const DexRegisterLocation& location) {
-    return CanBeEncodedAsShortLocation(location) ? SingleShortEntrySize() : SingleLargeEntrySize();
-  }
-
-  static size_t SingleShortEntrySize() {
-    return sizeof(ShortLocation);
-  }
-
-  static size_t SingleLargeEntrySize() {
-    return sizeof(DexRegisterLocation::Kind) + sizeof(int32_t);
-  }
-
-  size_t Size() const {
-    return region_.size();
-  }
-
-  void Dump(VariableIndentationOutputStream* vios,
-            const CodeInfo& code_info);
-
-  // Special (invalid) Dex register location catalog entry index meaning
-  // that there is no location for a given Dex register (i.e., it is
-  // mapped to a DexRegisterLocation::Kind::kNone location).
-  static constexpr size_t kNoLocationEntryIndex = -1;
-
- private:
-  static constexpr int kFixedSize = 0;
-
-  // Width of the kind "field" in a short location, in bits.
-  static constexpr size_t kKindBits = 3;
-  // Width of the value "field" in a short location, in bits.
-  static constexpr size_t kValueBits = 5;
-
-  static constexpr uint8_t kKindMask = (1 << kKindBits) - 1;
-  static constexpr int32_t kValueMask = (1 << kValueBits) - 1;
-  static constexpr size_t kKindOffset = 0;
-  static constexpr size_t kValueOffset = kKindBits;
-
-  static bool IsShortStackOffsetValue(int32_t value) {
-    DCHECK_EQ(value % kFrameSlotSize, 0);
-    return IsShortValue(value / kFrameSlotSize);
-  }
-
-  static bool IsShortConstantValue(int32_t value) {
-    return IsShortValue(value);
-  }
-
-  static bool IsShortValue(int32_t value) {
-    return IsUint<kValueBits>(value);
-  }
-
-  static ShortLocation MakeShortLocation(DexRegisterLocation::Kind kind, int32_t value) {
-    uint8_t kind_integer_value = static_cast<uint8_t>(kind);
-    DCHECK(IsUint<kKindBits>(kind_integer_value)) << kind_integer_value;
-    DCHECK(IsShortValue(value)) << value;
-    return (kind_integer_value & kKindMask) << kKindOffset
-        | (value & kValueMask) << kValueOffset;
-  }
-
-  static DexRegisterLocation::Kind ExtractKindFromShortLocation(ShortLocation location) {
-    uint8_t kind = (location >> kKindOffset) & kKindMask;
-    DCHECK_LE(kind, static_cast<uint8_t>(DexRegisterLocation::Kind::kLastLocationKind));
-    // We do not encode kNone locations in the stack map.
-    DCHECK_NE(kind, static_cast<uint8_t>(DexRegisterLocation::Kind::kNone));
-    return static_cast<DexRegisterLocation::Kind>(kind);
-  }
-
-  static int32_t ExtractValueFromShortLocation(ShortLocation location) {
-    return (location >> kValueOffset) & kValueMask;
-  }
-
-  // Extract a location kind from the byte at position `offset`.
-  DexRegisterLocation::Kind ExtractKindAtOffset(size_t offset) const {
-    ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
-    return ExtractKindFromShortLocation(first_byte);
-  }
-
-  MemoryRegion region_;
-
-  friend class CodeInfo;
-  friend class StackMapStream;
-};
-
-/* Information on Dex register locations for a specific PC, mapping a
- * stack map's Dex register to a location entry in a DexRegisterLocationCatalog.
- * The information is of the form:
- *
- *   [live_bit_mask, entries*]
- *
- * where entries are concatenated unsigned integer values encoded on a number
- * of bits (fixed per DexRegisterMap instances of a CodeInfo object) depending
- * on the number of entries in the Dex register location catalog
- * (see DexRegisterMap::SingleEntrySizeInBits).  The map is 1-byte aligned.
- */
+// Information on Dex register locations for a specific PC.
+// Effectively just a convenience wrapper for DexRegisterLocation vector.
+// If the size is small enough, it keeps the data on the stack.
 class DexRegisterMap {
  public:
-  DexRegisterMap(MemoryRegion region, uint16_t number_of_dex_registers, const CodeInfo& code_info)
-      : region_(region),
-        number_of_dex_registers_(number_of_dex_registers),
-        code_info_(code_info) {}
-
-  bool IsValid() const { return region_.IsValid(); }
-
-  // Get the surface kind of Dex register `dex_register_number`.
-  DexRegisterLocation::Kind GetLocationKind(uint16_t dex_register_number) const {
-    return DexRegisterLocation::ConvertToSurfaceKind(GetLocationInternalKind(dex_register_number));
+  // Create map for given number of registers and initialize all locations to None.
+  explicit DexRegisterMap(size_t count) : count_(count), regs_small_{} {
+    if (count_ <= kSmallCount) {
+      std::fill_n(regs_small_.begin(), count, DexRegisterLocation::None());
+    } else {
+      regs_large_.resize(count, DexRegisterLocation::None());
+    }
   }
 
-  // Get the internal kind of Dex register `dex_register_number`.
-  DexRegisterLocation::Kind GetLocationInternalKind(uint16_t dex_register_number) const;
+  DexRegisterLocation* data() {
+    return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data();
+  }
 
-  // Get the Dex register location `dex_register_number`.
-  DexRegisterLocation GetDexRegisterLocation(uint16_t dex_register_number) const;
+  size_t size() const { return count_; }
+
+  bool IsValid() const { return count_ != 0; }
+
+  DexRegisterLocation Get(size_t index) const {
+    DCHECK_LT(index, count_);
+    return count_ <= kSmallCount ? regs_small_[index] : regs_large_[index];
+  }
+
+  DexRegisterLocation::Kind GetLocationKind(uint16_t dex_register_number) const {
+    return Get(dex_register_number).GetKind();
+  }
+
+  // TODO: Remove.
+  DexRegisterLocation::Kind GetLocationInternalKind(uint16_t dex_register_number) const {
+    return Get(dex_register_number).GetKind();
+  }
+
+  DexRegisterLocation GetDexRegisterLocation(uint16_t dex_register_number) const {
+    return Get(dex_register_number);
+  }
 
   int32_t GetStackOffsetInBytes(uint16_t dex_register_number) const {
-    DexRegisterLocation location = GetDexRegisterLocation(dex_register_number);
+    DexRegisterLocation location = Get(dex_register_number);
     DCHECK(location.GetKind() == DexRegisterLocation::Kind::kInStack);
-    // GetDexRegisterLocation returns the offset in bytes.
     return location.GetValue();
   }
 
   int32_t GetConstant(uint16_t dex_register_number) const {
-    DexRegisterLocation location = GetDexRegisterLocation(dex_register_number);
-    DCHECK_EQ(location.GetKind(), DexRegisterLocation::Kind::kConstant);
+    DexRegisterLocation location = Get(dex_register_number);
+    DCHECK(location.GetKind() == DexRegisterLocation::Kind::kConstant);
     return location.GetValue();
   }
 
   int32_t GetMachineRegister(uint16_t dex_register_number) const {
-    DexRegisterLocation location = GetDexRegisterLocation(dex_register_number);
-    DCHECK(location.GetInternalKind() == DexRegisterLocation::Kind::kInRegister ||
-           location.GetInternalKind() == DexRegisterLocation::Kind::kInRegisterHigh ||
-           location.GetInternalKind() == DexRegisterLocation::Kind::kInFpuRegister ||
-           location.GetInternalKind() == DexRegisterLocation::Kind::kInFpuRegisterHigh)
-        << location.GetInternalKind();
+    DexRegisterLocation location = Get(dex_register_number);
+    DCHECK(location.GetKind() == DexRegisterLocation::Kind::kInRegister ||
+           location.GetKind() == DexRegisterLocation::Kind::kInRegisterHigh ||
+           location.GetKind() == DexRegisterLocation::Kind::kInFpuRegister ||
+           location.GetKind() == DexRegisterLocation::Kind::kInFpuRegisterHigh);
     return location.GetValue();
   }
 
-  // Get the index of the entry in the Dex register location catalog
-  // corresponding to `dex_register_number`.
-  size_t GetLocationCatalogEntryIndex(uint16_t dex_register_number,
-                                      size_t number_of_location_catalog_entries) const {
-    if (!IsDexRegisterLive(dex_register_number)) {
-      return DexRegisterLocationCatalog::kNoLocationEntryIndex;
-    }
-
-    if (number_of_location_catalog_entries == 1) {
-      // We do not allocate space for location maps in the case of a
-      // single-entry location catalog, as it is useless.  The only valid
-      // entry index is 0;
-      return 0;
-    }
-
-    // The bit offset of the beginning of the map locations.
-    size_t map_locations_offset_in_bits =
-        GetLocationMappingDataOffset(number_of_dex_registers_) * kBitsPerByte;
-    size_t index_in_dex_register_map = GetIndexInDexRegisterMap(dex_register_number);
-    DCHECK_LT(index_in_dex_register_map, GetNumberOfLiveDexRegisters());
-    // The bit size of an entry.
-    size_t map_entry_size_in_bits = SingleEntrySizeInBits(number_of_location_catalog_entries);
-    // The bit offset where `index_in_dex_register_map` is located.
-    size_t entry_offset_in_bits =
-        map_locations_offset_in_bits + index_in_dex_register_map * map_entry_size_in_bits;
-    size_t location_catalog_entry_index =
-        region_.LoadBits(entry_offset_in_bits, map_entry_size_in_bits);
-    DCHECK_LT(location_catalog_entry_index, number_of_location_catalog_entries);
-    return location_catalog_entry_index;
-  }
-
-  // Map entry at `index_in_dex_register_map` to `location_catalog_entry_index`.
-  void SetLocationCatalogEntryIndex(size_t index_in_dex_register_map,
-                                    size_t location_catalog_entry_index,
-                                    size_t number_of_location_catalog_entries) {
-    DCHECK_LT(index_in_dex_register_map, GetNumberOfLiveDexRegisters());
-    DCHECK_LT(location_catalog_entry_index, number_of_location_catalog_entries);
-
-    if (number_of_location_catalog_entries == 1) {
-      // We do not allocate space for location maps in the case of a
-      // single-entry location catalog, as it is useless.
-      return;
-    }
-
-    // The bit offset of the beginning of the map locations.
-    size_t map_locations_offset_in_bits =
-        GetLocationMappingDataOffset(number_of_dex_registers_) * kBitsPerByte;
-    // The bit size of an entry.
-    size_t map_entry_size_in_bits = SingleEntrySizeInBits(number_of_location_catalog_entries);
-    // The bit offset where `index_in_dex_register_map` is located.
-    size_t entry_offset_in_bits =
-        map_locations_offset_in_bits + index_in_dex_register_map * map_entry_size_in_bits;
-    region_.StoreBits(entry_offset_in_bits, location_catalog_entry_index, map_entry_size_in_bits);
-  }
-
-  void SetLiveBitMask(uint16_t number_of_dex_registers,
-                      const BitVector& live_dex_registers_mask) {
-    size_t live_bit_mask_offset_in_bits = GetLiveBitMaskOffset() * kBitsPerByte;
-    for (uint16_t i = 0; i < number_of_dex_registers; ++i) {
-      region_.StoreBit(live_bit_mask_offset_in_bits + i, live_dex_registers_mask.IsBitSet(i));
-    }
-  }
-
   ALWAYS_INLINE bool IsDexRegisterLive(uint16_t dex_register_number) const {
-    size_t live_bit_mask_offset_in_bits = GetLiveBitMaskOffset() * kBitsPerByte;
-    return region_.LoadBit(live_bit_mask_offset_in_bits + dex_register_number);
+    return Get(dex_register_number).IsLive();
   }
 
-  size_t GetNumberOfLiveDexRegisters(uint16_t number_of_dex_registers) const {
+  size_t GetNumberOfLiveDexRegisters() const {
     size_t number_of_live_dex_registers = 0;
-    for (size_t i = 0; i < number_of_dex_registers; ++i) {
+    for (size_t i = 0; i < count_; ++i) {
       if (IsDexRegisterLive(i)) {
         ++number_of_live_dex_registers;
       }
@@ -564,74 +119,22 @@
     return number_of_live_dex_registers;
   }
 
-  size_t GetNumberOfLiveDexRegisters() const {
-    return GetNumberOfLiveDexRegisters(number_of_dex_registers_);
+  bool HasAnyLiveDexRegisters() const {
+    for (size_t i = 0; i < count_; ++i) {
+      if (IsDexRegisterLive(i)) {
+        return true;
+      }
+    }
+    return false;
   }
 
-  static size_t GetLiveBitMaskOffset() {
-    return kFixedSize;
-  }
-
-  // Compute the size of the live register bit mask (in bytes), for a
-  // method having `number_of_dex_registers` Dex registers.
-  static size_t GetLiveBitMaskSize(uint16_t number_of_dex_registers) {
-    return RoundUp(number_of_dex_registers, kBitsPerByte) / kBitsPerByte;
-  }
-
-  static size_t GetLocationMappingDataOffset(uint16_t number_of_dex_registers) {
-    return GetLiveBitMaskOffset() + GetLiveBitMaskSize(number_of_dex_registers);
-  }
-
-  size_t GetLocationMappingDataSize(size_t number_of_location_catalog_entries) const {
-    size_t location_mapping_data_size_in_bits =
-        GetNumberOfLiveDexRegisters()
-        * SingleEntrySizeInBits(number_of_location_catalog_entries);
-    return RoundUp(location_mapping_data_size_in_bits, kBitsPerByte) / kBitsPerByte;
-  }
-
-  // Return the size of a map entry in bits.  Note that if
-  // `number_of_location_catalog_entries` equals 1, this function returns 0,
-  // which is fine, as there is no need to allocate a map for a
-  // single-entry location catalog; the only valid location catalog entry index
-  // for a live register in this case is 0 and there is no need to
-  // store it.
-  static size_t SingleEntrySizeInBits(size_t number_of_location_catalog_entries) {
-    // Handle the case of 0, as we cannot pass 0 to art::WhichPowerOf2.
-    return number_of_location_catalog_entries == 0
-        ? 0u
-        : WhichPowerOf2(RoundUpToPowerOfTwo(number_of_location_catalog_entries));
-  }
-
-  // Return the size of the DexRegisterMap object, in bytes.
-  size_t Size() const {
-    return BitsToBytesRoundUp(region_.size_in_bits());
-  }
-
-  void Dump(VariableIndentationOutputStream* vios) const;
-
  private:
-  // Return the index in the Dex register map corresponding to the Dex
-  // register number `dex_register_number`.
-  size_t GetIndexInDexRegisterMap(uint16_t dex_register_number) const {
-    if (!IsDexRegisterLive(dex_register_number)) {
-      return kInvalidIndexInDexRegisterMap;
-    }
-    return GetNumberOfLiveDexRegisters(dex_register_number);
-  }
-
-  // Special (invalid) Dex register map entry index meaning that there
-  // is no index in the map for a given Dex register (i.e., it must
-  // have been mapped to a DexRegisterLocation::Kind::kNone location).
-  static constexpr size_t kInvalidIndexInDexRegisterMap = -1;
-
-  static constexpr int kFixedSize = 0;
-
-  BitMemoryRegion region_;
-  uint16_t number_of_dex_registers_;
-  const CodeInfo& code_info_;
-
-  friend class CodeInfo;
-  friend class StackMapStream;
+  // Store the data inline if the number of registers is small to avoid memory allocations.
+  // If count_ <= kSmallCount, we use the regs_small_ array, and regs_large_ otherwise.
+  static constexpr size_t kSmallCount = 16;
+  size_t count_;
+  std::array<DexRegisterLocation, kSmallCount> regs_small_;
+  dchecked_vector<DexRegisterLocation> regs_large_;
 };
 
 /**
@@ -642,15 +145,16 @@
  * - Knowing the inlining information,
  * - Knowing the values of dex registers.
  */
-class StackMap : public BitTable<6>::Accessor {
+class StackMap : public BitTable<7>::Accessor {
  public:
   enum Field {
     kPackedNativePc,
     kDexPc,
-    kDexRegisterMapOffset,
-    kInlineInfoIndex,
     kRegisterMaskIndex,
     kStackMaskIndex,
+    kInlineInfoIndex,
+    kDexRegisterMaskIndex,
+    kDexRegisterMapIndex,
     kCount,
   };
 
@@ -664,8 +168,10 @@
 
   uint32_t GetDexPc() const { return Get<kDexPc>(); }
 
-  uint32_t GetDexRegisterMapOffset() const { return Get<kDexRegisterMapOffset>(); }
-  bool HasDexRegisterMap() const { return GetDexRegisterMapOffset() != kNoValue; }
+  uint32_t GetDexRegisterMaskIndex() const { return Get<kDexRegisterMaskIndex>(); }
+
+  uint32_t GetDexRegisterMapIndex() const { return Get<kDexRegisterMapIndex>(); }
+  bool HasDexRegisterMap() const { return GetDexRegisterMapIndex() != kNoValue; }
 
   uint32_t GetInlineInfoIndex() const { return Get<kInlineInfoIndex>(); }
   bool HasInlineInfo() const { return GetInlineInfoIndex() != kNoValue; }
@@ -675,7 +181,7 @@
   uint32_t GetStackMaskIndex() const { return Get<kStackMaskIndex>(); }
 
   static uint32_t PackNativePc(uint32_t native_pc, InstructionSet isa) {
-    // TODO: DCHECK_ALIGNED_PARAM(native_pc, GetInstructionSetInstructionAlignment(isa));
+    DCHECK_ALIGNED_PARAM(native_pc, GetInstructionSetInstructionAlignment(isa));
     return native_pc / GetInstructionSetInstructionAlignment(isa);
   }
 
@@ -685,14 +191,12 @@
     return native_pc;
   }
 
-  static void DumpEncoding(const BitTable<6>& table, VariableIndentationOutputStream* vios);
   void Dump(VariableIndentationOutputStream* vios,
             const CodeInfo& code_info,
             const MethodInfo& method_info,
             uint32_t code_offset,
             uint16_t number_of_dex_registers,
-            InstructionSet instruction_set,
-            const std::string& header_suffix = "") const;
+            InstructionSet instruction_set) const;
 };
 
 /**
@@ -700,14 +204,16 @@
  * The row referenced from the StackMap holds information at depth 0.
  * Following rows hold information for further depths.
  */
-class InlineInfo : public BitTable<5>::Accessor {
+class InlineInfo : public BitTable<7>::Accessor {
  public:
   enum Field {
     kIsLast,  // Determines if there are further rows for further depths.
-    kMethodIndexIdx,  // Method index or ArtMethod high bits.
     kDexPc,
-    kExtraData,  // ArtMethod low bits or 1.
-    kDexRegisterMapOffset,
+    kMethodIndexIdx,
+    kArtMethodHi,  // High bits of ArtMethod*.
+    kArtMethodLo,  // Low bits of ArtMethod*.
+    kDexRegisterMaskIndex,
+    kDexRegisterMapIndex,
     kCount,
   };
   static constexpr uint32_t kLast = -1;
@@ -740,30 +246,26 @@
   }
 
   bool EncodesArtMethodAtDepth(uint32_t depth) const {
-    return (AtDepth(depth).Get<kExtraData>() & 1) == 0;
+    return AtDepth(depth).Get<kArtMethodLo>() != kNoValue;
   }
 
   ArtMethod* GetArtMethodAtDepth(uint32_t depth) const {
-    uint32_t low_bits = AtDepth(depth).Get<kExtraData>();
-    uint32_t high_bits = AtDepth(depth).Get<kMethodIndexIdx>();
-    if (high_bits == 0) {
-      return reinterpret_cast<ArtMethod*>(low_bits);
-    } else {
-      uint64_t address = high_bits;
-      address = address << 32;
-      return reinterpret_cast<ArtMethod*>(address | low_bits);
-    }
+    uint64_t lo = AtDepth(depth).Get<kArtMethodLo>();
+    uint64_t hi = AtDepth(depth).Get<kArtMethodHi>();
+    return reinterpret_cast<ArtMethod*>((hi << 32) | lo);
   }
 
-  uint32_t GetDexRegisterMapOffsetAtDepth(uint32_t depth) const {
-    return AtDepth(depth).Get<kDexRegisterMapOffset>();
+  uint32_t GetDexRegisterMaskIndexAtDepth(uint32_t depth) const {
+    return AtDepth(depth).Get<kDexRegisterMaskIndex>();
   }
 
+  uint32_t GetDexRegisterMapIndexAtDepth(uint32_t depth) const {
+    return AtDepth(depth).Get<kDexRegisterMapIndex>();
+  }
   bool HasDexRegisterMapAtDepth(uint32_t depth) const {
-    return GetDexRegisterMapOffsetAtDepth(depth) != StackMap::kNoValue;
+    return GetDexRegisterMapIndexAtDepth(depth) != kNoValue;
   }
 
-  static void DumpEncoding(const BitTable<5>& table, VariableIndentationOutputStream* vios);
   void Dump(VariableIndentationOutputStream* vios,
             const CodeInfo& info,
             const MethodInfo& method_info,
@@ -795,6 +297,40 @@
   }
 };
 
+class DexRegisterInfo : public BitTable<2>::Accessor {
+ public:
+  enum Field {
+    kKind,
+    kPackedValue,
+    kCount,
+  };
+
+  DexRegisterInfo(const BitTable<kCount>* table, uint32_t row)
+    : BitTable<kCount>::Accessor(table, row) {}
+
+  ALWAYS_INLINE DexRegisterLocation GetLocation() const {
+    DexRegisterLocation::Kind kind = static_cast<DexRegisterLocation::Kind>(Get<kKind>());
+    return DexRegisterLocation(kind, UnpackValue(kind, Get<kPackedValue>()));
+  }
+
+  static uint32_t PackValue(DexRegisterLocation::Kind kind, uint32_t value) {
+    uint32_t packed_value = value;
+    if (kind == DexRegisterLocation::Kind::kInStack) {
+      DCHECK(IsAligned<kFrameSlotSize>(packed_value));
+      packed_value /= kFrameSlotSize;
+    }
+    return packed_value;
+  }
+
+  static uint32_t UnpackValue(DexRegisterLocation::Kind kind, uint32_t packed_value) {
+    uint32_t value = packed_value;
+    if (kind == DexRegisterLocation::Kind::kInStack) {
+      value *= kFrameSlotSize;
+    }
+    return value;
+  }
+};
+
 // Register masks tend to have many trailing zero bits (caller-saves are usually not encoded),
 // therefore it is worth encoding the mask as value+shift.
 class RegisterMask : public BitTable<2>::Accessor {
@@ -815,11 +351,7 @@
 
 /**
  * Wrapper around all compiler information collected for a method.
- * The information is of the form:
- *
- *   [BitTable<Header>, BitTable<StackMap>, BitTable<RegisterMask>, BitTable<InlineInfo>,
- *    BitTable<InvokeInfo>, BitTable<StackMask>, DexRegisterMap, DexLocationCatalog]
- *
+ * See the Decode method at the end for the precise binary format.
  */
 class CodeInfo {
  public:
@@ -840,11 +372,7 @@
   }
 
   bool HasInlineInfo() const {
-    return stack_maps_.NumColumnBits(StackMap::kInlineInfoIndex) != 0;
-  }
-
-  DexRegisterLocationCatalog GetDexRegisterLocationCatalog() const {
-    return DexRegisterLocationCatalog(location_catalog_);
+    return inline_infos_.NumRows() > 0;
   }
 
   ALWAYS_INLINE StackMap GetStackMapAt(size_t index) const {
@@ -866,11 +394,11 @@
   }
 
   uint32_t GetNumberOfLocationCatalogEntries() const {
-    return location_catalog_entries_;
+    return dex_register_catalog_.NumRows();
   }
 
-  uint32_t GetDexRegisterLocationCatalogSize() const {
-    return location_catalog_.size();
+  ALWAYS_INLINE DexRegisterLocation GetDexRegisterCatalogEntry(size_t index) const {
+    return DexRegisterInfo(&dex_register_catalog_, index).GetLocation();
   }
 
   uint32_t GetNumberOfStackMaps() const {
@@ -881,41 +409,19 @@
     return InvokeInfo(&invoke_infos_, index);
   }
 
-  DexRegisterMap GetDexRegisterMapOf(StackMap stack_map,
-                                     size_t number_of_dex_registers) const {
-    if (!stack_map.HasDexRegisterMap()) {
-      return DexRegisterMap(MemoryRegion(), 0, *this);
-    }
-    const uint32_t offset = stack_map.GetDexRegisterMapOffset();
-    size_t size = ComputeDexRegisterMapSizeOf(offset, number_of_dex_registers);
-    return DexRegisterMap(dex_register_maps_.Subregion(offset, size),
-                          number_of_dex_registers,
-                          *this);
+  ALWAYS_INLINE DexRegisterMap GetDexRegisterMapOf(StackMap stack_map,
+                                                   size_t num_dex_registers) const {
+    return DecodeDexRegisterMap(stack_map.GetDexRegisterMaskIndex(),
+                                stack_map.GetDexRegisterMapIndex(),
+                                num_dex_registers);
   }
 
-  size_t GetDexRegisterMapsSize(uint32_t number_of_dex_registers) const {
-    size_t total = 0;
-    for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
-      StackMap stack_map = GetStackMapAt(i);
-      DexRegisterMap map(GetDexRegisterMapOf(stack_map, number_of_dex_registers));
-      total += map.Size();
-    }
-    return total;
-  }
-
-  // Return the `DexRegisterMap` pointed by `inline_info` at depth `depth`.
-  DexRegisterMap GetDexRegisterMapAtDepth(uint8_t depth,
-                                          InlineInfo inline_info,
-                                          uint32_t number_of_dex_registers) const {
-    if (!inline_info.HasDexRegisterMapAtDepth(depth)) {
-      return DexRegisterMap(MemoryRegion(), 0, *this);
-    } else {
-      uint32_t offset = inline_info.GetDexRegisterMapOffsetAtDepth(depth);
-      size_t size = ComputeDexRegisterMapSizeOf(offset, number_of_dex_registers);
-      return DexRegisterMap(dex_register_maps_.Subregion(offset, size),
-                            number_of_dex_registers,
-                            *this);
-    }
+  ALWAYS_INLINE DexRegisterMap GetDexRegisterMapAtDepth(uint8_t depth,
+                                                        InlineInfo inline_info,
+                                                        size_t num_dex_registers) const {
+    return DecodeDexRegisterMap(inline_info.GetDexRegisterMaskIndexAtDepth(depth),
+                                inline_info.GetDexRegisterMapIndexAtDepth(depth),
+                                num_dex_registers);
   }
 
   InlineInfo GetInlineInfo(size_t index) const {
@@ -965,8 +471,8 @@
         if (other.GetDexPc() == dex_pc &&
             other.GetNativePcOffset(kRuntimeISA) ==
                 stack_map.GetNativePcOffset(kRuntimeISA)) {
-          DCHECK_EQ(other.GetDexRegisterMapOffset(),
-                    stack_map.GetDexRegisterMapOffset());
+          DCHECK_EQ(other.GetDexRegisterMapIndex(),
+                    stack_map.GetDexRegisterMapIndex());
           DCHECK(!stack_map.HasInlineInfo());
           if (i < e - 2) {
             // Make sure there are not three identical stack maps following each other.
@@ -1004,81 +510,61 @@
     return InvokeInfo(&invoke_infos_, -1);
   }
 
-  // Dump this CodeInfo object on `os`.  `code_offset` is the (absolute)
-  // native PC of the compiled method and `number_of_dex_registers` the
-  // number of Dex virtual registers used in this method.  If
-  // `dump_stack_maps` is true, also dump the stack maps and the
-  // associated Dex register maps.
+  // Dump this CodeInfo object on `vios`.
+  // `code_offset` is the (absolute) native PC of the compiled method.
   void Dump(VariableIndentationOutputStream* vios,
             uint32_t code_offset,
             uint16_t number_of_dex_registers,
-            bool dump_stack_maps,
+            bool verbose,
             InstructionSet instruction_set,
             const MethodInfo& method_info) const;
 
  private:
-  // Compute the size of the Dex register map associated to the stack map at
-  // `dex_register_map_offset_in_code_info`.
-  size_t ComputeDexRegisterMapSizeOf(uint32_t dex_register_map_offset,
-                                     uint16_t number_of_dex_registers) const {
-    // Offset where the actual mapping data starts within art::DexRegisterMap.
-    size_t location_mapping_data_offset_in_dex_register_map =
-        DexRegisterMap::GetLocationMappingDataOffset(number_of_dex_registers);
-    // Create a temporary art::DexRegisterMap to be able to call
-    // art::DexRegisterMap::GetNumberOfLiveDexRegisters and
-    DexRegisterMap dex_register_map_without_locations(
-        MemoryRegion(dex_register_maps_.Subregion(dex_register_map_offset,
-                                        location_mapping_data_offset_in_dex_register_map)),
-        number_of_dex_registers,
-        *this);
-    size_t number_of_live_dex_registers =
-        dex_register_map_without_locations.GetNumberOfLiveDexRegisters();
-    size_t location_mapping_data_size_in_bits =
-        DexRegisterMap::SingleEntrySizeInBits(GetNumberOfLocationCatalogEntries())
-        * number_of_live_dex_registers;
-    size_t location_mapping_data_size_in_bytes =
-        RoundUp(location_mapping_data_size_in_bits, kBitsPerByte) / kBitsPerByte;
-    size_t dex_register_map_size =
-        location_mapping_data_offset_in_dex_register_map + location_mapping_data_size_in_bytes;
-    return dex_register_map_size;
-  }
-
-  MemoryRegion DecodeMemoryRegion(MemoryRegion& region, size_t* bit_offset) {
-    size_t length = DecodeVarintBits(BitMemoryRegion(region), bit_offset);
-    size_t offset = BitsToBytesRoundUp(*bit_offset);;
-    *bit_offset = (offset + length) * kBitsPerByte;
-    return region.Subregion(offset, length);
+  ALWAYS_INLINE DexRegisterMap DecodeDexRegisterMap(uint32_t mask_index,
+                                                    uint32_t map_index,
+                                                    uint32_t num_dex_registers) const {
+    DexRegisterMap map(map_index == StackMap::kNoValue ? 0 : num_dex_registers);
+    if (mask_index != StackMap::kNoValue) {
+      BitMemoryRegion mask = dex_register_masks_.GetBitMemoryRegion(mask_index);
+      num_dex_registers = std::min<uint32_t>(num_dex_registers, mask.size_in_bits());
+      DexRegisterLocation* regs = map.data();
+      for (uint32_t r = 0; r < mask.size_in_bits(); r++) {
+        if (mask.LoadBit(r) /* is_live */) {
+          DCHECK_LT(r, map.size());
+          regs[r] = GetDexRegisterCatalogEntry(dex_register_maps_.Get(map_index++));
+        }
+      }
+    }
+    return map;
   }
 
   void Decode(const uint8_t* data) {
     size_t non_header_size = DecodeUnsignedLeb128(&data);
-    MemoryRegion region(const_cast<uint8_t*>(data), non_header_size);
-    BitMemoryRegion bit_region(region);
+    BitMemoryRegion region(MemoryRegion(const_cast<uint8_t*>(data), non_header_size));
     size_t bit_offset = 0;
     size_ = UnsignedLeb128Size(non_header_size) + non_header_size;
-    dex_register_maps_ = DecodeMemoryRegion(region, &bit_offset);
-    location_catalog_entries_ = DecodeVarintBits(bit_region, &bit_offset);
-    location_catalog_ = DecodeMemoryRegion(region, &bit_offset);
-    stack_maps_.Decode(bit_region, &bit_offset);
-    invoke_infos_.Decode(bit_region, &bit_offset);
-    inline_infos_.Decode(bit_region, &bit_offset);
-    register_masks_.Decode(bit_region, &bit_offset);
-    stack_masks_.Decode(bit_region, &bit_offset);
-    CHECK_EQ(BitsToBytesRoundUp(bit_offset), non_header_size);
+    stack_maps_.Decode(region, &bit_offset);
+    register_masks_.Decode(region, &bit_offset);
+    stack_masks_.Decode(region, &bit_offset);
+    invoke_infos_.Decode(region, &bit_offset);
+    inline_infos_.Decode(region, &bit_offset);
+    dex_register_masks_.Decode(region, &bit_offset);
+    dex_register_maps_.Decode(region, &bit_offset);
+    dex_register_catalog_.Decode(region, &bit_offset);
+    CHECK_EQ(non_header_size, BitsToBytesRoundUp(bit_offset)) << "Invalid CodeInfo";
   }
 
   size_t size_;
-  MemoryRegion dex_register_maps_;
-  uint32_t location_catalog_entries_;
-  MemoryRegion location_catalog_;
   BitTable<StackMap::Field::kCount> stack_maps_;
-  BitTable<InvokeInfo::Field::kCount> invoke_infos_;
-  BitTable<InlineInfo::Field::kCount> inline_infos_;
   BitTable<RegisterMask::Field::kCount> register_masks_;
   BitTable<1> stack_masks_;
+  BitTable<InvokeInfo::Field::kCount> invoke_infos_;
+  BitTable<InlineInfo::Field::kCount> inline_infos_;
+  BitTable<1> dex_register_masks_;
+  BitTable<1> dex_register_maps_;
+  BitTable<DexRegisterInfo::Field::kCount> dex_register_catalog_;
 
   friend class OatDumper;
-  friend class StackMapStream;
 };
 
 #undef ELEMENT_BYTE_OFFSET_AFTER
diff --git a/runtime/well_known_classes.cc b/runtime/well_known_classes.cc
index f7cdf39..c64e7bb 100644
--- a/runtime/well_known_classes.cc
+++ b/runtime/well_known_classes.cc
@@ -55,8 +55,6 @@
 jclass WellKnownClasses::java_lang_ClassNotFoundException;
 jclass WellKnownClasses::java_lang_Daemons;
 jclass WellKnownClasses::java_lang_Error;
-jclass WellKnownClasses::java_lang_invoke_MethodHandle;
-jclass WellKnownClasses::java_lang_invoke_VarHandle;
 jclass WellKnownClasses::java_lang_IllegalAccessError;
 jclass WellKnownClasses::java_lang_NoClassDefFoundError;
 jclass WellKnownClasses::java_lang_Object;
@@ -74,7 +72,6 @@
 jclass WellKnownClasses::java_lang_Throwable;
 jclass WellKnownClasses::java_nio_ByteBuffer;
 jclass WellKnownClasses::java_nio_DirectByteBuffer;
-jclass WellKnownClasses::java_util_ArrayList;
 jclass WellKnownClasses::java_util_Collections;
 jclass WellKnownClasses::java_util_function_Consumer;
 jclass WellKnownClasses::libcore_reflect_AnnotationFactory;
@@ -90,14 +87,11 @@
 jmethodID WellKnownClasses::java_lang_Character_valueOf;
 jmethodID WellKnownClasses::java_lang_ClassLoader_loadClass;
 jmethodID WellKnownClasses::java_lang_ClassNotFoundException_init;
-jmethodID WellKnownClasses::java_lang_Daemons_requestHeapTrim;
 jmethodID WellKnownClasses::java_lang_Daemons_start;
 jmethodID WellKnownClasses::java_lang_Daemons_stop;
 jmethodID WellKnownClasses::java_lang_Double_valueOf;
 jmethodID WellKnownClasses::java_lang_Float_valueOf;
 jmethodID WellKnownClasses::java_lang_Integer_valueOf;
-jmethodID WellKnownClasses::java_lang_invoke_MethodHandle_invoke;
-jmethodID WellKnownClasses::java_lang_invoke_MethodHandle_invokeExact;
 jmethodID WellKnownClasses::java_lang_invoke_MethodHandles_lookup;
 jmethodID WellKnownClasses::java_lang_invoke_MethodHandles_Lookup_findConstructor;
 jmethodID WellKnownClasses::java_lang_Long_valueOf;
@@ -108,7 +102,6 @@
 jmethodID WellKnownClasses::java_lang_Runtime_nativeLoad;
 jmethodID WellKnownClasses::java_lang_Short_valueOf;
 jmethodID WellKnownClasses::java_lang_String_charAt;
-jmethodID WellKnownClasses::java_lang_System_runFinalization = nullptr;
 jmethodID WellKnownClasses::java_lang_Thread_dispatchUncaughtException;
 jmethodID WellKnownClasses::java_lang_Thread_init;
 jmethodID WellKnownClasses::java_lang_Thread_run;
@@ -144,7 +137,6 @@
 jfieldID WellKnownClasses::java_lang_Throwable_stackTrace;
 jfieldID WellKnownClasses::java_lang_Throwable_stackState;
 jfieldID WellKnownClasses::java_lang_Throwable_suppressedExceptions;
-jfieldID WellKnownClasses::java_lang_reflect_Proxy_h;
 jfieldID WellKnownClasses::java_nio_ByteBuffer_address;
 jfieldID WellKnownClasses::java_nio_ByteBuffer_hb;
 jfieldID WellKnownClasses::java_nio_ByteBuffer_isReadOnly;
@@ -152,8 +144,6 @@
 jfieldID WellKnownClasses::java_nio_ByteBuffer_offset;
 jfieldID WellKnownClasses::java_nio_DirectByteBuffer_capacity;
 jfieldID WellKnownClasses::java_nio_DirectByteBuffer_effectiveDirectAddress;
-jfieldID WellKnownClasses::java_util_ArrayList_array;
-jfieldID WellKnownClasses::java_util_ArrayList_size;
 jfieldID WellKnownClasses::java_util_Collections_EMPTY_LIST;
 jfieldID WellKnownClasses::libcore_util_EmptyArray_STACK_TRACE_ELEMENT;
 jfieldID WellKnownClasses::org_apache_harmony_dalvik_ddmc_Chunk_data;
@@ -323,8 +313,6 @@
   java_lang_OutOfMemoryError = CacheClass(env, "java/lang/OutOfMemoryError");
   java_lang_Error = CacheClass(env, "java/lang/Error");
   java_lang_IllegalAccessError = CacheClass(env, "java/lang/IllegalAccessError");
-  java_lang_invoke_MethodHandle = CacheClass(env, "java/lang/invoke/MethodHandle");
-  java_lang_invoke_VarHandle = CacheClass(env, "java/lang/invoke/VarHandle");
   java_lang_NoClassDefFoundError = CacheClass(env, "java/lang/NoClassDefFoundError");
   java_lang_reflect_Parameter = CacheClass(env, "java/lang/reflect/Parameter");
   java_lang_reflect_Parameter__array = CacheClass(env, "[Ljava/lang/reflect/Parameter;");
@@ -339,7 +327,6 @@
   java_lang_Throwable = CacheClass(env, "java/lang/Throwable");
   java_nio_ByteBuffer = CacheClass(env, "java/nio/ByteBuffer");
   java_nio_DirectByteBuffer = CacheClass(env, "java/nio/DirectByteBuffer");
-  java_util_ArrayList = CacheClass(env, "java/util/ArrayList");
   java_util_Collections = CacheClass(env, "java/util/Collections");
   java_util_function_Consumer = CacheClass(env, "java/util/function/Consumer");
   libcore_reflect_AnnotationFactory = CacheClass(env, "libcore/reflect/AnnotationFactory");
@@ -353,11 +340,8 @@
   java_lang_ClassNotFoundException_init = CacheMethod(env, java_lang_ClassNotFoundException, false, "<init>", "(Ljava/lang/String;Ljava/lang/Throwable;)V");
   java_lang_ClassLoader_loadClass = CacheMethod(env, java_lang_ClassLoader, false, "loadClass", "(Ljava/lang/String;)Ljava/lang/Class;");
 
-  java_lang_Daemons_requestHeapTrim = CacheMethod(env, java_lang_Daemons, true, "requestHeapTrim", "()V");
   java_lang_Daemons_start = CacheMethod(env, java_lang_Daemons, true, "start", "()V");
   java_lang_Daemons_stop = CacheMethod(env, java_lang_Daemons, true, "stop", "()V");
-  java_lang_invoke_MethodHandle_invoke = CacheMethod(env, java_lang_invoke_MethodHandle, false, "invoke", "([Ljava/lang/Object;)Ljava/lang/Object;");
-  java_lang_invoke_MethodHandle_invokeExact = CacheMethod(env, java_lang_invoke_MethodHandle, false, "invokeExact", "([Ljava/lang/Object;)Ljava/lang/Object;");
   java_lang_invoke_MethodHandles_lookup = CacheMethod(env, "java/lang/invoke/MethodHandles", true, "lookup", "()Ljava/lang/invoke/MethodHandles$Lookup;");
   java_lang_invoke_MethodHandles_Lookup_findConstructor = CacheMethod(env, "java/lang/invoke/MethodHandles$Lookup", false, "findConstructor", "(Ljava/lang/Class;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/MethodHandle;");
 
@@ -408,8 +392,6 @@
   java_nio_ByteBuffer_offset = CacheField(env, java_nio_ByteBuffer, false, "offset", "I");
   java_nio_DirectByteBuffer_capacity = CacheField(env, java_nio_DirectByteBuffer, false, "capacity", "I");
   java_nio_DirectByteBuffer_effectiveDirectAddress = CacheField(env, java_nio_DirectByteBuffer, false, "address", "J");
-  java_util_ArrayList_array = CacheField(env, java_util_ArrayList, false, "elementData", "[Ljava/lang/Object;");
-  java_util_ArrayList_size = CacheField(env, java_util_ArrayList, false, "size", "I");
   java_util_Collections_EMPTY_LIST = CacheField(env, java_util_Collections, true, "EMPTY_LIST", "Ljava/util/List;");
   libcore_util_EmptyArray_STACK_TRACE_ELEMENT = CacheField(env, libcore_util_EmptyArray, true, "STACK_TRACE_ELEMENT", "[Ljava/lang/StackTraceElement;");
   org_apache_harmony_dalvik_ddmc_Chunk_data = CacheField(env, org_apache_harmony_dalvik_ddmc_Chunk, false, "data", "[B");
@@ -440,9 +422,6 @@
     CacheMethod(env, java_lang_reflect_Proxy, true, "invoke",
                 "(Ljava/lang/reflect/Proxy;Ljava/lang/reflect/Method;"
                     "[Ljava/lang/Object;)Ljava/lang/Object;");
-  java_lang_reflect_Proxy_h =
-    CacheField(env, java_lang_reflect_Proxy, false, "h",
-               "Ljava/lang/reflect/InvocationHandler;");
 }
 
 void WellKnownClasses::Clear() {
@@ -464,8 +443,6 @@
   java_lang_Daemons = nullptr;
   java_lang_Error = nullptr;
   java_lang_IllegalAccessError = nullptr;
-  java_lang_invoke_MethodHandle = nullptr;
-  java_lang_invoke_VarHandle = nullptr;
   java_lang_NoClassDefFoundError = nullptr;
   java_lang_Object = nullptr;
   java_lang_OutOfMemoryError = nullptr;
@@ -480,7 +457,6 @@
   java_lang_Thread = nullptr;
   java_lang_ThreadGroup = nullptr;
   java_lang_Throwable = nullptr;
-  java_util_ArrayList = nullptr;
   java_util_Collections = nullptr;
   java_nio_ByteBuffer = nullptr;
   java_nio_DirectByteBuffer = nullptr;
@@ -497,14 +473,11 @@
   java_lang_Character_valueOf = nullptr;
   java_lang_ClassLoader_loadClass = nullptr;
   java_lang_ClassNotFoundException_init = nullptr;
-  java_lang_Daemons_requestHeapTrim = nullptr;
   java_lang_Daemons_start = nullptr;
   java_lang_Daemons_stop = nullptr;
   java_lang_Double_valueOf = nullptr;
   java_lang_Float_valueOf = nullptr;
   java_lang_Integer_valueOf = nullptr;
-  java_lang_invoke_MethodHandle_invoke = nullptr;
-  java_lang_invoke_MethodHandle_invokeExact = nullptr;
   java_lang_invoke_MethodHandles_lookup = nullptr;
   java_lang_invoke_MethodHandles_Lookup_findConstructor = nullptr;
   java_lang_Long_valueOf = nullptr;
@@ -515,7 +488,6 @@
   java_lang_Runtime_nativeLoad = nullptr;
   java_lang_Short_valueOf = nullptr;
   java_lang_String_charAt = nullptr;
-  java_lang_System_runFinalization = nullptr;
   java_lang_Thread_dispatchUncaughtException = nullptr;
   java_lang_Thread_init = nullptr;
   java_lang_Thread_run = nullptr;
@@ -533,7 +505,6 @@
   dalvik_system_DexPathList_dexElements = nullptr;
   dalvik_system_DexPathList__Element_dexFile = nullptr;
   dalvik_system_VMRuntime_nonSdkApiUsageConsumer = nullptr;
-  java_lang_reflect_Proxy_h = nullptr;
   java_lang_Thread_daemon = nullptr;
   java_lang_Thread_group = nullptr;
   java_lang_Thread_lock = nullptr;
@@ -558,8 +529,6 @@
   java_nio_ByteBuffer_offset = nullptr;
   java_nio_DirectByteBuffer_capacity = nullptr;
   java_nio_DirectByteBuffer_effectiveDirectAddress = nullptr;
-  java_util_ArrayList_array = nullptr;
-  java_util_ArrayList_size = nullptr;
   java_util_Collections_EMPTY_LIST = nullptr;
   libcore_util_EmptyArray_STACK_TRACE_ELEMENT = nullptr;
   org_apache_harmony_dalvik_ddmc_Chunk_data = nullptr;
diff --git a/runtime/well_known_classes.h b/runtime/well_known_classes.h
index c06e4a7..c81062f 100644
--- a/runtime/well_known_classes.h
+++ b/runtime/well_known_classes.h
@@ -66,8 +66,6 @@
   static jclass java_lang_Daemons;
   static jclass java_lang_Error;
   static jclass java_lang_IllegalAccessError;
-  static jclass java_lang_invoke_MethodHandle;
-  static jclass java_lang_invoke_VarHandle;
   static jclass java_lang_NoClassDefFoundError;
   static jclass java_lang_Object;
   static jclass java_lang_OutOfMemoryError;
@@ -82,7 +80,6 @@
   static jclass java_lang_Thread;
   static jclass java_lang_ThreadGroup;
   static jclass java_lang_Throwable;
-  static jclass java_util_ArrayList;
   static jclass java_util_Collections;
   static jclass java_util_function_Consumer;
   static jclass java_nio_ByteBuffer;
@@ -100,14 +97,11 @@
   static jmethodID java_lang_Character_valueOf;
   static jmethodID java_lang_ClassLoader_loadClass;
   static jmethodID java_lang_ClassNotFoundException_init;
-  static jmethodID java_lang_Daemons_requestHeapTrim;
   static jmethodID java_lang_Daemons_start;
   static jmethodID java_lang_Daemons_stop;
   static jmethodID java_lang_Double_valueOf;
   static jmethodID java_lang_Float_valueOf;
   static jmethodID java_lang_Integer_valueOf;
-  static jmethodID java_lang_invoke_MethodHandle_invoke;
-  static jmethodID java_lang_invoke_MethodHandle_invokeExact;
   static jmethodID java_lang_invoke_MethodHandles_lookup;
   static jmethodID java_lang_invoke_MethodHandles_Lookup_findConstructor;
   static jmethodID java_lang_Long_valueOf;
@@ -118,7 +112,6 @@
   static jmethodID java_lang_Runtime_nativeLoad;
   static jmethodID java_lang_Short_valueOf;
   static jmethodID java_lang_String_charAt;
-  static jmethodID java_lang_System_runFinalization;
   static jmethodID java_lang_Thread_dispatchUncaughtException;
   static jmethodID java_lang_Thread_init;
   static jmethodID java_lang_Thread_run;
@@ -137,7 +130,6 @@
   static jfieldID dalvik_system_DexPathList_dexElements;
   static jfieldID dalvik_system_DexPathList__Element_dexFile;
   static jfieldID dalvik_system_VMRuntime_nonSdkApiUsageConsumer;
-  static jfieldID java_lang_reflect_Proxy_h;
   static jfieldID java_lang_Thread_daemon;
   static jfieldID java_lang_Thread_group;
   static jfieldID java_lang_Thread_lock;
@@ -163,8 +155,6 @@
   static jfieldID java_nio_DirectByteBuffer_capacity;
   static jfieldID java_nio_DirectByteBuffer_effectiveDirectAddress;
 
-  static jfieldID java_util_ArrayList_array;
-  static jfieldID java_util_ArrayList_size;
   static jfieldID java_util_Collections_EMPTY_LIST;
   static jfieldID libcore_util_EmptyArray_STACK_TRACE_ELEMENT;
   static jfieldID org_apache_harmony_dalvik_ddmc_Chunk_data;
diff --git a/test/530-checker-lse/build b/test/530-checker-lse/build
deleted file mode 100755
index 10ffcc5..0000000
--- a/test/530-checker-lse/build
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-#
-# Copyright 2017 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# See b/65168732
-export USE_D8=false
-
-./default-build "$@"
diff --git a/test/530-checker-lse/smali/Main.smali b/test/530-checker-lse/smali/Main.smali
new file mode 100644
index 0000000..2678017
--- /dev/null
+++ b/test/530-checker-lse/smali/Main.smali
@@ -0,0 +1,260 @@
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+.class public LMain2;
+.super Ljava/lang/Object;
+.source "Main.java"
+
+# direct methods
+
+## CHECK-START: int Main2.test4(TestClass, boolean) load_store_elimination (before)
+## CHECK: InstanceFieldSet
+## CHECK: InstanceFieldGet
+## CHECK: Return
+## CHECK: InstanceFieldSet
+
+## CHECK-START: int Main2.test4(TestClass, boolean) load_store_elimination (after)
+## CHECK: InstanceFieldSet
+## CHECK-NOT: NullCheck
+## CHECK-NOT: InstanceFieldGet
+## CHECK: Return
+## CHECK: InstanceFieldSet
+
+# Set and merge the same value in two branches.
+
+# Original java source:
+#
+#     static int test4(TestClass obj, boolean b) {
+#       if (b) {
+#         obj.i = 1;
+#       } else {
+#         obj.i = 1;
+#       }
+#       return obj.i;
+#     }
+
+.method public static test4(LTestClass;Z)I
+    .registers 3
+    .param p0, "obj"    # LTestClass;
+    .param p1, "b"    # Z
+
+    .prologue
+    const/4 v0, 0x1
+
+    .line 185
+    if-eqz p1, :cond_8
+
+    .line 186
+    iput v0, p0, LTestClass;->i:I
+
+    .line 190
+    :goto_5
+    iget v0, p0, LTestClass;->i:I
+
+    return v0
+
+    .line 188
+    :cond_8
+    iput v0, p0, LTestClass;->i:I
+
+    goto :goto_5
+.end method
+
+## CHECK-START: int Main2.test5(TestClass, boolean) load_store_elimination (before)
+## CHECK: InstanceFieldSet
+## CHECK: InstanceFieldGet
+## CHECK: Return
+## CHECK: InstanceFieldSet
+
+## CHECK-START: int Main2.test5(TestClass, boolean) load_store_elimination (after)
+## CHECK: InstanceFieldSet
+## CHECK: InstanceFieldGet
+## CHECK: Return
+## CHECK: InstanceFieldSet
+
+# Set and merge different values in two branches.
+# Original java source:
+#
+#     static int test5(TestClass obj, boolean b) {
+#       if (b) {
+#         obj.i = 1;
+#       } else {
+#         obj.i = 2;
+#       }
+#       return obj.i;
+#     }
+
+.method public static test5(LTestClass;Z)I
+    .registers 3
+    .param p0, "obj"    # LTestClass;
+    .param p1, "b"    # Z
+
+    .prologue
+    .line 207
+    if-eqz p1, :cond_8
+
+    .line 208
+    const/4 v0, 0x1
+
+    iput v0, p0, LTestClass;->i:I
+
+    .line 212
+    :goto_5
+    iget v0, p0, LTestClass;->i:I
+
+    return v0
+
+    .line 210
+    :cond_8
+    const/4 v0, 0x2
+
+    iput v0, p0, LTestClass;->i:I
+
+    goto :goto_5
+.end method
+
+## CHECK-START: int Main2.test23(boolean) load_store_elimination (before)
+## CHECK: NewInstance
+## CHECK: InstanceFieldSet
+## CHECK: InstanceFieldGet
+## CHECK: InstanceFieldSet
+## CHECK: InstanceFieldGet
+## CHECK: Return
+## CHECK: InstanceFieldGet
+## CHECK: InstanceFieldSet
+
+## CHECK-START: int Main2.test23(boolean) load_store_elimination (after)
+## CHECK: NewInstance
+## CHECK-NOT: InstanceFieldSet
+## CHECK-NOT: InstanceFieldGet
+## CHECK: InstanceFieldSet
+## CHECK: InstanceFieldGet
+## CHECK: Return
+## CHECK-NOT: InstanceFieldGet
+## CHECK: InstanceFieldSet
+
+# Test store elimination on merging.
+
+# Original java source:
+#
+#     static int test23(boolean b) {
+#       TestClass obj = new TestClass();
+#       obj.i = 3;      // This store can be eliminated since the value flows into each branch.
+#       if (b) {
+#         obj.i += 1;   // This store cannot be eliminated due to the merge later.
+#       } else {
+#         obj.i += 2;   // This store cannot be eliminated due to the merge later.
+#       }
+#       return obj.i;
+#     }
+
+.method public static test23(Z)I
+    .registers 3
+    .param p0, "b"    # Z
+
+    .prologue
+    .line 582
+    new-instance v0, LTestClass;
+
+    invoke-direct {v0}, LTestClass;-><init>()V
+
+    .line 583
+    .local v0, "obj":LTestClass;
+    const/4 v1, 0x3
+
+    iput v1, v0, LTestClass;->i:I
+
+    .line 584
+    if-eqz p0, :cond_13
+
+    .line 585
+    iget v1, v0, LTestClass;->i:I
+
+    add-int/lit8 v1, v1, 0x1
+
+    iput v1, v0, LTestClass;->i:I
+
+    .line 589
+    :goto_10
+    iget v1, v0, LTestClass;->i:I
+
+    return v1
+
+    .line 587
+    :cond_13
+    iget v1, v0, LTestClass;->i:I
+
+    add-int/lit8 v1, v1, 0x2
+
+    iput v1, v0, LTestClass;->i:I
+
+    goto :goto_10
+.end method
+
+## CHECK-START: float Main2.test24() load_store_elimination (before)
+## CHECK-DAG:     <<True:i\d+>>     IntConstant 1
+## CHECK-DAG:     <<Float8:f\d+>>   FloatConstant 8
+## CHECK-DAG:     <<Float42:f\d+>>  FloatConstant 42
+## CHECK-DAG:     <<Obj:l\d+>>      NewInstance
+## CHECK-DAG:                       InstanceFieldSet [<<Obj>>,<<True>>]
+## CHECK-DAG:                       InstanceFieldSet [<<Obj>>,<<Float8>>]
+## CHECK-DAG:     <<GetTest:z\d+>>  InstanceFieldGet [<<Obj>>]
+## CHECK-DAG:     <<GetField:f\d+>> InstanceFieldGet [<<Obj>>]
+## CHECK-DAG:     <<Select:f\d+>>   Select [<<Float42>>,<<GetField>>,<<GetTest>>]
+## CHECK-DAG:                       Return [<<Select>>]
+
+## CHECK-START: float Main2.test24() load_store_elimination (after)
+## CHECK-DAG:     <<True:i\d+>>     IntConstant 1
+## CHECK-DAG:     <<Float8:f\d+>>   FloatConstant 8
+## CHECK-DAG:     <<Float42:f\d+>>  FloatConstant 42
+## CHECK-DAG:     <<Select:f\d+>>   Select [<<Float42>>,<<Float8>>,<<True>>]
+## CHECK-DAG:                       Return [<<Select>>]
+
+# Original java source:
+#
+#     static float test24() {
+#       float a = 42.0f;
+#       TestClass3 obj = new TestClass3();
+#       if (obj.test1) {
+#         a = obj.floatField;
+#       }
+#       return a;
+#     }
+
+.method public static test24()F
+    .registers 3
+
+    .prologue
+    .line 612
+    const/high16 v0, 0x42280000    # 42.0f
+
+    .line 613
+    .local v0, "a":F
+    new-instance v1, LTestClass3;
+
+    invoke-direct {v1}, LTestClass3;-><init>()V
+
+    .line 614
+    .local v1, "obj":LTestClass3;
+    iget-boolean v2, v1, LTestClass3;->test1:Z
+
+    if-eqz v2, :cond_d
+
+    .line 615
+    iget v0, v1, LTestClass3;->floatField:F
+
+    .line 617
+    :cond_d
+    return v0
+.end method
diff --git a/test/530-checker-lse/src/Main.java b/test/530-checker-lse/src/Main.java
index 93c1538..bd1744c 100644
--- a/test/530-checker-lse/src/Main.java
+++ b/test/530-checker-lse/src/Main.java
@@ -14,6 +14,8 @@
  * limitations under the License.
  */
 
+import java.lang.reflect.Method;
+
 class Circle {
   Circle(double radius) {
     this.radius = radius;
@@ -167,51 +169,6 @@
     return obj.i + obj1.j + obj2.i + obj2.j;
   }
 
-  /// CHECK-START: int Main.test4(TestClass, boolean) load_store_elimination (before)
-  /// CHECK: InstanceFieldSet
-  /// CHECK: InstanceFieldGet
-  /// CHECK: Return
-  /// CHECK: InstanceFieldSet
-
-  /// CHECK-START: int Main.test4(TestClass, boolean) load_store_elimination (after)
-  /// CHECK: InstanceFieldSet
-  /// CHECK-NOT: NullCheck
-  /// CHECK-NOT: InstanceFieldGet
-  /// CHECK: Return
-  /// CHECK: InstanceFieldSet
-
-  // Set and merge the same value in two branches.
-  static int test4(TestClass obj, boolean b) {
-    if (b) {
-      obj.i = 1;
-    } else {
-      obj.i = 1;
-    }
-    return obj.i;
-  }
-
-  /// CHECK-START: int Main.test5(TestClass, boolean) load_store_elimination (before)
-  /// CHECK: InstanceFieldSet
-  /// CHECK: InstanceFieldGet
-  /// CHECK: Return
-  /// CHECK: InstanceFieldSet
-
-  /// CHECK-START: int Main.test5(TestClass, boolean) load_store_elimination (after)
-  /// CHECK: InstanceFieldSet
-  /// CHECK: InstanceFieldGet
-  /// CHECK: Return
-  /// CHECK: InstanceFieldSet
-
-  // Set and merge different values in two branches.
-  static int test5(TestClass obj, boolean b) {
-    if (b) {
-      obj.i = 1;
-    } else {
-      obj.i = 2;
-    }
-    return obj.i;
-  }
-
   /// CHECK-START: int Main.test6(TestClass, TestClass, boolean) load_store_elimination (before)
   /// CHECK: InstanceFieldSet
   /// CHECK: InstanceFieldSet
@@ -557,66 +514,6 @@
     return sum;
   }
 
-  /// CHECK-START: int Main.test23(boolean) load_store_elimination (before)
-  /// CHECK: NewInstance
-  /// CHECK: InstanceFieldSet
-  /// CHECK: InstanceFieldGet
-  /// CHECK: InstanceFieldSet
-  /// CHECK: InstanceFieldGet
-  /// CHECK: Return
-  /// CHECK: InstanceFieldGet
-  /// CHECK: InstanceFieldSet
-
-  /// CHECK-START: int Main.test23(boolean) load_store_elimination (after)
-  /// CHECK: NewInstance
-  /// CHECK-NOT: InstanceFieldSet
-  /// CHECK-NOT: InstanceFieldGet
-  /// CHECK: InstanceFieldSet
-  /// CHECK: InstanceFieldGet
-  /// CHECK: Return
-  /// CHECK-NOT: InstanceFieldGet
-  /// CHECK: InstanceFieldSet
-
-  // Test store elimination on merging.
-  static int test23(boolean b) {
-    TestClass obj = new TestClass();
-    obj.i = 3;      // This store can be eliminated since the value flows into each branch.
-    if (b) {
-      obj.i += 1;   // This store cannot be eliminated due to the merge later.
-    } else {
-      obj.i += 2;   // This store cannot be eliminated due to the merge later.
-    }
-    return obj.i;
-  }
-
-  /// CHECK-START: float Main.test24() load_store_elimination (before)
-  /// CHECK-DAG:     <<True:i\d+>>     IntConstant 1
-  /// CHECK-DAG:     <<Float8:f\d+>>   FloatConstant 8
-  /// CHECK-DAG:     <<Float42:f\d+>>  FloatConstant 42
-  /// CHECK-DAG:     <<Obj:l\d+>>      NewInstance
-  /// CHECK-DAG:                       InstanceFieldSet [<<Obj>>,<<True>>]
-  /// CHECK-DAG:                       InstanceFieldSet [<<Obj>>,<<Float8>>]
-  /// CHECK-DAG:     <<GetTest:z\d+>>  InstanceFieldGet [<<Obj>>]
-  /// CHECK-DAG:     <<GetField:f\d+>> InstanceFieldGet [<<Obj>>]
-  /// CHECK-DAG:     <<Select:f\d+>>   Select [<<Float42>>,<<GetField>>,<<GetTest>>]
-  /// CHECK-DAG:                       Return [<<Select>>]
-
-  /// CHECK-START: float Main.test24() load_store_elimination (after)
-  /// CHECK-DAG:     <<True:i\d+>>     IntConstant 1
-  /// CHECK-DAG:     <<Float8:f\d+>>   FloatConstant 8
-  /// CHECK-DAG:     <<Float42:f\d+>>  FloatConstant 42
-  /// CHECK-DAG:     <<Select:f\d+>>   Select [<<Float42>>,<<Float8>>,<<True>>]
-  /// CHECK-DAG:                       Return [<<Select>>]
-
-  static float test24() {
-    float a = 42.0f;
-    TestClass3 obj = new TestClass3();
-    if (obj.test1) {
-      a = obj.floatField;
-    }
-    return a;
-  }
-
   /// CHECK-START: void Main.testFinalizable() load_store_elimination (before)
   /// CHECK: NewInstance
   /// CHECK: InstanceFieldSet
@@ -1275,7 +1172,14 @@
     }
   }
 
-  public static void main(String[] args) {
+  public static void main(String[] args) throws Exception {
+
+    Class main2 = Class.forName("Main2");
+    Method test4 = main2.getMethod("test4", TestClass.class, boolean.class);
+    Method test5 = main2.getMethod("test5", TestClass.class, boolean.class);
+    Method test23 = main2.getMethod("test23", boolean.class);
+    Method test24 = main2.getMethod("test24");
+
     assertDoubleEquals(Math.PI * Math.PI * Math.PI, calcCircleArea(Math.PI));
     assertIntEquals(test1(new TestClass(), new TestClass()), 3);
     assertIntEquals(test2(new TestClass()), 1);
@@ -1283,10 +1187,10 @@
     TestClass obj2 = new TestClass();
     obj1.next = obj2;
     assertIntEquals(test3(obj1), 10);
-    assertIntEquals(test4(new TestClass(), true), 1);
-    assertIntEquals(test4(new TestClass(), false), 1);
-    assertIntEquals(test5(new TestClass(), true), 1);
-    assertIntEquals(test5(new TestClass(), false), 2);
+    assertIntEquals((int)test4.invoke(null, new TestClass(), true), 1);
+    assertIntEquals((int)test4.invoke(null, new TestClass(), false), 1);
+    assertIntEquals((int)test5.invoke(null, new TestClass(), true), 1);
+    assertIntEquals((int)test5.invoke(null, new TestClass(), false), 2);
     assertIntEquals(test6(new TestClass(), new TestClass(), true), 4);
     assertIntEquals(test6(new TestClass(), new TestClass(), false), 2);
     assertIntEquals(test7(new TestClass()), 1);
@@ -1312,9 +1216,9 @@
     assertFloatEquals(test20().i, 0);
     test21(new TestClass());
     assertIntEquals(test22(), 13);
-    assertIntEquals(test23(true), 4);
-    assertIntEquals(test23(false), 5);
-    assertFloatEquals(test24(), 8.0f);
+    assertIntEquals((int)test23.invoke(null, true), 4);
+    assertIntEquals((int)test23.invoke(null, false), 5);
+    assertFloatEquals((float)test24.invoke(null), 8.0f);
     testFinalizableByForcingGc();
     assertIntEquals($noinline$testHSelect(true), 0xdead);
     int[] array = {2, 5, 9, -1, -3, 10, 8, 4};
diff --git a/test/549-checker-types-merge/build b/test/549-checker-types-merge/build
deleted file mode 100644
index 10ffcc5..0000000
--- a/test/549-checker-types-merge/build
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-#
-# Copyright 2017 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# See b/65168732
-export USE_D8=false
-
-./default-build "$@"
diff --git a/test/567-checker-compare/build b/test/567-checker-compare/build
deleted file mode 100644
index 10ffcc5..0000000
--- a/test/567-checker-compare/build
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-#
-# Copyright 2017 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# See b/65168732
-export USE_D8=false
-
-./default-build "$@"
diff --git a/test/910-methods/build b/test/910-methods/build
deleted file mode 100644
index 10ffcc5..0000000
--- a/test/910-methods/build
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-#
-# Copyright 2017 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# See b/65168732
-export USE_D8=false
-
-./default-build "$@"
diff --git a/tools/dexanalyze/dexanalyze.cc b/tools/dexanalyze/dexanalyze.cc
index 38725d4..7d7e5f2 100644
--- a/tools/dexanalyze/dexanalyze.cc
+++ b/tools/dexanalyze/dexanalyze.cc
@@ -49,6 +49,8 @@
         << "Usage " << argv[0] << " [options] <dex files>\n"
         << "    [options] is a combination of the following\n"
         << "    -count_indices (Count dex indices accessed from code items)\n"
+        << "    -analyze-strings (Analyze string data)\n"
+        << "    -analyze-debug-info (Analyze debug info)\n"
         << "    -i (Ignore Dex checksum and verification failures)\n"
         << "    -a (Run all experiments)\n"
         << "    -d (Dump on per DEX basis)\n";
@@ -69,6 +71,8 @@
           exp_count_indices_ = true;
         } else if (arg == "-analyze-strings") {
           exp_analyze_strings_ = true;
+        } else if (arg == "-analyze-debug-info") {
+          exp_debug_info_ = true;
         } else if (arg == "-d") {
           dump_per_input_dex_ = true;
         } else if (!arg.empty() && arg[0] == '-') {
@@ -90,6 +94,7 @@
     bool exp_count_indices_ = false;
     bool exp_code_metrics_ = false;
     bool exp_analyze_strings_ = false;
+    bool exp_debug_info_ = false;
     bool run_all_experiments_ = false;
     std::vector<std::string> filenames_;
   };
@@ -106,6 +111,9 @@
       if (options->run_all_experiments_ || options->exp_code_metrics_) {
         experiments_.emplace_back(new CodeMetrics);
       }
+      if (options->run_all_experiments_ || options->exp_debug_info_) {
+        experiments_.emplace_back(new AnalyzeDebugInfo);
+      }
     }
 
     bool ProcessDexFile(const DexFile& dex_file) {
@@ -120,6 +128,7 @@
     void Dump(std::ostream& os) {
       for (std::unique_ptr<Experiment>& experiment : experiments_) {
         experiment->Dump(os, total_size_);
+        os << "\n";
       }
     }
 
diff --git a/tools/dexanalyze/dexanalyze_experiments.cc b/tools/dexanalyze/dexanalyze_experiments.cc
index 7006370..1a3b89c 100644
--- a/tools/dexanalyze/dexanalyze_experiments.cc
+++ b/tools/dexanalyze/dexanalyze_experiments.cc
@@ -75,6 +75,128 @@
   return len;
 }
 
+void AnalyzeDebugInfo::ProcessDexFile(const DexFile& dex_file) {
+  std::set<const uint8_t*> seen;
+  std::vector<size_t> counts(256, 0u);
+  std::vector<size_t> opcode_counts(256, 0u);
+  std::set<std::vector<uint8_t>> unique_non_header;
+  for (ClassAccessor accessor : dex_file.GetClasses()) {
+    for (const ClassAccessor::Method& method : accessor.GetMethods()) {
+      CodeItemDebugInfoAccessor code_item(dex_file, method.GetCodeItem(), method.GetIndex());
+      const uint8_t* debug_info = dex_file.GetDebugInfoStream(code_item.DebugInfoOffset());
+      if (debug_info != nullptr && seen.insert(debug_info).second) {
+        const uint8_t* stream = debug_info;
+        DecodeUnsignedLeb128(&stream);  // line_start
+        uint32_t parameters_size = DecodeUnsignedLeb128(&stream);
+        for (uint32_t i = 0; i < parameters_size; ++i) {
+          DecodeUnsignedLeb128P1(&stream);  // Parameter name.
+        }
+        bool done = false;
+        const uint8_t* after_header_start = stream;
+        while (!done) {
+          const uint8_t* const op_start = stream;
+          uint8_t opcode = *stream++;
+          ++opcode_counts[opcode];
+          ++total_opcode_bytes_;
+          switch (opcode) {
+            case DexFile::DBG_END_SEQUENCE:
+              ++total_end_seq_bytes_;
+              done = true;
+              break;
+            case DexFile::DBG_ADVANCE_PC:
+              DecodeUnsignedLeb128(&stream);  // addr_diff
+              total_advance_pc_bytes_ += stream - op_start;
+              break;
+            case DexFile::DBG_ADVANCE_LINE:
+              DecodeSignedLeb128(&stream);  // line_diff
+              total_advance_line_bytes_ += stream - op_start;
+              break;
+            case DexFile::DBG_START_LOCAL:
+              DecodeUnsignedLeb128(&stream);  // register_num
+              DecodeUnsignedLeb128P1(&stream);  // name_idx
+              DecodeUnsignedLeb128P1(&stream);  // type_idx
+              total_start_local_bytes_ += stream - op_start;
+              break;
+            case DexFile::DBG_START_LOCAL_EXTENDED:
+              DecodeUnsignedLeb128(&stream);  // register_num
+              DecodeUnsignedLeb128P1(&stream);  // name_idx
+              DecodeUnsignedLeb128P1(&stream);  // type_idx
+              DecodeUnsignedLeb128P1(&stream);  // sig_idx
+              total_start_local_extended_bytes_ += stream - op_start;
+              break;
+            case DexFile::DBG_END_LOCAL:
+              DecodeUnsignedLeb128(&stream);  // register_num
+              total_end_local_bytes_ += stream - op_start;
+              break;
+            case DexFile::DBG_RESTART_LOCAL:
+              DecodeUnsignedLeb128(&stream);  // register_num
+              total_restart_local_bytes_ += stream - op_start;
+              break;
+            case DexFile::DBG_SET_PROLOGUE_END:
+            case DexFile::DBG_SET_EPILOGUE_BEGIN:
+              total_epilogue_bytes_ += stream - op_start;
+              break;
+            case DexFile::DBG_SET_FILE: {
+              DecodeUnsignedLeb128P1(&stream);  // name_idx
+              total_set_file_bytes_ += stream - op_start;
+              break;
+            }
+            default: {
+              total_other_bytes_ += stream - op_start;
+              break;
+            }
+          }
+        }
+        const size_t bytes = stream - debug_info;
+        total_bytes_ += bytes;
+        total_non_header_bytes_ += stream - after_header_start;
+        if (unique_non_header.insert(std::vector<uint8_t>(after_header_start, stream)).second) {
+          total_unique_non_header_bytes_ += stream - after_header_start;
+        }
+        for (size_t i = 0; i < bytes; ++i) {
+          ++counts[debug_info[i]];
+        }
+      }
+    }
+  }
+  auto calc_entropy = [](std::vector<size_t> data) {
+    size_t total = std::accumulate(data.begin(), data.end(), 0u);
+    double avg_entropy = 0.0;
+    for (size_t c : data) {
+      if (c > 0) {
+        double ratio = static_cast<double>(c) / static_cast<double>(total);
+        avg_entropy -= ratio * log(ratio) / log(256.0);
+      }
+    }
+    return avg_entropy * total;
+  };
+  total_entropy_ += calc_entropy(counts);
+  total_opcode_entropy_ += calc_entropy(opcode_counts);
+}
+
+void AnalyzeDebugInfo::Dump(std::ostream& os, uint64_t total_size) const {
+  os << "Debug info bytes " << Percent(total_bytes_, total_size) << "\n";
+
+  os << "  DBG_END_SEQUENCE: " << Percent(total_end_seq_bytes_, total_size) << "\n";
+  os << "  DBG_ADVANCE_PC: " << Percent(total_advance_pc_bytes_, total_size) << "\n";
+  os << "  DBG_ADVANCE_LINE: " << Percent(total_advance_line_bytes_, total_size) << "\n";
+  os << "  DBG_START_LOCAL: " << Percent(total_start_local_bytes_, total_size) << "\n";
+  os << "  DBG_START_LOCAL_EXTENDED: "
+     << Percent(total_start_local_extended_bytes_, total_size) << "\n";
+  os << "  DBG_END_LOCAL: " << Percent(total_end_local_bytes_, total_size) << "\n";
+  os << "  DBG_RESTART_LOCAL: " << Percent(total_restart_local_bytes_, total_size) << "\n";
+  os << "  DBG_SET_PROLOGUE bytes " << Percent(total_epilogue_bytes_, total_size) << "\n";
+  os << "  DBG_SET_FILE bytes " << Percent(total_set_file_bytes_, total_size) << "\n";
+  os << "  special: "
+      << Percent(total_other_bytes_, total_size) << "\n";
+  os << "Debug info entropy " << Percent(total_entropy_, total_size) << "\n";
+  os << "Debug info opcode bytes " << Percent(total_opcode_bytes_, total_size) << "\n";
+  os << "Debug info opcode entropy " << Percent(total_opcode_entropy_, total_size) << "\n";
+  os << "Debug info non header bytes " << Percent(total_non_header_bytes_, total_size) << "\n";
+  os << "Debug info deduped non header bytes "
+     << Percent(total_unique_non_header_bytes_, total_size) << "\n";
+}
+
 void AnalyzeStrings::ProcessDexFile(const DexFile& dex_file) {
   std::vector<std::string> strings;
   for (size_t i = 0; i < dex_file.NumStringIds(); ++i) {
diff --git a/tools/dexanalyze/dexanalyze_experiments.h b/tools/dexanalyze/dexanalyze_experiments.h
index 7ba2a49..a2621c8 100644
--- a/tools/dexanalyze/dexanalyze_experiments.h
+++ b/tools/dexanalyze/dexanalyze_experiments.h
@@ -51,6 +51,32 @@
   int64_t total_num_prefixes_ = 0u;
 };
 
+// Analyze debug info sizes.
+class AnalyzeDebugInfo  : public Experiment {
+ public:
+  void ProcessDexFile(const DexFile& dex_file);
+  void Dump(std::ostream& os, uint64_t total_size) const;
+
+ private:
+  int64_t total_bytes_ = 0u;
+  int64_t total_entropy_ = 0u;
+  int64_t total_opcode_bytes_ = 0u;
+  int64_t total_opcode_entropy_ = 0u;
+  int64_t total_non_header_bytes_ = 0u;
+  int64_t total_unique_non_header_bytes_ = 0u;
+  // Opcode and related data.
+  int64_t total_end_seq_bytes_ = 0u;
+  int64_t total_advance_pc_bytes_ = 0u;
+  int64_t total_advance_line_bytes_ = 0u;
+  int64_t total_start_local_bytes_ = 0u;
+  int64_t total_start_local_extended_bytes_ = 0u;
+  int64_t total_end_local_bytes_ = 0u;
+  int64_t total_restart_local_bytes_ = 0u;
+  int64_t total_epilogue_bytes_ = 0u;
+  int64_t total_set_file_bytes_ = 0u;
+  int64_t total_other_bytes_ = 0u;
+};
+
 // Count numbers of dex indices.
 class CountDexIndices : public Experiment {
  public:
diff --git a/tools/teardown-buildbot-device.sh b/tools/teardown-buildbot-device.sh
index df239a2..bf14ca4 100755
--- a/tools/teardown-buildbot-device.sh
+++ b/tools/teardown-buildbot-device.sh
@@ -25,6 +25,27 @@
 adb wait-for-device
 
 if [[ -n "$ART_TEST_CHROOT" ]]; then
+
+  # remove_filesystem_from_chroot DIR-IN-CHROOT FSTYPE REMOVE-DIR-IN-CHROOT
+  # -----------------------------------------------------------------------
+  # Unmount filesystem with type FSTYPE mounted in directory DIR-IN-CHROOT
+  # under the chroot directory.
+  # Remove DIR-IN-CHROOT under the chroot if REMOVE-DIR-IN-CHROOT is
+  # true.
+  remove_filesystem_from_chroot() {
+    local dir_in_chroot=$1
+    local fstype=$2
+    local remove_dir=$3
+    local dir="$ART_TEST_CHROOT/$dir_in_chroot"
+    adb shell test -d "$dir" \
+      && adb shell mount | grep -q "^$fstype on $dir type $fstype " \
+      && if adb shell umount "$dir"; then
+           $remove_dir && adb shell rmdir "$dir"
+         else
+           adb shell lsof "$dir"
+         fi
+  }
+
   # Tear down the chroot dir.
   echo -e "${green}Tear down the chroot dir in $ART_TEST_CHROOT${nc}"
 
@@ -32,22 +53,17 @@
   [[ "x$ART_TEST_CHROOT" = x/* ]] || { echo "$ART_TEST_CHROOT is not an absolute path"; exit 1; }
 
   # Remove /dev from chroot.
-  adb shell mount | grep -q "^tmpfs on $ART_TEST_CHROOT/dev type tmpfs " \
-    && adb shell umount "$ART_TEST_CHROOT/dev" \
-    && adb shell rmdir "$ART_TEST_CHROOT/dev"
+  remove_filesystem_from_chroot dev tmpfs true
 
   # Remove /sys/kernel/debug from chroot.
-  adb shell mount | grep -q "^debugfs on $ART_TEST_CHROOT/sys/kernel/debug type debugfs " \
-    && adb shell umount "$ART_TEST_CHROOT/sys/kernel/debug"
+  # The /sys/kernel/debug directory under the chroot dir cannot be
+  # deleted, as it is part of the host device's /sys filesystem.
+  remove_filesystem_from_chroot sys/kernel/debug debugfs false
   # Remove /sys from chroot.
-  adb shell mount | grep -q "^sysfs on $ART_TEST_CHROOT/sys type sysfs " \
-    && adb shell umount "$ART_TEST_CHROOT/sys" \
-    && adb shell rmdir "$ART_TEST_CHROOT/sys"
+  remove_filesystem_from_chroot sys sysfs true
 
   # Remove /proc from chroot.
-  adb shell mount | grep -q "^proc on $ART_TEST_CHROOT/proc type proc " \
-    && adb shell umount "$ART_TEST_CHROOT/proc" \
-    && adb shell rmdir "$ART_TEST_CHROOT/proc"
+  remove_filesystem_from_chroot proc proc true
 
   # Remove /etc from chroot.
   adb shell rm -f "$ART_TEST_CHROOT/etc"
@@ -65,6 +81,6 @@
     /plat_property_contexts \
     /nonplat_property_contexts"
   for f in $property_context_files; do
-    adb shell test -f "$f" "&&" rm -f "$ART_TEST_CHROOT$f"
+    adb shell rm -f "$ART_TEST_CHROOT$f"
   done
 fi
diff --git a/tools/veridex/Android.mk b/tools/veridex/Android.mk
index 83fa0d6..f8463c1 100644
--- a/tools/veridex/Android.mk
+++ b/tools/veridex/Android.mk
@@ -46,10 +46,19 @@
 
 $(VERIDEX_FILES_PATH): PRIVATE_VERIDEX_FILES := $(VERIDEX_FILES)
 $(VERIDEX_FILES_PATH): PRIVATE_APP_COMPAT_LISTS := $(app_compat_lists)
-$(VERIDEX_FILES_PATH) : $(SOONG_ZIP) $(VERIDEX_FILES) $(app_compat_lists) $(HOST_OUT_EXECUTABLES)/veridex
+$(VERIDEX_FILES_PATH): PRIVATE_SYSTEM_STUBS_ZIP := $(dir $(VERIDEX_FILES_PATH))/system-stubs.zip
+$(VERIDEX_FILES_PATH): PRIVATE_OAHL_STUBS_ZIP := $(dir $(VERIDEX_FILES_PATH))/org.apache.http.legacy-stubs.zip
+$(VERIDEX_FILES_PATH) : $(SOONG_ZIP) $(VERIDEX_FILES) $(app_compat_lists) $(HOST_OUT_EXECUTABLES)/veridex $(system_stub_dex) $(oahl_stub_dex)
+	$(hide) rm -f $(PRIVATE_SYSTEM_STUBS_ZIP) $(PRIVATE_OAHL_STUBS_ZIP)
+	$(hide) zip -j $(PRIVATE_SYSTEM_STUBS_ZIP) $(dir $(system_stub_dex))/classes*.dex
+	$(hide) zip -j $(PRIVATE_OAHL_STUBS_ZIP) $(dir $(oahl_stub_dex))/classes*.dex
 	$(hide) $(SOONG_ZIP) -o $@ -C art/tools/veridex -f $(PRIVATE_VERIDEX_FILES) \
                              -C $(dir $(lastword $(PRIVATE_APP_COMPAT_LISTS))) $(addprefix -f , $(PRIVATE_APP_COMPAT_LISTS)) \
-                             -C $(HOST_OUT_EXECUTABLES) -f $(HOST_OUT_EXECUTABLES)/veridex
+                             -C $(HOST_OUT_EXECUTABLES) -f $(HOST_OUT_EXECUTABLES)/veridex \
+                             -C $(dir $(PRIVATE_SYSTEM_STUBS_ZIP)) -f $(PRIVATE_SYSTEM_STUBS_ZIP) \
+                             -C $(dir $(PRIVATE_OAHL_STUBS_ZIP)) -f $(PRIVATE_OAHL_STUBS_ZIP)
+	$(hide) rm -f $(PRIVATE_SYSTEM_STUBS_ZIP)
+	$(hide) rm -f $(PRIVATE_OAHL_STUBS_ZIP)
 
 # Make the zip file available for prebuilts.
 $(call dist-for-goals,sdk,$(VERIDEX_FILES_PATH))
diff --git a/tools/veridex/appcompat.sh b/tools/veridex/appcompat.sh
index c07ab21..e7b735d 100755
--- a/tools/veridex/appcompat.sh
+++ b/tools/veridex/appcompat.sh
@@ -25,10 +25,10 @@
       -e ${SCRIPT_DIR}/hiddenapi-blacklist.txt && \
       -e ${SCRIPT_DIR}/hiddenapi-light-greylist.txt && \
       -e ${SCRIPT_DIR}/hiddenapi-dark-greylist.txt && \
-      -e ${SCRIPT_DIR}/org.apache.http.legacy-stubs.dex && \
-      -e ${SCRIPT_DIR}/system-stubs.dex ]]; then
+      -e ${SCRIPT_DIR}/org.apache.http.legacy-stubs.zip && \
+      -e ${SCRIPT_DIR}/system-stubs.zip ]]; then
   exec ${SCRIPT_DIR}/veridex \
-    --core-stubs=${SCRIPT_DIR}/system-stubs.dex:${SCRIPT_DIR}/org.apache.http.legacy-stubs.dex \
+    --core-stubs=${SCRIPT_DIR}/system-stubs.zip:${SCRIPT_DIR}/org.apache.http.legacy-stubs.zip \
     --blacklist=${SCRIPT_DIR}/hiddenapi-blacklist.txt \
     --light-greylist=${SCRIPT_DIR}/hiddenapi-light-greylist.txt \
     --dark-greylist=${SCRIPT_DIR}/hiddenapi-dark-greylist.txt \