Merge "[optimizing] trivial x86 explicit null check fix"
diff --git a/Android.mk b/Android.mk
index 216e865..9360355 100644
--- a/Android.mk
+++ b/Android.mk
@@ -42,27 +42,7 @@
 
 .PHONY: clean-oat-host
 clean-oat-host:
-	rm -f $(HOST_CORE_IMG_OUTS)
-	rm -f $(HOST_CORE_OAT_OUTS)
-	rm -f $(HOST_OUT_JAVA_LIBRARIES)/$(ART_HOST_ARCH)/*.odex
-ifneq ($(HOST_PREFER_32_BIT),true)
-	rm -f $(HOST_OUT_JAVA_LIBRARIES)/$(2ND_ART_HOST_ARCH)/*.odex
-endif
-	rm -f $(TARGET_CORE_IMG_OUTS)
-	rm -f $(TARGET_CORE_OAT_OUTS)
-	rm -rf $(DEXPREOPT_PRODUCT_DIR_FULL_PATH)
-	rm -f $(TARGET_OUT_UNSTRIPPED)/system/framework/*.odex
-	rm -f $(TARGET_OUT_UNSTRIPPED)/system/framework/*/*.oat
-	rm -f $(TARGET_OUT_UNSTRIPPED)/system/framework/*/*.art
-	rm -f $(TARGET_OUT)/framework/*/*.oat
-	rm -f $(TARGET_OUT)/framework/*/*.art
-	rm -f $(TARGET_OUT_APPS)/*.odex
-	rm -f $(TARGET_OUT_INTERMEDIATES)/JAVA_LIBRARIES/*_intermediates/javalib.odex
-	rm -f $(TARGET_OUT_INTERMEDIATES)/APPS/*_intermediates/*.odex
-ifdef TARGET_2ND_ARCH
-	rm -f $(2ND_TARGET_OUT_INTERMEDIATES)/JAVA_LIBRARIES/*_intermediates/javalib.odex
-	rm -f $(2ND_TARGET_OUT_INTERMEDIATES)/APPS/*_intermediates/*.odex
-endif
+	find $(OUT_DIR) -name "*.oat" -o -name "*.odex" -o -name "*.art" | xargs rm -f
 ifneq ($(TMPDIR),)
 	rm -rf $(TMPDIR)/$(USER)/test-*/dalvik-cache/*
 	rm -rf $(TMPDIR)/android-data/dalvik-cache/*
diff --git a/compiler/common_compiler_test.cc b/compiler/common_compiler_test.cc
index 257406a..1d0aad5 100644
--- a/compiler/common_compiler_test.cc
+++ b/compiler/common_compiler_test.cc
@@ -180,7 +180,6 @@
   callbacks_.reset(new QuickCompilerCallbacks(verification_results_.get(),
                                               method_inliner_map_.get(),
                                               CompilerCallbacks::CallbackMode::kCompileApp));
-  options->push_back(std::make_pair("compilercallbacks", callbacks_.get()));
 }
 
 void CommonCompilerTest::TearDown() {
diff --git a/compiler/common_compiler_test.h b/compiler/common_compiler_test.h
index 9cffbc8..d7b210d 100644
--- a/compiler/common_compiler_test.h
+++ b/compiler/common_compiler_test.h
@@ -78,7 +78,6 @@
   std::unique_ptr<CompilerOptions> compiler_options_;
   std::unique_ptr<VerificationResults> verification_results_;
   std::unique_ptr<DexFileToMethodInlinerMap> method_inliner_map_;
-  std::unique_ptr<CompilerCallbacks> callbacks_;
   std::unique_ptr<CompilerDriver> compiler_driver_;
   std::unique_ptr<CumulativeLogger> timer_;
   std::unique_ptr<const InstructionSetFeatures> instruction_set_features_;
diff --git a/compiler/oat_test.cc b/compiler/oat_test.cc
index d59ad6c..afd39e8 100644
--- a/compiler/oat_test.cc
+++ b/compiler/oat_test.cc
@@ -85,9 +85,6 @@
   compiler_options_.reset(new CompilerOptions);
   verification_results_.reset(new VerificationResults(compiler_options_.get()));
   method_inliner_map_.reset(new DexFileToMethodInlinerMap);
-  callbacks_.reset(new QuickCompilerCallbacks(verification_results_.get(),
-                                              method_inliner_map_.get(),
-                                              CompilerCallbacks::CallbackMode::kCompileApp));
   timer_.reset(new CumulativeLogger("Compilation times"));
   compiler_driver_.reset(new CompilerDriver(compiler_options_.get(),
                                             verification_results_.get(),
diff --git a/compiler/optimizing/boolean_simplifier.cc b/compiler/optimizing/boolean_simplifier.cc
index e9ca042..be432c5 100644
--- a/compiler/optimizing/boolean_simplifier.cc
+++ b/compiler/optimizing/boolean_simplifier.cc
@@ -59,7 +59,8 @@
       return new (allocator) HGreaterThan(lhs, rhs);
     } else if (cond->IsGreaterThan()) {
       return new (allocator) HLessThanOrEqual(lhs, rhs);
-    } else if (cond->IsGreaterThanOrEqual()) {
+    } else {
+      DCHECK(cond->IsGreaterThanOrEqual());
       return new (allocator) HLessThan(lhs, rhs);
     }
   } else if (cond->IsIntConstant()) {
@@ -70,10 +71,11 @@
       DCHECK(int_const->IsOne());
       return graph->GetIntConstant(0);
     }
+  } else {
+    // General case when 'cond' is another instruction of type boolean.
+    // Negate with 'cond == 0'.
+    return new (allocator) HEqual(cond, graph->GetIntConstant(0));
   }
-
-  LOG(FATAL) << "Instruction " << cond->DebugName() << " used as a condition";
-  UNREACHABLE();
 }
 
 void HBooleanSimplifier::Run() {
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 0f79d18..1f95041 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -2087,16 +2087,32 @@
 }
 
 void LocationsBuilderARM::VisitDiv(HDiv* div) {
-  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
-      ? LocationSummary::kCall
-      : LocationSummary::kNoCall;
+  LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
+  if (div->GetResultType() == Primitive::kPrimLong) {
+    // pLdiv runtime call.
+    call_kind = LocationSummary::kCall;
+  } else if (div->GetResultType() == Primitive::kPrimInt &&
+             !codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
+    // pIdivmod runtime call.
+    call_kind = LocationSummary::kCall;
+  }
+
   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
 
   switch (div->GetResultType()) {
     case Primitive::kPrimInt: {
-      locations->SetInAt(0, Location::RequiresRegister());
-      locations->SetInAt(1, Location::RequiresRegister());
-      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
+      if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
+        locations->SetInAt(0, Location::RequiresRegister());
+        locations->SetInAt(1, Location::RequiresRegister());
+        locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
+      } else {
+        InvokeRuntimeCallingConvention calling_convention;
+        locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+        locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+        // Note: divrem will compute both the quotient and the remainder as the pair R0 and R1, but
+        //       we only need the former.
+        locations->SetOut(Location::RegisterLocation(R0));
+      }
       break;
     }
     case Primitive::kPrimLong: {
@@ -2129,9 +2145,18 @@
 
   switch (div->GetResultType()) {
     case Primitive::kPrimInt: {
-      __ sdiv(out.AsRegister<Register>(),
-              first.AsRegister<Register>(),
-              second.AsRegister<Register>());
+      if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
+        __ sdiv(out.AsRegister<Register>(),
+                first.AsRegister<Register>(),
+                second.AsRegister<Register>());
+      } else {
+        InvokeRuntimeCallingConvention calling_convention;
+        DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegister<Register>());
+        DCHECK_EQ(calling_convention.GetRegisterAt(1), second.AsRegister<Register>());
+        DCHECK_EQ(R0, out.AsRegister<Register>());
+
+        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pIdivmod), div, div->GetDexPc(), nullptr);
+      }
       break;
     }
 
@@ -2169,17 +2194,32 @@
 
 void LocationsBuilderARM::VisitRem(HRem* rem) {
   Primitive::Type type = rem->GetResultType();
-  LocationSummary::CallKind call_kind = type == Primitive::kPrimInt
-      ? LocationSummary::kNoCall
-      : LocationSummary::kCall;
+
+  // Most remainders are implemented in the runtime.
+  LocationSummary::CallKind call_kind = LocationSummary::kCall;
+  if (rem->GetResultType() == Primitive::kPrimInt &&
+      codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
+    // Have hardware divide instruction for int, do it with three instructions.
+    call_kind = LocationSummary::kNoCall;
+  }
+
   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
 
   switch (type) {
     case Primitive::kPrimInt: {
-      locations->SetInAt(0, Location::RequiresRegister());
-      locations->SetInAt(1, Location::RequiresRegister());
-      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
-      locations->AddTemp(Location::RequiresRegister());
+      if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
+        locations->SetInAt(0, Location::RequiresRegister());
+        locations->SetInAt(1, Location::RequiresRegister());
+        locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
+        locations->AddTemp(Location::RequiresRegister());
+      } else {
+        InvokeRuntimeCallingConvention calling_convention;
+        locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+        locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
+        // Note: divrem will compute both the quotient and the remainder as the pair R0 and R1, but
+        //       we only need the latter.
+        locations->SetOut(Location::RegisterLocation(R1));
+      }
       break;
     }
     case Primitive::kPrimLong: {
@@ -2224,16 +2264,25 @@
   Primitive::Type type = rem->GetResultType();
   switch (type) {
     case Primitive::kPrimInt: {
-      Register reg1 = first.AsRegister<Register>();
-      Register reg2 = second.AsRegister<Register>();
-      Register temp = locations->GetTemp(0).AsRegister<Register>();
+      if (codegen_->GetInstructionSetFeatures().HasDivideInstruction()) {
+        Register reg1 = first.AsRegister<Register>();
+        Register reg2 = second.AsRegister<Register>();
+        Register temp = locations->GetTemp(0).AsRegister<Register>();
 
-      // temp = reg1 / reg2  (integer division)
-      // temp = temp * reg2
-      // dest = reg1 - temp
-      __ sdiv(temp, reg1, reg2);
-      __ mul(temp, temp, reg2);
-      __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
+        // temp = reg1 / reg2  (integer division)
+        // temp = temp * reg2
+        // dest = reg1 - temp
+        __ sdiv(temp, reg1, reg2);
+        __ mul(temp, temp, reg2);
+        __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp));
+      } else {
+        InvokeRuntimeCallingConvention calling_convention;
+        DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegister<Register>());
+        DCHECK_EQ(calling_convention.GetRegisterAt(1), second.AsRegister<Register>());
+        DCHECK_EQ(R1, out.AsRegister<Register>());
+
+        codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pIdivmod), rem, rem->GetDexPc(), nullptr);
+      }
       break;
     }
 
diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc
index 4b990f1..2c17a67 100644
--- a/compiler/optimizing/inliner.cc
+++ b/compiler/optimizing/inliner.cc
@@ -49,7 +49,8 @@
     for (HInstruction* instruction = block->GetFirstInstruction(); instruction != nullptr;) {
       HInstruction* next = instruction->GetNext();
       HInvokeStaticOrDirect* call = instruction->AsInvokeStaticOrDirect();
-      if (call != nullptr) {
+      // As long as the call is not intrinsified, it is worth trying to inline.
+      if (call != nullptr && call->GetIntrinsic() == Intrinsics::kNone) {
         // We use the original invoke type to ensure the resolution of the called method
         // works properly.
         if (!TryInline(call, call->GetDexMethodIndex(), call->GetOriginalInvokeType())) {
diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc
index 36cf856..628a844 100644
--- a/compiler/optimizing/intrinsics.cc
+++ b/compiler/optimizing/intrinsics.cc
@@ -191,8 +191,10 @@
     case kIntrinsicCompareTo:
       return Intrinsics::kStringCompareTo;
     case kIntrinsicIsEmptyOrLength:
-      return ((method.d.data & kIntrinsicFlagIsEmpty) == 0) ?
-          Intrinsics::kStringLength : Intrinsics::kStringIsEmpty;
+      // The inliner can handle these two cases - and this is the preferred approach
+      // since after inlining the call is no longer visible (as opposed to waiting
+      // until codegen to handle intrinsic).
+      return Intrinsics::kNone;
     case kIntrinsicIndexOf:
       return ((method.d.data & kIntrinsicFlagBase0) == 0) ?
           Intrinsics::kStringIndexOfAfter : Intrinsics::kStringIndexOf;
diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc
index 0f6978d..33176f0 100644
--- a/compiler/optimizing/intrinsics_arm.cc
+++ b/compiler/optimizing/intrinsics_arm.cc
@@ -903,8 +903,6 @@
 UNIMPLEMENTED_INTRINSIC(MathRoundFloat)    // Could be done by changing rounding mode, maybe?
 UNIMPLEMENTED_INTRINSIC(UnsafeCASLong)     // High register pressure.
 UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
-UNIMPLEMENTED_INTRINSIC(StringIsEmpty)  // Might not want to do these two anyways, inlining should
-UNIMPLEMENTED_INTRINSIC(StringLength)   // be good enough here.
 UNIMPLEMENTED_INTRINSIC(StringIndexOf)
 UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
 UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index 5f78933..72d303c 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -1030,8 +1030,6 @@
 }
 
 UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
-UNIMPLEMENTED_INTRINSIC(StringIsEmpty)  // Might not want to do these two anyways, inlining should
-UNIMPLEMENTED_INTRINSIC(StringLength)   // be good enough here.
 UNIMPLEMENTED_INTRINSIC(StringIndexOf)
 UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
 UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
diff --git a/compiler/optimizing/intrinsics_list.h b/compiler/optimizing/intrinsics_list.h
index 9cc77c6..10f6e1d 100644
--- a/compiler/optimizing/intrinsics_list.h
+++ b/compiler/optimizing/intrinsics_list.h
@@ -60,10 +60,8 @@
   V(MemoryPokeShortNative, kStatic) \
   V(StringCharAt, kDirect) \
   V(StringCompareTo, kDirect) \
-  V(StringIsEmpty, kDirect) \
   V(StringIndexOf, kDirect) \
   V(StringIndexOfAfter, kDirect) \
-  V(StringLength, kDirect) \
   V(UnsafeCASInt, kDirect) \
   V(UnsafeCASLong, kDirect) \
   V(UnsafeCASObject, kDirect) \
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index 6b7e4ae..384737f 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -1196,8 +1196,6 @@
 UNIMPLEMENTED_INTRINSIC(MathRint)
 UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
 UNIMPLEMENTED_INTRINSIC(MathRoundFloat)
-UNIMPLEMENTED_INTRINSIC(StringIsEmpty)  // Might not want to do these two anyways, inlining should
-UNIMPLEMENTED_INTRINSIC(StringLength)   // be good enough here.
 UNIMPLEMENTED_INTRINSIC(StringIndexOf)
 UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
 UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index 05ea906..736cea8 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -1014,8 +1014,6 @@
 UNIMPLEMENTED_INTRINSIC(MathRint)
 UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
 UNIMPLEMENTED_INTRINSIC(MathRoundFloat)
-UNIMPLEMENTED_INTRINSIC(StringIsEmpty)  // Might not want to do these two anyways, inlining should
-UNIMPLEMENTED_INTRINSIC(StringLength)   // be good enough here.
 UNIMPLEMENTED_INTRINSIC(StringIndexOf)
 UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
 UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
diff --git a/compiler/optimizing/register_allocator.cc b/compiler/optimizing/register_allocator.cc
index efb98f0..cf38bd3 100644
--- a/compiler/optimizing/register_allocator.cc
+++ b/compiler/optimizing/register_allocator.cc
@@ -213,7 +213,7 @@
           LiveInterval* interval =
               LiveInterval::MakeTempInterval(allocator_, Primitive::kPrimInt);
           temp_intervals_.Add(interval);
-          interval->AddRange(position, position + 1);
+          interval->AddTempUse(instruction, i);
           unhandled_core_intervals_.Add(interval);
           break;
         }
@@ -222,7 +222,7 @@
           LiveInterval* interval =
               LiveInterval::MakeTempInterval(allocator_, Primitive::kPrimDouble);
           temp_intervals_.Add(interval);
-          interval->AddRange(position, position + 1);
+          interval->AddTempUse(instruction, i);
           if (codegen_->NeedsTwoRegisters(Primitive::kPrimDouble)) {
             interval->AddHighInterval(true);
             LiveInterval* high = interval->GetHighInterval();
@@ -1681,8 +1681,6 @@
   }
 
   // Assign temp locations.
-  HInstruction* current = nullptr;
-  size_t temp_index = 0;
   for (size_t i = 0; i < temp_intervals_.Size(); ++i) {
     LiveInterval* temp = temp_intervals_.Get(i);
     if (temp->IsHighInterval()) {
@@ -1690,25 +1688,20 @@
       continue;
     }
     HInstruction* at = liveness_.GetTempUser(temp);
-    if (at != current) {
-      temp_index = 0;
-      current = at;
-    }
+    size_t temp_index = liveness_.GetTempIndex(temp);
     LocationSummary* locations = at->GetLocations();
     switch (temp->GetType()) {
       case Primitive::kPrimInt:
-        locations->SetTempAt(
-            temp_index++, Location::RegisterLocation(temp->GetRegister()));
+        locations->SetTempAt(temp_index, Location::RegisterLocation(temp->GetRegister()));
         break;
 
       case Primitive::kPrimDouble:
         if (codegen_->NeedsTwoRegisters(Primitive::kPrimDouble)) {
           Location location = Location::FpuRegisterPairLocation(
               temp->GetRegister(), temp->GetHighInterval()->GetRegister());
-          locations->SetTempAt(temp_index++, location);
+          locations->SetTempAt(temp_index, location);
         } else {
-          locations->SetTempAt(
-              temp_index++, Location::FpuRegisterLocation(temp->GetRegister()));
+          locations->SetTempAt(temp_index, Location::FpuRegisterLocation(temp->GetRegister()));
         }
         break;
 
diff --git a/compiler/optimizing/ssa_liveness_analysis.cc b/compiler/optimizing/ssa_liveness_analysis.cc
index 56ccd71..0f3973e 100644
--- a/compiler/optimizing/ssa_liveness_analysis.cc
+++ b/compiler/optimizing/ssa_liveness_analysis.cc
@@ -318,6 +318,8 @@
 
 int LiveInterval::FindFirstRegisterHint(size_t* free_until) const {
   DCHECK(!IsHighInterval());
+  if (IsTemp()) return kNoRegister;
+
   if (GetParent() == this && defined_by_ != nullptr) {
     // This is the first interval for the instruction. Try to find
     // a register based on its definition.
diff --git a/compiler/optimizing/ssa_liveness_analysis.h b/compiler/optimizing/ssa_liveness_analysis.h
index b57029d..bc78dc2 100644
--- a/compiler/optimizing/ssa_liveness_analysis.h
+++ b/compiler/optimizing/ssa_liveness_analysis.h
@@ -180,6 +180,15 @@
   // This interval is the result of a split.
   bool IsSplit() const { return parent_ != this; }
 
+  void AddTempUse(HInstruction* instruction, size_t temp_index) {
+    DCHECK(IsTemp());
+    DCHECK(first_use_ == nullptr) << "A temporary can only have one user";
+    size_t position = instruction->GetLifetimePosition();
+    first_use_ = new (allocator_) UsePosition(
+        instruction, temp_index, /* is_environment */ false, position, first_use_);
+    AddRange(position, position + 1);
+  }
+
   void AddUse(HInstruction* instruction, size_t input_index, bool is_environment) {
     // Set the use within the instruction.
     size_t position = instruction->GetLifetimePosition() + 1;
@@ -856,7 +865,15 @@
   HInstruction* GetTempUser(LiveInterval* temp) const {
     // A temporary shares the same lifetime start as the instruction that requires it.
     DCHECK(temp->IsTemp());
-    return GetInstructionFromPosition(temp->GetStart() / 2);
+    HInstruction* user = GetInstructionFromPosition(temp->GetStart() / 2);
+    DCHECK_EQ(user, temp->GetFirstUse()->GetUser());
+    return user;
+  }
+
+  size_t GetTempIndex(LiveInterval* temp) const {
+    // We use the input index to store the index of the temporary in the user's temporary list.
+    DCHECK(temp->IsTemp());
+    return temp->GetFirstUse()->GetInputIndex();
   }
 
   size_t GetMaxLifetimePosition() const {
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index 5818a37..a73c8d7 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -27,6 +27,32 @@
 
 namespace art {
 
+// Helper to build art::StackMapStream::LocationCatalogEntriesIndices.
+class LocationCatalogEntriesIndicesEmptyFn {
+ public:
+  void MakeEmpty(std::pair<DexRegisterLocation, size_t>& item) const {
+    item.first = DexRegisterLocation::None();
+  }
+  bool IsEmpty(const std::pair<DexRegisterLocation, size_t>& item) const {
+    return item.first == DexRegisterLocation::None();
+  }
+};
+
+// Hash function for art::StackMapStream::LocationCatalogEntriesIndices.
+// This hash function does not create collisions.
+class DexRegisterLocationHashFn {
+ public:
+  size_t operator()(DexRegisterLocation key) const {
+    // Concatenate `key`s fields to create a 64-bit value to be hashed.
+    int64_t kind_and_value =
+        (static_cast<int64_t>(key.kind_) << 32) | static_cast<int64_t>(key.value_);
+    return inner_hash_fn_(kind_and_value);
+  }
+ private:
+  std::hash<int64_t> inner_hash_fn_;
+};
+
+
 /**
  * Collects and builds stack maps for a method. All the stack maps
  * for a method are placed in a CodeInfo object.
@@ -36,11 +62,13 @@
   explicit StackMapStream(ArenaAllocator* allocator)
       : allocator_(allocator),
         stack_maps_(allocator, 10),
+        location_catalog_entries_(allocator, 4),
         dex_register_locations_(allocator, 10 * 4),
         inline_infos_(allocator, 2),
         stack_mask_max_(-1),
         dex_pc_max_(0),
         native_pc_offset_max_(0),
+        register_mask_max_(0),
         number_of_stack_maps_with_inline_info_(0),
         dex_map_hash_to_stack_map_indices_(std::less<uint32_t>(), allocator->Adapter()) {}
 
@@ -101,6 +129,7 @@
 
     dex_pc_max_ = std::max(dex_pc_max_, dex_pc);
     native_pc_offset_max_ = std::max(native_pc_offset_max_, native_pc_offset);
+    register_mask_max_ = std::max(register_mask_max_, register_mask);
   }
 
   void AddInlineInfoEntry(uint32_t method_index) {
@@ -111,6 +140,7 @@
 
   size_t ComputeNeededSize() {
     size_t size = CodeInfo::kFixedSize
+        + ComputeDexRegisterLocationCatalogSize()
         + ComputeStackMapsSize()
         + ComputeDexRegisterMapsSize()
         + ComputeInlineInfoSize();
@@ -128,24 +158,43 @@
         ComputeInlineInfoSize(),
         ComputeDexRegisterMapsSize(),
         dex_pc_max_,
-        native_pc_offset_max_);
+        native_pc_offset_max_,
+        register_mask_max_);
   }
 
-  // Compute the size of the Dex register map of `entry`.
+  // Compute the size of the Dex register location catalog of `entry`.
+  size_t ComputeDexRegisterLocationCatalogSize() const {
+    size_t size = DexRegisterLocationCatalog::kFixedSize;
+    for (size_t location_catalog_entry_index = 0;
+         location_catalog_entry_index < location_catalog_entries_.Size();
+         ++location_catalog_entry_index) {
+      DexRegisterLocation dex_register_location =
+          location_catalog_entries_.Get(location_catalog_entry_index);
+      size += DexRegisterLocationCatalog::EntrySize(dex_register_location);
+    }
+    return size;
+  }
+
   size_t ComputeDexRegisterMapSize(const StackMapEntry& entry) const {
+    // Size of the map in bytes.
     size_t size = DexRegisterMap::kFixedSize;
-    // Add the bit mask for the dex register liveness.
-    size += DexRegisterMap::LiveBitMaskSize(entry.num_dex_registers);
-    for (size_t dex_register_number = 0, index_in_dex_register_locations = 0;
+    // Add the live bit mask for the Dex register liveness.
+    size += DexRegisterMap::GetLiveBitMaskSize(entry.num_dex_registers);
+    // Compute the size of the set of live Dex register entries.
+    size_t number_of_live_dex_registers = 0;
+    for (size_t dex_register_number = 0;
          dex_register_number < entry.num_dex_registers;
          ++dex_register_number) {
       if (entry.live_dex_registers_mask->IsBitSet(dex_register_number)) {
-        DexRegisterLocation dex_register_location = dex_register_locations_.Get(
-            entry.dex_register_locations_start_index + index_in_dex_register_locations);
-        size += DexRegisterMap::EntrySize(dex_register_location);
-        index_in_dex_register_locations++;
+        ++number_of_live_dex_registers;
       }
     }
+    size_t map_entries_size_in_bits =
+        DexRegisterMap::SingleEntrySizeInBits(location_catalog_entries_.Size())
+        * number_of_live_dex_registers;
+    size_t map_entries_size_in_bytes =
+        RoundUp(map_entries_size_in_bits, kBitsPerByte) / kBitsPerByte;
+    size += map_entries_size_in_bytes;
     return size;
   }
 
@@ -168,8 +217,16 @@
       + (number_of_stack_maps_with_inline_info_ * InlineInfo::kFixedSize);
   }
 
+  size_t ComputeDexRegisterLocationCatalogStart() const {
+    return CodeInfo::kFixedSize;
+  }
+
+  size_t ComputeStackMapsStart() const {
+    return ComputeDexRegisterLocationCatalogStart() + ComputeDexRegisterLocationCatalogSize();
+  }
+
   size_t ComputeDexRegisterMapsStart() {
-    return CodeInfo::kFixedSize + ComputeStackMapsSize();
+    return ComputeStackMapsStart() + ComputeStackMapsSize();
   }
 
   size_t ComputeInlineInfoStart() {
@@ -194,11 +251,32 @@
       ComputeInlineInfoStart(),
       inline_info_size);
 
-    code_info.SetEncoding(
-        inline_info_size, dex_register_map_size, dex_pc_max_, native_pc_offset_max_);
+    code_info.SetEncoding(inline_info_size,
+                          dex_register_map_size,
+                          dex_pc_max_,
+                          native_pc_offset_max_,
+                          register_mask_max_);
     code_info.SetNumberOfStackMaps(stack_maps_.Size());
     code_info.SetStackMaskSize(stack_mask_size);
-    DCHECK_EQ(code_info.StackMapsSize(), ComputeStackMapsSize());
+    DCHECK_EQ(code_info.GetStackMapsSize(), ComputeStackMapsSize());
+
+    // Set the Dex register location catalog.
+    code_info.SetNumberOfDexRegisterLocationCatalogEntries(
+        location_catalog_entries_.Size());
+    MemoryRegion dex_register_location_catalog_region = region.Subregion(
+        ComputeDexRegisterLocationCatalogStart(),
+        ComputeDexRegisterLocationCatalogSize());
+    DexRegisterLocationCatalog dex_register_location_catalog(dex_register_location_catalog_region);
+    // Offset in `dex_register_location_catalog` where to store the next
+    // register location.
+    size_t location_catalog_offset = DexRegisterLocationCatalog::kFixedSize;
+    for (size_t i = 0, e = location_catalog_entries_.Size(); i < e; ++i) {
+      DexRegisterLocation dex_register_location = location_catalog_entries_.Get(i);
+      dex_register_location_catalog.SetRegisterInfo(location_catalog_offset, dex_register_location);
+      location_catalog_offset += DexRegisterLocationCatalog::EntrySize(dex_register_location);
+    }
+    // Ensure we reached the end of the Dex registers location_catalog.
+    DCHECK_EQ(location_catalog_offset, dex_register_location_catalog_region.size());
 
     uintptr_t next_dex_register_map_offset = 0;
     uintptr_t next_inline_info_offset = 0;
@@ -234,25 +312,25 @@
           stack_map.SetDexRegisterMapOffset(
             code_info, register_region.start() - dex_register_locations_region.start());
 
-          // Offset in `dex_register_map` where to store the next register entry.
-          size_t offset = DexRegisterMap::kFixedSize;
-          dex_register_map.SetLiveBitMask(offset,
-                                          entry.num_dex_registers,
-                                          *entry.live_dex_registers_mask);
-          offset += DexRegisterMap::LiveBitMaskSize(entry.num_dex_registers);
+          // Set the live bit mask.
+          dex_register_map.SetLiveBitMask(entry.num_dex_registers, *entry.live_dex_registers_mask);
+
+          // Set the dex register location mapping data.
           for (size_t dex_register_number = 0, index_in_dex_register_locations = 0;
                dex_register_number < entry.num_dex_registers;
                ++dex_register_number) {
             if (entry.live_dex_registers_mask->IsBitSet(dex_register_number)) {
-              DexRegisterLocation dex_register_location = dex_register_locations_.Get(
-                  entry.dex_register_locations_start_index + index_in_dex_register_locations);
-              dex_register_map.SetRegisterInfo(offset, dex_register_location);
-              offset += DexRegisterMap::EntrySize(dex_register_location);
+              size_t location_catalog_entry_index =
+                  dex_register_locations_.Get(entry.dex_register_locations_start_index
+                                              + index_in_dex_register_locations);
+              dex_register_map.SetLocationCatalogEntryIndex(
+                  index_in_dex_register_locations,
+                  location_catalog_entry_index,
+                  entry.num_dex_registers,
+                  location_catalog_entries_.Size());
               ++index_in_dex_register_locations;
             }
           }
-          // Ensure we reached the end of the Dex registers region.
-          DCHECK_EQ(offset, register_region.size());
         }
       }
 
@@ -282,12 +360,31 @@
   }
 
   void AddDexRegisterEntry(uint16_t dex_register, DexRegisterLocation::Kind kind, int32_t value) {
+    StackMapEntry entry = stack_maps_.Get(stack_maps_.Size() - 1);
+    DCHECK_LT(dex_register, entry.num_dex_registers);
+
     if (kind != DexRegisterLocation::Kind::kNone) {
       // Ensure we only use non-compressed location kind at this stage.
       DCHECK(DexRegisterLocation::IsShortLocationKind(kind))
           << DexRegisterLocation::PrettyDescriptor(kind);
-      dex_register_locations_.Add(DexRegisterLocation(kind, value));
-      StackMapEntry entry = stack_maps_.Get(stack_maps_.Size() - 1);
+      DexRegisterLocation location(kind, value);
+
+      // Look for Dex register `location` in the location catalog (using the
+      // companion hash map of locations to indices).  Use its index if it
+      // is already in the location catalog.  If not, insert it (in the
+      // location catalog and the hash map) and use the newly created index.
+      auto it = location_catalog_entries_indices_.Find(location);
+      if (it != location_catalog_entries_indices_.end()) {
+        // Retrieve the index from the hash map.
+        dex_register_locations_.Add(it->second);
+      } else {
+        // Create a new entry in the location catalog and the hash map.
+        size_t index = location_catalog_entries_.Size();
+        location_catalog_entries_.Add(location);
+        dex_register_locations_.Add(index);
+        location_catalog_entries_indices_.Insert(std::make_pair(location, index));
+      }
+
       entry.live_dex_registers_mask->SetBit(dex_register);
       entry.dex_register_map_hash += (1 << dex_register);
       entry.dex_register_map_hash += static_cast<uint32_t>(value);
@@ -354,9 +451,9 @@
         return false;
       }
       if (a.live_dex_registers_mask->IsBitSet(i)) {
-        DexRegisterLocation a_loc = dex_register_locations_.Get(
+        size_t a_loc = dex_register_locations_.Get(
             a.dex_register_locations_start_index + index_in_dex_register_locations);
-        DexRegisterLocation b_loc = dex_register_locations_.Get(
+        size_t b_loc = dex_register_locations_.Get(
             b.dex_register_locations_start_index + index_in_dex_register_locations);
         if (a_loc != b_loc) {
           return false;
@@ -369,21 +466,29 @@
 
   ArenaAllocator* allocator_;
   GrowableArray<StackMapEntry> stack_maps_;
-  GrowableArray<DexRegisterLocation> dex_register_locations_;
+
+  // A catalog of unique [location_kind, register_value] pairs (per method).
+  GrowableArray<DexRegisterLocation> location_catalog_entries_;
+  // Map from Dex register location catalog entries to their indices in the
+  // location catalog.
+  typedef HashMap<DexRegisterLocation, size_t, LocationCatalogEntriesIndicesEmptyFn,
+                  DexRegisterLocationHashFn> LocationCatalogEntriesIndices;
+  LocationCatalogEntriesIndices location_catalog_entries_indices_;
+
+  // A set of concatenated maps of Dex register locations indices to
+  // `location_catalog_entries_`.
+  GrowableArray<size_t> dex_register_locations_;
   GrowableArray<InlineInfoEntry> inline_infos_;
   int stack_mask_max_;
   uint32_t dex_pc_max_;
   uint32_t native_pc_offset_max_;
+  uint32_t register_mask_max_;
   size_t number_of_stack_maps_with_inline_info_;
 
   ArenaSafeMap<uint32_t, GrowableArray<uint32_t>> dex_map_hash_to_stack_map_indices_;
 
   static constexpr uint32_t kNoSameDexMapFound = -1;
 
-  ART_FRIEND_TEST(StackMapTest, Test1);
-  ART_FRIEND_TEST(StackMapTest, Test2);
-  ART_FRIEND_TEST(StackMapTest, TestNonLiveDexRegisters);
-
   DISALLOW_COPY_AND_ASSIGN(StackMapStream);
 };
 
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index e5a9790..8d160bc 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -31,6 +31,8 @@
   return true;
 }
 
+using Kind = DexRegisterLocation::Kind;
+
 TEST(StackMapTest, Test1) {
   ArenaPool pool;
   ArenaAllocator arena(&pool);
@@ -39,8 +41,8 @@
   ArenaBitVector sp_mask(&arena, 0, false);
   size_t number_of_dex_registers = 2;
   stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
-  stream.AddDexRegisterEntry(0, DexRegisterLocation::Kind::kInStack, 0);
-  stream.AddDexRegisterEntry(1, DexRegisterLocation::Kind::kConstant, -2);
+  stream.AddDexRegisterEntry(0, Kind::kInStack, 0);         // Short location.
+  stream.AddDexRegisterEntry(1, Kind::kConstant, -2);       // Short location.
 
   size_t size = stream.ComputeNeededSize();
   void* memory = arena.Alloc(size, kArenaAllocMisc);
@@ -51,6 +53,16 @@
   ASSERT_EQ(0u, code_info.GetStackMaskSize());
   ASSERT_EQ(1u, code_info.GetNumberOfStackMaps());
 
+  uint32_t number_of_location_catalog_entries =
+      code_info.GetNumberOfDexRegisterLocationCatalogEntries();
+  ASSERT_EQ(2u, number_of_location_catalog_entries);
+  DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
+  // The Dex register location catalog contains:
+  // - one 1-byte short Dex register location, and
+  // - one 5-byte large Dex register location.
+  size_t expected_location_catalog_size = 1u + 5u;
+  ASSERT_EQ(expected_location_catalog_size, location_catalog.Size());
+
   StackMap stack_map = code_info.GetStackMapAt(0);
   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
   ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
@@ -62,14 +74,40 @@
   ASSERT_TRUE(SameBits(stack_mask, sp_mask));
 
   ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info));
-  DexRegisterMap dex_registers = code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
-  ASSERT_EQ(7u, dex_registers.Size());
-  DexRegisterLocation location0 = dex_registers.GetLocationKindAndValue(0, number_of_dex_registers);
-  DexRegisterLocation location1 = dex_registers.GetLocationKindAndValue(1, number_of_dex_registers);
-  ASSERT_EQ(DexRegisterLocation::Kind::kInStack, location0.GetKind());
-  ASSERT_EQ(DexRegisterLocation::Kind::kConstant, location1.GetKind());
-  ASSERT_EQ(DexRegisterLocation::Kind::kInStack, location0.GetInternalKind());
-  ASSERT_EQ(DexRegisterLocation::Kind::kConstantLargeValue, location1.GetInternalKind());
+  DexRegisterMap dex_register_map =
+      code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
+  ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
+  ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
+  ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
+  // The Dex register map contains:
+  // - one 1-byte live bit mask, and
+  // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
+  size_t expected_dex_register_map_size = 1u + 1u;
+  ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+
+  ASSERT_EQ(Kind::kInStack,
+            dex_register_map.GetLocationKind(0, number_of_dex_registers, code_info));
+  ASSERT_EQ(Kind::kConstant,
+            dex_register_map.GetLocationKind(1, number_of_dex_registers, code_info));
+  ASSERT_EQ(Kind::kInStack,
+            dex_register_map.GetLocationInternalKind(0, number_of_dex_registers, code_info));
+  ASSERT_EQ(Kind::kConstantLargeValue,
+            dex_register_map.GetLocationInternalKind(1, number_of_dex_registers, code_info));
+  ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes(0, number_of_dex_registers, code_info));
+  ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info));
+
+  size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(
+      0, number_of_dex_registers, number_of_location_catalog_entries);
+  size_t index1 = dex_register_map.GetLocationCatalogEntryIndex(
+      1, number_of_dex_registers, number_of_location_catalog_entries);
+  ASSERT_EQ(0u, index0);
+  ASSERT_EQ(1u, index1);
+  DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
+  DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+  ASSERT_EQ(Kind::kInStack, location0.GetKind());
+  ASSERT_EQ(Kind::kConstant, location1.GetKind());
+  ASSERT_EQ(Kind::kInStack, location0.GetInternalKind());
+  ASSERT_EQ(Kind::kConstantLargeValue, location1.GetInternalKind());
   ASSERT_EQ(0, location0.GetValue());
   ASSERT_EQ(-2, location1.GetValue());
 
@@ -86,8 +124,8 @@
   sp_mask1.SetBit(4);
   size_t number_of_dex_registers = 2;
   stream.AddStackMapEntry(0, 64, 0x3, &sp_mask1, number_of_dex_registers, 2);
-  stream.AddDexRegisterEntry(0, DexRegisterLocation::Kind::kInStack, 0);
-  stream.AddDexRegisterEntry(1, DexRegisterLocation::Kind::kConstant, -2);
+  stream.AddDexRegisterEntry(0, Kind::kInStack, 0);         // Short location.
+  stream.AddDexRegisterEntry(1, Kind::kConstant, -2);       // Large location.
   stream.AddInlineInfoEntry(42);
   stream.AddInlineInfoEntry(82);
 
@@ -95,8 +133,8 @@
   sp_mask2.SetBit(3);
   sp_mask1.SetBit(8);
   stream.AddStackMapEntry(1, 128, 0xFF, &sp_mask2, number_of_dex_registers, 0);
-  stream.AddDexRegisterEntry(0, DexRegisterLocation::Kind::kInRegister, 18);
-  stream.AddDexRegisterEntry(1, DexRegisterLocation::Kind::kInFpuRegister, 3);
+  stream.AddDexRegisterEntry(0, Kind::kInRegister, 18);     // Short location.
+  stream.AddDexRegisterEntry(1, Kind::kInFpuRegister, 3);   // Short location.
 
   size_t size = stream.ComputeNeededSize();
   void* memory = arena.Alloc(size, kArenaAllocMisc);
@@ -107,6 +145,16 @@
   ASSERT_EQ(1u, code_info.GetStackMaskSize());
   ASSERT_EQ(2u, code_info.GetNumberOfStackMaps());
 
+  uint32_t number_of_location_catalog_entries =
+      code_info.GetNumberOfDexRegisterLocationCatalogEntries();
+  ASSERT_EQ(4u, number_of_location_catalog_entries);
+  DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
+  // The Dex register location catalog contains:
+  // - three 1-byte short Dex register locations, and
+  // - one 5-byte large Dex register location.
+  size_t expected_location_catalog_size = 3u * 1u + 5u;
+  ASSERT_EQ(expected_location_catalog_size, location_catalog.Size());
+
   // First stack map.
   {
     StackMap stack_map = code_info.GetStackMapAt(0);
@@ -120,17 +168,40 @@
     ASSERT_TRUE(SameBits(stack_mask, sp_mask1));
 
     ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info));
-    DexRegisterMap dex_registers =
+    DexRegisterMap dex_register_map =
         code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
-    ASSERT_EQ(7u, dex_registers.Size());
-    DexRegisterLocation location0 =
-        dex_registers.GetLocationKindAndValue(0, number_of_dex_registers);
-    DexRegisterLocation location1 =
-        dex_registers.GetLocationKindAndValue(1, number_of_dex_registers);
-    ASSERT_EQ(DexRegisterLocation::Kind::kInStack, location0.GetKind());
-    ASSERT_EQ(DexRegisterLocation::Kind::kConstant, location1.GetKind());
-    ASSERT_EQ(DexRegisterLocation::Kind::kInStack, location0.GetInternalKind());
-    ASSERT_EQ(DexRegisterLocation::Kind::kConstantLargeValue, location1.GetInternalKind());
+    ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
+    ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
+    ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
+    // The Dex register map contains:
+    // - one 1-byte live bit mask, and
+    // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
+    size_t expected_dex_register_map_size = 1u + 1u;
+    ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+
+    ASSERT_EQ(Kind::kInStack,
+              dex_register_map.GetLocationKind(0, number_of_dex_registers, code_info));
+    ASSERT_EQ(Kind::kConstant,
+              dex_register_map.GetLocationKind(1, number_of_dex_registers, code_info));
+    ASSERT_EQ(Kind::kInStack,
+              dex_register_map.GetLocationInternalKind(0, number_of_dex_registers, code_info));
+    ASSERT_EQ(Kind::kConstantLargeValue,
+              dex_register_map.GetLocationInternalKind(1, number_of_dex_registers, code_info));
+    ASSERT_EQ(0, dex_register_map.GetStackOffsetInBytes(0, number_of_dex_registers, code_info));
+    ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info));
+
+    size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(
+        0, number_of_dex_registers, number_of_location_catalog_entries);
+    size_t index1 = dex_register_map.GetLocationCatalogEntryIndex(
+        1, number_of_dex_registers, number_of_location_catalog_entries);
+    ASSERT_EQ(0u, index0);
+    ASSERT_EQ(1u, index1);
+    DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
+    DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+    ASSERT_EQ(Kind::kInStack, location0.GetKind());
+    ASSERT_EQ(Kind::kConstant, location1.GetKind());
+    ASSERT_EQ(Kind::kInStack, location0.GetInternalKind());
+    ASSERT_EQ(Kind::kConstantLargeValue, location1.GetInternalKind());
     ASSERT_EQ(0, location0.GetValue());
     ASSERT_EQ(-2, location1.GetValue());
 
@@ -154,17 +225,40 @@
     ASSERT_TRUE(SameBits(stack_mask, sp_mask2));
 
     ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info));
-    DexRegisterMap dex_registers =
+    DexRegisterMap dex_register_map =
         code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
-    ASSERT_EQ(3u, dex_registers.Size());
-    DexRegisterLocation location0 =
-        dex_registers.GetLocationKindAndValue(0, number_of_dex_registers);
-    DexRegisterLocation location1 =
-        dex_registers.GetLocationKindAndValue(1, number_of_dex_registers);
-    ASSERT_EQ(DexRegisterLocation::Kind::kInRegister, location0.GetKind());
-    ASSERT_EQ(DexRegisterLocation::Kind::kInFpuRegister, location1.GetKind());
-    ASSERT_EQ(DexRegisterLocation::Kind::kInRegister, location0.GetInternalKind());
-    ASSERT_EQ(DexRegisterLocation::Kind::kInFpuRegister, location1.GetInternalKind());
+    ASSERT_TRUE(dex_register_map.IsDexRegisterLive(0));
+    ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
+    ASSERT_EQ(2u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
+    // The Dex register map contains:
+    // - one 1-byte live bit mask, and
+    // - one 1-byte set of location catalog entry indices composed of two 2-bit values.
+    size_t expected_dex_register_map_size = 1u + 1u;
+    ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+
+    ASSERT_EQ(Kind::kInRegister,
+              dex_register_map.GetLocationKind(0, number_of_dex_registers, code_info));
+    ASSERT_EQ(Kind::kInFpuRegister,
+              dex_register_map.GetLocationKind(1, number_of_dex_registers, code_info));
+    ASSERT_EQ(Kind::kInRegister,
+              dex_register_map.GetLocationInternalKind(0, number_of_dex_registers, code_info));
+    ASSERT_EQ(Kind::kInFpuRegister,
+              dex_register_map.GetLocationInternalKind(1, number_of_dex_registers, code_info));
+    ASSERT_EQ(18, dex_register_map.GetMachineRegister(0, number_of_dex_registers, code_info));
+    ASSERT_EQ(3, dex_register_map.GetMachineRegister(1, number_of_dex_registers, code_info));
+
+    size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(
+        0, number_of_dex_registers, number_of_location_catalog_entries);
+    size_t index1 = dex_register_map.GetLocationCatalogEntryIndex(
+        1, number_of_dex_registers, number_of_location_catalog_entries);
+    ASSERT_EQ(2u, index0);
+    ASSERT_EQ(3u, index1);
+    DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
+    DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+    ASSERT_EQ(Kind::kInRegister, location0.GetKind());
+    ASSERT_EQ(Kind::kInFpuRegister, location1.GetKind());
+    ASSERT_EQ(Kind::kInRegister, location0.GetInternalKind());
+    ASSERT_EQ(Kind::kInFpuRegister, location1.GetInternalKind());
     ASSERT_EQ(18, location0.GetValue());
     ASSERT_EQ(3, location1.GetValue());
 
@@ -180,8 +274,8 @@
   ArenaBitVector sp_mask(&arena, 0, false);
   uint32_t number_of_dex_registers = 2;
   stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
-  stream.AddDexRegisterEntry(0, DexRegisterLocation::Kind::kNone, 0);
-  stream.AddDexRegisterEntry(1, DexRegisterLocation::Kind::kConstant, -2);
+  stream.AddDexRegisterEntry(0, Kind::kNone, 0);            // No location.
+  stream.AddDexRegisterEntry(1, Kind::kConstant, -2);       // Large location.
 
   size_t size = stream.ComputeNeededSize();
   void* memory = arena.Alloc(size, kArenaAllocMisc);
@@ -189,14 +283,62 @@
   stream.FillIn(region);
 
   CodeInfo code_info(region);
+  ASSERT_EQ(0u, code_info.GetStackMaskSize());
+  ASSERT_EQ(1u, code_info.GetNumberOfStackMaps());
+
+  uint32_t number_of_location_catalog_entries =
+      code_info.GetNumberOfDexRegisterLocationCatalogEntries();
+  ASSERT_EQ(1u, number_of_location_catalog_entries);
+  DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
+  // The Dex register location catalog contains:
+  // - one 5-byte large Dex register location.
+  size_t expected_location_catalog_size = 5u;
+  ASSERT_EQ(expected_location_catalog_size, location_catalog.Size());
+
   StackMap stack_map = code_info.GetStackMapAt(0);
+  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
+  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+  ASSERT_EQ(0u, stack_map.GetDexPc(code_info));
+  ASSERT_EQ(64u, stack_map.GetNativePcOffset(code_info));
+  ASSERT_EQ(0x3u, stack_map.GetRegisterMask(code_info));
+
   ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info));
-  DexRegisterMap dex_registers = code_info.GetDexRegisterMapOf(stack_map, 2);
-  ASSERT_EQ(DexRegisterLocation::Kind::kNone,
-            dex_registers.GetLocationKind(0, number_of_dex_registers));
-  ASSERT_EQ(DexRegisterLocation::Kind::kConstant,
-            dex_registers.GetLocationKind(1, number_of_dex_registers));
-  ASSERT_EQ(-2, dex_registers.GetConstant(1, number_of_dex_registers));
+  DexRegisterMap dex_register_map =
+      code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
+  ASSERT_FALSE(dex_register_map.IsDexRegisterLive(0));
+  ASSERT_TRUE(dex_register_map.IsDexRegisterLive(1));
+  ASSERT_EQ(1u, dex_register_map.GetNumberOfLiveDexRegisters(number_of_dex_registers));
+  // The Dex register map contains:
+  // - one 1-byte live bit mask.
+  // No space is allocated for the sole location catalog entry index, as it is useless.
+  size_t expected_dex_register_map_size = 1u + 0u;
+  ASSERT_EQ(expected_dex_register_map_size, dex_register_map.Size());
+
+  ASSERT_EQ(Kind::kNone,
+            dex_register_map.GetLocationKind(0, number_of_dex_registers, code_info));
+  ASSERT_EQ(Kind::kConstant,
+            dex_register_map.GetLocationKind(1, number_of_dex_registers, code_info));
+  ASSERT_EQ(Kind::kNone,
+            dex_register_map.GetLocationInternalKind(0, number_of_dex_registers, code_info));
+  ASSERT_EQ(Kind::kConstantLargeValue,
+            dex_register_map.GetLocationInternalKind(1, number_of_dex_registers, code_info));
+  ASSERT_EQ(-2, dex_register_map.GetConstant(1, number_of_dex_registers, code_info));
+
+  size_t index0 = dex_register_map.GetLocationCatalogEntryIndex(
+      0, number_of_dex_registers, number_of_location_catalog_entries);
+  size_t index1 =  dex_register_map.GetLocationCatalogEntryIndex(
+      1, number_of_dex_registers, number_of_location_catalog_entries);
+  ASSERT_EQ(DexRegisterLocationCatalog::kNoLocationEntryIndex, index0);
+  ASSERT_EQ(0u, index1);
+  DexRegisterLocation location0 = location_catalog.GetDexRegisterLocation(index0);
+  DexRegisterLocation location1 = location_catalog.GetDexRegisterLocation(index1);
+  ASSERT_EQ(Kind::kNone, location0.GetKind());
+  ASSERT_EQ(Kind::kConstant, location1.GetKind());
+  ASSERT_EQ(Kind::kNone, location0.GetInternalKind());
+  ASSERT_EQ(Kind::kConstantLargeValue, location1.GetInternalKind());
+  ASSERT_EQ(0, location0.GetValue());
+  ASSERT_EQ(-2, location1.GetValue());
+
   ASSERT_FALSE(stack_map.HasInlineInfo(code_info));
 }
 
@@ -209,14 +351,21 @@
   StackMapStream stream(&arena);
 
   ArenaBitVector sp_mask(&arena, 0, false);
-  uint32_t number_of_dex_registers = 0xEA;
+  uint32_t number_of_dex_registers = 1024;
+  // Create the first stack map (and its Dex register map).
   stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
-  for (uint32_t i = 0; i < number_of_dex_registers - 9; ++i) {
-    stream.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kConstant, 0);
+  uint32_t number_of_dex_live_registers_in_dex_register_map_0 = number_of_dex_registers - 8;
+  for (uint32_t i = 0; i < number_of_dex_live_registers_in_dex_register_map_0; ++i) {
+    // Use two different Dex register locations to populate this map,
+    // as using a single value (in the whole CodeInfo object) would
+    // make this Dex register mapping data empty (see
+    // art::DexRegisterMap::SingleEntrySizeInBits).
+    stream.AddDexRegisterEntry(i, Kind::kConstant, i % 2);  // Short location.
   }
+  // Create the second stack map (and its Dex register map).
   stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
   for (uint32_t i = 0; i < number_of_dex_registers; ++i) {
-    stream.AddDexRegisterEntry(i, DexRegisterLocation::Kind::kConstant, 0);
+    stream.AddDexRegisterEntry(i, Kind::kConstant, 0);  // Short location.
   }
 
   size_t size = stream.ComputeNeededSize();
@@ -225,10 +374,35 @@
   stream.FillIn(region);
 
   CodeInfo code_info(region);
-  StackMap stack_map = code_info.GetStackMapAt(1);
-  ASSERT_TRUE(stack_map.HasDexRegisterMap(code_info));
-  ASSERT_NE(stack_map.GetDexRegisterMapOffset(code_info), StackMap::kNoDexRegisterMap);
-  ASSERT_EQ(stack_map.GetDexRegisterMapOffset(code_info), StackMap::kNoDexRegisterMapSmallEncoding);
+  // The location catalog contains two entries (DexRegisterLocation(kConstant, 0)
+  // and DexRegisterLocation(kConstant, 1)), therefore the location catalog index
+  // has a size of 1 bit.
+  uint32_t number_of_location_catalog_entries =
+      code_info.GetNumberOfDexRegisterLocationCatalogEntries();
+  ASSERT_EQ(2u, number_of_location_catalog_entries);
+  ASSERT_EQ(1u, DexRegisterMap::SingleEntrySizeInBits(number_of_location_catalog_entries));
+
+  // The first Dex register map contains:
+  // - a live register bit mask for 1024 registers (that is, 128 bytes of
+  //   data); and
+  // - Dex register mapping information for 1016 1-bit Dex (live) register
+  //   locations (that is, 127 bytes of data).
+  // Hence it has a size of 255 bytes, and therefore...
+  ASSERT_EQ(128u, DexRegisterMap::GetLiveBitMaskSize(number_of_dex_registers));
+  StackMap stack_map0 = code_info.GetStackMapAt(0);
+  DexRegisterMap dex_register_map0 =
+      code_info.GetDexRegisterMapOf(stack_map0, number_of_dex_registers);
+  ASSERT_EQ(127u, dex_register_map0.GetLocationMappingDataSize(number_of_dex_registers,
+                                                               number_of_location_catalog_entries));
+  ASSERT_EQ(255u, dex_register_map0.Size());
+
+  StackMap stack_map1 = code_info.GetStackMapAt(1);
+  ASSERT_TRUE(stack_map1.HasDexRegisterMap(code_info));
+  // ...the offset of the second Dex register map (relative to the
+  // beginning of the Dex register maps region) is 255 (i.e.,
+  // kNoDexRegisterMapSmallEncoding).
+  ASSERT_NE(stack_map1.GetDexRegisterMapOffset(code_info), StackMap::kNoDexRegisterMap);
+  ASSERT_EQ(stack_map1.GetDexRegisterMapOffset(code_info), 0xFFu);
 }
 
 TEST(StackMapTest, TestShareDexRegisterMap) {
@@ -240,16 +414,16 @@
   uint32_t number_of_dex_registers = 2;
   // First stack map.
   stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
-  stream.AddDexRegisterEntry(0, DexRegisterLocation::Kind::kInRegister, 0);
-  stream.AddDexRegisterEntry(1, DexRegisterLocation::Kind::kConstant, -2);
+  stream.AddDexRegisterEntry(0, Kind::kInRegister, 0);  // Short location.
+  stream.AddDexRegisterEntry(1, Kind::kConstant, -2);   // Large location.
   // Second stack map, which should share the same dex register map.
   stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
-  stream.AddDexRegisterEntry(0, DexRegisterLocation::Kind::kInRegister, 0);
-  stream.AddDexRegisterEntry(1, DexRegisterLocation::Kind::kConstant, -2);
+  stream.AddDexRegisterEntry(0, Kind::kInRegister, 0);  // Short location.
+  stream.AddDexRegisterEntry(1, Kind::kConstant, -2);   // Large location.
   // Third stack map (doesn't share the dex register map).
   stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
-  stream.AddDexRegisterEntry(0, DexRegisterLocation::Kind::kInRegister, 2);
-  stream.AddDexRegisterEntry(1, DexRegisterLocation::Kind::kConstant, -2);
+  stream.AddDexRegisterEntry(0, Kind::kInRegister, 2);  // Short location.
+  stream.AddDexRegisterEntry(1, Kind::kConstant, -2);   // Large location.
 
   size_t size = stream.ComputeNeededSize();
   void* memory = arena.Alloc(size, kArenaAllocMisc);
@@ -260,20 +434,20 @@
   // Verify first stack map.
   StackMap sm0 = ci.GetStackMapAt(0);
   DexRegisterMap dex_registers0 = ci.GetDexRegisterMapOf(sm0, number_of_dex_registers);
-  ASSERT_EQ(0, dex_registers0.GetMachineRegister(0, number_of_dex_registers));
-  ASSERT_EQ(-2, dex_registers0.GetConstant(1, number_of_dex_registers));
+  ASSERT_EQ(0, dex_registers0.GetMachineRegister(0, number_of_dex_registers, ci));
+  ASSERT_EQ(-2, dex_registers0.GetConstant(1, number_of_dex_registers, ci));
 
   // Verify second stack map.
   StackMap sm1 = ci.GetStackMapAt(1);
   DexRegisterMap dex_registers1 = ci.GetDexRegisterMapOf(sm1, number_of_dex_registers);
-  ASSERT_EQ(0, dex_registers1.GetMachineRegister(0, number_of_dex_registers));
-  ASSERT_EQ(-2, dex_registers1.GetConstant(1, number_of_dex_registers));
+  ASSERT_EQ(0, dex_registers1.GetMachineRegister(0, number_of_dex_registers, ci));
+  ASSERT_EQ(-2, dex_registers1.GetConstant(1, number_of_dex_registers, ci));
 
   // Verify third stack map.
   StackMap sm2 = ci.GetStackMapAt(2);
   DexRegisterMap dex_registers2 = ci.GetDexRegisterMapOf(sm2, number_of_dex_registers);
-  ASSERT_EQ(2, dex_registers2.GetMachineRegister(0, number_of_dex_registers));
-  ASSERT_EQ(-2, dex_registers2.GetConstant(1, number_of_dex_registers));
+  ASSERT_EQ(2, dex_registers2.GetMachineRegister(0, number_of_dex_registers, ci));
+  ASSERT_EQ(-2, dex_registers2.GetConstant(1, number_of_dex_registers, ci));
 
   // Verify dex register map offsets.
   ASSERT_EQ(sm0.GetDexRegisterMapOffset(ci), sm1.GetDexRegisterMapOffset(ci));
@@ -281,4 +455,39 @@
   ASSERT_NE(sm1.GetDexRegisterMapOffset(ci), sm2.GetDexRegisterMapOffset(ci));
 }
 
+TEST(StackMapTest, TestNoDexRegisterMap) {
+  ArenaPool pool;
+  ArenaAllocator arena(&pool);
+  StackMapStream stream(&arena);
+
+  ArenaBitVector sp_mask(&arena, 0, false);
+  uint32_t number_of_dex_registers = 0;
+  stream.AddStackMapEntry(0, 64, 0x3, &sp_mask, number_of_dex_registers, 0);
+
+  size_t size = stream.ComputeNeededSize();
+  void* memory = arena.Alloc(size, kArenaAllocMisc);
+  MemoryRegion region(memory, size);
+  stream.FillIn(region);
+
+  CodeInfo code_info(region);
+  ASSERT_EQ(0u, code_info.GetStackMaskSize());
+  ASSERT_EQ(1u, code_info.GetNumberOfStackMaps());
+
+  uint32_t number_of_location_catalog_entries =
+      code_info.GetNumberOfDexRegisterLocationCatalogEntries();
+  ASSERT_EQ(0u, number_of_location_catalog_entries);
+  DexRegisterLocationCatalog location_catalog = code_info.GetDexRegisterLocationCatalog();
+  ASSERT_EQ(0u, location_catalog.Size());
+
+  StackMap stack_map = code_info.GetStackMapAt(0);
+  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForDexPc(0)));
+  ASSERT_TRUE(stack_map.Equals(code_info.GetStackMapForNativePcOffset(64)));
+  ASSERT_EQ(0u, stack_map.GetDexPc(code_info));
+  ASSERT_EQ(64u, stack_map.GetNativePcOffset(code_info));
+  ASSERT_EQ(0x3u, stack_map.GetRegisterMask(code_info));
+
+  ASSERT_FALSE(stack_map.HasDexRegisterMap(code_info));
+  ASSERT_FALSE(stack_map.HasInlineInfo(code_info));
+}
+
 }  // namespace art
diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S
index 16f0e70..0c2250e 100644
--- a/runtime/arch/mips/quick_entrypoints_mips.S
+++ b/runtime/arch/mips/quick_entrypoints_mips.S
@@ -1094,7 +1094,7 @@
     lw      $a0, ARG_SLOT_SIZE($sp)       # load resolved method to $a0
     RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
     move    $t9, $v0               # code pointer must be in $t9 to generate the global pointer
-    jalr    $zero, $v0             # tail call to method
+    jalr    $zero, $t9             # tail call to method
     nop
 1:
     RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
@@ -1203,29 +1203,28 @@
     .cpload  $t9
     move     $ra, $zero     # link register is to here, so clobber with 0 for later checks
 
+    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
     addiu    $sp, $sp, -16  # allocate temp storage on the stack
     .cfi_adjust_cfa_offset 16
-    sw       $v0, 12($sp)
-    .cfi_rel_offset 2, 32
-    sw       $v1, 8($sp)
-    .cfi_rel_offset 3, 36
-    s.d      $f0, 0($sp)
-    SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
+    sw       $v0, ARG_SLOT_SIZE+12($sp)
+    .cfi_rel_offset 2, ARG_SLOT_SIZE+12
+    sw       $v1, ARG_SLOT_SIZE+8($sp)
+    .cfi_rel_offset 3, ARG_SLOT_SIZE+8
+    s.d      $f0, ARG_SLOT_SIZE($sp)
     s.d      $f0, 16($sp)   # pass fpr result
     move     $a2, $v0       # pass gpr result
     move     $a3, $v1
-    addiu    $a1, $sp, ARG_SLOT_SIZE   # pass $sp (remove arg slots)
+    addiu    $a1, $sp, ARG_SLOT_SIZE+16   # pass $sp (remove arg slots and temp storage)
     jal      artInstrumentationMethodExitFromCode  # (Thread*, SP, gpr_res, fpr_res)
     move     $a0, rSELF     # pass Thread::Current
-    move     $t0, $v0       # set aside returned link register
+    move     $t9, $v0       # set aside returned link register
     move     $ra, $v1       # set link register for deoptimization
-    addiu    $sp, $sp, ARG_SLOT_SIZE+FRAME_SIZE_REFS_ONLY_CALLEE_SAVE  # args slot + refs_only callee save frame
-    lw       $v0, 12($sp)   # restore return values
-    lw       $v1, 8($sp)
-    l.d      $f0, 0($sp)
-    jalr     $zero, $t0     # return
-    addiu    $sp, $sp, 16   # remove temp storage from stack
-    .cfi_adjust_cfa_offset -16
+    lw       $v0, ARG_SLOT_SIZE+12($sp)   # restore return values
+    lw       $v1, ARG_SLOT_SIZE+8($sp)
+    l.d      $f0, ARG_SLOT_SIZE($sp)
+    jalr     $zero, $t9     # return
+    addiu    $sp, $sp, ARG_SLOT_SIZE+FRAME_SIZE_REFS_ONLY_CALLEE_SAVE+16  # restore stack 
+    .cfi_adjust_cfa_offset -(ARG_SLOT_SIZE+FRAME_SIZE_REFS_ONLY_CALLEE_SAVE+16)
 END art_quick_instrumentation_exit
 
     /*
diff --git a/runtime/arch/mips64/quick_entrypoints_mips64.S b/runtime/arch/mips64/quick_entrypoints_mips64.S
index 8cb95f1..697bf00 100644
--- a/runtime/arch/mips64/quick_entrypoints_mips64.S
+++ b/runtime/arch/mips64/quick_entrypoints_mips64.S
@@ -1504,11 +1504,11 @@
     move     $a1, $t0          # pass $sp
     jal      artInstrumentationMethodExitFromCode  # (Thread*, SP, gpr_res, fpr_res)
     move     $a0, rSELF        # pass Thread::Current
-    move     $t0, $v0          # set aside returned link register
+    move     $t9, $v0          # set aside returned link register
     move     $ra, $v1          # set link register for deoptimization
     ld       $v0, 0($sp)       # restore return values
     l.d      $f0, 8($sp)
-    jalr     $zero, $t0        # return
+    jalr     $zero, $t9        # return
     daddiu   $sp, $sp, 16+FRAME_SIZE_REFS_ONLY_CALLEE_SAVE  # 16 bytes of saved values + ref_only callee save frame
     .cfi_adjust_cfa_offset -(16+FRAME_SIZE_REFS_ONLY_CALLEE_SAVE)
 END art_quick_instrumentation_exit
diff --git a/runtime/base/hash_map.h b/runtime/base/hash_map.h
index c0f903f..eab80ff 100644
--- a/runtime/base/hash_map.h
+++ b/runtime/base/hash_map.h
@@ -48,7 +48,7 @@
   Fn fn_;
 };
 
-template <class Key, class Value, class EmptyFn = DefaultEmptyFn<Key>,
+template <class Key, class Value, class EmptyFn,
     class HashFn = std::hash<Key>, class Pred = std::equal_to<Key>,
     class Alloc = std::allocator<std::pair<Key, Value>>>
 class HashMap : public HashSet<std::pair<Key, Value>, EmptyFn, HashMapWrapper<HashFn>,
diff --git a/runtime/check_reference_map_visitor.h b/runtime/check_reference_map_visitor.h
index 204546d..5d9cd35 100644
--- a/runtime/check_reference_map_visitor.h
+++ b/runtime/check_reference_map_visitor.h
@@ -75,7 +75,7 @@
       int reg = registers[i];
       CHECK(reg < m->GetCodeItem()->registers_size_);
       DexRegisterLocation location =
-          dex_register_map.GetLocationKindAndValue(reg, number_of_dex_registers);
+          dex_register_map.GetDexRegisterLocation(reg, number_of_dex_registers, code_info);
       switch (location.GetKind()) {
         case DexRegisterLocation::Kind::kNone:
           // Not set, should not be a reference.
diff --git a/runtime/common_runtime_test.cc b/runtime/common_runtime_test.cc
index 4104509..d400010 100644
--- a/runtime/common_runtime_test.cc
+++ b/runtime/common_runtime_test.cc
@@ -220,7 +220,6 @@
   std::string min_heap_string(StringPrintf("-Xms%zdm", gc::Heap::kDefaultInitialSize / MB));
   std::string max_heap_string(StringPrintf("-Xmx%zdm", gc::Heap::kDefaultMaximumSize / MB));
 
-  callbacks_.reset(new NoopCompilerCallbacks());
 
   RuntimeOptions options;
   std::string boot_class_path_string = "-Xbootclasspath:" + GetLibCoreDexFileName();
@@ -228,9 +227,16 @@
   options.push_back(std::make_pair("-Xcheck:jni", nullptr));
   options.push_back(std::make_pair(min_heap_string, nullptr));
   options.push_back(std::make_pair(max_heap_string, nullptr));
-  options.push_back(std::make_pair("compilercallbacks", callbacks_.get()));
+
+  callbacks_.reset(new NoopCompilerCallbacks());
+
   SetUpRuntimeOptions(&options);
 
+  // Install compiler-callbacks if SetupRuntimeOptions hasn't deleted them.
+  if (callbacks_.get() != nullptr) {
+    options.push_back(std::make_pair("compilercallbacks", callbacks_.get()));
+  }
+
   PreRuntimeCreate();
   if (!Runtime::Create(options, false)) {
     LOG(FATAL) << "Failed to create runtime";
diff --git a/runtime/common_runtime_test.h b/runtime/common_runtime_test.h
index a29487f..5fbc2ee 100644
--- a/runtime/common_runtime_test.h
+++ b/runtime/common_runtime_test.h
@@ -140,10 +140,11 @@
   // Get the first dex file from a PathClassLoader. Will abort if it is null.
   const DexFile* GetFirstDexFile(jobject jclass_loader);
 
+  std::unique_ptr<CompilerCallbacks> callbacks_;
+
  private:
   static std::string GetCoreFileLocation(const char* suffix);
 
-  std::unique_ptr<CompilerCallbacks> callbacks_;
   std::vector<std::unique_ptr<const DexFile>> loaded_dex_files_;
 };
 
diff --git a/runtime/memory_region.h b/runtime/memory_region.h
index f867f6a..6a784eb 100644
--- a/runtime/memory_region.h
+++ b/runtime/memory_region.h
@@ -48,22 +48,28 @@
   uint8_t* end() const { return start() + size_; }
 
   // Load value of type `T` at `offset`.  The memory address corresponding
-  // to `offset` should be word-aligned.
-  template<typename T> T Load(uintptr_t offset) const {
-    // TODO: DCHECK that the address is word-aligned.
-    return *ComputeInternalPointer<T>(offset);
+  // to `offset` should be word-aligned (on ARM, this is a requirement).
+  template<typename T>
+  ALWAYS_INLINE T Load(uintptr_t offset) const {
+    T* address = ComputeInternalPointer<T>(offset);
+    DCHECK(IsWordAligned(address));
+    return *address;
   }
 
   // Store `value` (of type `T`) at `offset`.  The memory address
-  // corresponding to `offset` should be word-aligned.
-  template<typename T> void Store(uintptr_t offset, T value) const {
-    // TODO: DCHECK that the address is word-aligned.
-    *ComputeInternalPointer<T>(offset) = value;
+  // corresponding to `offset` should be word-aligned (on ARM, this is
+  // a requirement).
+  template<typename T>
+  ALWAYS_INLINE void Store(uintptr_t offset, T value) const {
+    T* address = ComputeInternalPointer<T>(offset);
+    DCHECK(IsWordAligned(address));
+    *address = value;
   }
 
   // Load value of type `T` at `offset`.  The memory address corresponding
   // to `offset` does not need to be word-aligned.
-  template<typename T> T LoadUnaligned(uintptr_t offset) const {
+  template<typename T>
+  ALWAYS_INLINE T LoadUnaligned(uintptr_t offset) const {
     // Equivalent unsigned integer type corresponding to T.
     typedef typename UnsignedIntegerType<sizeof(T)>::type U;
     U equivalent_unsigned_integer_value = 0;
@@ -77,7 +83,8 @@
 
   // Store `value` (of type `T`) at `offset`.  The memory address
   // corresponding to `offset` does not need to be word-aligned.
-  template<typename T> void StoreUnaligned(uintptr_t offset, T value) const {
+  template<typename T>
+  ALWAYS_INLINE void StoreUnaligned(uintptr_t offset, T value) const {
     // Equivalent unsigned integer type corresponding to T.
     typedef typename UnsignedIntegerType<sizeof(T)>::type U;
     U equivalent_unsigned_integer_value = bit_cast<U, T>(value);
@@ -88,19 +95,20 @@
     }
   }
 
-  template<typename T> T* PointerTo(uintptr_t offset) const {
+  template<typename T>
+  ALWAYS_INLINE T* PointerTo(uintptr_t offset) const {
     return ComputeInternalPointer<T>(offset);
   }
 
   // Load a single bit in the region. The bit at offset 0 is the least
   // significant bit in the first byte.
-  bool LoadBit(uintptr_t bit_offset) const {
+  ALWAYS_INLINE bool LoadBit(uintptr_t bit_offset) const {
     uint8_t bit_mask;
     uint8_t byte = *ComputeBitPointer(bit_offset, &bit_mask);
     return byte & bit_mask;
   }
 
-  void StoreBit(uintptr_t bit_offset, bool value) const {
+  ALWAYS_INLINE void StoreBit(uintptr_t bit_offset, bool value) const {
     uint8_t bit_mask;
     uint8_t* byte = ComputeBitPointer(bit_offset, &bit_mask);
     if (value) {
@@ -110,6 +118,31 @@
     }
   }
 
+  // Load `length` bits from the region starting at bit offset `bit_offset`.
+  // The bit at the smallest offset is the least significant bit in the
+  // loaded value.  `length` must not be larger than the number of bits
+  // contained in the return value (32).
+  uint32_t LoadBits(uintptr_t bit_offset, size_t length) const {
+    CHECK_LE(length, sizeof(uint32_t) * kBitsPerByte);
+    uint32_t value = 0u;
+    for (size_t i = 0; i < length; ++i) {
+      value |= LoadBit(bit_offset + i) << i;
+    }
+    return value;
+  }
+
+  // Store `value` on `length` bits in the region starting at bit offset
+  // `bit_offset`.  The bit at the smallest offset is the least significant
+  // bit of the stored `value`.  `value` must not be larger than `length`
+  // bits.
+  void StoreBits(uintptr_t bit_offset, uint32_t value, size_t length) {
+    CHECK_LT(value, 2u << length);
+    for (size_t i = 0; i < length; ++i) {
+      bool ith_bit = value & (1 << i);
+      StoreBit(bit_offset + i, ith_bit);
+    }
+  }
+
   void CopyFrom(size_t offset, const MemoryRegion& from) const;
 
   // Compute a sub memory region based on an existing one.
@@ -126,7 +159,8 @@
   }
 
  private:
-  template<typename T> T* ComputeInternalPointer(size_t offset) const {
+  template<typename T>
+  ALWAYS_INLINE T* ComputeInternalPointer(size_t offset) const {
     CHECK_GE(size(), sizeof(T));
     CHECK_LE(offset, size() - sizeof(T));
     return reinterpret_cast<T*>(start() + offset);
@@ -134,13 +168,20 @@
 
   // Locate the bit with the given offset. Returns a pointer to the byte
   // containing the bit, and sets bit_mask to the bit within that byte.
-  uint8_t* ComputeBitPointer(uintptr_t bit_offset, uint8_t* bit_mask) const {
+  ALWAYS_INLINE uint8_t* ComputeBitPointer(uintptr_t bit_offset, uint8_t* bit_mask) const {
     uintptr_t bit_remainder = (bit_offset & (kBitsPerByte - 1));
     *bit_mask = (1U << bit_remainder);
     uintptr_t byte_offset = (bit_offset >> kBitsPerByteLog2);
     return ComputeInternalPointer<uint8_t>(byte_offset);
   }
 
+  // Is `address` aligned on a machine word?
+  template<typename T> static bool IsWordAligned(const T* address) {
+    // Word alignment in bytes.
+    size_t kWordAlignment = GetInstructionSetPointerSize(kRuntimeISA);
+    return IsAlignedParam(address, kWordAlignment);
+  }
+
   void* pointer_;
   size_t size_;
 };
diff --git a/runtime/oat_file_assistant_test.cc b/runtime/oat_file_assistant_test.cc
index b2798d3..a8b0876 100644
--- a/runtime/oat_file_assistant_test.cc
+++ b/runtime/oat_file_assistant_test.cc
@@ -27,6 +27,7 @@
 
 #include "class_linker.h"
 #include "common_runtime_test.h"
+#include "compiler_callbacks.h"
 #include "mem_map.h"
 #include "os.h"
 #include "thread-inl.h"
@@ -77,11 +78,7 @@
           nullptr));
     // Make sure compilercallbacks are not set so that relocation will be
     // enabled.
-    for (std::pair<std::string, const void*>& pair : *options) {
-      if (pair.first == "compilercallbacks") {
-        pair.second = nullptr;
-      }
-    }
+    callbacks_.reset();
   }
 
   virtual void PreRuntimeCreate() {
@@ -213,29 +210,29 @@
         // image in case of the GSS collector.
         + 384 * MB;
 
-    std::string error_msg;
     std::unique_ptr<BacktraceMap> map(BacktraceMap::Create(getpid(), true));
     ASSERT_TRUE(map.get() != nullptr) << "Failed to build process map";
     for (BacktraceMap::const_iterator it = map->begin();
         reservation_start < reservation_end && it != map->end(); ++it) {
-      if (it->end <= reservation_start) {
-        continue;
-      }
+      ReserveImageSpaceChunk(reservation_start, std::min(it->start, reservation_end));
+      reservation_start = std::max(reservation_start, it->end);
+    }
+    ReserveImageSpaceChunk(reservation_start, reservation_end);
+  }
 
-      if (it->start < reservation_start) {
-        reservation_start = std::min(reservation_end, it->end);
-      }
-
+  // Reserve a chunk of memory for the image space in the given range.
+  // Only has effect for chunks with a positive number of bytes.
+  void ReserveImageSpaceChunk(uintptr_t start, uintptr_t end) {
+    if (start < end) {
+      std::string error_msg;
       image_reservation_.push_back(std::unique_ptr<MemMap>(
           MemMap::MapAnonymous("image reservation",
-              reinterpret_cast<uint8_t*>(reservation_start),
-              std::min(it->start, reservation_end) - reservation_start,
+              reinterpret_cast<uint8_t*>(start), end - start,
               PROT_NONE, false, false, &error_msg)));
       ASSERT_TRUE(image_reservation_.back().get() != nullptr) << error_msg;
       LOG(INFO) << "Reserved space for image " <<
         reinterpret_cast<void*>(image_reservation_.back()->Begin()) << "-" <<
         reinterpret_cast<void*>(image_reservation_.back()->End());
-      reservation_start = it->end;
     }
   }
 
diff --git a/runtime/stack.cc b/runtime/stack.cc
index 2d688ee..4ae49dd 100644
--- a/runtime/stack.cc
+++ b/runtime/stack.cc
@@ -206,21 +206,22 @@
   DexRegisterMap dex_register_map =
       code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
   DexRegisterLocation::Kind location_kind =
-      dex_register_map.GetLocationKind(vreg, number_of_dex_registers);
+      dex_register_map.GetLocationKind(vreg, number_of_dex_registers, code_info);
   switch (location_kind) {
     case DexRegisterLocation::Kind::kInStack: {
-      const int32_t offset = dex_register_map.GetStackOffsetInBytes(vreg, number_of_dex_registers);
+      const int32_t offset =
+          dex_register_map.GetStackOffsetInBytes(vreg, number_of_dex_registers, code_info);
       const uint8_t* addr = reinterpret_cast<const uint8_t*>(cur_quick_frame_) + offset;
       *val = *reinterpret_cast<const uint32_t*>(addr);
       return true;
     }
     case DexRegisterLocation::Kind::kInRegister:
     case DexRegisterLocation::Kind::kInFpuRegister: {
-      uint32_t reg = dex_register_map.GetMachineRegister(vreg, number_of_dex_registers);
+      uint32_t reg = dex_register_map.GetMachineRegister(vreg, number_of_dex_registers, code_info);
       return GetRegisterIfAccessible(reg, kind, val);
     }
     case DexRegisterLocation::Kind::kConstant:
-      *val = dex_register_map.GetConstant(vreg, number_of_dex_registers);
+      *val = dex_register_map.GetConstant(vreg, number_of_dex_registers, code_info);
       return true;
     case DexRegisterLocation::Kind::kNone:
       return false;
@@ -228,7 +229,7 @@
       LOG(FATAL)
           << "Unexpected location kind"
           << DexRegisterLocation::PrettyDescriptor(
-                dex_register_map.GetLocationInternalKind(vreg, number_of_dex_registers));
+                dex_register_map.GetLocationInternalKind(vreg, number_of_dex_registers, code_info));
       UNREACHABLE();
   }
 }
@@ -396,18 +397,19 @@
   DexRegisterMap dex_register_map =
       code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
   DexRegisterLocation::Kind location_kind =
-      dex_register_map.GetLocationKind(vreg, number_of_dex_registers);
+      dex_register_map.GetLocationKind(vreg, number_of_dex_registers, code_info);
   uint32_t dex_pc = m->ToDexPc(cur_quick_frame_pc_, false);
   switch (location_kind) {
     case DexRegisterLocation::Kind::kInStack: {
-      const int32_t offset = dex_register_map.GetStackOffsetInBytes(vreg, number_of_dex_registers);
+      const int32_t offset =
+          dex_register_map.GetStackOffsetInBytes(vreg, number_of_dex_registers, code_info);
       uint8_t* addr = reinterpret_cast<uint8_t*>(cur_quick_frame_) + offset;
       *reinterpret_cast<uint32_t*>(addr) = new_value;
       return true;
     }
     case DexRegisterLocation::Kind::kInRegister:
     case DexRegisterLocation::Kind::kInFpuRegister: {
-      uint32_t reg = dex_register_map.GetMachineRegister(vreg, number_of_dex_registers);
+      uint32_t reg = dex_register_map.GetMachineRegister(vreg, number_of_dex_registers, code_info);
       return SetRegisterIfAccessible(reg, new_value, kind);
     }
     case DexRegisterLocation::Kind::kConstant:
diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc
index 020a6e6..11e7e44 100644
--- a/runtime/stack_map.cc
+++ b/runtime/stack_map.cc
@@ -16,133 +16,198 @@
 
 #include "stack_map.h"
 
+#include <stdint.h>
+
 namespace art {
 
-constexpr uint32_t StackMap::kNoDexRegisterMapSmallEncoding;
-constexpr uint32_t StackMap::kNoInlineInfoSmallEncoding;
+constexpr size_t DexRegisterLocationCatalog::kNoLocationEntryIndex;
 constexpr uint32_t StackMap::kNoDexRegisterMap;
 constexpr uint32_t StackMap::kNoInlineInfo;
 
-uint32_t StackMap::GetDexPc(const CodeInfo& info) const {
-  return info.HasSmallDexPc()
-      ? region_.LoadUnaligned<kSmallEncoding>(info.ComputeStackMapDexPcOffset())
-      : region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapDexPcOffset());
+DexRegisterLocation::Kind DexRegisterMap::GetLocationInternalKind(uint16_t dex_register_number,
+                                                                  uint16_t number_of_dex_registers,
+                                                                  const CodeInfo& code_info) const {
+  DexRegisterLocationCatalog dex_register_location_catalog =
+      code_info.GetDexRegisterLocationCatalog();
+  size_t location_catalog_entry_index = GetLocationCatalogEntryIndex(
+      dex_register_number,
+      number_of_dex_registers,
+      code_info.GetNumberOfDexRegisterLocationCatalogEntries());
+  return dex_register_location_catalog.GetLocationInternalKind(location_catalog_entry_index);
 }
 
-void StackMap::SetDexPc(const CodeInfo& info, uint32_t dex_pc) {
-  DCHECK(!info.HasSmallDexPc() || IsUint<kBitsForSmallEncoding>(dex_pc)) << dex_pc;
-  info.HasSmallDexPc()
-      ? region_.StoreUnaligned<kSmallEncoding>(info.ComputeStackMapDexPcOffset(), dex_pc)
-      : region_.StoreUnaligned<kLargeEncoding>(info.ComputeStackMapDexPcOffset(), dex_pc);
+DexRegisterLocation DexRegisterMap::GetDexRegisterLocation(uint16_t dex_register_number,
+                                                           uint16_t number_of_dex_registers,
+                                                           const CodeInfo& code_info) const {
+  DexRegisterLocationCatalog dex_register_location_catalog =
+      code_info.GetDexRegisterLocationCatalog();
+  size_t location_catalog_entry_index = GetLocationCatalogEntryIndex(
+      dex_register_number,
+      number_of_dex_registers,
+      code_info.GetNumberOfDexRegisterLocationCatalogEntries());
+  return dex_register_location_catalog.GetDexRegisterLocation(location_catalog_entry_index);
 }
 
-uint32_t StackMap::GetNativePcOffset(const CodeInfo& info) const {
-  return info.HasSmallNativePc()
-      ? region_.LoadUnaligned<kSmallEncoding>(info.ComputeStackMapNativePcOffset())
-      : region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapNativePcOffset());
-}
-
-void StackMap::SetNativePcOffset(const CodeInfo& info, uint32_t native_pc_offset) {
-  DCHECK(!info.HasSmallNativePc()
-         || IsUint<kBitsForSmallEncoding>(native_pc_offset)) << native_pc_offset;
-  uint32_t entry = info.ComputeStackMapNativePcOffset();
-  info.HasSmallNativePc()
-      ? region_.StoreUnaligned<kSmallEncoding>(entry, native_pc_offset)
-      : region_.StoreUnaligned<kLargeEncoding>(entry, native_pc_offset);
-}
-
-uint32_t StackMap::GetDexRegisterMapOffset(const CodeInfo& info) const {
-  if (info.HasSmallDexRegisterMap()) {
-    uint8_t value = region_.LoadUnaligned<kSmallEncoding>(
-        info.ComputeStackMapDexRegisterMapOffset());
-    if (value == kNoDexRegisterMapSmallEncoding) {
-      return kNoDexRegisterMap;
+// Loads `number_of_bytes` at the given `offset` and assemble a uint32_t. If `check_max` is true,
+// this method converts a maximum value of size `number_of_bytes` into a uint32_t 0xFFFFFFFF.
+static uint32_t LoadAt(MemoryRegion region,
+                       size_t number_of_bytes,
+                       size_t offset,
+                       bool check_max = false) {
+  if (number_of_bytes == 0u) {
+    DCHECK(!check_max);
+    return 0;
+  } else if (number_of_bytes == 1u) {
+    uint8_t value = region.LoadUnaligned<uint8_t>(offset);
+    if (check_max && value == 0xFF) {
+      return -1;
+    } else {
+      return value;
+    }
+  } else if (number_of_bytes == 2u) {
+    uint16_t value = region.LoadUnaligned<uint16_t>(offset);
+    if (check_max && value == 0xFFFF) {
+      return -1;
+    } else {
+      return value;
+    }
+  } else if (number_of_bytes == 3u) {
+    uint16_t low = region.LoadUnaligned<uint16_t>(offset);
+    uint16_t high = region.LoadUnaligned<uint8_t>(offset + sizeof(uint16_t));
+    uint32_t value = (high << 16) + low;
+    if (check_max && value == 0xFFFFFF) {
+      return -1;
     } else {
       return value;
     }
   } else {
-    return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapDexRegisterMapOffset());
+    DCHECK_EQ(number_of_bytes, 4u);
+    return region.LoadUnaligned<uint32_t>(offset);
   }
 }
 
+static void StoreAt(MemoryRegion region, size_t number_of_bytes, size_t offset, uint32_t value) {
+  if (number_of_bytes == 0u) {
+    DCHECK_EQ(value, 0u);
+  } else if (number_of_bytes == 1u) {
+    region.StoreUnaligned<uint8_t>(offset, value);
+  } else if (number_of_bytes == 2u) {
+    region.StoreUnaligned<uint16_t>(offset, value);
+  } else if (number_of_bytes == 3u) {
+    region.StoreUnaligned<uint16_t>(offset, Low16Bits(value));
+    region.StoreUnaligned<uint8_t>(offset + sizeof(uint16_t), High16Bits(value));
+  } else {
+    region.StoreUnaligned<uint32_t>(offset, value);
+    DCHECK_EQ(number_of_bytes, 4u);
+  }
+}
+
+uint32_t StackMap::GetDexPc(const CodeInfo& info) const {
+  return LoadAt(region_, info.NumberOfBytesForDexPc(), info.ComputeStackMapDexPcOffset());
+}
+
+void StackMap::SetDexPc(const CodeInfo& info, uint32_t dex_pc) {
+  StoreAt(region_, info.NumberOfBytesForDexPc(), info.ComputeStackMapDexPcOffset(), dex_pc);
+}
+
+uint32_t StackMap::GetNativePcOffset(const CodeInfo& info) const {
+  return LoadAt(region_, info.NumberOfBytesForNativePc(), info.ComputeStackMapNativePcOffset());
+}
+
+void StackMap::SetNativePcOffset(const CodeInfo& info, uint32_t native_pc_offset) {
+  StoreAt(region_, info.NumberOfBytesForNativePc(), info.ComputeStackMapNativePcOffset(), native_pc_offset);
+}
+
+uint32_t StackMap::GetDexRegisterMapOffset(const CodeInfo& info) const {
+  return LoadAt(region_,
+                info.NumberOfBytesForDexRegisterMap(),
+                info.ComputeStackMapDexRegisterMapOffset(),
+                /* check_max */ true);
+}
+
 void StackMap::SetDexRegisterMapOffset(const CodeInfo& info, uint32_t offset) {
-  DCHECK(!info.HasSmallDexRegisterMap()
-         || (IsUint<kBitsForSmallEncoding>(offset)
-             || (offset == kNoDexRegisterMap))) << offset;
-  size_t dex_register_map_entry = info.ComputeStackMapDexRegisterMapOffset();
-  info.HasSmallDexRegisterMap()
-      ? region_.StoreUnaligned<kSmallEncoding>(dex_register_map_entry, offset)
-      : region_.StoreUnaligned<kLargeEncoding>(dex_register_map_entry, offset);
+  StoreAt(region_,
+          info.NumberOfBytesForDexRegisterMap(),
+          info.ComputeStackMapDexRegisterMapOffset(),
+          offset);
 }
 
 uint32_t StackMap::GetInlineDescriptorOffset(const CodeInfo& info) const {
   if (!info.HasInlineInfo()) return kNoInlineInfo;
-  if (info.HasSmallInlineInfo()) {
-    uint8_t value = region_.LoadUnaligned<kSmallEncoding>(
-        info.ComputeStackMapInlineInfoOffset());
-    if (value == kNoInlineInfoSmallEncoding) {
-      return kNoInlineInfo;
-    } else {
-      return value;
-    }
-  } else {
-    return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapInlineInfoOffset());
-  }
+  return LoadAt(region_,
+                info.NumberOfBytesForInlineInfo(),
+                info.ComputeStackMapInlineInfoOffset(),
+                /* check_max */ true);
 }
 
 void StackMap::SetInlineDescriptorOffset(const CodeInfo& info, uint32_t offset) {
   DCHECK(info.HasInlineInfo());
-  DCHECK(!info.HasSmallInlineInfo()
-         || (IsUint<kBitsForSmallEncoding>(offset)
-             || (offset == kNoInlineInfo))) << offset;
-  size_t inline_entry = info.ComputeStackMapInlineInfoOffset();
-  info.HasSmallInlineInfo()
-      ? region_.StoreUnaligned<kSmallEncoding>(inline_entry, offset)
-      : region_.StoreUnaligned<kLargeEncoding>(inline_entry, offset);
+  StoreAt(region_,
+          info.NumberOfBytesForInlineInfo(),
+          info.ComputeStackMapInlineInfoOffset(),
+          offset);
 }
 
 uint32_t StackMap::GetRegisterMask(const CodeInfo& info) const {
-  return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapRegisterMaskOffset());
+  return LoadAt(region_,
+                info.NumberOfBytesForRegisterMask(),
+                info.ComputeStackMapRegisterMaskOffset());
 }
 
 void StackMap::SetRegisterMask(const CodeInfo& info, uint32_t mask) {
-  region_.StoreUnaligned<kLargeEncoding>(info.ComputeStackMapRegisterMaskOffset(), mask);
+  StoreAt(region_,
+          info.NumberOfBytesForRegisterMask(),
+          info.ComputeStackMapRegisterMaskOffset(),
+          mask);
 }
 
-size_t StackMap::ComputeStackMapSize(size_t stack_mask_size,
-                                     bool has_inline_info,
-                                     bool is_small_inline_info,
-                                     bool is_small_dex_map,
-                                     bool is_small_dex_pc,
-                                     bool is_small_native_pc) {
-  return StackMap::kFixedSize
-      + stack_mask_size
-      + (has_inline_info ? NumberOfBytesForEntry(is_small_inline_info) : 0)
-      + NumberOfBytesForEntry(is_small_dex_map)
-      + NumberOfBytesForEntry(is_small_dex_pc)
-      + NumberOfBytesForEntry(is_small_native_pc);
+size_t StackMap::ComputeStackMapSizeInternal(size_t stack_mask_size,
+                                             size_t number_of_bytes_for_inline_info,
+                                             size_t number_of_bytes_for_dex_map,
+                                             size_t number_of_bytes_for_dex_pc,
+                                             size_t number_of_bytes_for_native_pc,
+                                             size_t number_of_bytes_for_register_mask) {
+  return stack_mask_size
+      + number_of_bytes_for_inline_info
+      + number_of_bytes_for_dex_map
+      + number_of_bytes_for_dex_pc
+      + number_of_bytes_for_native_pc
+      + number_of_bytes_for_register_mask;
 }
 
 size_t StackMap::ComputeStackMapSize(size_t stack_mask_size,
                                      size_t inline_info_size,
                                      size_t dex_register_map_size,
                                      size_t dex_pc_max,
-                                     size_t native_pc_max) {
-  return ComputeStackMapSize(
+                                     size_t native_pc_max,
+                                     size_t register_mask_max) {
+  return ComputeStackMapSizeInternal(
       stack_mask_size,
-      inline_info_size != 0,
-      // + 1 to also encode kNoInlineInfo.
-      IsUint<kBitsForSmallEncoding>(inline_info_size + dex_register_map_size + 1),
+      inline_info_size == 0
+          ? 0
+            // + 1 to also encode kNoInlineInfo.
+          :  CodeInfo::EncodingSizeInBytes(inline_info_size + dex_register_map_size + 1),
       // + 1 to also encode kNoDexRegisterMap.
-      IsUint<kBitsForSmallEncoding>(dex_register_map_size + 1),
-      IsUint<kBitsForSmallEncoding>(dex_pc_max),
-      IsUint<kBitsForSmallEncoding>(native_pc_max));
+      CodeInfo::EncodingSizeInBytes(dex_register_map_size + 1),
+      CodeInfo::EncodingSizeInBytes(dex_pc_max),
+      CodeInfo::EncodingSizeInBytes(native_pc_max),
+      CodeInfo::EncodingSizeInBytes(register_mask_max));
 }
 
 MemoryRegion StackMap::GetStackMask(const CodeInfo& info) const {
   return region_.Subregion(info.ComputeStackMapStackMaskOffset(), info.GetStackMaskSize());
 }
 
+static void DumpRegisterMapping(std::ostream& os,
+                                size_t dex_register_num,
+                                DexRegisterLocation location,
+                                const std::string& prefix = "v",
+                                const std::string& suffix = "") {
+  os << "      " << prefix << dex_register_num << ": "
+     << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind())
+     << " (" << location.GetValue() << ")" << suffix << '\n';
+}
+
 void CodeInfo::DumpStackMapHeader(std::ostream& os, size_t stack_map_num) const {
   StackMap stack_map = GetStackMapAt(stack_map_num);
   os << "    StackMap " << stack_map_num
@@ -168,13 +233,25 @@
      << ", number_of_dex_registers=" << number_of_dex_registers
      << ", number_of_stack_maps=" << number_of_stack_maps
      << ", has_inline_info=" << HasInlineInfo()
-     << ", has_small_inline_info=" << HasSmallInlineInfo()
-     << ", has_small_dex_register_map=" << HasSmallDexRegisterMap()
-     << ", has_small_dex_pc=" << HasSmallDexPc()
-     << ", has_small_native_pc=" << HasSmallNativePc()
+     << ", number_of_bytes_for_inline_info=" << NumberOfBytesForInlineInfo()
+     << ", number_of_bytes_for_dex_register_map=" << NumberOfBytesForDexRegisterMap()
+     << ", number_of_bytes_for_dex_pc=" << NumberOfBytesForDexPc()
+     << ", number_of_bytes_for_native_pc=" << NumberOfBytesForNativePc()
+     << ", number_of_bytes_for_register_mask=" << NumberOfBytesForRegisterMask()
      << ")\n";
 
-  // Display stack maps along with Dex register maps.
+  // Display the Dex register location catalog.
+  size_t number_of_location_catalog_entries = GetNumberOfDexRegisterLocationCatalogEntries();
+  size_t location_catalog_size_in_bytes = GetDexRegisterLocationCatalogSize();
+  os << "  DexRegisterLocationCatalog (number_of_entries=" << number_of_location_catalog_entries
+     << ", size_in_bytes=" << location_catalog_size_in_bytes << ")\n";
+  DexRegisterLocationCatalog dex_register_location_catalog = GetDexRegisterLocationCatalog();
+  for (size_t i = 0; i < number_of_location_catalog_entries; ++i) {
+    DexRegisterLocation location = dex_register_location_catalog.GetDexRegisterLocation(i);
+    DumpRegisterMapping(os, i, location, "entry ");
+  }
+
+  // Display stack maps along with (live) Dex register maps.
   for (size_t i = 0; i < number_of_stack_maps; ++i) {
     StackMap stack_map = GetStackMapAt(i);
     DumpStackMapHeader(os, i);
@@ -183,11 +260,13 @@
       // TODO: Display the bit mask of live Dex registers.
       for (size_t j = 0; j < number_of_dex_registers; ++j) {
         if (dex_register_map.IsDexRegisterLive(j)) {
+          size_t location_catalog_entry_index = dex_register_map.GetLocationCatalogEntryIndex(
+              j, number_of_dex_registers, number_of_location_catalog_entries);
           DexRegisterLocation location =
-              dex_register_map.GetLocationKindAndValue(j, number_of_dex_registers);
-           os << "      " << "v" << j << ": "
-              << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind())
-              << " (" << location.GetValue() << ")" << '\n';
+              dex_register_map.GetDexRegisterLocation(j, number_of_dex_registers, *this);
+          DumpRegisterMapping(
+              os, j, location, "v",
+              "\t[entry " + std::to_string(static_cast<int>(location_catalog_entry_index)) + "]");
         }
       }
     }
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 6ec7cc8..f68cafe 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -26,13 +26,10 @@
 // Size of a frame slot, in bytes.  This constant is a signed value,
 // to please the compiler in arithmetic operations involving int32_t
 // (signed) values.
-static ssize_t constexpr kFrameSlotSize = 4;
-
-// Word alignment required on ARM, in bytes.
-static constexpr size_t kWordAlignment = 4;
+static constexpr ssize_t kFrameSlotSize = 4;
 
 // Size of Dex virtual registers.
-static size_t constexpr kVRegSize = 4;
+static constexpr size_t kVRegSize = 4;
 
 class CodeInfo;
 
@@ -97,9 +94,9 @@
    *
    * In addition, DexRegisterMap also uses these values:
    * - kInStackLargeOffset: value holds a "large" stack offset (greater than
-   *   128 bytes);
-   * - kConstantLargeValue: value holds a "large" constant (lower than or
-   *   equal to -16, or greater than 16).
+   *   or equal to 128 bytes);
+   * - kConstantLargeValue: value holds a "large" constant (lower than 0, or
+   *   or greater than or equal to 32).
    */
   enum class Kind : uint8_t {
     // Short location kinds, for entries fitting on one byte (3 bits
@@ -120,8 +117,7 @@
     kInStackLargeOffset = 5,  // 0b101
 
     // Large constant, that cannot fit on a 5-bit signed integer (i.e.,
-    // lower than -2^(5-1) = -16, or greater than or equal to
-    // 2^(5-1) - 1 = 15).
+    // lower than 0, or greater than or equal to 2^5 = 32).
     kConstantLargeValue = 6,  // 0b110
 
     kLastLocationKind = kConstantLargeValue
@@ -193,8 +189,10 @@
     }
   }
 
-  DexRegisterLocation(Kind kind, int32_t value)
-      : kind_(kind), value_(value) {}
+  // Required by art::StackMapStream::LocationCatalogEntriesIndices.
+  DexRegisterLocation() : kind_(Kind::kNone), value_(0) {}
+
+  DexRegisterLocation(Kind kind, int32_t value) : kind_(kind), value_(value) {}
 
   static DexRegisterLocation None() {
     return DexRegisterLocation(Kind::kNone, 0);
@@ -223,33 +221,23 @@
  private:
   Kind kind_;
   int32_t value_;
+
+  friend class DexRegisterLocationHashFn;
 };
 
 /**
- * Information on dex register values for a specific PC. The information is
- * of the form:
- * [live_bit_mask, DexRegisterLocation+].
+ * Store information on unique Dex register locations used in a method.
+ * The information is of the form:
+ * [DexRegisterLocation+].
  * DexRegisterLocations are either 1- or 5-byte wide (see art::DexRegisterLocation::Kind).
  */
-class DexRegisterMap {
+class DexRegisterLocationCatalog {
  public:
-  explicit DexRegisterMap(MemoryRegion region) : region_(region) {}
+  explicit DexRegisterLocationCatalog(MemoryRegion region) : region_(region) {}
 
   // Short (compressed) location, fitting on one byte.
   typedef uint8_t ShortLocation;
 
-  static size_t LiveBitMaskSize(uint16_t number_of_dex_registers) {
-    return RoundUp(number_of_dex_registers, kBitsPerByte) / kBitsPerByte;
-  }
-
-  void SetLiveBitMask(size_t offset,
-                      uint16_t number_of_dex_registers,
-                      const BitVector& live_dex_registers_mask) {
-    for (uint16_t i = 0; i < number_of_dex_registers; i++) {
-      region_.StoreBit(offset + i, live_dex_registers_mask.IsBitSet(i));
-    }
-  }
-
   void SetRegisterInfo(size_t offset, const DexRegisterLocation& dex_register_location) {
     DexRegisterLocation::Kind kind = ComputeCompressedKind(dex_register_location);
     int32_t value = dex_register_location.GetValue();
@@ -265,12 +253,12 @@
         DCHECK_EQ(value % kFrameSlotSize, 0);
         value /= kFrameSlotSize;
       }
-      DCHECK(IsUint<kValueBits>(value)) << value;
+      DCHECK(IsShortValue(value)) << value;
       region_.StoreUnaligned<ShortLocation>(offset, MakeShortLocation(kind, value));
     } else {
       // Large location.  Write the location on one byte and the value
       // on 4 bytes.
-      DCHECK(!IsUint<kValueBits>(value)) << value;
+      DCHECK(!IsShortValue(value)) << value;
       if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
         // Also divide large stack offsets by 4 for the sake of consistency.
         DCHECK_EQ(value % kFrameSlotSize, 0);
@@ -285,63 +273,39 @@
     }
   }
 
-  bool IsDexRegisterLive(uint16_t dex_register_index) const {
+  // Find the offset of the location catalog entry number `location_catalog_entry_index`.
+  size_t FindLocationOffset(size_t location_catalog_entry_index) const {
     size_t offset = kFixedSize;
-    return region_.LoadBit(offset + dex_register_index);
-  }
-
-  static constexpr size_t kNoDexRegisterLocationOffset = -1;
-
-  static size_t GetDexRegisterMapLocationsOffset(uint16_t number_of_dex_registers) {
-    return kLiveBitMaskOffset + LiveBitMaskSize(number_of_dex_registers);
-  }
-
-  // Find the offset of the Dex register location number `dex_register_index`.
-  size_t FindLocationOffset(uint16_t dex_register_index, uint16_t number_of_dex_registers) const {
-    if (!IsDexRegisterLive(dex_register_index)) return kNoDexRegisterLocationOffset;
-    size_t offset = GetDexRegisterMapLocationsOffset(number_of_dex_registers);
-    // Skip the first `dex_register_index - 1` entries.
-    for (uint16_t i = 0; i < dex_register_index; ++i) {
-      if (IsDexRegisterLive(i)) {
-        // Read the first next byte and inspect its first 3 bits to decide
-        // whether it is a short or a large location.
-        DexRegisterLocation::Kind kind = ExtractKindAtOffset(offset);
-        if (DexRegisterLocation::IsShortLocationKind(kind)) {
-          // Short location.  Skip the current byte.
-          offset += SingleShortEntrySize();
-        } else {
-          // Large location.  Skip the 5 next bytes.
-          offset += SingleLargeEntrySize();
-        }
+    // Skip the first `location_catalog_entry_index - 1` entries.
+    for (uint16_t i = 0; i < location_catalog_entry_index; ++i) {
+      // Read the first next byte and inspect its first 3 bits to decide
+      // whether it is a short or a large location.
+      DexRegisterLocation::Kind kind = ExtractKindAtOffset(offset);
+      if (DexRegisterLocation::IsShortLocationKind(kind)) {
+        // Short location.  Skip the current byte.
+        offset += SingleShortEntrySize();
+      } else {
+        // Large location.  Skip the 5 next bytes.
+        offset += SingleLargeEntrySize();
       }
     }
     return offset;
   }
 
-  // Get the surface kind.
-  DexRegisterLocation::Kind GetLocationKind(uint16_t dex_register_index,
-                                            uint16_t number_of_dex_registers) const {
-    return IsDexRegisterLive(dex_register_index)
-        ? DexRegisterLocation::ConvertToSurfaceKind(
-              GetLocationInternalKind(dex_register_index, number_of_dex_registers))
-        : DexRegisterLocation::Kind::kNone;
+  // Get the internal kind of entry at `location_catalog_entry_index`.
+  DexRegisterLocation::Kind GetLocationInternalKind(size_t location_catalog_entry_index) const {
+    if (location_catalog_entry_index == kNoLocationEntryIndex) {
+      return DexRegisterLocation::Kind::kNone;
+    }
+    return ExtractKindAtOffset(FindLocationOffset(location_catalog_entry_index));
   }
 
-  // Get the internal kind.
-  DexRegisterLocation::Kind GetLocationInternalKind(uint16_t dex_register_index,
-                                                    uint16_t number_of_dex_registers) const {
-    return IsDexRegisterLive(dex_register_index)
-        ? ExtractKindAtOffset(FindLocationOffset(dex_register_index, number_of_dex_registers))
-        : DexRegisterLocation::Kind::kNone;
-  }
-
-  // TODO: Rename as GetDexRegisterLocation?
-  DexRegisterLocation GetLocationKindAndValue(uint16_t dex_register_index,
-                                              uint16_t number_of_dex_registers) const {
-    if (!IsDexRegisterLive(dex_register_index)) {
+  // Get the (surface) kind and value of entry at `location_catalog_entry_index`.
+  DexRegisterLocation GetDexRegisterLocation(size_t location_catalog_entry_index) const {
+    if (location_catalog_entry_index == kNoLocationEntryIndex) {
       return DexRegisterLocation::None();
     }
-    size_t offset = FindLocationOffset(dex_register_index, number_of_dex_registers);
+    size_t offset = FindLocationOffset(location_catalog_entry_index);
     // Read the first byte and inspect its first 3 bits to get the location.
     ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
     DexRegisterLocation::Kind kind = ExtractKindFromShortLocation(first_byte);
@@ -364,31 +328,6 @@
     }
   }
 
-  int32_t GetStackOffsetInBytes(uint16_t dex_register_index,
-                                uint16_t number_of_dex_registers) const {
-    DexRegisterLocation location =
-        GetLocationKindAndValue(dex_register_index, number_of_dex_registers);
-    DCHECK(location.GetKind() == DexRegisterLocation::Kind::kInStack);
-    // GetLocationKindAndValue returns the offset in bytes.
-    return location.GetValue();
-  }
-
-  int32_t GetConstant(uint16_t dex_register_index, uint16_t number_of_dex_registers) const {
-    DexRegisterLocation location =
-        GetLocationKindAndValue(dex_register_index, number_of_dex_registers);
-    DCHECK(location.GetKind() == DexRegisterLocation::Kind::kConstant);
-    return location.GetValue();
-  }
-
-  int32_t GetMachineRegister(uint16_t dex_register_index, uint16_t number_of_dex_registers) const {
-    DexRegisterLocation location =
-        GetLocationKindAndValue(dex_register_index, number_of_dex_registers);
-    DCHECK(location.GetInternalKind() == DexRegisterLocation::Kind::kInRegister
-           || location.GetInternalKind() == DexRegisterLocation::Kind::kInFpuRegister)
-        << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind());
-    return location.GetValue();
-  }
-
   // Compute the compressed kind of `location`.
   static DexRegisterLocation::Kind ComputeCompressedKind(const DexRegisterLocation& location) {
     switch (location.GetInternalKind()) {
@@ -398,22 +337,21 @@
 
       case DexRegisterLocation::Kind::kInRegister:
         DCHECK_GE(location.GetValue(), 0);
-        DCHECK_LT(location.GetValue(), 1 << DexRegisterMap::kValueBits);
+        DCHECK_LT(location.GetValue(), 1 << kValueBits);
         return DexRegisterLocation::Kind::kInRegister;
 
       case DexRegisterLocation::Kind::kInFpuRegister:
         DCHECK_GE(location.GetValue(), 0);
-        DCHECK_LT(location.GetValue(), 1 << DexRegisterMap::kValueBits);
+        DCHECK_LT(location.GetValue(), 1 << kValueBits);
         return DexRegisterLocation::Kind::kInFpuRegister;
 
       case DexRegisterLocation::Kind::kInStack:
-        DCHECK_EQ(location.GetValue() % kFrameSlotSize, 0);
-        return IsUint<DexRegisterMap::kValueBits>(location.GetValue() / kFrameSlotSize)
+        return IsShortStackOffsetValue(location.GetValue())
             ? DexRegisterLocation::Kind::kInStack
             : DexRegisterLocation::Kind::kInStackLargeOffset;
 
       case DexRegisterLocation::Kind::kConstant:
-        return IsUint<DexRegisterMap::kValueBits>(location.GetValue())
+        return IsShortConstantValue(location.GetValue())
             ? DexRegisterLocation::Kind::kConstant
             : DexRegisterLocation::Kind::kConstantLargeValue;
 
@@ -433,11 +371,10 @@
         return true;
 
       case DexRegisterLocation::Kind::kInStack:
-        DCHECK_EQ(location.GetValue() % kFrameSlotSize, 0);
-        return IsUint<kValueBits>(location.GetValue() / kFrameSlotSize);
+        return IsShortStackOffsetValue(location.GetValue());
 
       case DexRegisterLocation::Kind::kConstant:
-        return IsUint<kValueBits>(location.GetValue());
+        return IsShortConstantValue(location.GetValue());
 
       default:
         UNREACHABLE();
@@ -445,9 +382,7 @@
   }
 
   static size_t EntrySize(const DexRegisterLocation& location) {
-    return CanBeEncodedAsShortLocation(location)
-        ? DexRegisterMap::SingleShortEntrySize()
-        : DexRegisterMap::SingleLargeEntrySize();
+    return CanBeEncodedAsShortLocation(location) ? SingleShortEntrySize() : SingleLargeEntrySize();
   }
 
   static size_t SingleShortEntrySize() {
@@ -462,10 +397,14 @@
     return region_.size();
   }
 
-  static constexpr int kLiveBitMaskOffset = 0;
-  static constexpr int kFixedSize = kLiveBitMaskOffset;
+  // Special (invalid) Dex register location catalog entry index meaning
+  // that there is no location for a given Dex register (i.e., it is
+  // mapped to a DexRegisterLocation::Kind::kNone location).
+  static constexpr size_t kNoLocationEntryIndex = -1;
 
  private:
+  static constexpr int kFixedSize = 0;
+
   // Width of the kind "field" in a short location, in bits.
   static constexpr size_t kKindBits = 3;
   // Width of the value "field" in a short location, in bits.
@@ -476,10 +415,24 @@
   static constexpr size_t kKindOffset = 0;
   static constexpr size_t kValueOffset = kKindBits;
 
+  static bool IsShortStackOffsetValue(int32_t value) {
+    DCHECK_EQ(value % kFrameSlotSize, 0);
+    return IsShortValue(value / kFrameSlotSize);
+  }
+
+  static bool IsShortConstantValue(int32_t value) {
+    return IsShortValue(value);
+  }
+
+  static bool IsShortValue(int32_t value) {
+    return IsUint<kValueBits>(value);
+  }
+
   static ShortLocation MakeShortLocation(DexRegisterLocation::Kind kind, int32_t value) {
-    DCHECK(IsUint<kKindBits>(static_cast<uint8_t>(kind))) << static_cast<uint8_t>(kind);
-    DCHECK(IsUint<kValueBits>(value)) << value;
-    return (static_cast<uint8_t>(kind) & kKindMask) << kKindOffset
+    uint8_t kind_integer_value = static_cast<uint8_t>(kind);
+    DCHECK(IsUint<kKindBits>(kind_integer_value)) << kind_integer_value;
+    DCHECK(IsShortValue(value)) << value;
+    return (kind_integer_value & kKindMask) << kKindOffset
         | (value & kValueMask) << kValueOffset;
   }
 
@@ -507,6 +460,210 @@
   friend class StackMapStream;
 };
 
+/* Information on Dex register locations for a specific PC, mapping a
+ * stack map's Dex register to a location entry in a DexRegisterLocationCatalog.
+ * The information is of the form:
+ * [live_bit_mask, entries*]
+ * where entries are concatenated unsigned integer values encoded on a number
+ * of bits (fixed per DexRegisterMap instances of a CodeInfo object) depending
+ * on the number of entries in the Dex register location catalog
+ * (see DexRegisterMap::SingleEntrySizeInBits).  The map is 1-byte aligned.
+ */
+class DexRegisterMap {
+ public:
+  explicit DexRegisterMap(MemoryRegion region) : region_(region) {}
+
+  // Get the surface kind of Dex register `dex_register_number`.
+  DexRegisterLocation::Kind GetLocationKind(uint16_t dex_register_number,
+                                            uint16_t number_of_dex_registers,
+                                            const CodeInfo& code_info) const {
+    return DexRegisterLocation::ConvertToSurfaceKind(
+        GetLocationInternalKind(dex_register_number, number_of_dex_registers, code_info));
+  }
+
+  // Get the internal kind of Dex register `dex_register_number`.
+  DexRegisterLocation::Kind GetLocationInternalKind(uint16_t dex_register_number,
+                                                    uint16_t number_of_dex_registers,
+                                                    const CodeInfo& code_info) const;
+
+  // Get the Dex register location `dex_register_number`.
+  DexRegisterLocation GetDexRegisterLocation(uint16_t dex_register_number,
+                                             uint16_t number_of_dex_registers,
+                                             const CodeInfo& code_info) const;
+
+  int32_t GetStackOffsetInBytes(uint16_t dex_register_number,
+                                uint16_t number_of_dex_registers,
+                                const CodeInfo& code_info) const {
+    DexRegisterLocation location =
+        GetDexRegisterLocation(dex_register_number, number_of_dex_registers, code_info);
+    DCHECK(location.GetKind() == DexRegisterLocation::Kind::kInStack);
+    // GetDexRegisterLocation returns the offset in bytes.
+    return location.GetValue();
+  }
+
+  int32_t GetConstant(uint16_t dex_register_number,
+                      uint16_t number_of_dex_registers,
+                      const CodeInfo& code_info) const {
+    DexRegisterLocation location =
+        GetDexRegisterLocation(dex_register_number, number_of_dex_registers, code_info);
+    DCHECK(location.GetKind() == DexRegisterLocation::Kind::kConstant);
+    return location.GetValue();
+  }
+
+  int32_t GetMachineRegister(uint16_t dex_register_number,
+                             uint16_t number_of_dex_registers,
+                             const CodeInfo& code_info) const {
+    DexRegisterLocation location =
+        GetDexRegisterLocation(dex_register_number, number_of_dex_registers, code_info);
+    DCHECK(location.GetInternalKind() == DexRegisterLocation::Kind::kInRegister
+           || location.GetInternalKind() == DexRegisterLocation::Kind::kInFpuRegister)
+        << DexRegisterLocation::PrettyDescriptor(location.GetInternalKind());
+    return location.GetValue();
+  }
+
+  // Get the index of the entry in the Dex register location catalog
+  // corresponding to `dex_register_number`.
+  size_t GetLocationCatalogEntryIndex(uint16_t dex_register_number,
+                                      uint16_t number_of_dex_registers,
+                                      size_t number_of_location_catalog_entries) const {
+    if (!IsDexRegisterLive(dex_register_number)) {
+      return DexRegisterLocationCatalog::kNoLocationEntryIndex;
+    }
+
+    if (number_of_location_catalog_entries == 1) {
+      // We do not allocate space for location maps in the case of a
+      // single-entry location catalog, as it is useless.  The only valid
+      // entry index is 0;
+      return 0;
+    }
+
+    // The bit offset of the beginning of the map locations.
+    size_t map_locations_offset_in_bits =
+        GetLocationMappingDataOffset(number_of_dex_registers) * kBitsPerByte;
+    size_t index_in_dex_register_map = GetIndexInDexRegisterMap(dex_register_number);
+    DCHECK_LT(index_in_dex_register_map, GetNumberOfLiveDexRegisters(number_of_dex_registers));
+    // The bit size of an entry.
+    size_t map_entry_size_in_bits = SingleEntrySizeInBits(number_of_location_catalog_entries);
+    // The bit offset where `index_in_dex_register_map` is located.
+    size_t entry_offset_in_bits =
+        map_locations_offset_in_bits + index_in_dex_register_map * map_entry_size_in_bits;
+    size_t location_catalog_entry_index =
+        region_.LoadBits(entry_offset_in_bits, map_entry_size_in_bits);
+    DCHECK_LT(location_catalog_entry_index, number_of_location_catalog_entries);
+    return location_catalog_entry_index;
+  }
+
+  // Map entry at `index_in_dex_register_map` to `location_catalog_entry_index`.
+  void SetLocationCatalogEntryIndex(size_t index_in_dex_register_map,
+                                    size_t location_catalog_entry_index,
+                                    uint16_t number_of_dex_registers,
+                                    size_t number_of_location_catalog_entries) {
+    DCHECK_LT(index_in_dex_register_map, GetNumberOfLiveDexRegisters(number_of_dex_registers));
+    DCHECK_LT(location_catalog_entry_index, number_of_location_catalog_entries);
+
+    if (number_of_location_catalog_entries == 1) {
+      // We do not allocate space for location maps in the case of a
+      // single-entry location catalog, as it is useless.
+      return;
+    }
+
+    // The bit offset of the beginning of the map locations.
+    size_t map_locations_offset_in_bits =
+        GetLocationMappingDataOffset(number_of_dex_registers) * kBitsPerByte;
+    // The bit size of an entry.
+    size_t map_entry_size_in_bits = SingleEntrySizeInBits(number_of_location_catalog_entries);
+    // The bit offset where `index_in_dex_register_map` is located.
+    size_t entry_offset_in_bits =
+        map_locations_offset_in_bits + index_in_dex_register_map * map_entry_size_in_bits;
+    region_.StoreBits(entry_offset_in_bits, location_catalog_entry_index, map_entry_size_in_bits);
+  }
+
+  void SetLiveBitMask(uint16_t number_of_dex_registers,
+                      const BitVector& live_dex_registers_mask) {
+    size_t live_bit_mask_offset_in_bits = GetLiveBitMaskOffset() * kBitsPerByte;
+    for (uint16_t i = 0; i < number_of_dex_registers; ++i) {
+      region_.StoreBit(live_bit_mask_offset_in_bits + i, live_dex_registers_mask.IsBitSet(i));
+    }
+  }
+
+  bool IsDexRegisterLive(uint16_t dex_register_number) const {
+    size_t live_bit_mask_offset_in_bits = GetLiveBitMaskOffset() * kBitsPerByte;
+    return region_.LoadBit(live_bit_mask_offset_in_bits + dex_register_number);
+  }
+
+  size_t GetNumberOfLiveDexRegisters(uint16_t number_of_dex_registers) const {
+    size_t number_of_live_dex_registers = 0;
+    for (size_t i = 0; i < number_of_dex_registers; ++i) {
+      if (IsDexRegisterLive(i)) {
+        ++number_of_live_dex_registers;
+      }
+    }
+    return number_of_live_dex_registers;
+  }
+
+  static size_t GetLiveBitMaskOffset() {
+    return kFixedSize;
+  }
+
+  // Compute the size of the live register bit mask (in bytes), for a
+  // method having `number_of_dex_registers` Dex registers.
+  static size_t GetLiveBitMaskSize(uint16_t number_of_dex_registers) {
+    return RoundUp(number_of_dex_registers, kBitsPerByte) / kBitsPerByte;
+  }
+
+  static size_t GetLocationMappingDataOffset(uint16_t number_of_dex_registers) {
+    return GetLiveBitMaskOffset() + GetLiveBitMaskSize(number_of_dex_registers);
+  }
+
+  size_t GetLocationMappingDataSize(uint16_t number_of_dex_registers,
+                                    size_t number_of_location_catalog_entries) const {
+    size_t location_mapping_data_size_in_bits =
+        GetNumberOfLiveDexRegisters(number_of_dex_registers)
+        * SingleEntrySizeInBits(number_of_location_catalog_entries);
+    return RoundUp(location_mapping_data_size_in_bits, kBitsPerByte) / kBitsPerByte;
+  }
+
+  // Return the size of a map entry in bits.  Note that if
+  // `number_of_location_catalog_entries` equals 1, this function returns 0,
+  // which is fine, as there is no need to allocate a map for a
+  // single-entry location catalog; the only valid location catalog entry index
+  // for a live register in this case is 0 and there is no need to
+  // store it.
+  static size_t SingleEntrySizeInBits(size_t number_of_location_catalog_entries) {
+    // Handle the case of 0, as we cannot pass 0 to art::WhichPowerOf2.
+    return number_of_location_catalog_entries == 0
+        ? 0u
+        : WhichPowerOf2(RoundUpToPowerOfTwo(number_of_location_catalog_entries));
+  }
+
+  // Return the size of the DexRegisterMap object, in bytes.
+  size_t Size() const {
+    return region_.size();
+  }
+
+ private:
+  // Return the index in the Dex register map corresponding to the Dex
+  // register number `dex_register_number`.
+  size_t GetIndexInDexRegisterMap(uint16_t dex_register_number) const {
+    if (!IsDexRegisterLive(dex_register_number)) {
+      return kInvalidIndexInDexRegisterMap;
+    }
+    return GetNumberOfLiveDexRegisters(dex_register_number);
+  }
+
+  // Special (invalid) Dex register map entry index meaning that there
+  // is no index in the map for a given Dex register (i.e., it must
+  // have been mapped to a DexRegisterLocation::Kind::kNone location).
+  static constexpr size_t kInvalidIndexInDexRegisterMap = -1;
+
+  static constexpr int kFixedSize = 0;
+
+  MemoryRegion region_;
+
+  friend class CodeInfo;
+  friend class StackMapStream;
+};
+
 /**
  * A Stack Map holds compilation information for a specific PC necessary for:
  * - Mapping it to a dex PC,
@@ -516,7 +673,8 @@
  * - Knowing the values of dex registers.
  *
  * The information is of the form:
- * [dex_pc, native_pc_offset, dex_register_map_offset, inlining_info_offset, register_mask, stack_mask].
+ * [dex_pc, native_pc_offset, dex_register_map_offset, inlining_info_offset, register_mask,
+ * stack_mask].
  *
  * Note that register_mask is fixed size, but stack_mask is variable size, depending on the
  * stack size of a method.
@@ -568,49 +726,32 @@
   }
 
   static size_t ComputeStackMapSize(size_t stack_mask_size,
-                                    bool has_inline_info,
-                                    bool is_small_inline_info,
-                                    bool is_small_dex_map,
-                                    bool is_small_dex_pc,
-                                    bool is_small_native_pc);
-
-  static size_t ComputeStackMapSize(size_t stack_mask_size,
                                     size_t inline_info_size,
                                     size_t dex_register_map_size,
                                     size_t dex_pc_max,
-                                    size_t native_pc_max);
-
-  // TODO: Revisit this abstraction if we allow 3 bytes encoding.
-  typedef uint8_t kSmallEncoding;
-  typedef uint32_t kLargeEncoding;
-  static constexpr size_t kBytesForSmallEncoding = sizeof(kSmallEncoding);
-  static constexpr size_t kBitsForSmallEncoding = kBitsPerByte * kBytesForSmallEncoding;
-  static constexpr size_t kBytesForLargeEncoding = sizeof(kLargeEncoding);
-  static constexpr size_t kBitsForLargeEncoding = kBitsPerByte * kBytesForLargeEncoding;
+                                    size_t native_pc_max,
+                                    size_t register_mask_max);
 
   // Special (invalid) offset for the DexRegisterMapOffset field meaning
   // that there is no Dex register map for this stack map.
   static constexpr uint32_t kNoDexRegisterMap = -1;
-  static constexpr uint32_t kNoDexRegisterMapSmallEncoding =
-      std::numeric_limits<kSmallEncoding>::max();
 
   // Special (invalid) offset for the InlineDescriptorOffset field meaning
   // that there is no inline info for this stack map.
   static constexpr uint32_t kNoInlineInfo = -1;
-  static constexpr uint32_t kNoInlineInfoSmallEncoding =
-    std::numeric_limits<kSmallEncoding>::max();
-
-  // Returns the number of bytes needed for an entry in the StackMap.
-  static size_t NumberOfBytesForEntry(bool small_encoding) {
-    return small_encoding ? kBytesForSmallEncoding : kBytesForLargeEncoding;
-  }
 
  private:
+  static size_t ComputeStackMapSizeInternal(size_t stack_mask_size,
+                                            size_t number_of_bytes_for_inline_info,
+                                            size_t number_of_bytes_for_dex_map,
+                                            size_t number_of_bytes_for_dex_pc,
+                                            size_t number_of_bytes_for_native_pc,
+                                            size_t number_of_bytes_for_register_mask);
+
   // TODO: Instead of plain types such as "uint32_t", introduce
   // typedefs (and document the memory layout of StackMap).
   static constexpr int kRegisterMaskOffset = 0;
-  static constexpr int kFixedSize = kRegisterMaskOffset + sizeof(uint32_t);
-  static constexpr int kStackMaskOffset = kFixedSize;
+  static constexpr int kFixedSize = 0;
 
   MemoryRegion region_;
 
@@ -622,7 +763,8 @@
 /**
  * Wrapper around all compiler information collected for a method.
  * The information is of the form:
- * [overall_size, number_of_stack_maps, stack_mask_size, StackMap+, DexRegisterInfo+, InlineInfo*].
+ * [overall_size, number_of_location_catalog_entries, number_of_stack_maps, stack_mask_size,
+ * DexRegisterLocationCatalog+, StackMap+, DexRegisterMap+, InlineInfo*].
  */
 class CodeInfo {
  public:
@@ -633,50 +775,77 @@
     region_ = MemoryRegion(const_cast<void*>(data), size);
   }
 
+  static size_t EncodingSizeInBytes(size_t max_element) {
+    DCHECK(IsUint<32>(max_element));
+    return (max_element == 0) ? 0
+         : IsUint<8>(max_element) ? 1
+         : IsUint<16>(max_element) ? 2
+         : IsUint<24>(max_element) ? 3
+         : 4;
+  }
+
   void SetEncoding(size_t inline_info_size,
                    size_t dex_register_map_size,
                    size_t dex_pc_max,
-                   size_t native_pc_max) {
+                   size_t native_pc_max,
+                   size_t register_mask_max) {
     if (inline_info_size != 0) {
       region_.StoreBit(kHasInlineInfoBitOffset, 1);
-      region_.StoreBit(kHasSmallInlineInfoBitOffset, IsUint<StackMap::kBitsForSmallEncoding>(
-          // + 1 to also encode kNoInlineInfo: if an inline info offset
-          // is at 0xFF, we want to overflow to a larger encoding, because it will
-          // conflict with kNoInlineInfo.
-          // The offset is relative to the dex register map. TODO: Change this.
-          inline_info_size + dex_register_map_size + 1));
+      // + 1 to also encode kNoInlineInfo: if an inline info offset
+      // is at 0xFF, we want to overflow to a larger encoding, because it will
+      // conflict with kNoInlineInfo.
+      // The offset is relative to the dex register map. TODO: Change this.
+      SetEncodingAt(kInlineInfoBitOffset,
+                    EncodingSizeInBytes(dex_register_map_size + inline_info_size + 1));
     } else {
       region_.StoreBit(kHasInlineInfoBitOffset, 0);
-      region_.StoreBit(kHasSmallInlineInfoBitOffset, 0);
+      SetEncodingAt(kInlineInfoBitOffset, 0);
     }
-    region_.StoreBit(kHasSmallDexRegisterMapBitOffset,
-                     // + 1 to also encode kNoDexRegisterMap: if a dex register map offset
-                     // is at 0xFF, we want to overflow to a larger encoding, because it will
-                     // conflict with kNoDexRegisterMap.
-                     IsUint<StackMap::kBitsForSmallEncoding>(dex_register_map_size + 1));
-    region_.StoreBit(kHasSmallDexPcBitOffset, IsUint<StackMap::kBitsForSmallEncoding>(dex_pc_max));
-    region_.StoreBit(kHasSmallNativePcBitOffset,
-                     IsUint<StackMap::kBitsForSmallEncoding>(native_pc_max));
+    // + 1 to also encode kNoDexRegisterMap: if a dex register map offset
+    // is at 0xFF, we want to overflow to a larger encoding, because it will
+    // conflict with kNoDexRegisterMap.
+    SetEncodingAt(kDexRegisterMapBitOffset, EncodingSizeInBytes(dex_register_map_size + 1));
+    SetEncodingAt(kDexPcBitOffset, EncodingSizeInBytes(dex_pc_max));
+    SetEncodingAt(kNativePcBitOffset, EncodingSizeInBytes(native_pc_max));
+    SetEncodingAt(kRegisterMaskBitOffset, EncodingSizeInBytes(register_mask_max));
+  }
+
+  void SetEncodingAt(size_t bit_offset, size_t number_of_bytes) {
+    // We encode the number of bytes needed for writing a value on 3 bits,
+    // for values that we know are maximum 32bits.
+    region_.StoreBit(bit_offset, (number_of_bytes & 1));
+    region_.StoreBit(bit_offset + 1, (number_of_bytes & 2));
+    region_.StoreBit(bit_offset + 2, (number_of_bytes & 4));
+  }
+
+  size_t GetNumberOfBytesForEncoding(size_t bit_offset) const {
+    return region_.LoadBit(bit_offset)
+        + (region_.LoadBit(bit_offset + 1) << 1)
+        + (region_.LoadBit(bit_offset + 2) << 2);
   }
 
   bool HasInlineInfo() const {
     return region_.LoadBit(kHasInlineInfoBitOffset);
   }
 
-  bool HasSmallInlineInfo() const {
-    return region_.LoadBit(kHasSmallInlineInfoBitOffset);
+  size_t NumberOfBytesForInlineInfo() const {
+    return GetNumberOfBytesForEncoding(kInlineInfoBitOffset);
   }
 
-  bool HasSmallDexRegisterMap() const {
-    return region_.LoadBit(kHasSmallDexRegisterMapBitOffset);
+  size_t NumberOfBytesForDexRegisterMap() const {
+    return GetNumberOfBytesForEncoding(kDexRegisterMapBitOffset);
   }
 
-  bool HasSmallNativePc() const {
-    return region_.LoadBit(kHasSmallNativePcBitOffset);
+  size_t NumberOfBytesForRegisterMask() const {
+    return GetNumberOfBytesForEncoding(kRegisterMaskBitOffset);
   }
 
-  bool HasSmallDexPc() const {
-    return region_.LoadBit(kHasSmallDexPcBitOffset);
+  size_t NumberOfBytesForNativePc() const {
+    return GetNumberOfBytesForEncoding(kNativePcBitOffset);
+  }
+
+  size_t NumberOfBytesForDexPc() const {
+    return GetNumberOfBytesForEncoding(kDexPcBitOffset);
   }
 
   size_t ComputeStackMapRegisterMaskOffset() const {
@@ -684,7 +853,8 @@
   }
 
   size_t ComputeStackMapStackMaskOffset() const {
-    return StackMap::kStackMaskOffset;
+    return ComputeStackMapRegisterMaskOffset()
+        + (NumberOfBytesForRegisterMask() * sizeof(uint8_t));
   }
 
   size_t ComputeStackMapDexPcOffset() const {
@@ -693,18 +863,28 @@
 
   size_t ComputeStackMapNativePcOffset() const {
     return ComputeStackMapDexPcOffset()
-        + (HasSmallDexPc() ? sizeof(uint8_t) : sizeof(uint32_t));
+        + (NumberOfBytesForDexPc() * sizeof(uint8_t));
   }
 
   size_t ComputeStackMapDexRegisterMapOffset() const {
     return ComputeStackMapNativePcOffset()
-        + (HasSmallNativePc() ? sizeof(uint8_t) : sizeof(uint32_t));
+        + (NumberOfBytesForNativePc() * sizeof(uint8_t));
   }
 
   size_t ComputeStackMapInlineInfoOffset() const {
     CHECK(HasInlineInfo());
     return ComputeStackMapDexRegisterMapOffset()
-        + (HasSmallDexRegisterMap() ? sizeof(uint8_t) : sizeof(uint32_t));
+        + (NumberOfBytesForDexRegisterMap() * sizeof(uint8_t));
+  }
+
+  uint32_t GetDexRegisterLocationCatalogOffset() const {
+    return kFixedSize;
+  }
+
+  DexRegisterLocationCatalog GetDexRegisterLocationCatalog() const {
+    return DexRegisterLocationCatalog(region_.Subregion(
+        GetDexRegisterLocationCatalogOffset(),
+        GetDexRegisterLocationCatalogSize()));
   }
 
   StackMap GetStackMapAt(size_t i) const {
@@ -720,6 +900,19 @@
     region_.StoreUnaligned<uint32_t>(kOverallSizeOffset, size);
   }
 
+  uint32_t GetNumberOfDexRegisterLocationCatalogEntries() const {
+    return region_.LoadUnaligned<uint32_t>(kNumberOfDexRegisterLocationCatalogEntriesOffset);
+  }
+
+  void SetNumberOfDexRegisterLocationCatalogEntries(uint32_t num_entries) {
+    region_.StoreUnaligned<uint32_t>(kNumberOfDexRegisterLocationCatalogEntriesOffset, num_entries);
+  }
+
+  uint32_t GetDexRegisterLocationCatalogSize() const {
+    return ComputeDexRegisterLocationCatalogSize(GetDexRegisterLocationCatalogOffset(),
+                                                 GetNumberOfDexRegisterLocationCatalogEntries());
+  }
+
   uint32_t GetStackMaskSize() const {
     return region_.LoadUnaligned<uint32_t>(kStackMaskSizeOffset);
   }
@@ -739,31 +932,31 @@
   // Get the size of one stack map of this CodeInfo object, in bytes.
   // All stack maps of a CodeInfo have the same size.
   size_t StackMapSize() const {
-    return StackMap::ComputeStackMapSize(GetStackMaskSize(),
-                                         HasInlineInfo(),
-                                         HasSmallInlineInfo(),
-                                         HasSmallDexRegisterMap(),
-                                         HasSmallDexPc(),
-                                         HasSmallNativePc());
+    return StackMap::ComputeStackMapSizeInternal(GetStackMaskSize(),
+                                                 NumberOfBytesForInlineInfo(),
+                                                 NumberOfBytesForDexRegisterMap(),
+                                                 NumberOfBytesForDexPc(),
+                                                 NumberOfBytesForNativePc(),
+                                                 NumberOfBytesForRegisterMask());
   }
 
   // Get the size all the stack maps of this CodeInfo object, in bytes.
-  size_t StackMapsSize() const {
+  size_t GetStackMapsSize() const {
     return StackMapSize() * GetNumberOfStackMaps();
   }
 
   size_t GetDexRegisterMapsOffset() const {
-    return CodeInfo::kFixedSize + StackMapsSize();
+    return GetStackMapsOffset() + GetStackMapsSize();
   }
 
   uint32_t GetStackMapsOffset() const {
-    return kFixedSize;
+    return GetDexRegisterLocationCatalogOffset() + GetDexRegisterLocationCatalogSize();
   }
 
   DexRegisterMap GetDexRegisterMapOf(StackMap stack_map, uint32_t number_of_dex_registers) const {
     DCHECK(stack_map.HasDexRegisterMap(*this));
-    uint32_t offset = stack_map.GetDexRegisterMapOffset(*this) + GetDexRegisterMapsOffset();
-    size_t size = ComputeDexRegisterMapSize(offset, number_of_dex_registers);
+    uint32_t offset = GetDexRegisterMapsOffset() + stack_map.GetDexRegisterMapOffset(*this);
+    size_t size = ComputeDexRegisterMapSizeOf(offset, number_of_dex_registers);
     return DexRegisterMap(region_.Subregion(offset, size));
   }
 
@@ -806,50 +999,73 @@
   // typedefs (and document the memory layout of CodeInfo).
   static constexpr int kOverallSizeOffset = 0;
   static constexpr int kEncodingInfoOffset = kOverallSizeOffset + sizeof(uint32_t);
-  static constexpr int kNumberOfStackMapsOffset = kEncodingInfoOffset + sizeof(uint8_t);
+  static constexpr int kNumberOfDexRegisterLocationCatalogEntriesOffset =
+      kEncodingInfoOffset + sizeof(uint16_t);
+  static constexpr int kNumberOfStackMapsOffset =
+      kNumberOfDexRegisterLocationCatalogEntriesOffset + sizeof(uint32_t);
   static constexpr int kStackMaskSizeOffset = kNumberOfStackMapsOffset + sizeof(uint32_t);
   static constexpr int kFixedSize = kStackMaskSizeOffset + sizeof(uint32_t);
 
   static constexpr int kHasInlineInfoBitOffset = (kEncodingInfoOffset * kBitsPerByte);
-  static constexpr int kHasSmallInlineInfoBitOffset = kHasInlineInfoBitOffset + 1;
-  static constexpr int kHasSmallDexRegisterMapBitOffset = kHasSmallInlineInfoBitOffset + 1;
-  static constexpr int kHasSmallDexPcBitOffset = kHasSmallDexRegisterMapBitOffset + 1;
-  static constexpr int kHasSmallNativePcBitOffset = kHasSmallDexPcBitOffset + 1;
+  static constexpr int kInlineInfoBitOffset = kHasInlineInfoBitOffset + 1;
+  static constexpr int kDexRegisterMapBitOffset = kInlineInfoBitOffset + 3;
+  static constexpr int kDexPcBitOffset = kDexRegisterMapBitOffset + 3;
+  static constexpr int kNativePcBitOffset = kDexPcBitOffset + 3;
+  static constexpr int kRegisterMaskBitOffset = kNativePcBitOffset + 3;
 
   MemoryRegion GetStackMaps() const {
     return region_.size() == 0
         ? MemoryRegion()
-        : region_.Subregion(kFixedSize, StackMapsSize());
+        : region_.Subregion(GetStackMapsOffset(), GetStackMapsSize());
   }
 
-  // Compute the size of a Dex register map starting at offset `origin` in
-  // `region_` and containing `number_of_dex_registers` locations.
-  size_t ComputeDexRegisterMapSize(uint32_t origin, uint32_t number_of_dex_registers) const {
-    // TODO: Ideally, we would like to use art::DexRegisterMap::Size or
-    // art::DexRegisterMap::FindLocationOffset, but the DexRegisterMap is not
-    // yet built.  Try to factor common code.
-    size_t offset =
-        origin + DexRegisterMap::GetDexRegisterMapLocationsOffset(number_of_dex_registers);
+  // Compute the size of the Dex register map associated to the stack map at
+  // `dex_register_map_offset_in_code_info`.
+  size_t ComputeDexRegisterMapSizeOf(uint32_t dex_register_map_offset_in_code_info,
+                                     uint16_t number_of_dex_registers) const {
+    // Offset where the actual mapping data starts within art::DexRegisterMap.
+    size_t location_mapping_data_offset_in_dex_register_map =
+        DexRegisterMap::GetLocationMappingDataOffset(number_of_dex_registers);
+    // Create a temporary art::DexRegisterMap to be able to call
+    // art::DexRegisterMap::GetNumberOfLiveDexRegisters and
+    DexRegisterMap dex_register_map_without_locations(
+        MemoryRegion(region_.Subregion(dex_register_map_offset_in_code_info,
+                                       location_mapping_data_offset_in_dex_register_map)));
+    size_t number_of_live_dex_registers =
+        dex_register_map_without_locations.GetNumberOfLiveDexRegisters(number_of_dex_registers);
+    size_t location_mapping_data_size_in_bits =
+        DexRegisterMap::SingleEntrySizeInBits(GetNumberOfDexRegisterLocationCatalogEntries())
+        * number_of_live_dex_registers;
+    size_t location_mapping_data_size_in_bytes =
+        RoundUp(location_mapping_data_size_in_bits, kBitsPerByte) / kBitsPerByte;
+    size_t dex_register_map_size =
+        location_mapping_data_offset_in_dex_register_map + location_mapping_data_size_in_bytes;
+    return dex_register_map_size;
+  }
 
-    // Create a temporary DexRegisterMap to be able to call DexRegisterMap.IsDexRegisterLive.
-    DexRegisterMap only_live_mask(MemoryRegion(region_.Subregion(origin, offset - origin)));
+  // Compute the size of a Dex register location catalog starting at offset `origin`
+  // in `region_` and containing `number_of_dex_locations` entries.
+  size_t ComputeDexRegisterLocationCatalogSize(uint32_t origin,
+                                               uint32_t number_of_dex_locations) const {
+    // TODO: Ideally, we would like to use art::DexRegisterLocationCatalog::Size or
+    // art::DexRegisterLocationCatalog::FindLocationOffset, but the
+    // DexRegisterLocationCatalog is not yet built.  Try to factor common code.
+    size_t offset = origin + DexRegisterLocationCatalog::kFixedSize;
 
-    // Skip the first `number_of_dex_registers - 1` entries.
-    for (uint16_t i = 0; i < number_of_dex_registers; ++i) {
-      if (only_live_mask.IsDexRegisterLive(i)) {
-        // Read the first next byte and inspect its first 3 bits to decide
-        // whether it is a short or a large location.
-        DexRegisterMap::ShortLocation first_byte =
-            region_.LoadUnaligned<DexRegisterMap::ShortLocation>(offset);
-        DexRegisterLocation::Kind kind =
-            DexRegisterMap::ExtractKindFromShortLocation(first_byte);
-        if (DexRegisterLocation::IsShortLocationKind(kind)) {
-          // Short location.  Skip the current byte.
-          offset += DexRegisterMap::SingleShortEntrySize();
-        } else {
-          // Large location.  Skip the 5 next bytes.
-          offset += DexRegisterMap::SingleLargeEntrySize();
-        }
+    // Skip the first `number_of_dex_locations - 1` entries.
+    for (uint16_t i = 0; i < number_of_dex_locations; ++i) {
+      // Read the first next byte and inspect its first 3 bits to decide
+      // whether it is a short or a large location.
+      DexRegisterLocationCatalog::ShortLocation first_byte =
+          region_.LoadUnaligned<DexRegisterLocationCatalog::ShortLocation>(offset);
+      DexRegisterLocation::Kind kind =
+          DexRegisterLocationCatalog::ExtractKindFromShortLocation(first_byte);
+      if (DexRegisterLocation::IsShortLocationKind(kind)) {
+        // Short location.  Skip the current byte.
+        offset += DexRegisterLocationCatalog::SingleShortEntrySize();
+      } else {
+        // Large location.  Skip the 5 next bytes.
+        offset += DexRegisterLocationCatalog::SingleLargeEntrySize();
       }
     }
     size_t size = offset - origin;
diff --git a/test/468-bool-simplifier-regression/expected.txt b/test/468-bool-simplifier-regression/expected.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/468-bool-simplifier-regression/expected.txt
diff --git a/test/468-bool-simplifier-regression/info.txt b/test/468-bool-simplifier-regression/info.txt
new file mode 100644
index 0000000..0a46584
--- /dev/null
+++ b/test/468-bool-simplifier-regression/info.txt
@@ -0,0 +1,2 @@
+Regression test for optimizing's boolean simplifier
+that used to trip when a boolean value was the input of an If.
diff --git a/test/468-bool-simplifier-regression/smali/TestCase.smali b/test/468-bool-simplifier-regression/smali/TestCase.smali
new file mode 100644
index 0000000..f36304d
--- /dev/null
+++ b/test/468-bool-simplifier-regression/smali/TestCase.smali
@@ -0,0 +1,32 @@
+# Copyright (C) 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+.class public LTestCase;
+
+.super Ljava/lang/Object;
+
+.field public static value:Z
+
+.method public static testCase()Z
+    .registers 2
+    sget-boolean v0, LTestCase;->value:Z
+    const/4 v1, 1
+    if-eq v0, v1, :label1
+    const/4 v1, 1
+    goto :label2
+    :label1
+    const/4 v1, 0
+    :label2
+    return v1
+.end method
diff --git a/test/468-bool-simplifier-regression/src/Main.java b/test/468-bool-simplifier-regression/src/Main.java
new file mode 100644
index 0000000..1dd27c9
--- /dev/null
+++ b/test/468-bool-simplifier-regression/src/Main.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.lang.reflect.*;
+
+public class Main {
+  public static boolean runTest(boolean input) throws Exception {
+    Class<?> c = Class.forName("TestCase");
+    Method m = c.getMethod("testCase");
+    Field f = c.getField("value");
+    f.set(null, (Boolean) input);
+    return (Boolean) m.invoke(null);
+  }
+
+  public static void main(String[] args) throws Exception {
+    if (runTest(true) != false) {
+      throw new Error("Expected false, got true");
+    }
+
+    if (runTest(false) != true) {
+      throw new Error("Expected true, got false");
+    }
+  }
+}
diff --git a/tools/run-jdwp-tests.sh b/tools/run-jdwp-tests.sh
index 2c555d5..90c01f5 100755
--- a/tools/run-jdwp-tests.sh
+++ b/tools/run-jdwp-tests.sh
@@ -19,7 +19,7 @@
   exit 1
 fi
 
-if [ $ANDROID_SERIAL == 03a79ae90ae5889b ] || [ $ANDROID_SERIAL == HT4CTJT03670 ] || [ $ANDROID_SERIAL == HT49CJT00070 ]; then
+if [[ $ANDROID_SERIAL == 03a79ae90ae5889b ]] || [[ $ANDROID_SERIAL == HT4CTJT03670 ]] || [[ $ANDROID_SERIAL == HT49CJT00070 ]]; then
   echo "Not run because of localhost failures. Investigating."
   exit 0
 fi
@@ -40,13 +40,25 @@
 args=$@
 debuggee_args="-Xcompiler-option --compiler-backend=Optimizing -Xcompiler-option --debuggable"
 device_dir="--device-dir=/data/local/tmp"
+# We use the art script on target to ensure the runner and the debuggee share the same
+# image.
+vm_command="--vm-command=$art"
+image_compiler_option=""
 
 while true; do
   if [[ "$1" == "--mode=host" ]]; then
-    art="art"
+    # Specify bash explicitly since the art script cannot, since it has to run on the device
+    # with mksh.
+    art="bash out/host/linux-x86/bin/art"
     # We force generation of a new image to avoid build-time and run-time classpath differences.
     image="-Ximage:/system/non/existent"
+    # We do not need a device directory on host.
     device_dir=""
+    # Vogar knows which VM to use on host.
+    vm_command=""
+    # We only compile the image on the host. Note that not providing this option
+    # puts us below the adb command limit for vogar.
+    image_compiler_option="--vm-arg -Ximage-compiler-option --vm-arg --debuggable"
     shift
   elif [[ $1 == -Ximage:* ]]; then
     image="$1"
@@ -59,16 +71,16 @@
 done
 
 # Run the tests using vogar.
-vogar --vm-command=$art \
+vogar $vm_command \
       --vm-arg $image \
       --verbose \
       $args \
       $device_dir \
-      --vm-arg -Ximage-compiler-option \
-      --vm-arg --debuggable \
+      $image_compiler_option \
       --timeout 600 \
       --vm-arg -Djpda.settings.verbose=true \
       --vm-arg -Djpda.settings.syncPort=34016 \
+      --vm-arg -Djpda.settings.transportAddress=127.0.0.1:55107 \
       --vm-arg -Djpda.settings.debuggeeJavaPath="$art $image $debuggee_args" \
       --classpath $test_jar \
       --classpath $junit_jar \