Merge "Fix for potential moving GC bugs around proxy class."
diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc
index 195949b..bf32feb 100644
--- a/compiler/image_writer.cc
+++ b/compiler/image_writer.cc
@@ -1006,7 +1006,7 @@
   // Fixup int pointers for the field array.
   CHECK(!arr->IsObjectArray());
   const size_t num_elements = arr->GetLength();
-  if (target_ptr_size_ == 4) {
+  if (target_ptr_size_ == 4u) {
     // Will get fixed up by fixup object.
     dst->SetClass(down_cast<mirror::Class*>(
     GetImageAddress(mirror::IntArray::GetArrayClass())));
@@ -1026,10 +1026,11 @@
       CHECK(it2 != art_field_reloc_.end()) << "No relocation for field " << PrettyField(field);
       fixup_location = image_begin_ + it2->second;
     }
-    if (target_ptr_size_ == 4) {
+    if (target_ptr_size_ == 4u) {
       down_cast<mirror::IntArray*>(dest_array)->SetWithoutChecks<kVerifyNone>(
           i, static_cast<uint32_t>(reinterpret_cast<uint64_t>(fixup_location)));
     } else {
+      DCHECK_EQ(target_ptr_size_, 8u);
       down_cast<mirror::LongArray*>(dest_array)->SetWithoutChecks<kVerifyNone>(
           i, reinterpret_cast<uint64_t>(fixup_location));
     }
diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc
index c4eaabf..49a0444 100644
--- a/compiler/optimizing/builder.cc
+++ b/compiler/optimizing/builder.cc
@@ -711,8 +711,8 @@
           clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kNone;
         } else {
           clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit;
-          HLoadClass* load_class =
-              new (arena_) HLoadClass(storage_index, is_referrer_class, dex_pc);
+          HLoadClass* load_class = new (arena_) HLoadClass(
+              storage_index, *dex_compilation_unit_->GetDexFile(), is_referrer_class, dex_pc);
           current_block_->AddInstruction(load_class);
           clinit_check = new (arena_) HClinitCheck(load_class, dex_pc);
           current_block_->AddInstruction(clinit_check);
@@ -915,7 +915,8 @@
       *outer_compilation_unit_->GetDexFile(), storage_index);
   bool is_initialized = resolved_field->GetDeclaringClass()->IsInitialized() && is_in_dex_cache;
 
-  HLoadClass* constant = new (arena_) HLoadClass(storage_index, is_referrer_class, dex_pc);
+  HLoadClass* constant = new (arena_) HLoadClass(
+      storage_index, *dex_compilation_unit_->GetDexFile(), is_referrer_class, dex_pc);
   current_block_->AddInstruction(constant);
 
   HInstruction* cls = constant;
@@ -1151,7 +1152,10 @@
   }
   HInstruction* object = LoadLocal(reference, Primitive::kPrimNot);
   HLoadClass* cls = new (arena_) HLoadClass(
-      type_index, IsOutermostCompilingClass(type_index), dex_pc);
+      type_index,
+      *dex_compilation_unit_->GetDexFile(),
+      IsOutermostCompilingClass(type_index),
+      dex_pc);
   current_block_->AddInstruction(cls);
   // The class needs a temporary before being used by the type check.
   Temporaries temps(graph_);
@@ -1976,7 +1980,8 @@
             ? kQuickAllocObjectWithAccessCheck
             : kQuickAllocObject;
 
-        current_block_->AddInstruction(new (arena_) HNewInstance(dex_pc, type_index, entrypoint));
+        current_block_->AddInstruction(new (arena_) HNewInstance(
+            dex_pc, type_index, *dex_compilation_unit_->GetDexFile(), entrypoint));
         UpdateLocal(instruction.VRegA(), current_block_->GetLastInstruction());
       }
       break;
@@ -2161,8 +2166,11 @@
         MaybeRecordStat(MethodCompilationStat::kNotCompiledCantAccesType);
         return false;
       }
-      current_block_->AddInstruction(
-          new (arena_) HLoadClass(type_index, IsOutermostCompilingClass(type_index), dex_pc));
+      current_block_->AddInstruction(new (arena_) HLoadClass(
+          type_index,
+          *dex_compilation_unit_->GetDexFile(),
+          IsOutermostCompilingClass(type_index),
+          dex_pc));
       UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
       break;
     }
diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc
index 47c6318..a72817f 100644
--- a/compiler/optimizing/inliner.cc
+++ b/compiler/optimizing/inliner.cc
@@ -141,7 +141,6 @@
   }
 
   if (!TryBuildAndInline(resolved_method, invoke_instruction, method_index, can_use_dex_cache)) {
-    resolved_method->SetShouldNotInline();
     return false;
   }
 
@@ -208,6 +207,7 @@
   if (!builder.BuildGraph(*code_item)) {
     VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file)
                    << " could not be built, so cannot be inlined";
+    resolved_method->SetShouldNotInline();
     return false;
   }
 
@@ -215,12 +215,14 @@
                                                   compiler_driver_->GetInstructionSet())) {
     VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file)
                    << " cannot be inlined because of the register allocator";
+    resolved_method->SetShouldNotInline();
     return false;
   }
 
   if (!callee_graph->TryBuildingSsa()) {
     VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file)
                    << " could not be transformed to SSA";
+    resolved_method->SetShouldNotInline();
     return false;
   }
 
@@ -257,6 +259,7 @@
     if (block->IsLoopHeader()) {
       VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file)
                      << " could not be inlined because it contains a loop";
+      resolved_method->SetShouldNotInline();
       return false;
     }
 
@@ -272,6 +275,7 @@
         VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file)
                        << " could not be inlined because " << current->DebugName()
                        << " can throw";
+        resolved_method->SetShouldNotInline();
         return false;
       }
 
@@ -279,6 +283,7 @@
         VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file)
                        << " could not be inlined because " << current->DebugName()
                        << " needs an environment";
+        resolved_method->SetShouldNotInline();
         return false;
       }
 
@@ -286,6 +291,8 @@
         VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file)
                        << " could not be inlined because " << current->DebugName()
                        << " it is in a different dex file and requires access to the dex cache";
+        // Do not flag the method as not-inlineable. A caller within the same
+        // dex file could still successfully inline it.
         return false;
       }
     }
diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc
index 43fe374..4e3436e 100644
--- a/compiler/optimizing/intrinsics.cc
+++ b/compiler/optimizing/intrinsics.cc
@@ -326,9 +326,6 @@
 
 // TODO: Refactor DexFileMethodInliner and have something nicer than InlineMethod.
 void IntrinsicsRecognizer::Run() {
-  DexFileMethodInliner* inliner = driver_->GetMethodInlinerMap()->GetMethodInliner(dex_file_);
-  DCHECK(inliner != nullptr);
-
   for (HReversePostOrderIterator it(*graph_); !it.Done(); it.Advance()) {
     HBasicBlock* block = it.Current();
     for (HInstructionIterator inst_it(block->GetInstructions()); !inst_it.Done();
@@ -337,6 +334,9 @@
       if (inst->IsInvoke()) {
         HInvoke* invoke = inst->AsInvoke();
         InlineMethod method;
+        DexFileMethodInliner* inliner =
+            driver_->GetMethodInlinerMap()->GetMethodInliner(&invoke->GetDexFile());
+        DCHECK(inliner != nullptr);
         if (inliner->IsIntrinsic(invoke->GetDexMethodIndex(), &method)) {
           Intrinsics intrinsic = GetIntrinsic(method);
 
@@ -344,7 +344,7 @@
             if (!CheckInvokeType(intrinsic, invoke)) {
               LOG(WARNING) << "Found an intrinsic with unexpected invoke type: "
                            << intrinsic << " for "
-                           << PrettyMethod(invoke->GetDexMethodIndex(), *dex_file_);
+                           << PrettyMethod(invoke->GetDexMethodIndex(), invoke->GetDexFile());
             } else {
               invoke->SetIntrinsic(intrinsic);
             }
diff --git a/compiler/optimizing/intrinsics.h b/compiler/optimizing/intrinsics.h
index c243ef3..741fb64 100644
--- a/compiler/optimizing/intrinsics.h
+++ b/compiler/optimizing/intrinsics.h
@@ -30,16 +30,15 @@
 // Recognize intrinsics from HInvoke nodes.
 class IntrinsicsRecognizer : public HOptimization {
  public:
-  IntrinsicsRecognizer(HGraph* graph, const DexFile* dex_file, CompilerDriver* driver)
+  IntrinsicsRecognizer(HGraph* graph, CompilerDriver* driver)
       : HOptimization(graph, true, kIntrinsicsRecognizerPassName),
-        dex_file_(dex_file), driver_(driver) {}
+        driver_(driver) {}
 
   void Run() OVERRIDE;
 
   static constexpr const char* kIntrinsicsRecognizerPassName = "intrinsics_recognition";
 
  private:
-  const DexFile* dex_file_;
   CompilerDriver* driver_;
 
   DISALLOW_COPY_AND_ASSIGN(IntrinsicsRecognizer);
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index 47da9cc..2ece5a5 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -1290,11 +1290,16 @@
     block->RemovePhi(it.Current()->AsPhi());
   }
 
+  if (block->IsExitBlock()) {
+    exit_block_ = nullptr;
+  }
+
   reverse_post_order_.Delete(block);
   blocks_.Put(block->GetBlockId(), nullptr);
 }
 
 void HGraph::InlineInto(HGraph* outer_graph, HInvoke* invoke) {
+  DCHECK(HasExitBlock()) << "Unimplemented scenario";
   if (GetBlocks().Size() == 3) {
     // Simple case of an entry block, a body block, and an exit block.
     // Put the body block's instruction into `invoke`'s block.
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 12ace41..01870c3 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -155,6 +155,7 @@
 
   HBasicBlock* GetEntryBlock() const { return entry_block_; }
   HBasicBlock* GetExitBlock() const { return exit_block_; }
+  bool HasExitBlock() const { return exit_block_ != nullptr; }
 
   void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
   void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
@@ -2387,6 +2388,7 @@
   uint32_t GetDexPc() const OVERRIDE { return dex_pc_; }
 
   uint32_t GetDexMethodIndex() const { return dex_method_index_; }
+  const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); }
 
   InvokeType GetOriginalInvokeType() const { return original_invoke_type_; }
 
@@ -2598,14 +2600,19 @@
 
 class HNewInstance : public HExpression<0> {
  public:
-  HNewInstance(uint32_t dex_pc, uint16_t type_index, QuickEntrypointEnum entrypoint)
+  HNewInstance(uint32_t dex_pc,
+               uint16_t type_index,
+               const DexFile& dex_file,
+               QuickEntrypointEnum entrypoint)
       : HExpression(Primitive::kPrimNot, SideEffects::None()),
         dex_pc_(dex_pc),
         type_index_(type_index),
+        dex_file_(dex_file),
         entrypoint_(entrypoint) {}
 
   uint32_t GetDexPc() const OVERRIDE { return dex_pc_; }
   uint16_t GetTypeIndex() const { return type_index_; }
+  const DexFile& GetDexFile() const { return dex_file_; }
 
   // Calls runtime so needs an environment.
   bool NeedsEnvironment() const OVERRIDE { return true; }
@@ -2624,6 +2631,7 @@
  private:
   const uint32_t dex_pc_;
   const uint16_t type_index_;
+  const DexFile& dex_file_;
   const QuickEntrypointEnum entrypoint_;
 
   DISALLOW_COPY_AND_ASSIGN(HNewInstance);
@@ -3428,10 +3436,12 @@
 class HLoadClass : public HExpression<0> {
  public:
   HLoadClass(uint16_t type_index,
+             const DexFile& dex_file,
              bool is_referrers_class,
              uint32_t dex_pc)
       : HExpression(Primitive::kPrimNot, SideEffects::None()),
         type_index_(type_index),
+        dex_file_(dex_file),
         is_referrers_class_(is_referrers_class),
         dex_pc_(dex_pc),
         generate_clinit_check_(false),
@@ -3487,12 +3497,15 @@
     return loaded_class_rti_.IsExact();
   }
 
+  const DexFile& GetDexFile() { return dex_file_; }
+
   bool NeedsDexCache() const OVERRIDE { return !is_referrers_class_; }
 
   DECLARE_INSTRUCTION(LoadClass);
 
  private:
   const uint16_t type_index_;
+  const DexFile& dex_file_;
   const bool is_referrers_class_;
   const uint32_t dex_pc_;
   // Whether this instruction must generate the initialization check.
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index b2e8ecd..fa3c310 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -223,7 +223,6 @@
   CompiledMethod* CompileOptimized(HGraph* graph,
                                    CodeGenerator* codegen,
                                    CompilerDriver* driver,
-                                   const DexFile& dex_file,
                                    const DexCompilationUnit& dex_compilation_unit,
                                    PassInfoPrinter* pass_info) const;
 
@@ -316,7 +315,6 @@
 static void RunOptimizations(HGraph* graph,
                              CompilerDriver* driver,
                              OptimizingCompilerStats* stats,
-                             const DexFile& dex_file,
                              const DexCompilationUnit& dex_compilation_unit,
                              PassInfoPrinter* pass_info_printer,
                              StackHandleScopeCollection* handles) {
@@ -335,10 +333,10 @@
   GVNOptimization gvn(graph, side_effects);
   LICM licm(graph, side_effects);
   BoundsCheckElimination bce(graph);
-  ReferenceTypePropagation type_propagation(graph, dex_file, dex_compilation_unit, handles);
+  ReferenceTypePropagation type_propagation(graph, handles);
   InstructionSimplifier simplify2(graph, stats, "instruction_simplifier_after_types");
 
-  IntrinsicsRecognizer intrinsics(graph, dex_compilation_unit.GetDexFile(), driver);
+  IntrinsicsRecognizer intrinsics(graph, driver);
 
   HOptimization* optimizations[] = {
     &intrinsics,
@@ -391,12 +389,11 @@
 CompiledMethod* OptimizingCompiler::CompileOptimized(HGraph* graph,
                                                      CodeGenerator* codegen,
                                                      CompilerDriver* compiler_driver,
-                                                     const DexFile& dex_file,
                                                      const DexCompilationUnit& dex_compilation_unit,
                                                      PassInfoPrinter* pass_info_printer) const {
   StackHandleScopeCollection handles(Thread::Current());
   RunOptimizations(graph, compiler_driver, compilation_stats_.get(),
-                   dex_file, dex_compilation_unit, pass_info_printer, &handles);
+                   dex_compilation_unit, pass_info_printer, &handles);
 
   AllocateRegisters(graph, codegen, pass_info_printer);
 
@@ -585,7 +582,6 @@
     return CompileOptimized(graph,
                             codegen.get(),
                             compiler_driver,
-                            dex_file,
                             dex_compilation_unit,
                             &pass_info_printer);
   } else if (shouldOptimize && can_allocate_registers) {
diff --git a/compiler/optimizing/reference_type_propagation.cc b/compiler/optimizing/reference_type_propagation.cc
index 12b1c2b..601b48a 100644
--- a/compiler/optimizing/reference_type_propagation.cc
+++ b/compiler/optimizing/reference_type_propagation.cc
@@ -161,7 +161,8 @@
 
 void ReferenceTypePropagation::VisitNewInstance(HNewInstance* instr) {
   ScopedObjectAccess soa(Thread::Current());
-  mirror::DexCache* dex_cache = dex_compilation_unit_.GetClassLinker()->FindDexCache(dex_file_);
+  mirror::DexCache* dex_cache =
+      Runtime::Current()->GetClassLinker()->FindDexCache(instr->GetDexFile());
   // Get type from dex cache assuming it was populated by the verifier.
   mirror::Class* resolved_class = dex_cache->GetResolvedType(instr->GetTypeIndex());
   if (resolved_class != nullptr) {
@@ -172,7 +173,8 @@
 
 void ReferenceTypePropagation::VisitLoadClass(HLoadClass* instr) {
   ScopedObjectAccess soa(Thread::Current());
-  mirror::DexCache* dex_cache = dex_compilation_unit_.GetClassLinker()->FindDexCache(dex_file_);
+  mirror::DexCache* dex_cache =
+      Runtime::Current()->GetClassLinker()->FindDexCache(instr->GetDexFile());
   // Get type from dex cache assuming it was populated by the verifier.
   mirror::Class* resolved_class = dex_cache->GetResolvedType(instr->GetTypeIndex());
   if (resolved_class != nullptr) {
diff --git a/compiler/optimizing/reference_type_propagation.h b/compiler/optimizing/reference_type_propagation.h
index 733e18e..b68fc67 100644
--- a/compiler/optimizing/reference_type_propagation.h
+++ b/compiler/optimizing/reference_type_propagation.h
@@ -30,13 +30,8 @@
  */
 class ReferenceTypePropagation : public HOptimization {
  public:
-  ReferenceTypePropagation(HGraph* graph,
-                           const DexFile& dex_file,
-                           const DexCompilationUnit& dex_compilation_unit,
-                           StackHandleScopeCollection* handles)
+  ReferenceTypePropagation(HGraph* graph, StackHandleScopeCollection* handles)
     : HOptimization(graph, true, kReferenceTypePropagationPassName),
-      dex_file_(dex_file),
-      dex_compilation_unit_(dex_compilation_unit),
       handles_(handles),
       worklist_(graph->GetArena(), kDefaultWorklistSize) {}
 
@@ -66,8 +61,6 @@
   ReferenceTypeInfo MergeTypes(const ReferenceTypeInfo& a, const ReferenceTypeInfo& b)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
-  const DexFile& dex_file_;
-  const DexCompilationUnit& dex_compilation_unit_;
   StackHandleScopeCollection* handles_;
 
   GrowableArray<HInstruction*> worklist_;
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index b446815..5663e39 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -144,14 +144,17 @@
 
   // Note: use RoundUp to word-size here if you want CodeInfo objects to be word aligned.
   needed_size_ = CodeInfo::kFixedSize
-      + dex_register_location_catalog_size_
       + stack_maps_size_
+      + dex_register_location_catalog_size_
       + dex_register_maps_size_
       + inline_info_size_;
 
-  dex_register_location_catalog_start_ = CodeInfo::kFixedSize;
-  stack_maps_start_ = dex_register_location_catalog_start_ + dex_register_location_catalog_size_;
-  dex_register_maps_start_ = stack_maps_start_ + stack_maps_size_;
+  stack_maps_start_ = CodeInfo::kFixedSize;
+  // TODO: Move the catalog at the end. It is currently too expensive at runtime
+  // to compute its size (note that we do not encode that size in the CodeInfo).
+  dex_register_location_catalog_start_ = stack_maps_start_ + stack_maps_size_;
+  dex_register_maps_start_ =
+      dex_register_location_catalog_start_ + dex_register_location_catalog_size_;
   inline_infos_start_ = dex_register_maps_start_ + dex_register_maps_size_;
 
   return needed_size_;
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index ef94a30..d9b59aa 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1407,9 +1407,10 @@
     return nullptr;
   }
   Handle<mirror::Array> fields;
-  if (image_pointer_size_ == 8) {
+  if (image_pointer_size_ == 8u) {
     fields = hs.NewHandle<mirror::Array>(mirror::LongArray::Alloc(self, dex_file.NumFieldIds()));
   } else {
+    DCHECK_EQ(image_pointer_size_, 4u);
     fields = hs.NewHandle<mirror::Array>(mirror::IntArray::Alloc(self, dex_file.NumFieldIds()));
   }
   if (fields.Get() == nullptr) {
@@ -5670,7 +5671,7 @@
   ArtField* const parent_field =
       mirror::Class::FindField(self, hs.NewHandle(h_path_class_loader->GetClass()), "parent",
                                "Ljava/lang/ClassLoader;");
-  DCHECK(parent_field!= nullptr);
+  DCHECK(parent_field != nullptr);
   mirror::Object* boot_cl =
       soa.Decode<mirror::Class*>(WellKnownClasses::java_lang_BootClassLoader)->AllocObject(self);
   parent_field->SetObject<false>(h_path_class_loader.Get(), boot_cl);
diff --git a/runtime/debugger.cc b/runtime/debugger.cc
index 0eb7f2b..ef1aa6a 100644
--- a/runtime/debugger.cc
+++ b/runtime/debugger.cc
@@ -2806,7 +2806,27 @@
   JDWP::EventLocation location;
   SetEventLocation(&location, m, dex_pc);
 
+  // We need to be sure no exception is pending when calling JdwpState::PostLocationEvent.
+  // This is required to be able to call JNI functions to create JDWP ids. To achieve this,
+  // we temporarily clear the current thread's exception (if any) and will restore it after
+  // the call.
+  // Note: the only way to get a pending exception here is to suspend on a move-exception
+  // instruction.
+  Thread* const self = Thread::Current();
+  StackHandleScope<1> hs(self);
+  Handle<mirror::Throwable> pending_exception(hs.NewHandle(self->GetException()));
+  self->ClearException();
+  if (kIsDebugBuild && pending_exception.Get() != nullptr) {
+    const DexFile::CodeItem* code_item = location.method->GetCodeItem();
+    const Instruction* instr = Instruction::At(&code_item->insns_[location.dex_pc]);
+    CHECK_EQ(Instruction::MOVE_EXCEPTION, instr->Opcode());
+  }
+
   gJdwpState->PostLocationEvent(&location, this_object, event_flags, return_value);
+
+  if (pending_exception.Get() != nullptr) {
+    self->SetException(pending_exception.Get());
+  }
 }
 
 void Dbg::PostFieldAccessEvent(mirror::ArtMethod* m, int dex_pc,
diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h
index 1cb437e..bfb9eb5 100644
--- a/runtime/mirror/dex_cache-inl.h
+++ b/runtime/mirror/dex_cache-inl.h
@@ -51,7 +51,7 @@
 
 inline ArtField* DexCache::GetResolvedField(uint32_t idx, size_t ptr_size) {
   ArtField* field = nullptr;
-  if (ptr_size == 8) {
+  if (ptr_size == 8u) {
     field = reinterpret_cast<ArtField*>(
         static_cast<uintptr_t>(GetResolvedFields()->AsLongArray()->GetWithoutChecks(idx)));
   } else {
@@ -66,7 +66,7 @@
 }
 
 inline void DexCache::SetResolvedField(uint32_t idx, ArtField* field, size_t ptr_size) {
-  if (ptr_size == 8) {
+  if (ptr_size == 8u) {
     GetResolvedFields()->AsLongArray()->Set(
         idx, static_cast<uint64_t>(reinterpret_cast<uintptr_t>(field)));
   } else {
diff --git a/runtime/runtime_linux.cc b/runtime/runtime_linux.cc
index d65e18e..f0b3c4e 100644
--- a/runtime/runtime_linux.cc
+++ b/runtime/runtime_linux.cc
@@ -340,6 +340,9 @@
                       << "Thread: " << tid << " \"" << thread_name << "\"\n"
                       << "Registers:\n" << Dumpable<UContext>(thread_context) << "\n"
                       << "Backtrace:\n" << Dumpable<Backtrace>(thread_backtrace);
+  if (kIsDebugBuild && signal_number == SIGSEGV) {
+    PrintFileToLog("/proc/self/maps", LogSeverity::INTERNAL_FATAL);
+  }
   Runtime* runtime = Runtime::Current();
   if (runtime != nullptr) {
     if (IsTimeoutSignal(signal_number)) {
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index f710460..b425a46 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -923,10 +923,6 @@
         + (NumberOfBytesForDexRegisterMap() * sizeof(uint8_t));
   }
 
-  uint32_t GetDexRegisterLocationCatalogOffset() const {
-    return kFixedSize;
-  }
-
   DexRegisterLocationCatalog GetDexRegisterLocationCatalog() const {
     return DexRegisterLocationCatalog(region_.Subregion(
         GetDexRegisterLocationCatalogOffset(),
@@ -991,14 +987,18 @@
     return StackMapSize() * GetNumberOfStackMaps();
   }
 
-  size_t GetDexRegisterMapsOffset() const {
+  uint32_t GetDexRegisterLocationCatalogOffset() const {
     return GetStackMapsOffset() + GetStackMapsSize();
   }
 
-  uint32_t GetStackMapsOffset() const {
+  size_t GetDexRegisterMapsOffset() const {
     return GetDexRegisterLocationCatalogOffset() + GetDexRegisterLocationCatalogSize();
   }
 
+  uint32_t GetStackMapsOffset() const {
+    return kFixedSize;
+  }
+
   DexRegisterMap GetDexRegisterMapOf(StackMap stack_map, uint32_t number_of_dex_registers) const {
     DCHECK(stack_map.HasDexRegisterMap(*this));
     uint32_t offset = GetDexRegisterMapsOffset() + stack_map.GetDexRegisterMapOffset(*this);