Merge "Clean up some includes."
diff --git a/build/Android.gtest.mk b/build/Android.gtest.mk
index 08b56d0..c09241a 100644
--- a/build/Android.gtest.mk
+++ b/build/Android.gtest.mk
@@ -535,6 +535,7 @@
 	  (adb shell "$(GCOV_ENV) LD_LIBRARY_PATH=$(3) ANDROID_ROOT=$(ART_GTEST_TARGET_ANDROID_ROOT) \
 	    valgrind --leak-check=full --error-exitcode=1 --workaround-gcc296-bugs=yes \
 	    --suppressions=$(ART_TARGET_TEST_DIR)/valgrind-target-suppressions.txt \
+	    --num-callers=50 \
 	    $(ART_TARGET_NATIVETEST_DIR)/$(TARGET_$(2)ARCH)/$(1) && touch $(ART_TARGET_TEST_DIR)/$(TARGET_$(2)ARCH)/$$@-$$$$PPID" \
 	  && (adb pull $(ART_TARGET_TEST_DIR)/$(TARGET_$(2)ARCH)/$$@-$$$$PPID /tmp/ \
 	      && $$(call ART_TEST_PASSED,$$@)) \
@@ -591,7 +592,8 @@
 	$(hide) $$(call ART_TEST_SKIP,$$@) && \
 	  VALGRIND_LIB=$(HOST_OUT)/lib64/valgrind \
 	  $(HOST_OUT_EXECUTABLES)/valgrind --leak-check=full --error-exitcode=1 \
-	    --suppressions=art/test/valgrind-suppressions.txt $$< && \
+	    --suppressions=art/test/valgrind-suppressions.txt --num-callers=50 \
+	    $$< && \
 	    $$(call ART_TEST_PASSED,$$@) || $$(call ART_TEST_FAILED,$$@)
 
   ART_TEST_HOST_VALGRIND_GTEST$$($(2)ART_PHONY_TEST_HOST_SUFFIX)_RULES += valgrind-$$(gtest_rule)
diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc
index 39f01d8..b5bc2fb 100644
--- a/compiler/driver/compiler_driver.cc
+++ b/compiler/driver/compiler_driver.cc
@@ -2289,7 +2289,7 @@
                                                 &dex_file,
                                                 dex_cache,
                                                 class_loader,
-                                                &class_def,
+                                                class_def,
                                                 Runtime::Current()->GetCompilerCallbacks(),
                                                 true /* allow soft failures */,
                                                 log_level_,
diff --git a/compiler/oat_writer.cc b/compiler/oat_writer.cc
index 2095608..c9c5d24 100644
--- a/compiler/oat_writer.cc
+++ b/compiler/oat_writer.cc
@@ -226,7 +226,6 @@
     return dex_file_location_data_;
   }
 
-  void ReserveTypeLookupTable(OatWriter* oat_writer);
   void ReserveClassOffsets(OatWriter* oat_writer);
 
   size_t SizeOf() const;
@@ -436,36 +435,35 @@
                                 instruction_set_features,
                                 dchecked_integral_cast<uint32_t>(oat_dex_files_.size()),
                                 key_value_store);
-  offset = InitOatDexFiles(offset);
-  size_ = offset;
+  size_ = InitOatDexFiles(offset);
 
   std::unique_ptr<MemMap> dex_files_map;
   std::vector<std::unique_ptr<const DexFile>> dex_files;
-  if (!WriteDexFiles(rodata, file)) {
+  if (!WriteDexFiles(rodata, file) ||
+      !OpenDexFiles(file, verify, &dex_files_map, &dex_files)) {
     return false;
   }
-  // Reserve space for type lookup tables and update type_lookup_table_offset_.
-  for (OatDexFile& oat_dex_file : oat_dex_files_) {
-    oat_dex_file.ReserveTypeLookupTable(this);
+
+  // Do a bulk checksum update for Dex[]. Doing it piece by piece would be
+  // difficult because we're not using the OutputStream directly.
+  if (!oat_dex_files_.empty()) {
+    size_t size = size_ - oat_dex_files_[0].dex_file_offset_;
+    oat_header_->UpdateChecksum(dex_files_map->Begin(), size);
   }
-  size_t size_after_type_lookup_tables = size_;
+
+  ChecksumUpdatingOutputStream checksum_updating_rodata(rodata, oat_header_.get());
+
+  if (!WriteTypeLookupTables(&checksum_updating_rodata, dex_files)) {
+    return false;
+  }
+
   // Reserve space for class offsets and update class_offsets_offset_.
   for (OatDexFile& oat_dex_file : oat_dex_files_) {
     oat_dex_file.ReserveClassOffsets(this);
   }
-  ChecksumUpdatingOutputStream checksum_updating_rodata(rodata, oat_header_.get());
-  if (!WriteOatDexFiles(&checksum_updating_rodata) ||
-      !ExtendForTypeLookupTables(rodata, file, size_after_type_lookup_tables) ||
-      !OpenDexFiles(file, verify, &dex_files_map, &dex_files) ||
-      !WriteTypeLookupTables(dex_files_map.get(), dex_files)) {
-    return false;
-  }
 
-  // Do a bulk checksum update for Dex[] and TypeLookupTable[]. Doing it piece by
-  // piece would be difficult because we're not using the OutpuStream directly.
-  if (!oat_dex_files_.empty()) {
-    size_t size = size_after_type_lookup_tables - oat_dex_files_[0].dex_file_offset_;
-    oat_header_->UpdateChecksum(dex_files_map->Begin(), size);
+  if (!WriteOatDexFiles(&checksum_updating_rodata)) {
+    return false;
   }
 
   *opened_dex_files_map = std::move(dex_files_map);
@@ -1525,7 +1523,7 @@
 
   off_t tables_end_offset = out->Seek(0, kSeekCurrent);
   if (tables_end_offset == static_cast<off_t>(-1)) {
-    LOG(ERROR) << "Failed to seek to oat code position in " << out->GetLocation();
+    LOG(ERROR) << "Failed to get oat code position in " << out->GetLocation();
     return false;
   }
   size_t file_offset = oat_data_offset_;
@@ -2094,6 +2092,12 @@
 bool OatWriter::WriteOatDexFiles(OutputStream* rodata) {
   TimingLogger::ScopedTiming split("WriteOatDexFiles", timings_);
 
+  off_t initial_offset = rodata->Seek(0, kSeekCurrent);
+  if (initial_offset == static_cast<off_t>(-1)) {
+    LOG(ERROR) << "Failed to get current position in " << rodata->GetLocation();
+    return false;
+  }
+
   // Seek to the start of OatDexFiles, i.e. to the end of the OatHeader.  If there are
   // no OatDexFiles, no data is actually written to .rodata before WriteHeader() and
   // this Seek() ensures that we reserve the space for OatHeader in .rodata.
@@ -2119,30 +2123,13 @@
     }
   }
 
-  return true;
-}
+  // Seek back to the initial position.
+  if (rodata->Seek(initial_offset, kSeekSet) != initial_offset) {
+    PLOG(ERROR) << "Failed to seek to initial position. Actual: " << actual_offset
+                << " Expected: " << initial_offset << " File: " << rodata->GetLocation();
+    return false;
+  }
 
-bool OatWriter::ExtendForTypeLookupTables(OutputStream* rodata, File* file, size_t offset) {
-  TimingLogger::ScopedTiming split("ExtendForTypeLookupTables", timings_);
-
-  int64_t new_length = oat_data_offset_ + dchecked_integral_cast<int64_t>(offset);
-  if (file->SetLength(new_length) != 0) {
-    PLOG(ERROR) << "Failed to extend file for type lookup tables. new_length: " << new_length
-        << "File: " << file->GetPath();
-    return false;
-  }
-  off_t actual_offset = rodata->Seek(new_length, kSeekSet);
-  if (actual_offset != static_cast<off_t>(new_length)) {
-    PLOG(ERROR) << "Failed to seek stream after extending file for type lookup tables."
-                << " Actual: " << actual_offset << " Expected: " << new_length
-                << " File: " << rodata->GetLocation();
-    return false;
-  }
-  if (!rodata->Flush()) {
-    PLOG(ERROR) << "Failed to flush stream after extending for type lookup tables."
-                << " File: " << rodata->GetLocation();
-    return false;
-  }
   return true;
 }
 
@@ -2223,26 +2210,66 @@
 }
 
 bool OatWriter::WriteTypeLookupTables(
-    MemMap* opened_dex_files_map,
+    OutputStream* rodata,
     const std::vector<std::unique_ptr<const DexFile>>& opened_dex_files) {
   TimingLogger::ScopedTiming split("WriteTypeLookupTables", timings_);
 
   DCHECK_EQ(opened_dex_files.size(), oat_dex_files_.size());
   for (size_t i = 0, size = opened_dex_files.size(); i != size; ++i) {
     OatDexFile* oat_dex_file = &oat_dex_files_[i];
-    if (oat_dex_file->lookup_table_offset_ != 0u) {
-      DCHECK(oat_dex_file->create_type_lookup_table_ == CreateTypeLookupTable::kCreate);
-      DCHECK_NE(oat_dex_file->class_offsets_.size(), 0u);
-      size_t map_offset = oat_dex_files_[0].dex_file_offset_;
-      size_t lookup_table_offset = oat_dex_file->lookup_table_offset_;
-      uint8_t* lookup_table = opened_dex_files_map->Begin() + (lookup_table_offset - map_offset);
-      opened_dex_files[i]->CreateTypeLookupTable(lookup_table);
+    DCHECK_EQ(oat_dex_file->lookup_table_offset_, 0u);
+
+    if (oat_dex_file->create_type_lookup_table_ != CreateTypeLookupTable::kCreate ||
+        oat_dex_file->class_offsets_.empty()) {
+      continue;
     }
+
+    size_t table_size = TypeLookupTable::RawDataLength(oat_dex_file->class_offsets_.size());
+    if (table_size == 0u) {
+      continue;
+    }
+
+    // Create the lookup table. When `nullptr` is given as the storage buffer,
+    // TypeLookupTable allocates its own and DexFile takes ownership.
+    opened_dex_files[i]->CreateTypeLookupTable(/* storage */ nullptr);
+    TypeLookupTable* table = opened_dex_files[i]->GetTypeLookupTable();
+
+    // Type tables are required to be 4 byte aligned.
+    size_t original_offset = size_;
+    size_t rodata_offset = RoundUp(original_offset, 4);
+    size_t padding_size = rodata_offset - original_offset;
+
+    if (padding_size != 0u) {
+      std::vector<uint8_t> buffer(padding_size, 0u);
+      if (!rodata->WriteFully(buffer.data(), padding_size)) {
+        PLOG(ERROR) << "Failed to write lookup table alignment padding."
+                    << " File: " << oat_dex_file->GetLocation()
+                    << " Output: " << rodata->GetLocation();
+        return false;
+      }
+    }
+
+    DCHECK_EQ(oat_data_offset_ + rodata_offset,
+              static_cast<size_t>(rodata->Seek(0u, kSeekCurrent)));
+    DCHECK_EQ(table_size, table->RawDataLength());
+
+    if (!rodata->WriteFully(table->RawData(), table_size)) {
+      PLOG(ERROR) << "Failed to write lookup table."
+                  << " File: " << oat_dex_file->GetLocation()
+                  << " Output: " << rodata->GetLocation();
+      return false;
+    }
+
+    oat_dex_file->lookup_table_offset_ = rodata_offset;
+
+    size_ += padding_size + table_size;
+    size_oat_lookup_table_ += table_size;
+    size_oat_lookup_table_alignment_ += padding_size;
   }
 
-  DCHECK_EQ(opened_dex_files_map == nullptr, opened_dex_files.empty());
-  if (opened_dex_files_map != nullptr && !opened_dex_files_map->Sync()) {
-    PLOG(ERROR) << "Failed to Sync() type lookup tables. Map: " << opened_dex_files_map->GetName();
+  if (!rodata->Flush()) {
+    PLOG(ERROR) << "Failed to flush stream after writing type lookup tables."
+                << " File: " << rodata->GetLocation();
     return false;
   }
 
@@ -2298,22 +2325,6 @@
           + sizeof(lookup_table_offset_);
 }
 
-void OatWriter::OatDexFile::ReserveTypeLookupTable(OatWriter* oat_writer) {
-  DCHECK_EQ(lookup_table_offset_, 0u);
-  if (create_type_lookup_table_ == CreateTypeLookupTable::kCreate && !class_offsets_.empty()) {
-    size_t table_size = TypeLookupTable::RawDataLength(class_offsets_.size());
-    if (table_size != 0u) {
-      // Type tables are required to be 4 byte aligned.
-      size_t original_offset = oat_writer->size_;
-      size_t offset = RoundUp(original_offset, 4);
-      oat_writer->size_oat_lookup_table_alignment_ += offset - original_offset;
-      lookup_table_offset_ = offset;
-      oat_writer->size_ = offset + table_size;
-      oat_writer->size_oat_lookup_table_ += table_size;
-    }
-  }
-}
-
 void OatWriter::OatDexFile::ReserveClassOffsets(OatWriter* oat_writer) {
   DCHECK_EQ(class_offsets_offset_, 0u);
   if (!class_offsets_.empty()) {
diff --git a/compiler/oat_writer.h b/compiler/oat_writer.h
index decb7db..93e2e44 100644
--- a/compiler/oat_writer.h
+++ b/compiler/oat_writer.h
@@ -262,12 +262,11 @@
   bool WriteDexFile(OutputStream* rodata, File* file, OatDexFile* oat_dex_file, File* dex_file);
   bool WriteDexFile(OutputStream* rodata, OatDexFile* oat_dex_file, const uint8_t* dex_file);
   bool WriteOatDexFiles(OutputStream* rodata);
-  bool ExtendForTypeLookupTables(OutputStream* rodata, File* file, size_t offset);
   bool OpenDexFiles(File* file,
                     bool verify,
                     /*out*/ std::unique_ptr<MemMap>* opened_dex_files_map,
                     /*out*/ std::vector<std::unique_ptr<const DexFile>>* opened_dex_files);
-  bool WriteTypeLookupTables(MemMap* opened_dex_files_map,
+  bool WriteTypeLookupTables(OutputStream* rodata,
                              const std::vector<std::unique_ptr<const DexFile>>& opened_dex_files);
   bool WriteCodeAlignment(OutputStream* out, uint32_t aligned_code_delta);
   void SetMultiOatRelativePatcherAdjustment();
diff --git a/compiler/optimizing/ssa_liveness_analysis.cc b/compiler/optimizing/ssa_liveness_analysis.cc
index a01e107..a4d52d7 100644
--- a/compiler/optimizing/ssa_liveness_analysis.cc
+++ b/compiler/optimizing/ssa_liveness_analysis.cc
@@ -59,6 +59,38 @@
   worklist->insert(insert_pos.base(), block);
 }
 
+static bool IsLinearOrderWellFormed(const HGraph& graph) {
+  for (HBasicBlock* header : graph.GetBlocks()) {
+    if (header == nullptr || !header->IsLoopHeader()) {
+      continue;
+    }
+
+    HLoopInformation* loop = header->GetLoopInformation();
+    size_t num_blocks = loop->GetBlocks().NumSetBits();
+    size_t found_blocks = 0u;
+
+    for (HLinearOrderIterator it(graph); !it.Done(); it.Advance()) {
+      HBasicBlock* current = it.Current();
+      if (loop->Contains(*current)) {
+        found_blocks++;
+        if (found_blocks == 1u && current != header) {
+          // First block is not the header.
+          return false;
+        } else if (found_blocks == num_blocks && !loop->IsBackEdge(*current)) {
+          // Last block is not a back edge.
+          return false;
+        }
+      } else if (found_blocks != 0u && found_blocks != num_blocks) {
+        // Blocks are not adjacent.
+        return false;
+      }
+    }
+    DCHECK_EQ(found_blocks, num_blocks);
+  }
+
+  return true;
+}
+
 void SsaLivenessAnalysis::LinearizeGraph() {
   // Create a reverse post ordering with the following properties:
   // - Blocks in a loop are consecutive,
@@ -100,6 +132,8 @@
       forward_predecessors[block_id] = number_of_remaining_predecessors - 1;
     }
   } while (!worklist.empty());
+
+  DCHECK(graph_->HasIrreducibleLoops() || IsLinearOrderWellFormed(*graph_));
 }
 
 void SsaLivenessAnalysis::NumberInstructions() {
diff --git a/compiler/optimizing/ssa_liveness_analysis.h b/compiler/optimizing/ssa_liveness_analysis.h
index 92788fe..9f94c83 100644
--- a/compiler/optimizing/ssa_liveness_analysis.h
+++ b/compiler/optimizing/ssa_liveness_analysis.h
@@ -983,38 +983,6 @@
     return false;
   }
 
-  bool IsLinearOrderWellFormed(const HGraph& graph) {
-    for (HBasicBlock* header : graph.GetBlocks()) {
-      if (header == nullptr || !header->IsLoopHeader()) {
-        continue;
-      }
-
-      HLoopInformation* loop = header->GetLoopInformation();
-      size_t num_blocks = loop->GetBlocks().NumSetBits();
-      size_t found_blocks = 0u;
-
-      for (HLinearOrderIterator it(graph); !it.Done(); it.Advance()) {
-        HBasicBlock* current = it.Current();
-        if (loop->Contains(*current)) {
-          found_blocks++;
-          if (found_blocks == 1u && current != header) {
-            // First block is not the header.
-            return false;
-          } else if (found_blocks == num_blocks && !loop->IsBackEdge(*current)) {
-            // Last block is not a back edge.
-            return false;
-          }
-        } else if (found_blocks != 0u && found_blocks != num_blocks) {
-          // Blocks are not adjacent.
-          return false;
-        }
-      }
-      DCHECK_EQ(found_blocks, num_blocks);
-    }
-
-    return true;
-  }
-
   void AddBackEdgeUses(const HBasicBlock& block_at_use) {
     DCHECK(block_at_use.IsInLoop());
     if (block_at_use.GetGraph()->HasIrreducibleLoops()) {
@@ -1024,8 +992,6 @@
       return;
     }
 
-    DCHECK(IsLinearOrderWellFormed(*block_at_use.GetGraph()));
-
     // Add synthesized uses at the back edge of loops to help the register allocator.
     // Note that this method is called in decreasing liveness order, to faciliate adding
     // uses at the head of the `first_use_` linked list. Because below
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index 36421ff..c87a18b 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -1268,7 +1268,7 @@
       DCHECK(options_.class_loader_ != nullptr);
       return verifier::MethodVerifier::VerifyMethodAndDump(
           soa.Self(), vios, dex_method_idx, dex_file, dex_cache, *options_.class_loader_,
-          &class_def, code_item, nullptr, method_access_flags);
+          class_def, code_item, nullptr, method_access_flags);
     }
 
     return nullptr;
diff --git a/runtime/Android.bp b/runtime/Android.bp
index c4c7384..22d79cb 100644
--- a/runtime/Android.bp
+++ b/runtime/Android.bp
@@ -135,6 +135,7 @@
         "native_bridge_art_interface.cc",
         "native_stack_dump.cc",
         "native/dalvik_system_DexFile.cc",
+        "native/dalvik_system_InMemoryDexClassLoader_DexData.cc",
         "native/dalvik_system_VMDebug.cc",
         "native/dalvik_system_VMRuntime.cc",
         "native/dalvik_system_VMStack.cc",
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S
index bc4c999..c51c336 100644
--- a/runtime/arch/arm/quick_entrypoints_arm.S
+++ b/runtime/arch/arm/quick_entrypoints_arm.S
@@ -1086,25 +1086,6 @@
                                                               // Load the class (r2)
     ldr    r2, [r2, r0, lsl #COMPRESSED_REFERENCE_SIZE_SHIFT]
     cbz    r2, .Lart_quick_alloc_object_rosalloc_slow_path    // Check null class
-                                                              // Check class status.
-    ldr    r3, [r2, #MIRROR_CLASS_STATUS_OFFSET]
-    cmp    r3, #MIRROR_CLASS_STATUS_INITIALIZED
-    bne    .Lart_quick_alloc_object_rosalloc_slow_path
-                                                              // Add a fake dependence from the
-                                                              // following access flag and size
-                                                              // loads to the status load.
-                                                              // This is to prevent those loads
-                                                              // from being reordered above the
-                                                              // status load and reading wrong
-                                                              // values (an alternative is to use
-                                                              // a load-acquire for the status).
-    eor    r3, r3, r3
-    add    r2, r2, r3
-                                                              // Check access flags has
-                                                              // kAccClassIsFinalizable
-    ldr    r3, [r2, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET]
-    tst    r3, #ACCESS_FLAGS_CLASS_IS_FINALIZABLE
-    bne    .Lart_quick_alloc_object_rosalloc_slow_path
 
     ldr    r3, [r9, #THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET]     // Check if the thread local
                                                               // allocation stack has room.
@@ -1113,22 +1094,21 @@
     cmp    r3, r12
     bhs    .Lart_quick_alloc_object_rosalloc_slow_path
 
-    ldr    r3, [r2, #MIRROR_CLASS_OBJECT_SIZE_OFFSET]         // Load the object size (r3)
+    ldr    r3, [r2, #MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET]  // Load the object size (r3)
     cmp    r3, #ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE        // Check if the size is for a thread
-                                                              // local allocation
+                                                              // local allocation. Also does the
+                                                              // initialized and finalizable checks.
     bhs    .Lart_quick_alloc_object_rosalloc_slow_path
                                                               // Compute the rosalloc bracket index
-                                                              // from the size.
-                                                              // Align up the size by the rosalloc
-                                                              // bracket quantum size and divide
-                                                              // by the quantum size and subtract
-                                                              // by 1. This code is a shorter but
-                                                              // equivalent version.
-    sub    r3, r3, #1
-    lsr    r3, r3, #ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT
+                                                              // from the size. Since the size is
+                                                              // already aligned we can combine the
+                                                              // two shifts together.
+    add    r12, r9, r3, lsr #(ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT - POINTER_SIZE_SHIFT)
+                                                              // Subtract pointer size since ther
+                                                              // are no runs for 0 byte allocations
+                                                              // and the size is already aligned.
                                                               // Load the rosalloc run (r12)
-    add    r12, r9, r3, lsl #POINTER_SIZE_SHIFT
-    ldr    r12, [r12, #THREAD_ROSALLOC_RUNS_OFFSET]
+    ldr    r12, [r12, #(THREAD_ROSALLOC_RUNS_OFFSET - __SIZEOF_POINTER__)]
                                                               // Load the free list head (r3). This
                                                               // will be the return val.
     ldr    r3, [r12, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)]
@@ -1153,7 +1133,7 @@
                                                               // to later accesses to the class
                                                               // object. Alternatively we could use
                                                               // "ishst" if we use load-acquire for
-                                                              // the class status load.)
+                                                              // the object size load.
                                                               // Needs to be done before pushing on
                                                               // allocation since Heap::VisitObjects
                                                               // relies on seeing the class pointer.
@@ -1200,9 +1180,7 @@
     ldrd   r12, r3, [r9, #THREAD_LOCAL_POS_OFFSET]
     sub    r12, r3, r12                                       // Compute the remaining buf size.
     ldr    r3, [r2, #MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET]  // Load the object size (r3).
-    cmp    r3, r12                                            // Check if it fits. OK to do this
-                                                              // before rounding up the object size
-                                                              // assuming the buf size alignment.
+    cmp    r3, r12                                            // Check if it fits.
     bhi    \slowPathLabel
     // "Point of no slow path". Won't go to the slow path from here on. OK to clobber r0 and r1.
                                                               // Reload old thread_local_pos (r0)
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index 82dac9c..03768af 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -1860,47 +1860,27 @@
                                                               // Load the class (x2)
     ldr    w2, [x2, x0, lsl #COMPRESSED_REFERENCE_SIZE_SHIFT]
     cbz    x2, .Lart_quick_alloc_object_rosalloc_slow_path    // Check null class
-                                                              // Check class status.
-    ldr    w3, [x2, #MIRROR_CLASS_STATUS_OFFSET]
-    cmp    x3, #MIRROR_CLASS_STATUS_INITIALIZED
-    bne    .Lart_quick_alloc_object_rosalloc_slow_path
-                                                              // Add a fake dependence from the
-                                                              // following access flag and size
-                                                              // loads to the status load.
-                                                              // This is to prevent those loads
-                                                              // from being reordered above the
-                                                              // status load and reading wrong
-                                                              // values (an alternative is to use
-                                                              // a load-acquire for the status).
-    eor    x3, x3, x3
-    add    x2, x2, x3
-                                                              // Check access flags has
-                                                              // kAccClassIsFinalizable
-    ldr    w3, [x2, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET]
-    tst    x3, #ACCESS_FLAGS_CLASS_IS_FINALIZABLE
-    bne    .Lart_quick_alloc_object_rosalloc_slow_path
     ldr    x3, [xSELF, #THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET]  // Check if the thread local
                                                               // allocation stack has room.
                                                               // ldp won't work due to large offset.
     ldr    x4, [xSELF, #THREAD_LOCAL_ALLOC_STACK_END_OFFSET]
     cmp    x3, x4
     bhs    .Lart_quick_alloc_object_rosalloc_slow_path
-    ldr    w3, [x2, #MIRROR_CLASS_OBJECT_SIZE_OFFSET]         // Load the object size (x3)
+    ldr    w3, [x2, #MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET]  // Load the object size (x3)
     cmp    x3, #ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE        // Check if the size is for a thread
-                                                              // local allocation
+                                                              // local allocation. Also does the
+                                                              // finalizable and initialization
+                                                              // checks.
     bhs    .Lart_quick_alloc_object_rosalloc_slow_path
                                                               // Compute the rosalloc bracket index
-                                                              // from the size.
-                                                              // Align up the size by the rosalloc
-                                                              // bracket quantum size and divide
-                                                              // by the quantum size and subtract
-                                                              // by 1. This code is a shorter but
-                                                              // equivalent version.
-    sub    x3, x3, #1
-    lsr    x3, x3, #ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT
-                                                              // Load the rosalloc run (x4)
-    add    x4, xSELF, x3, lsl #POINTER_SIZE_SHIFT
-    ldr    x4, [x4, #THREAD_ROSALLOC_RUNS_OFFSET]
+                                                              // from the size. Since the size is
+                                                              // already aligned we can combine the
+                                                              // two shifts together.
+    add    x4, xSELF, x3, lsr #(ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT - POINTER_SIZE_SHIFT)
+                                                              // Subtract pointer size since ther
+                                                              // are no runs for 0 byte allocations
+                                                              // and the size is already aligned.
+    ldr    x4, [x4, #(THREAD_ROSALLOC_RUNS_OFFSET - __SIZEOF_POINTER__)]
                                                               // Load the free list head (x3). This
                                                               // will be the return val.
     ldr    x3, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)]
@@ -1921,11 +1901,11 @@
     str    w2, [x3, #MIRROR_OBJECT_CLASS_OFFSET]
                                                               // Fence. This is "ish" not "ishst" so
                                                               // that it also ensures ordering of
-                                                              // the class status load with respect
+                                                              // the object size load with respect
                                                               // to later accesses to the class
                                                               // object. Alternatively we could use
                                                               // "ishst" if we use load-acquire for
-                                                              // the class status load.)
+                                                              // the class status load.
                                                               // Needs to be done before pushing on
                                                               // allocation since Heap::VisitObjects
                                                               // relies on seeing the class pointer.
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index 9c22245..67ebf50 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -897,17 +897,6 @@
     movl 0(%edx, %eax, COMPRESSED_REFERENCE_SIZE), %edx
     testl %edx, %edx                                    // Check null class
     jz   .Lart_quick_alloc_object_rosalloc_slow_path
-                                                        // Check class status
-    cmpl LITERAL(MIRROR_CLASS_STATUS_INITIALIZED), MIRROR_CLASS_STATUS_OFFSET(%edx)
-    jne  .Lart_quick_alloc_object_rosalloc_slow_path
-                                                        // No fake dependence needed on x86
-                                                        // between status and flags load,
-                                                        // since each load is a load-acquire,
-                                                        // no loads reordering.
-                                                        // Check access flags has
-                                                        // kAccClassIsFinalizable
-    testl LITERAL(ACCESS_FLAGS_CLASS_IS_FINALIZABLE), MIRROR_CLASS_ACCESS_FLAGS_OFFSET(%edx)
-    jnz   .Lart_quick_alloc_object_rosalloc_slow_path
 
     movl %fs:THREAD_SELF_OFFSET, %ebx                   // ebx = thread
                                                         // Check if the thread local allocation
@@ -916,21 +905,19 @@
     cmpl THREAD_LOCAL_ALLOC_STACK_END_OFFSET(%ebx), %edi
     jae  .Lart_quick_alloc_object_rosalloc_slow_path
 
-    movl MIRROR_CLASS_OBJECT_SIZE_OFFSET(%edx), %edi    // Load the object size (edi)
+    movl MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET(%edx), %edi  // Load the object size (edi)
                                                         // Check if the size is for a thread
-                                                        // local allocation
+                                                        // local allocation. Also does the
+                                                        // finalizable and initialization check.
     cmpl LITERAL(ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE), %edi
     ja   .Lart_quick_alloc_object_rosalloc_slow_path
-    decl %edi
     shrl LITERAL(ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT), %edi // Calculate the rosalloc bracket index
                                                             // from object size.
-                                                            // Align up the size by the rosalloc
-                                                            // bracket quantum size and divide
-                                                            // by the quantum size and subtract
-                                                            // by 1. This code is a shorter but
-                                                            // equivalent version.
                                                         // Load thread local rosalloc run (ebx)
-    movl THREAD_ROSALLOC_RUNS_OFFSET(%ebx, %edi, __SIZEOF_POINTER__), %ebx
+                                                        // Subtract __SIZEOF_POINTER__ to subtract
+                                                        // one from edi as there is no 0 byte run
+                                                        // and the size is already aligned.
+    movl (THREAD_ROSALLOC_RUNS_OFFSET - __SIZEOF_POINTER__)(%ebx, %edi, __SIZEOF_POINTER__), %ebx
                                                         // Load free_list head (edi),
                                                         // this will be the return value.
     movl (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)(%ebx), %edi
@@ -990,17 +977,11 @@
 MACRO1(ALLOC_OBJECT_TLAB_FAST_PATH, slowPathLabel)
     testl %edx, %edx                                    // Check null class
     jz   VAR(slowPathLabel)
-                                                        // No fake dependence needed on x86
-                                                        // between status and flags load,
-                                                        // since each load is a load-acquire,
-                                                        // no loads reordering.
     movl %fs:THREAD_SELF_OFFSET, %ebx                   // ebx = thread
     movl THREAD_LOCAL_END_OFFSET(%ebx), %edi            // Load thread_local_end.
     subl THREAD_LOCAL_POS_OFFSET(%ebx), %edi            // Compute the remaining buffer size.
     movl MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET(%edx), %esi  // Load the object size.
-    cmpl %edi, %esi                                     // Check if it fits. OK to do this
-                                                        // before rounding up the object size
-                                                        // assuming the buf size alignment.
+    cmpl %edi, %esi                                     // Check if it fits.
     ja   VAR(slowPathLabel)
     movl THREAD_LOCAL_POS_OFFSET(%ebx), %eax            // Load thread_local_pos
                                                         // as allocated object.
diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
index c568715..b805703 100644
--- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S
+++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
@@ -935,17 +935,6 @@
     movl   0(%rdx, %rdi, COMPRESSED_REFERENCE_SIZE), %edx
     testl  %edx, %edx                                      // Check null class
     jz     .Lart_quick_alloc_object_rosalloc_slow_path
-                                                           // Check class status.
-    cmpl   LITERAL(MIRROR_CLASS_STATUS_INITIALIZED), MIRROR_CLASS_STATUS_OFFSET(%rdx)
-    jne    .Lart_quick_alloc_object_rosalloc_slow_path
-                                                           // We don't need a fence (between the
-                                                           // the status and the access flag
-                                                           // loads) here because every load is
-                                                           // a load acquire on x86.
-                                                           // Check access flags has
-                                                           // kAccClassIsFinalizable
-    testl  LITERAL(ACCESS_FLAGS_CLASS_IS_FINALIZABLE), MIRROR_CLASS_ACCESS_FLAGS_OFFSET(%rdx)
-    jnz    .Lart_quick_alloc_object_rosalloc_slow_path
                                                            // Check if the thread local
                                                            // allocation stack has room.
     movq   %gs:THREAD_SELF_OFFSET, %r8                     // r8 = thread
@@ -953,22 +942,21 @@
     cmpq   THREAD_LOCAL_ALLOC_STACK_END_OFFSET(%r8), %rcx
     jae    .Lart_quick_alloc_object_rosalloc_slow_path
                                                            // Load the object size
-    movl   MIRROR_CLASS_OBJECT_SIZE_OFFSET(%rdx), %eax
+    movl   MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET(%rdx), %eax
                                                            // Check if the size is for a thread
-                                                           // local allocation
+                                                           // local allocation. Also does the
+                                                           // initialized and finalizable checks.
     cmpl   LITERAL(ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE), %eax
     ja     .Lart_quick_alloc_object_rosalloc_slow_path
                                                            // Compute the rosalloc bracket index
                                                            // from the size.
-                                                           // Align up the size by the rosalloc
-                                                           // bracket quantum size and divide
-                                                           // by the quantum size and subtract
-                                                           // by 1. This code is a shorter but
-                                                           // equivalent version.
-    subq   LITERAL(1), %rax
     shrq   LITERAL(ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT), %rax
                                                            // Load the rosalloc run (r9)
-    movq   THREAD_ROSALLOC_RUNS_OFFSET(%r8, %rax, __SIZEOF_POINTER__), %r9
+                                                           // Subtract __SIZEOF_POINTER__ to
+                                                           // subtract one from edi as there is no
+                                                           // 0 byte run and the size is already
+                                                           // aligned.
+    movq   (THREAD_ROSALLOC_RUNS_OFFSET - __SIZEOF_POINTER__)(%r8, %rax, __SIZEOF_POINTER__), %r9
                                                            // Load the free list head (rax). This
                                                            // will be the return val.
     movq   (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)(%r9), %rax
diff --git a/runtime/dex_file.cc b/runtime/dex_file.cc
index 24d73ef..ebadd79 100644
--- a/runtime/dex_file.cc
+++ b/runtime/dex_file.cc
@@ -228,6 +228,10 @@
                                                        nullptr,
                                                        oat_dex_file,
                                                        error_msg);
+  if (dex_file == nullptr) {
+    return nullptr;
+  }
+
   if (verify && !DexFileVerifier::Verify(dex_file.get(),
                                          dex_file->Begin(),
                                          dex_file->Size(),
@@ -236,7 +240,32 @@
                                          error_msg)) {
     return nullptr;
   }
+  return dex_file;
+}
 
+std::unique_ptr<const DexFile> DexFile::Open(const std::string& location,
+                                             uint32_t location_checksum,
+                                             std::unique_ptr<MemMap> mem_map,
+                                             bool verify,
+                                             bool verify_checksum,
+                                             std::string* error_msg) {
+  ScopedTrace trace(std::string("Open dex file from mapped-memory ") + location);
+  std::unique_ptr<const DexFile> dex_file = OpenMemory(location,
+                                                       location_checksum,
+                                                       std::move(mem_map),
+                                                       error_msg);
+  if (dex_file == nullptr) {
+    return nullptr;
+  }
+
+  if (verify && !DexFileVerifier::Verify(dex_file.get(),
+                                         dex_file->Begin(),
+                                         dex_file->Size(),
+                                         location.c_str(),
+                                         verify_checksum,
+                                         error_msg)) {
+    return nullptr;
+  }
   return dex_file;
 }
 
@@ -269,7 +298,7 @@
                               /*low_4gb*/false,
                               location,
                               error_msg));
-    if (map.get() == nullptr) {
+    if (map == nullptr) {
       DCHECK(!error_msg->empty());
       return nullptr;
     }
@@ -283,7 +312,9 @@
 
   const Header* dex_header = reinterpret_cast<const Header*>(map->Begin());
 
-  std::unique_ptr<const DexFile> dex_file(OpenMemory(location, dex_header->checksum_, map.release(),
+  std::unique_ptr<const DexFile> dex_file(OpenMemory(location,
+                                                     dex_header->checksum_,
+                                                     std::move(map),
                                                      error_msg));
   if (dex_file.get() == nullptr) {
     *error_msg = StringPrintf("Failed to open dex file '%s' from memory: %s", location,
@@ -320,13 +351,13 @@
 
 std::unique_ptr<const DexFile> DexFile::OpenMemory(const std::string& location,
                                                    uint32_t location_checksum,
-                                                   MemMap* mem_map,
+                                                   std::unique_ptr<MemMap> mem_map,
                                                    std::string* error_msg) {
   return OpenMemory(mem_map->Begin(),
                     mem_map->Size(),
                     location,
                     location_checksum,
-                    mem_map,
+                    std::move(mem_map),
                     nullptr,
                     error_msg);
 }
@@ -356,9 +387,11 @@
     *error_code = ZipOpenErrorCode::kExtractToMemoryError;
     return nullptr;
   }
-  std::unique_ptr<const DexFile> dex_file(OpenMemory(location, zip_entry->GetCrc32(), map.release(),
-                                               error_msg));
-  if (dex_file.get() == nullptr) {
+  std::unique_ptr<const DexFile> dex_file(OpenMemory(location,
+                                                     zip_entry->GetCrc32(),
+                                                     std::move(map),
+                                                     error_msg));
+  if (dex_file == nullptr) {
     *error_msg = StringPrintf("Failed to open dex file '%s' from memory: %s", location.c_str(),
                               error_msg->c_str());
     *error_code = ZipOpenErrorCode::kDexFileError;
@@ -443,14 +476,14 @@
                                                    size_t size,
                                                    const std::string& location,
                                                    uint32_t location_checksum,
-                                                   MemMap* mem_map,
+                                                   std::unique_ptr<MemMap> mem_map,
                                                    const OatDexFile* oat_dex_file,
                                                    std::string* error_msg) {
   DCHECK(base != nullptr);
   DCHECK_NE(size, 0U);
   CHECK_ALIGNED(base, 4);  // various dex file structures must be word aligned
   std::unique_ptr<DexFile> dex_file(
-      new DexFile(base, size, location, location_checksum, mem_map, oat_dex_file));
+      new DexFile(base, size, location, location_checksum, std::move(mem_map), oat_dex_file));
   if (!dex_file->Init(error_msg)) {
     dex_file.reset();
   }
@@ -460,13 +493,13 @@
 DexFile::DexFile(const uint8_t* base, size_t size,
                  const std::string& location,
                  uint32_t location_checksum,
-                 MemMap* mem_map,
+                 std::unique_ptr<MemMap> mem_map,
                  const OatDexFile* oat_dex_file)
     : begin_(base),
       size_(size),
       location_(location),
       location_checksum_(location_checksum),
-      mem_map_(mem_map),
+      mem_map_(std::move(mem_map)),
       header_(reinterpret_cast<const Header*>(base)),
       string_ids_(reinterpret_cast<const StringId*>(base + header_->string_ids_off_)),
       type_ids_(reinterpret_cast<const TypeId*>(base + header_->type_ids_off_)),
diff --git a/runtime/dex_file.h b/runtime/dex_file.h
index 56e5ecc..ebbde0a 100644
--- a/runtime/dex_file.h
+++ b/runtime/dex_file.h
@@ -415,10 +415,6 @@
                    std::string* error_msg,
                    std::vector<std::unique_ptr<const DexFile>>* dex_files);
 
-  // Checks whether the given file has the dex magic, or is a zip file with a classes.dex entry.
-  // If this function returns false, Open will not succeed. The inverse is not true, however.
-  static bool MaybeDex(const char* filename);
-
   // Opens .dex file, backed by existing memory
   static std::unique_ptr<const DexFile> Open(const uint8_t* base, size_t size,
                                              const std::string& location,
@@ -428,6 +424,18 @@
                                              bool verify_checksum,
                                              std::string* error_msg);
 
+  // Opens .dex file that has been memory-mapped by the caller.
+  static std::unique_ptr<const DexFile> Open(const std::string& location,
+                                             uint32_t location_checkum,
+                                             std::unique_ptr<MemMap> mem_map,
+                                             bool verify,
+                                             bool verify_checksum,
+                                             std::string* error_msg);
+
+  // Checks whether the given file has the dex magic, or is a zip file with a classes.dex entry.
+  // If this function returns false, Open will not succeed. The inverse is not true, however.
+  static bool MaybeDex(const char* filename);
+
   // Open all classesXXX.dex files from a zip archive.
   static bool OpenFromZip(const ZipArchive& zip_archive,
                           const std::string& location,
@@ -1164,7 +1172,7 @@
   // Opens a .dex file at the given address backed by a MemMap
   static std::unique_ptr<const DexFile> OpenMemory(const std::string& location,
                                                    uint32_t location_checksum,
-                                                   MemMap* mem_map,
+                                                   std::unique_ptr<MemMap> mem_map,
                                                    std::string* error_msg);
 
   // Opens a .dex file at the given address, optionally backed by a MemMap
@@ -1172,14 +1180,14 @@
                                                    size_t size,
                                                    const std::string& location,
                                                    uint32_t location_checksum,
-                                                   MemMap* mem_map,
+                                                   std::unique_ptr<MemMap> mem_map,
                                                    const OatDexFile* oat_dex_file,
                                                    std::string* error_msg);
 
   DexFile(const uint8_t* base, size_t size,
           const std::string& location,
           uint32_t location_checksum,
-          MemMap* mem_map,
+          std::unique_ptr<MemMap> mem_map,
           const OatDexFile* oat_dex_file);
 
   // Top-level initializer that calls other Init methods.
diff --git a/runtime/dex_file_test.cc b/runtime/dex_file_test.cc
index 2704d8a..2328e3d 100644
--- a/runtime/dex_file_test.cc
+++ b/runtime/dex_file_test.cc
@@ -22,6 +22,7 @@
 #include "base/unix_file/fd_file.h"
 #include "common_runtime_test.h"
 #include "dex_file-inl.h"
+#include "mem_map.h"
 #include "os.h"
 #include "scoped_thread_state_change.h"
 #include "thread-inl.h"
@@ -61,7 +62,7 @@
   255, 255, 255, 255
 };
 
-static inline uint8_t* DecodeBase64(const char* src, size_t* dst_size) {
+static inline std::vector<uint8_t> DecodeBase64(const char* src) {
   std::vector<uint8_t> tmp;
   uint32_t t = 0, y = 0;
   int g = 3;
@@ -73,13 +74,11 @@
       c = 0;
       // prevent g < 0 which would potentially allow an overflow later
       if (--g < 0) {
-        *dst_size = 0;
-        return nullptr;
+        return std::vector<uint8_t>();
       }
     } else if (g != 3) {
       // we only allow = to be at the end
-      *dst_size = 0;
-      return nullptr;
+        return std::vector<uint8_t>();
     }
     t = (t << 6) | c;
     if (++y == 4) {
@@ -94,17 +93,9 @@
     }
   }
   if (y != 0) {
-    *dst_size = 0;
-    return nullptr;
+    return std::vector<uint8_t>();
   }
-  std::unique_ptr<uint8_t[]> dst(new uint8_t[tmp.size()]);
-  if (dst_size != nullptr) {
-    *dst_size = tmp.size();
-  } else {
-    *dst_size = 0;
-  }
-  std::copy(tmp.begin(), tmp.end(), dst.get());
-  return dst.release();
+  return tmp;
 }
 
 // Although this is the same content logically as the Nested test dex,
@@ -175,14 +166,13 @@
 static void DecodeAndWriteDexFile(const char* base64, const char* location) {
   // decode base64
   CHECK(base64 != nullptr);
-  size_t length;
-  std::unique_ptr<uint8_t[]> dex_bytes(DecodeBase64(base64, &length));
-  CHECK(dex_bytes.get() != nullptr);
+  std::vector<uint8_t> dex_bytes = DecodeBase64(base64);
+  CHECK_NE(dex_bytes.size(), 0u);
 
   // write to provided file
   std::unique_ptr<File> file(OS::CreateEmptyFile(location));
   CHECK(file.get() != nullptr);
-  if (!file->WriteFully(dex_bytes.get(), length)) {
+  if (!file->WriteFully(dex_bytes.data(), dex_bytes.size())) {
     PLOG(FATAL) << "Failed to write base64 as dex file";
   }
   if (file->FlushCloseOrErase() != 0) {
@@ -208,9 +198,67 @@
   return dex_file;
 }
 
+static std::unique_ptr<const DexFile> OpenDexFileInMemoryBase64(const char* base64,
+                                                                const char* location,
+                                                                uint32_t location_checksum) {
+  CHECK(base64 != nullptr);
+  std::vector<uint8_t> dex_bytes = DecodeBase64(base64);
+  CHECK_NE(dex_bytes.size(), 0u);
+
+  std::string error_message;
+  std::unique_ptr<MemMap> region(MemMap::MapAnonymous("test-region",
+                                                      nullptr,
+                                                      dex_bytes.size(),
+                                                      PROT_READ | PROT_WRITE,
+                                                      /* low_4gb */ false,
+                                                      /* reuse */ false,
+                                                      &error_message));
+  memcpy(region->Begin(), dex_bytes.data(), dex_bytes.size());
+  std::unique_ptr<const DexFile> dex_file(DexFile::Open(location,
+                                                        location_checksum,
+                                                        std::move(region),
+                                                        /* verify */ true,
+                                                        /* verify_checksum */ true,
+                                                        &error_message));
+  CHECK(dex_file != nullptr) << error_message;
+  return dex_file;
+}
+
 TEST_F(DexFileTest, Header) {
   ScratchFile tmp;
   std::unique_ptr<const DexFile> raw(OpenDexFileBase64(kRawDex, tmp.GetFilename().c_str()));
+  ASSERT_TRUE(raw != nullptr);
+
+  const DexFile::Header& header = raw->GetHeader();
+  // TODO: header.magic_
+  EXPECT_EQ(0x00d87910U, header.checksum_);
+  // TODO: header.signature_
+  EXPECT_EQ(904U, header.file_size_);
+  EXPECT_EQ(112U, header.header_size_);
+  EXPECT_EQ(0U, header.link_size_);
+  EXPECT_EQ(0U, header.link_off_);
+  EXPECT_EQ(15U, header.string_ids_size_);
+  EXPECT_EQ(112U, header.string_ids_off_);
+  EXPECT_EQ(7U, header.type_ids_size_);
+  EXPECT_EQ(172U, header.type_ids_off_);
+  EXPECT_EQ(2U, header.proto_ids_size_);
+  EXPECT_EQ(200U, header.proto_ids_off_);
+  EXPECT_EQ(1U, header.field_ids_size_);
+  EXPECT_EQ(224U, header.field_ids_off_);
+  EXPECT_EQ(3U, header.method_ids_size_);
+  EXPECT_EQ(232U, header.method_ids_off_);
+  EXPECT_EQ(2U, header.class_defs_size_);
+  EXPECT_EQ(256U, header.class_defs_off_);
+  EXPECT_EQ(584U, header.data_size_);
+  EXPECT_EQ(320U, header.data_off_);
+
+  EXPECT_EQ(header.checksum_, raw->GetLocationChecksum());
+}
+
+TEST_F(DexFileTest, HeaderInMemory) {
+  ScratchFile tmp;
+  std::unique_ptr<const DexFile> raw =
+      OpenDexFileInMemoryBase64(kRawDex, tmp.GetFilename().c_str(), 0x00d87910U);
   ASSERT_TRUE(raw.get() != nullptr);
 
   const DexFile::Header& header = raw->GetHeader();
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index 26b8e8a..d1d8caa 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -42,11 +42,19 @@
 template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
 inline uint32_t Class::GetObjectSize() {
   // Note: Extra parentheses to avoid the comma being interpreted as macro parameter separator.
-  DCHECK((!IsVariableSize<kVerifyFlags, kReadBarrierOption>())) << " class=" << PrettyTypeOf(this);
+  DCHECK((!IsVariableSize<kVerifyFlags, kReadBarrierOption>())) << "class=" << PrettyTypeOf(this);
   return GetField32(ObjectSizeOffset());
 }
 
 template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
+inline uint32_t Class::GetObjectSizeAllocFastPath() {
+  // Note: Extra parentheses to avoid the comma being interpreted as macro parameter separator.
+  DCHECK((!IsVariableSize<kVerifyFlags, kReadBarrierOption>())) << "class=" << PrettyTypeOf(this);
+  return GetField32(ObjectSizeAllocFastPathOffset());
+}
+
+
+template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
 inline Class* Class::GetSuperClass() {
   // Can only get super class for loaded classes (hack for when runtime is
   // initializing)
@@ -862,7 +870,7 @@
   klass->SetDexClassDefIndex(DexFile::kDexNoIndex16);  // Default to no valid class def index.
   klass->SetDexTypeIndex(DexFile::kDexNoIndex16);  // Default to no valid type index.
   // Default to force slow path until initialized.
-  klass->SetObjectSizeAllocFastPath(std::numeric_limits<int32_t>::max());
+  klass->SetObjectSizeAllocFastPath(std::numeric_limits<uint32_t>::max());
 }
 
 inline void Class::SetAccessFlags(uint32_t new_access_flags) {
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index b60c573..c979c28 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -109,12 +109,11 @@
   // alloc path sees a valid object size, we would know that it's initialized as long as it has a
   // load-acquire/fake dependency.
   if (new_status == kStatusInitialized && !h_this->IsVariableSize()) {
-    uint32_t object_size = RoundUp(h_this->GetObjectSize(), kObjectAlignment);
-    if (h_this->IsFinalizable()) {
-      // Finalizable objects must always go slow path.
-      object_size = std::numeric_limits<int32_t>::max();
+    DCHECK_EQ(h_this->GetObjectSizeAllocFastPath(), std::numeric_limits<uint32_t>::max());
+    // Finalizable objects must always go slow path.
+    if (!h_this->IsFinalizable()) {
+      h_this->SetObjectSizeAllocFastPath(RoundUp(h_this->GetObjectSize(), kObjectAlignment));
     }
-    h_this->SetObjectSizeAllocFastPath(object_size);
   }
 
   if (!class_linker_initialized) {
@@ -149,7 +148,7 @@
   if (kIsDebugBuild && new_class_size < GetClassSize()) {
     DumpClass(LOG(INTERNAL_FATAL), kDumpClassFullDetail);
     LOG(INTERNAL_FATAL) << new_class_size << " vs " << GetClassSize();
-    LOG(FATAL) << " class=" << PrettyTypeOf(this);
+    LOG(FATAL) << "class=" << PrettyTypeOf(this);
   }
   // Not called within a transaction.
   SetField32<false>(OFFSET_OF_OBJECT_MEMBER(Class, class_size_), new_class_size);
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index f8f414b..99b7769 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -598,6 +598,10 @@
 
   void SetObjectSizeAllocFastPath(uint32_t new_object_size) REQUIRES_SHARED(Locks::mutator_lock_);
 
+  template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+           ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+  uint32_t GetObjectSizeAllocFastPath() REQUIRES_SHARED(Locks::mutator_lock_);
+
   void SetObjectSizeWithoutChecks(uint32_t new_object_size)
       REQUIRES_SHARED(Locks::mutator_lock_) {
     // Not called within a transaction.
@@ -1462,7 +1466,7 @@
   // See also class_size_.
   uint32_t object_size_;
 
-  // Aligned object size for allocation fast path. The value is max int if the object is
+  // Aligned object size for allocation fast path. The value is max uint32_t if the object is
   // uninitialized or finalizable. Not currently used for variable sized objects.
   uint32_t object_size_alloc_fast_path_;
 
diff --git a/runtime/native/dalvik_system_InMemoryDexClassLoader_DexData.cc b/runtime/native/dalvik_system_InMemoryDexClassLoader_DexData.cc
new file mode 100644
index 0000000..08bf978
--- /dev/null
+++ b/runtime/native/dalvik_system_InMemoryDexClassLoader_DexData.cc
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "dalvik_system_InMemoryDexClassLoader_DexData.h"
+
+#include <sstream>
+
+#include "class_linker.h"
+#include "common_throws.h"
+#include "dex_file.h"
+#include "jni_internal.h"
+#include "mem_map.h"
+#include "mirror/class_loader.h"
+#include "mirror/object-inl.h"
+#include "scoped_thread_state_change.h"
+#include "ScopedUtfChars.h"
+
+namespace art {
+
+static std::unique_ptr<MemMap> AllocateDexMemoryMap(JNIEnv* env, jint start, jint end) {
+  if (end <= start) {
+    ScopedObjectAccess soa(env);
+    ThrowWrappedIOException("Bad range");
+    return nullptr;
+  }
+
+  std::string error_message;
+  size_t length = static_cast<size_t>(end - start);
+  std::unique_ptr<MemMap> dex_mem_map(MemMap::MapAnonymous("DEX data",
+                                                           nullptr,
+                                                           length,
+                                                           PROT_READ | PROT_WRITE,
+                                                           /* low_4gb */ false,
+                                                           /* reuse */ false,
+                                                           &error_message));
+  if (dex_mem_map == nullptr) {
+    ScopedObjectAccess soa(env);
+    ThrowWrappedIOException("%s", error_message.c_str());
+  }
+  return dex_mem_map;
+}
+
+static jlong DexFileToCookie(const DexFile* dex_file) {
+  return reinterpret_cast<jlong>(dex_file);
+}
+
+static const DexFile* CookieToDexFile(jlong cookie) {
+  return reinterpret_cast<const DexFile*>(cookie);
+}
+
+static const DexFile* CreateDexFile(JNIEnv* env, std::unique_ptr<MemMap> dex_mem_map) {
+  std::string location = StringPrintf("InMemoryDexClassLoader_DexData@%p-%p",
+                                      dex_mem_map->Begin(),
+                                      dex_mem_map->End());
+  std::string error_message;
+  std::unique_ptr<const DexFile> dex_file(DexFile::Open(location,
+                                                        0,
+                                                        std::move(dex_mem_map),
+                                                        /* verify */ true,
+                                                        /* verify_location */ true,
+                                                        &error_message));
+  if (dex_file == nullptr) {
+    ScopedObjectAccess soa(env);
+    ThrowWrappedIOException("%s", error_message.c_str());
+    return nullptr;
+  }
+
+  if (!dex_file->DisableWrite()) {
+    ScopedObjectAccess soa(env);
+    ThrowWrappedIOException("Failed to make dex file read-only");
+    return nullptr;
+  }
+
+  return dex_file.release();
+}
+
+static jlong InMemoryDexClassLoader_DexData_initializeWithDirectBuffer(
+    JNIEnv* env, jclass, jobject buffer, jint start, jint end) {
+  uint8_t* base_address = reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer));
+  if (base_address == nullptr) {
+    ScopedObjectAccess soa(env);
+    ThrowWrappedIOException("dexFileBuffer not direct");
+    return 0;
+  }
+
+  std::unique_ptr<MemMap> dex_mem_map(AllocateDexMemoryMap(env, start, end));
+  if (dex_mem_map == nullptr) {
+    DCHECK(Thread::Current()->IsExceptionPending());
+    return 0;
+  }
+
+  size_t length = static_cast<size_t>(end - start);
+  memcpy(dex_mem_map->Begin(), base_address, length);
+  return DexFileToCookie(CreateDexFile(env, std::move(dex_mem_map)));
+}
+
+static jlong InMemoryDexClassLoader_DexData_initializeWithArray(
+    JNIEnv* env, jclass, jbyteArray buffer, jint start, jint end) {
+  std::unique_ptr<MemMap> dex_mem_map(AllocateDexMemoryMap(env, start, end));
+  if (dex_mem_map == nullptr) {
+    DCHECK(Thread::Current()->IsExceptionPending());
+    return 0;
+  }
+
+  auto destination = reinterpret_cast<jbyte*>(dex_mem_map.get()->Begin());
+  env->GetByteArrayRegion(buffer, start, end - start, destination);
+  return DexFileToCookie(CreateDexFile(env, std::move(dex_mem_map)));
+}
+
+static void InMemoryDexClassLoader_DexData_uninitialize(JNIEnv* env, jclass, jlong cookie) {
+  const DexFile* dex_file = CookieToDexFile(cookie);
+  if (kIsDebugBuild) {
+    ScopedObjectAccess soa(env);
+    ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
+    CHECK(class_linker->FindDexCache(soa.Self(), *dex_file, true) == nullptr);
+  }
+  delete dex_file;
+}
+
+static jclass InMemoryDexClassLoader_DexData_findClass(
+    JNIEnv* env, jobject dexData, jstring name, jobject loader, jlong cookie) {
+  ScopedUtfChars scoped_class_name(env, name);
+  if (env->ExceptionCheck()) {
+    return nullptr;
+  }
+
+  const char* class_name = scoped_class_name.c_str();
+  const std::string descriptor(DotToDescriptor(class_name));
+  const char* class_descriptor = descriptor.c_str();
+  const size_t hash = ComputeModifiedUtf8Hash(class_descriptor);
+  const DexFile* dex_file = CookieToDexFile(cookie);
+  const DexFile::ClassDef* dex_class_def = dex_file->FindClassDef(class_descriptor, hash);
+  if (dex_class_def != nullptr) {
+    ScopedObjectAccess soa(env);
+    ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
+    StackHandleScope<1> handle_scope(soa.Self());
+    Handle<mirror::ClassLoader> class_loader(
+        handle_scope.NewHandle(soa.Decode<mirror::ClassLoader*>(loader)));
+    class_linker->RegisterDexFile(*dex_file, class_loader.Get());
+    mirror::Class* result = class_linker->DefineClass(
+        soa.Self(), class_descriptor, hash, class_loader, *dex_file, *dex_class_def);
+    if (result != nullptr) {
+      // Ensure the class table has a strong reference to the
+      // InMemoryClassLoader/DexData instance now that a class has
+      // been loaded.
+      class_linker->InsertDexFileInToClassLoader(
+          soa.Decode<mirror::Object*>(dexData), class_loader.Get());
+      return soa.AddLocalReference<jclass>(result);
+    }
+  }
+
+  VLOG(class_linker) << "Failed to find dex_class_def " << class_name;
+  return nullptr;
+}
+
+static JNINativeMethod gMethods[] = {
+  NATIVE_METHOD(InMemoryDexClassLoader_DexData,
+                initializeWithDirectBuffer,
+                "(Ljava/nio/ByteBuffer;II)J"),
+  NATIVE_METHOD(InMemoryDexClassLoader_DexData, initializeWithArray, "([BII)J"),
+  NATIVE_METHOD(InMemoryDexClassLoader_DexData, uninitialize, "(J)V"),
+  NATIVE_METHOD(InMemoryDexClassLoader_DexData,
+                findClass,
+                "(Ljava/lang/String;Ljava/lang/ClassLoader;J)Ljava/lang/Class;"),
+};
+
+void register_dalvik_system_InMemoryDexClassLoader_DexData(JNIEnv* env) {
+  REGISTER_NATIVE_METHODS("dalvik/system/InMemoryDexClassLoader$DexData");
+}
+
+}  // namespace art
diff --git a/runtime/native/dalvik_system_InMemoryDexClassLoader_DexData.h b/runtime/native/dalvik_system_InMemoryDexClassLoader_DexData.h
new file mode 100644
index 0000000..f73d07a
--- /dev/null
+++ b/runtime/native/dalvik_system_InMemoryDexClassLoader_DexData.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_NATIVE_DALVIK_SYSTEM_INMEMORYDEXCLASSLOADER_DEXDATA_H_
+#define ART_RUNTIME_NATIVE_DALVIK_SYSTEM_INMEMORYDEXCLASSLOADER_DEXDATA_H_
+
+#include <jni.h>
+
+namespace art {
+
+void register_dalvik_system_InMemoryDexClassLoader_DexData(JNIEnv* env);
+
+}  // namespace art
+
+#endif  // ART_RUNTIME_NATIVE_DALVIK_SYSTEM_INMEMORYDEXCLASSLOADER_DEXDATA_H_
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index 8ea31a1..a365a73 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -90,6 +90,7 @@
 #include "mirror/throwable.h"
 #include "monitor.h"
 #include "native/dalvik_system_DexFile.h"
+#include "native/dalvik_system_InMemoryDexClassLoader_DexData.h"
 #include "native/dalvik_system_VMDebug.h"
 #include "native/dalvik_system_VMRuntime.h"
 #include "native/dalvik_system_VMStack.h"
@@ -1389,6 +1390,7 @@
 
 void Runtime::RegisterRuntimeNativeMethods(JNIEnv* env) {
   register_dalvik_system_DexFile(env);
+  register_dalvik_system_InMemoryDexClassLoader_DexData(env);
   register_dalvik_system_VMDebug(env);
   register_dalvik_system_VMRuntime(env);
   register_dalvik_system_VMStack(env);
diff --git a/runtime/verifier/method_verifier.cc b/runtime/verifier/method_verifier.cc
index 2259b41..589e71c 100644
--- a/runtime/verifier/method_verifier.cc
+++ b/runtime/verifier/method_verifier.cc
@@ -159,7 +159,7 @@
                      &dex_file,
                      dex_cache,
                      class_loader,
-                     class_def,
+                     *class_def,
                      callbacks,
                      allow_soft_failures,
                      log_level,
@@ -190,7 +190,7 @@
 MethodVerifier::FailureData MethodVerifier::VerifyMethods(Thread* self,
                                                           ClassLinker* linker,
                                                           const DexFile* dex_file,
-                                                          const DexFile::ClassDef* class_def,
+                                                          const DexFile::ClassDef& class_def,
                                                           ClassDataItemIterator* it,
                                                           Handle<mirror::DexCache> dex_cache,
                                                           Handle<mirror::ClassLoader> class_loader,
@@ -214,7 +214,7 @@
       continue;
     }
     previous_method_idx = method_idx;
-    InvokeType type = it->GetMethodInvokeType(*class_def);
+    InvokeType type = it->GetMethodInvokeType(class_def);
     ArtMethod* method = linker->ResolveMethod<ClassLinker::kNoICCECheckForCache>(
         *dex_file, method_idx, dex_cache, class_loader, nullptr, type);
     if (method == nullptr) {
@@ -247,7 +247,7 @@
       } else {
         // If we didn't log a hard failure before, print the header of the message.
         *error_string += "Verifier rejected class ";
-        *error_string += PrettyDescriptor(dex_file->GetClassDescriptor(*class_def));
+        *error_string += PrettyDescriptor(dex_file->GetClassDescriptor(class_def));
         *error_string += ":";
       }
       *error_string += " ";
@@ -264,23 +264,22 @@
                                                         const DexFile* dex_file,
                                                         Handle<mirror::DexCache> dex_cache,
                                                         Handle<mirror::ClassLoader> class_loader,
-                                                        const DexFile::ClassDef* class_def,
+                                                        const DexFile::ClassDef& class_def,
                                                         CompilerCallbacks* callbacks,
                                                         bool allow_soft_failures,
                                                         LogSeverity log_level,
                                                         std::string* error) {
-  DCHECK(class_def != nullptr);
   ScopedTrace trace(__FUNCTION__);
 
   // A class must not be abstract and final.
-  if ((class_def->access_flags_ & (kAccAbstract | kAccFinal)) == (kAccAbstract | kAccFinal)) {
+  if ((class_def.access_flags_ & (kAccAbstract | kAccFinal)) == (kAccAbstract | kAccFinal)) {
     *error = "Verifier rejected class ";
-    *error += PrettyDescriptor(dex_file->GetClassDescriptor(*class_def));
+    *error += PrettyDescriptor(dex_file->GetClassDescriptor(class_def));
     *error += ": class is abstract and final.";
     return kHardFailure;
   }
 
-  const uint8_t* class_data = dex_file->GetClassData(*class_def);
+  const uint8_t* class_data = dex_file->GetClassData(class_def);
   if (class_data == nullptr) {
     // empty class, probably a marker interface
     return kNoFailure;
@@ -327,7 +326,7 @@
       // warning.
       std::string tmp =
           StringPrintf("Class %s failed lock verification and will run slower.",
-                       PrettyDescriptor(dex_file->GetClassDescriptor(*class_def)).c_str());
+                       PrettyDescriptor(dex_file->GetClassDescriptor(class_def)).c_str());
       if (!gPrintedDxMonitorText) {
         tmp = tmp + "\nCommon causes for lock verification issues are non-optimized dex code\n"
                     "and incorrect proguard optimizations.";
@@ -355,7 +354,7 @@
                                                          const DexFile* dex_file,
                                                          Handle<mirror::DexCache> dex_cache,
                                                          Handle<mirror::ClassLoader> class_loader,
-                                                         const DexFile::ClassDef* class_def,
+                                                         const DexFile::ClassDef& class_def,
                                                          const DexFile::CodeItem* code_item,
                                                          ArtMethod* method,
                                                          uint32_t method_access_flags,
@@ -436,7 +435,7 @@
 
       if (callbacks != nullptr) {
         // Let the interested party know that we failed the class.
-        ClassReference ref(dex_file, dex_file->GetIndexForClassDef(*class_def));
+        ClassReference ref(dex_file, dex_file->GetIndexForClassDef(class_def));
         callbacks->ClassRejected(ref);
       }
     }
@@ -463,7 +462,7 @@
                                                     const DexFile* dex_file,
                                                     Handle<mirror::DexCache> dex_cache,
                                                     Handle<mirror::ClassLoader> class_loader,
-                                                    const DexFile::ClassDef* class_def,
+                                                    const DexFile::ClassDef& class_def,
                                                     const DexFile::CodeItem* code_item,
                                                     ArtMethod* method,
                                                     uint32_t method_access_flags) {
@@ -499,7 +498,7 @@
                                const DexFile* dex_file,
                                Handle<mirror::DexCache> dex_cache,
                                Handle<mirror::ClassLoader> class_loader,
-                               const DexFile::ClassDef* class_def,
+                               const DexFile::ClassDef& class_def,
                                const DexFile::CodeItem* code_item,
                                uint32_t dex_method_idx,
                                ArtMethod* method,
@@ -544,7 +543,6 @@
       is_constructor_(false),
       link_(nullptr) {
   self->PushVerifier(this);
-  DCHECK(class_def != nullptr);
 }
 
 MethodVerifier::~MethodVerifier() {
@@ -561,7 +559,7 @@
                           m->GetDexFile(),
                           dex_cache,
                           class_loader,
-                          &m->GetClassDef(),
+                          m->GetClassDef(),
                           m->GetCodeItem(),
                           m->GetDexMethodIndex(),
                           m,
@@ -616,7 +614,7 @@
                           m->GetDexFile(),
                           dex_cache,
                           class_loader,
-                          &m->GetClassDef(),
+                          m->GetClassDef(),
                           m->GetCodeItem(),
                           m->GetDexMethodIndex(),
                           m,
@@ -656,7 +654,7 @@
                           m->GetDexFile(),
                           dex_cache,
                           class_loader,
-                          &m->GetClassDef(),
+                          m->GetClassDef(),
                           m->GetCodeItem(),
                           m->GetDexMethodIndex(),
                           m,
@@ -761,7 +759,7 @@
           return false;
         }
       }
-      if ((class_def_->GetJavaAccessFlags() & kAccInterface) != 0) {
+      if ((class_def_.GetJavaAccessFlags() & kAccInterface) != 0) {
         // Interface methods must be public and abstract (if default methods are disabled).
         uint32_t kRequired = kAccPublic;
         if ((method_access_flags_ & kRequired) != kRequired) {
@@ -792,7 +790,7 @@
       return false;
     }
 
-    if ((class_def_->GetJavaAccessFlags() & kAccInterface) != 0) {
+    if ((class_def_.GetJavaAccessFlags() & kAccInterface) != 0) {
       // Interfaces may always have static initializers for their fields. If we are running with
       // default methods enabled we also allow other public, static, non-final methods to have code.
       // Otherwise that is the only type of method allowed.
@@ -4023,7 +4021,7 @@
     }
     if (reference_class->IsInterface()) {
       // TODO Can we verify anything else.
-      if (class_idx == class_def_->class_idx_) {
+      if (class_idx == class_def_.class_idx_) {
         Fail(VERIFY_ERROR_CLASS_CHANGE) << "Cannot invoke-super on self as interface";
         return nullptr;
       }
diff --git a/runtime/verifier/method_verifier.h b/runtime/verifier/method_verifier.h
index e838900..d4e12f7 100644
--- a/runtime/verifier/method_verifier.h
+++ b/runtime/verifier/method_verifier.h
@@ -159,7 +159,7 @@
                                  const DexFile* dex_file,
                                  Handle<mirror::DexCache> dex_cache,
                                  Handle<mirror::ClassLoader> class_loader,
-                                 const DexFile::ClassDef* class_def,
+                                 const DexFile::ClassDef& class_def,
                                  CompilerCallbacks* callbacks,
                                  bool allow_soft_failures,
                                  LogSeverity log_level,
@@ -172,7 +172,7 @@
                                              const DexFile* dex_file,
                                              Handle<mirror::DexCache> dex_cache,
                                              Handle<mirror::ClassLoader> class_loader,
-                                             const DexFile::ClassDef* class_def,
+                                             const DexFile::ClassDef& class_def,
                                              const DexFile::CodeItem* code_item, ArtMethod* method,
                                              uint32_t method_access_flags)
       REQUIRES_SHARED(Locks::mutator_lock_);
@@ -283,7 +283,7 @@
                  const DexFile* dex_file,
                  Handle<mirror::DexCache> dex_cache,
                  Handle<mirror::ClassLoader> class_loader,
-                 const DexFile::ClassDef* class_def,
+                 const DexFile::ClassDef& class_def,
                  const DexFile::CodeItem* code_item,
                  uint32_t method_idx,
                  ArtMethod* method,
@@ -330,7 +330,7 @@
   static FailureData VerifyMethods(Thread* self,
                                    ClassLinker* linker,
                                    const DexFile* dex_file,
-                                   const DexFile::ClassDef* class_def,
+                                   const DexFile::ClassDef& class_def,
                                    ClassDataItemIterator* it,
                                    Handle<mirror::DexCache> dex_cache,
                                    Handle<mirror::ClassLoader> class_loader,
@@ -356,7 +356,7 @@
                                   const DexFile* dex_file,
                                   Handle<mirror::DexCache> dex_cache,
                                   Handle<mirror::ClassLoader> class_loader,
-                                  const DexFile::ClassDef* class_def_idx,
+                                  const DexFile::ClassDef& class_def_idx,
                                   const DexFile::CodeItem* code_item,
                                   ArtMethod* method,
                                   uint32_t method_access_flags,
@@ -759,7 +759,7 @@
   Handle<mirror::DexCache> dex_cache_ GUARDED_BY(Locks::mutator_lock_);
   // The class loader for the declaring class of the method.
   Handle<mirror::ClassLoader> class_loader_ GUARDED_BY(Locks::mutator_lock_);
-  const DexFile::ClassDef* const class_def_;  // The class def of the declaring class of the method.
+  const DexFile::ClassDef& class_def_;  // The class def of the declaring class of the method.
   const DexFile::CodeItem* const code_item_;  // The code item containing the code for the method.
   const RegType* declaring_class_;  // Lazily computed reg type of the method's declaring class.
   // Instruction widths and flags, one entry per code unit.
diff --git a/test/valgrind-suppressions.txt b/test/valgrind-suppressions.txt
index acab6e5..fd3c331 100644
--- a/test/valgrind-suppressions.txt
+++ b/test/valgrind-suppressions.txt
@@ -13,3 +13,12 @@
    fun:_dl_start
    obj:/lib/x86_64-linux-gnu/ld-2.19.so
 }
+
+{
+   b/31275764
+   Memcheck:Leak
+   match-leak-kinds: definite
+   fun:malloc
+   ...
+   fun:_ZN3art7Runtime17InitNativeMethodsEv
+}
diff --git a/test/valgrind-target-suppressions.txt b/test/valgrind-target-suppressions.txt
index 7ae6d53..fbc99b1 100644
--- a/test/valgrind-target-suppressions.txt
+++ b/test/valgrind-target-suppressions.txt
@@ -50,3 +50,12 @@
    fun:malloc
    fun:setenv
 }
+
+{
+   b/31275764
+   Memcheck:Leak
+   match-leak-kinds: definite
+   fun:malloc
+   ...
+   fun:_ZN3art7Runtime17InitNativeMethodsEv
+}
diff --git a/tools/libcore_failures.txt b/tools/libcore_failures.txt
index a69b58b..7b5e9ed 100644
--- a/tools/libcore_failures.txt
+++ b/tools/libcore_failures.txt
@@ -86,7 +86,6 @@
           "libcore.net.NetworkSecurityPolicyTest#testCleartextTrafficPolicyWithJarFtpURLConnection",
           "libcore.net.NetworkSecurityPolicyTest#testCleartextTrafficPolicyWithLoggingSocketHandler",
           "libcore.net.NetworkSecurityPolicyTest#testCleartextTrafficPolicyWithHttpURLConnection",
-          "libcore.net.NetworkSecurityPolicyTest#testCleartextTrafficPolicyWithJarHttpURLConnection",
           "org.apache.harmony.luni.tests.internal.net.www.protocol.http.HttpURLConnectionTest",
           "org.apache.harmony.luni.tests.internal.net.www.protocol.https.HttpsURLConnectionTest",
           "org.apache.harmony.luni.tests.java.net.URLConnectionTest",
@@ -234,5 +233,11 @@
   modes: [device],
   names: ["libcore.java.lang.ProcessBuilderTest#testRedirectInherit",
           "libcore.java.lang.ProcessBuilderTest#testRedirect_nullStreams"]
+},
+{
+  description: "Sometimes timeouts",
+  result: EXEC_FAILED,
+  bug: 31258002,
+  names: ["libcore.net.NetworkSecurityPolicyTest#testCleartextTrafficPolicyWithJarHttpURLConnection"]
 }
 ]
diff --git a/tools/run-jdwp-tests.sh b/tools/run-jdwp-tests.sh
index bdb2d4b..01dae43 100755
--- a/tools/run-jdwp-tests.sh
+++ b/tools/run-jdwp-tests.sh
@@ -50,6 +50,7 @@
 host="no"
 # Use JIT compiling by default.
 use_jit=true
+variant_cmdline_parameter="--variant=X32"
 
 while true; do
   if [[ "$1" == "--mode=host" ]]; then
@@ -93,11 +94,34 @@
     shift
   elif [[ "$1" == "" ]]; then
     break
+  elif [[ $1 == --variant=* ]]; then
+    variant_cmdline_parameter=$1
+    shift
   else
     shift
   fi
 done
 
+# For the host:
+#
+# If, on the other hand, there is a variant set, use it to modify the art_debugee parameter to
+# force the fork to have the same bitness as the controller. This should be fine and not impact
+# testing (cross-bitness), as the protocol is always 64-bit anyways (our implementation).
+#
+# Note: this isn't necessary for the device as the BOOTCLASSPATH environment variable is set there
+#       and used as a fallback.
+if [[ $host == "yes" ]]; then
+  variant=${variant_cmdline_parameter:10}
+  if [[ $variant == "x32" || $variant == "X32" ]]; then
+    art_debugee="$art_debugee --32"
+  elif [[ $variant == "x64" || $variant == "X64" ]]; then
+    art_debugee="$art_debugee --64"
+  else
+    echo "Error, do not understand variant $variant_cmdline_parameter."
+    exit 1
+  fi
+fi
+
 if [[ "$image" != "" ]]; then
   vm_args="--vm-arg $image"
 fi