Refactor object reference visiting logic.

Refactored the reference visiting logic to be in mirror::Object
instead of MarkSweep.

Change-Id: I773249478dc463d83b465e85c2402320488577c0
diff --git a/runtime/gc/accounting/mod_union_table.cc b/runtime/gc/accounting/mod_union_table.cc
index 8bc1ced..314f3c5 100644
--- a/runtime/gc/accounting/mod_union_table.cc
+++ b/runtime/gc/accounting/mod_union_table.cc
@@ -70,37 +70,29 @@
 
 class ModUnionUpdateObjectReferencesVisitor {
  public:
-  ModUnionUpdateObjectReferencesVisitor(MarkObjectCallback* callback, void* arg)
+  ModUnionUpdateObjectReferencesVisitor(MarkHeapReferenceCallback* callback, void* arg)
     : callback_(callback),
       arg_(arg) {
   }
 
   // Extra parameters are required since we use this same visitor signature for checking objects.
-  void operator()(Object* obj, Object* ref, const MemberOffset& offset,
-                  bool /* is_static */) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+  void operator()(Object* obj, MemberOffset offset, bool /* static */) const
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     // Only add the reference if it is non null and fits our criteria.
-    if (ref != nullptr) {
-      Object* new_ref = callback_(ref, arg_);
-      if (new_ref != ref) {
-        // Use SetFieldObjectWithoutWriteBarrier to avoid card mark as an optimization which
-        // reduces dirtied pages and improves performance.
-        if (Runtime::Current()->IsActiveTransaction()) {
-          obj->SetFieldObjectWithoutWriteBarrier<true>(offset, new_ref, true);
-        } else {
-          obj->SetFieldObjectWithoutWriteBarrier<false>(offset, new_ref, true);
-        }
-      }
+    mirror::HeapReference<Object>* obj_ptr = obj->GetFieldObjectReferenceAddr(offset);
+    if (obj_ptr->AsMirrorPtr() != nullptr) {
+      callback_(obj_ptr, arg_);
     }
   }
 
  private:
-  MarkObjectCallback* const callback_;
+  MarkHeapReferenceCallback* const callback_;
   void* arg_;
 };
 
 class ModUnionScanImageRootVisitor {
  public:
-  ModUnionScanImageRootVisitor(MarkObjectCallback* callback, void* arg)
+  ModUnionScanImageRootVisitor(MarkHeapReferenceCallback* callback, void* arg)
       : callback_(callback), arg_(arg) {}
 
   void operator()(Object* root) const
@@ -108,11 +100,11 @@
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     DCHECK(root != NULL);
     ModUnionUpdateObjectReferencesVisitor ref_visitor(callback_, arg_);
-    collector::MarkSweep::VisitObjectReferences<kMovingClasses>(root, ref_visitor);
+    root->VisitReferences<kMovingClasses>(ref_visitor);
   }
 
  private:
-  MarkObjectCallback* const callback_;
+  MarkHeapReferenceCallback* const callback_;
   void* const arg_;
 };
 
@@ -131,12 +123,14 @@
   }
 
   // Extra parameters are required since we use this same visitor signature for checking objects.
-  void operator()(Object* obj, Object* ref, const MemberOffset& offset,
-                  bool /* is_static */) const {
+  void operator()(Object* obj, MemberOffset offset, bool /* static */) const
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    mirror::HeapReference<Object>* ref_ptr = obj->GetFieldObjectReferenceAddr(offset);
+    mirror::Object* ref = ref_ptr->AsMirrorPtr();
     // Only add the reference if it is non null and fits our criteria.
-    if (ref != nullptr && mod_union_table_->AddReference(obj, ref)) {
+    if (ref  != nullptr && mod_union_table_->AddReference(obj, ref)) {
       // Push the adddress of the reference.
-      references_->push_back(obj->GetFieldObjectReferenceAddr(offset));
+      references_->push_back(ref_ptr);
     }
   }
 
@@ -155,11 +149,10 @@
 
   void operator()(Object* obj) const
       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
-    DCHECK(obj != NULL);
     // We don't have an early exit since we use the visitor pattern, an early
     // exit should significantly speed this up.
     AddToReferenceArrayVisitor visitor(mod_union_table_, references_);
-    collector::MarkSweep::VisitObjectReferences<kMovingClasses>(obj, visitor);
+    obj->VisitReferences<kMovingClasses>(visitor);
   }
  private:
   ModUnionTableReferenceCache* const mod_union_table_;
@@ -175,20 +168,22 @@
   }
 
   // Extra parameters are required since we use this same visitor signature for checking objects.
-  void operator()(Object* obj, Object* ref,
-                  const MemberOffset& /* offset */, bool /* is_static */) const
+  void operator()(Object* obj, MemberOffset offset, bool /* is_static */) const
       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
-    Heap* heap = mod_union_table_->GetHeap();
-    if (ref != NULL && mod_union_table_->AddReference(obj, ref) &&
+    mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset, false);
+    if (ref != nullptr && mod_union_table_->AddReference(obj, ref) &&
         references_.find(ref) == references_.end()) {
+      Heap* heap = mod_union_table_->GetHeap();
       space::ContinuousSpace* from_space = heap->FindContinuousSpaceFromObject(obj, false);
       space::ContinuousSpace* to_space = heap->FindContinuousSpaceFromObject(ref, false);
-      LOG(INFO) << "Object " << reinterpret_cast<const void*>(obj) << "(" << PrettyTypeOf(obj) << ")"
-                << "References " << reinterpret_cast<const void*>(ref)
-                << "(" << PrettyTypeOf(ref) << ") without being in mod-union table";
-      LOG(INFO) << "FromSpace " << from_space->GetName() << " type " << from_space->GetGcRetentionPolicy();
-      LOG(INFO) << "ToSpace " << to_space->GetName() << " type " << to_space->GetGcRetentionPolicy();
-      mod_union_table_->GetHeap()->DumpSpaces();
+      LOG(INFO) << "Object " << reinterpret_cast<const void*>(obj) << "(" << PrettyTypeOf(obj)
+          << ")" << "References " << reinterpret_cast<const void*>(ref) << "(" << PrettyTypeOf(ref)
+          << ") without being in mod-union table";
+      LOG(INFO) << "FromSpace " << from_space->GetName() << " type "
+          << from_space->GetGcRetentionPolicy();
+      LOG(INFO) << "ToSpace " << to_space->GetName() << " type "
+          << to_space->GetGcRetentionPolicy();
+      heap->DumpSpaces();
       LOG(FATAL) << "FATAL ERROR";
     }
   }
@@ -208,9 +203,8 @@
 
   void operator()(Object* obj) const NO_THREAD_SAFETY_ANALYSIS {
     Locks::heap_bitmap_lock_->AssertSharedHeld(Thread::Current());
-    DCHECK(obj != NULL);
     CheckReferenceVisitor visitor(mod_union_table_, references_);
-    collector::MarkSweep::VisitObjectReferences<kMovingClasses>(obj, visitor);
+    obj->VisitReferences<kMovingClasses>(visitor);
   }
 
  private:
@@ -264,7 +258,7 @@
   }
 }
 
-void ModUnionTableReferenceCache::UpdateAndMarkReferences(MarkObjectCallback* callback,
+void ModUnionTableReferenceCache::UpdateAndMarkReferences(MarkHeapReferenceCallback* callback,
                                                           void* arg) {
   Heap* heap = GetHeap();
   CardTable* card_table = heap->GetCardTable();
@@ -298,14 +292,7 @@
   size_t count = 0;
   for (const auto& ref : references_) {
     for (mirror::HeapReference<Object>* obj_ptr : ref.second) {
-      Object* obj = obj_ptr->AsMirrorPtr();
-      if (obj != nullptr) {
-        Object* new_obj = callback(obj, arg);
-        // Avoid dirtying pages in the image unless necessary.
-        if (new_obj != obj) {
-          obj_ptr->Assign(new_obj);
-        }
-      }
+      callback(obj_ptr, arg);
     }
     count += ref.second.size();
   }
@@ -322,7 +309,8 @@
 }
 
 // Mark all references to the alloc space(s).
-void ModUnionTableCardCache::UpdateAndMarkReferences(MarkObjectCallback* callback, void* arg) {
+void ModUnionTableCardCache::UpdateAndMarkReferences(MarkHeapReferenceCallback* callback,
+                                                     void* arg) {
   CardTable* card_table = heap_->GetCardTable();
   ModUnionScanImageRootVisitor scan_visitor(callback, arg);
   SpaceBitmap* bitmap = space_->GetLiveBitmap();
diff --git a/runtime/gc/accounting/mod_union_table.h b/runtime/gc/accounting/mod_union_table.h
index 2e22a11..c4b020b 100644
--- a/runtime/gc/accounting/mod_union_table.h
+++ b/runtime/gc/accounting/mod_union_table.h
@@ -69,7 +69,7 @@
   // Update the mod-union table using data stored by ClearCards. There may be multiple ClearCards
   // before a call to update, for example, back-to-back sticky GCs. Also mark references to other
   // spaces which are stored in the mod-union table.
-  virtual void UpdateAndMarkReferences(MarkObjectCallback* callback, void* arg) = 0;
+  virtual void UpdateAndMarkReferences(MarkHeapReferenceCallback* callback, void* arg) = 0;
 
   // Verification, sanity checks that we don't have clean cards which conflict with out cached data
   // for said cards. Exclusive lock is required since verify sometimes uses
@@ -106,7 +106,7 @@
   void ClearCards();
 
   // Update table based on cleared cards and mark all references to the other spaces.
-  void UpdateAndMarkReferences(MarkObjectCallback* callback, void* arg)
+  void UpdateAndMarkReferences(MarkHeapReferenceCallback* callback, void* arg)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
 
@@ -142,7 +142,7 @@
   void ClearCards();
 
   // Mark all references to the alloc space(s).
-  void UpdateAndMarkReferences(MarkObjectCallback* callback, void* arg)
+  void UpdateAndMarkReferences(MarkHeapReferenceCallback* callback, void* arg)
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
diff --git a/runtime/gc/accounting/remembered_set.cc b/runtime/gc/accounting/remembered_set.cc
index a832615..afa5054 100644
--- a/runtime/gc/accounting/remembered_set.cc
+++ b/runtime/gc/accounting/remembered_set.cc
@@ -60,28 +60,24 @@
 
 class RememberedSetReferenceVisitor {
  public:
-  RememberedSetReferenceVisitor(MarkObjectCallback* callback, space::ContinuousSpace* target_space,
+  RememberedSetReferenceVisitor(MarkHeapReferenceCallback* callback,
+                                space::ContinuousSpace* target_space,
                                 bool* const contains_reference_to_target_space, void* arg)
       : callback_(callback), target_space_(target_space), arg_(arg),
         contains_reference_to_target_space_(contains_reference_to_target_space) {}
 
-  void operator()(mirror::Object* obj, mirror::Object* ref,
-                  const MemberOffset& offset, bool /* is_static */) const
+  void operator()(mirror::Object* obj, MemberOffset offset, bool /* is_static */) const
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    if (ref != nullptr) {
-      if (target_space_->HasAddress(ref)) {
-        *contains_reference_to_target_space_ = true;
-        mirror::Object* new_ref = callback_(ref, arg_);
-        DCHECK(!target_space_->HasAddress(new_ref));
-        if (new_ref != ref) {
-          obj->SetFieldObjectWithoutWriteBarrier<false>(offset, new_ref, false);
-        }
-      }
+    mirror::HeapReference<mirror::Object>* ref_ptr = obj->GetFieldObjectReferenceAddr(offset);
+    if (target_space_->HasAddress(ref_ptr->AsMirrorPtr())) {
+      *contains_reference_to_target_space_ = true;
+      callback_(ref_ptr, arg_);
+      DCHECK(!target_space_->HasAddress(ref_ptr->AsMirrorPtr()));
     }
   }
 
  private:
-  MarkObjectCallback* const callback_;
+  MarkHeapReferenceCallback* const callback_;
   space::ContinuousSpace* const target_space_;
   void* const arg_;
   bool* const contains_reference_to_target_space_;
@@ -89,27 +85,27 @@
 
 class RememberedSetObjectVisitor {
  public:
-  RememberedSetObjectVisitor(MarkObjectCallback* callback, space::ContinuousSpace* target_space,
+  RememberedSetObjectVisitor(MarkHeapReferenceCallback* callback,
+                             space::ContinuousSpace* target_space,
                              bool* const contains_reference_to_target_space, void* arg)
       : callback_(callback), target_space_(target_space), arg_(arg),
         contains_reference_to_target_space_(contains_reference_to_target_space) {}
 
   void operator()(mirror::Object* obj) const EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    DCHECK(obj != NULL);
     RememberedSetReferenceVisitor ref_visitor(callback_, target_space_,
                                               contains_reference_to_target_space_, arg_);
-    collector::MarkSweep::VisitObjectReferences<kMovingClasses>(obj, ref_visitor);
+    obj->VisitReferences<kMovingClasses>(ref_visitor);
   }
 
  private:
-  MarkObjectCallback* const callback_;
+  MarkHeapReferenceCallback* const callback_;
   space::ContinuousSpace* const target_space_;
   void* const arg_;
   bool* const contains_reference_to_target_space_;
 };
 
-void RememberedSet::UpdateAndMarkReferences(MarkObjectCallback* callback,
+void RememberedSet::UpdateAndMarkReferences(MarkHeapReferenceCallback* callback,
                                             space::ContinuousSpace* target_space, void* arg) {
   CardTable* card_table = heap_->GetCardTable();
   bool contains_reference_to_target_space = false;
diff --git a/runtime/gc/accounting/remembered_set.h b/runtime/gc/accounting/remembered_set.h
index 92feeb1..4ed20dd 100644
--- a/runtime/gc/accounting/remembered_set.h
+++ b/runtime/gc/accounting/remembered_set.h
@@ -52,7 +52,7 @@
   void ClearCards();
 
   // Mark through all references to the target space.
-  void UpdateAndMarkReferences(MarkObjectCallback* callback,
+  void UpdateAndMarkReferences(MarkHeapReferenceCallback* callback,
                                space::ContinuousSpace* target_space, void* arg)
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
diff --git a/runtime/gc/collector/mark_sweep-inl.h b/runtime/gc/collector/mark_sweep-inl.h
index f73a50f..1cb2adb 100644
--- a/runtime/gc/collector/mark_sweep-inl.h
+++ b/runtime/gc/collector/mark_sweep-inl.h
@@ -22,137 +22,30 @@
 #include "gc/heap.h"
 #include "mirror/art_field.h"
 #include "mirror/class.h"
-#include "mirror/object_array.h"
+#include "mirror/object_array-inl.h"
+#include "mirror/reference.h"
 
 namespace art {
 namespace gc {
 namespace collector {
 
-template <typename MarkVisitor>
-inline void MarkSweep::ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor) {
+template<typename MarkVisitor, typename ReferenceVisitor>
+inline void MarkSweep::ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor,
+                                       const ReferenceVisitor& ref_visitor) {
   if (kIsDebugBuild && !IsMarked(obj)) {
     heap_->DumpSpaces();
     LOG(FATAL) << "Scanning unmarked object " << obj;
   }
-  // The GetClass verifies the object, don't need to reverify after.
-  mirror::Class* klass = obj->GetClass();
-  // IsArrayClass verifies klass.
-  if (UNLIKELY(klass->IsArrayClass())) {
-    if (kCountScannedTypes) {
-      ++array_count_;
-    }
-    if (klass->IsObjectArrayClass<kVerifyNone>()) {
-      VisitObjectArrayReferences(obj->AsObjectArray<mirror::Object, kVerifyNone>(), visitor);
-    }
-  } else if (UNLIKELY(klass == mirror::Class::GetJavaLangClass())) {
-    if (kCountScannedTypes) {
+  obj->VisitReferences<false>(visitor, ref_visitor);
+  if (kCountScannedTypes) {
+    mirror::Class* klass = obj->GetClass<kVerifyNone>();
+    if (UNLIKELY(klass == mirror::Class::GetJavaLangClass())) {
       ++class_count_;
-    }
-    VisitClassReferences<false>(klass, obj, visitor);
-  } else {
-    if (kCountScannedTypes) {
+    } else if (UNLIKELY(klass->IsArrayClass<kVerifyNone>())) {
+      ++array_count_;
+    } else {
       ++other_count_;
     }
-    VisitInstanceFieldsReferences<false>(klass, obj, visitor);
-    if (UNLIKELY(klass->IsReferenceClass<kVerifyNone>())) {
-      DelayReferenceReferent(klass, obj);
-    }
-  }
-}
-
-template <bool kVisitClass, typename Visitor>
-inline void MarkSweep::VisitObjectReferences(mirror::Object* obj, const Visitor& visitor)
-    SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
-                          Locks::mutator_lock_) {
-  mirror::Class* klass = obj->GetClass();
-  if (klass->IsArrayClass()) {
-    if (kVisitClass) {
-      visitor(obj, klass, mirror::Object::ClassOffset(), false);
-    }
-    if (klass->IsObjectArrayClass<kVerifyNone>()) {
-      VisitObjectArrayReferences(obj->AsObjectArray<mirror::Object, kVerifyNone>(), visitor);
-    }
-  } else if (klass == mirror::Class::GetJavaLangClass()) {
-    DCHECK_EQ(klass->GetClass<kVerifyNone>(), mirror::Class::GetJavaLangClass());
-    VisitClassReferences<kVisitClass>(klass, obj, visitor);
-  } else {
-    VisitInstanceFieldsReferences<kVisitClass>(klass, obj, visitor);
-  }
-}
-
-template <bool kVisitClass, typename Visitor>
-inline void MarkSweep::VisitInstanceFieldsReferences(mirror::Class* klass,
-                                                     mirror::Object* obj, const Visitor& visitor)
-    SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
-  VisitFieldsReferences<kVisitClass>(obj, klass->GetReferenceInstanceOffsets<kVerifyNone>(), false,
-                                     visitor);
-}
-
-template <bool kVisitClass, typename Visitor>
-inline void MarkSweep::VisitClassReferences(mirror::Class* klass, mirror::Object* obj,
-                                            const Visitor& visitor)
-    SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
-  VisitInstanceFieldsReferences<kVisitClass>(klass, obj, visitor);
-  VisitStaticFieldsReferences<kVisitClass>(obj->AsClass<kVerifyNone>(), visitor);
-}
-
-template <bool kVisitClass, typename Visitor>
-inline void MarkSweep::VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor)
-    SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
-  VisitFieldsReferences<kVisitClass>(klass, klass->GetReferenceStaticOffsets<kVerifyNone>(), true,
-                                     visitor);
-}
-
-template <bool kVisitClass, typename Visitor>
-inline void MarkSweep::VisitFieldsReferences(mirror::Object* obj, uint32_t ref_offsets,
-                                             bool is_static, const Visitor& visitor) {
-  if (LIKELY(ref_offsets != CLASS_WALK_SUPER)) {
-    if (!kVisitClass) {
-      // Currently the class bit is always set in the word. Since we count leading zeros to find
-      // the offset and the class bit is at offset 0, it means that the highest bit is the class
-      // bit. We can quickly clear this using xor.
-      ref_offsets ^= kWordHighBitMask;
-      DCHECK_EQ(mirror::Object::ClassOffset().Uint32Value(), 0U);
-    }
-    // Found a reference offset bitmap.  Mark the specified offsets.
-    while (ref_offsets != 0) {
-      size_t right_shift = CLZ(ref_offsets);
-      MemberOffset field_offset = CLASS_OFFSET_FROM_CLZ(right_shift);
-      mirror::Object* ref = obj->GetFieldObject<mirror::Object, kVerifyReads>(field_offset, false);
-      visitor(obj, ref, field_offset, is_static);
-      ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift);
-    }
-  } else {
-    // There is no reference offset bitmap.  In the non-static case,
-    // walk up the class inheritance hierarchy and find reference
-    // offsets the hard way. In the static case, just consider this
-    // class.
-    for (mirror::Class* klass = is_static ? obj->AsClass<kVerifyNone>() : obj->GetClass<kVerifyNone>();
-         klass != nullptr;
-         klass = is_static ? nullptr : klass->GetSuperClass()) {
-      size_t num_reference_fields = (is_static
-                                     ? klass->NumReferenceStaticFields()
-                                     : klass->NumReferenceInstanceFields());
-      for (size_t i = 0; i < num_reference_fields; ++i) {
-        mirror::ArtField* field = (is_static ? klass->GetStaticField(i)
-                                             : klass->GetInstanceField(i));
-        MemberOffset field_offset = field->GetOffset();
-        mirror::Object* ref = obj->GetFieldObject<mirror::Object, kVerifyReads>(field_offset, false);
-        visitor(obj, ref, field_offset, is_static);
-      }
-    }
-  }
-}
-
-template <typename Visitor>
-inline void MarkSweep::VisitObjectArrayReferences(mirror::ObjectArray<mirror::Object>* array,
-                                                  const Visitor& visitor) {
-  const size_t length = static_cast<size_t>(array->GetLength());
-  for (size_t i = 0; i < length; ++i) {
-    mirror::Object* element = array->GetWithoutChecks(static_cast<int32_t>(i));
-    const size_t width = sizeof(mirror::HeapReference<mirror::Object>);
-    MemberOffset offset(i * width + mirror::Array::DataOffset(width).Int32Value());
-    visitor(array, element, offset, false);
   }
 }
 
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index bfef438..8abf5e2 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -279,7 +279,7 @@
       TimingLogger::ScopedSplit split(name, &timings_);
       accounting::ModUnionTable* mod_union_table = heap_->FindModUnionTableFromSpace(space);
       CHECK(mod_union_table != nullptr);
-      mod_union_table->UpdateAndMarkReferences(MarkObjectCallback, this);
+      mod_union_table->UpdateAndMarkReferences(MarkHeapReferenceCallback, this);
     }
   }
 }
@@ -410,6 +410,10 @@
   return obj;
 }
 
+void MarkSweep::MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* ref, void* arg) {
+  reinterpret_cast<MarkSweep*>(arg)->MarkObject(ref->AsMirrorPtr());
+}
+
 inline void MarkSweep::UnMarkObjectNonNull(const Object* obj) {
   DCHECK(!immune_region_.ContainsObject(obj));
   if (kUseBrooksPointer) {
@@ -612,8 +616,8 @@
   explicit ScanObjectVisitor(MarkSweep* const mark_sweep) ALWAYS_INLINE
       : mark_sweep_(mark_sweep) {}
 
-  // TODO: Fixme when anotatalysis works with visitors.
-  void operator()(Object* obj) const ALWAYS_INLINE NO_THREAD_SAFETY_ANALYSIS {
+  void operator()(Object* obj) const ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
     if (kCheckLocks) {
       Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
       Locks::heap_bitmap_lock_->AssertExclusiveHeld(Thread::Current());
@@ -625,6 +629,21 @@
   MarkSweep* const mark_sweep_;
 };
 
+class DelayReferenceReferentVisitor {
+ public:
+  explicit DelayReferenceReferentVisitor(MarkSweep* collector) : collector_(collector) {
+  }
+
+  void operator()(mirror::Class* klass, mirror::Reference* ref) const
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
+    collector_->DelayReferenceReferent(klass, ref);
+  }
+
+ private:
+  MarkSweep* const collector_;
+};
+
 template <bool kUseFinger = false>
 class MarkStackTask : public Task {
  public:
@@ -647,27 +666,44 @@
   static const size_t kMaxSize = 1 * KB;
 
  protected:
+  class MarkObjectParallelVisitor {
+   public:
+    explicit MarkObjectParallelVisitor(MarkStackTask<kUseFinger>* chunk_task,
+                                       MarkSweep* mark_sweep) ALWAYS_INLINE
+            : chunk_task_(chunk_task), mark_sweep_(mark_sweep) {}
+
+    void operator()(Object* obj, MemberOffset offset, bool /* static */) const ALWAYS_INLINE
+        SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+      mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset, false);
+      if (ref != nullptr && mark_sweep_->MarkObjectParallel(ref)) {
+        if (kUseFinger) {
+          android_memory_barrier();
+          if (reinterpret_cast<uintptr_t>(ref) >=
+              static_cast<uintptr_t>(mark_sweep_->atomic_finger_)) {
+            return;
+          }
+        }
+        chunk_task_->MarkStackPush(ref);
+      }
+    }
+
+   private:
+    MarkStackTask<kUseFinger>* const chunk_task_;
+    MarkSweep* const mark_sweep_;
+  };
+
   class ScanObjectParallelVisitor {
    public:
     explicit ScanObjectParallelVisitor(MarkStackTask<kUseFinger>* chunk_task) ALWAYS_INLINE
         : chunk_task_(chunk_task) {}
 
-    void operator()(Object* obj) const {
-      MarkSweep* mark_sweep = chunk_task_->mark_sweep_;
-      mark_sweep->ScanObjectVisit(obj,
-          [mark_sweep, this](Object* /* obj */, Object* ref, const MemberOffset& /* offset */,
-              bool /* is_static */) ALWAYS_INLINE_LAMBDA {
-        if (ref != nullptr && mark_sweep->MarkObjectParallel(ref)) {
-          if (kUseFinger) {
-            android_memory_barrier();
-            if (reinterpret_cast<uintptr_t>(ref) >=
-                static_cast<uintptr_t>(mark_sweep->atomic_finger_)) {
-              return;
-            }
-          }
-          chunk_task_->MarkStackPush(ref);
-        }
-      });
+    // No thread safety analysis since multiple threads will use this visitor.
+    void operator()(Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+        EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
+      MarkSweep* const mark_sweep = chunk_task_->mark_sweep_;
+      MarkObjectParallelVisitor mark_visitor(chunk_task_, mark_sweep);
+      DelayReferenceReferentVisitor ref_visitor(mark_sweep);
+      mark_sweep->ScanObjectVisit(obj, mark_visitor, ref_visitor);
     }
 
    private:
@@ -707,7 +743,8 @@
   }
 
   // Scans all of the objects
-  virtual void Run(Thread* self) {
+  virtual void Run(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
     ScanObjectParallelVisitor visitor(this);
     // TODO: Tune this.
     static const size_t kFifoSize = 4;
@@ -1197,30 +1234,29 @@
   GetHeap()->RecordFree(freed_objects, freed_bytes);
 }
 
-// Process the "referent" field in a java.lang.ref.Reference.  If the
-// referent has not yet been marked, put it on the appropriate list in
-// the heap for later processing.
-void MarkSweep::DelayReferenceReferent(mirror::Class* klass, Object* obj) {
+// Process the "referent" field in a java.lang.ref.Reference.  If the referent has not yet been
+// marked, put it on the appropriate list in the heap for later processing.
+void MarkSweep::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* ref) {
   DCHECK(klass != nullptr);
   if (kCountJavaLangRefs) {
     ++reference_count_;
   }
-  heap_->DelayReferenceReferent(klass, obj->AsReference(), IsMarkedCallback, this);
+  heap_->DelayReferenceReferent(klass, ref, IsMarkedCallback, this);
 }
 
 class MarkObjectVisitor {
  public:
-  explicit MarkObjectVisitor(MarkSweep* const mark_sweep) ALWAYS_INLINE : mark_sweep_(mark_sweep) {}
+  explicit MarkObjectVisitor(MarkSweep* const mark_sweep) ALWAYS_INLINE : mark_sweep_(mark_sweep) {
+  }
 
-  // TODO: Fixme when anotatalysis works with visitors.
-  void operator()(Object* /* obj */, Object* ref, const MemberOffset& /* offset */,
-                  bool /* is_static */) const ALWAYS_INLINE
-      NO_THREAD_SAFETY_ANALYSIS {
+  void operator()(Object* obj, MemberOffset offset, bool /* is_static */) const
+      ALWAYS_INLINE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
     if (kCheckLocks) {
       Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
       Locks::heap_bitmap_lock_->AssertExclusiveHeld(Thread::Current());
     }
-    mark_sweep_->MarkObject(ref);
+    mark_sweep_->MarkObject(obj->GetFieldObject<mirror::Object>(offset, false));
   }
 
  private:
@@ -1230,8 +1266,9 @@
 // Scans an object reference.  Determines the type of the reference
 // and dispatches to a specialized scanning routine.
 void MarkSweep::ScanObject(Object* obj) {
-  MarkObjectVisitor visitor(this);
-  ScanObjectVisit(obj, visitor);
+  MarkObjectVisitor mark_visitor(this);
+  DelayReferenceReferentVisitor ref_visitor(this);
+  ScanObjectVisit(obj, mark_visitor, ref_visitor);
 }
 
 void MarkSweep::ProcessMarkStackPausedCallback(void* arg) {
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index 59f8e28..84b775a 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -33,6 +33,7 @@
   class Class;
   class Object;
   template<class T> class ObjectArray;
+  class Reference;
 }  // namespace mirror
 
 class StackVisitor;
@@ -162,10 +163,12 @@
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
-  // TODO: enable thread safety analysis when in use by multiple worker threads.
-  template <typename MarkVisitor>
-  void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor)
-      NO_THREAD_SAFETY_ANALYSIS;
+  // No thread safety analysis due to lambdas.
+  template<typename MarkVisitor, typename ReferenceVisitor>
+  void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor,
+                       const ReferenceVisitor& ref_visitor)
+    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+    EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
 
   void SweepSystemWeaks()
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
@@ -180,14 +183,14 @@
   void VerifyIsLive(const mirror::Object* obj)
       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
 
-  template <bool kVisitClass, typename Visitor>
-  static void VisitObjectReferences(mirror::Object* obj, const Visitor& visitor)
-      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
   static mirror::Object* MarkObjectCallback(mirror::Object* obj, void* arg)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
 
+  static void MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* ref, void* arg)
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
+
   static void MarkRootCallback(mirror::Object** root, void* arg, uint32_t thread_id,
                                RootType root_type)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
@@ -214,6 +217,10 @@
     return *gc_barrier_;
   }
 
+  // Schedules an unmarked object for reference processing.
+  void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
+      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
+
  protected:
   // Returns true if the object has its bit set in the mark bitmap.
   bool IsMarked(const mirror::Object* object) const;
@@ -269,32 +276,6 @@
   void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
       NO_THREAD_SAFETY_ANALYSIS;
 
-  template <bool kVisitClass, typename Visitor>
-  static void VisitInstanceFieldsReferences(mirror::Class* klass, mirror::Object* obj,
-                                            const Visitor& visitor)
-      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
-  // Visit the header, static field references, and interface pointers of a class object.
-  template <bool kVisitClass, typename Visitor>
-  static void VisitClassReferences(mirror::Class* klass, mirror::Object* obj,
-                                   const Visitor& visitor)
-      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
-  template <bool kVisitClass, typename Visitor>
-  static void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor)
-      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
-  template <bool kVisitClass, typename Visitor>
-  static void VisitFieldsReferences(mirror::Object* obj, uint32_t ref_offsets, bool is_static,
-                                    const Visitor& visitor)
-      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
-  // Visit all of the references in an object array.
-  template <typename Visitor>
-  static void VisitObjectArrayReferences(mirror::ObjectArray<mirror::Object>* array,
-                                         const Visitor& visitor)
-      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
   // Push a single reference on a mark stack.
   void PushOnMarkStack(mirror::Object* obj);
 
@@ -303,10 +284,6 @@
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
-  // Schedules an unmarked object for reference processing.
-  void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference)
-      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
   // Recursively blackens objects on the mark stack.
   void ProcessMarkStack(bool paused)
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
@@ -316,17 +293,6 @@
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
-  void EnqueueFinalizerReferences(mirror::Object** ref)
-      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
-      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
-  void PreserveSomeSoftReferences(mirror::Object** ref)
-      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
-      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
-  void ClearWhiteReferences(mirror::Object** list)
-      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
   // Used to get around thread safety annotations. The call is from MarkingPhase and is guarded by
   // IsExclusiveHeld.
   void RevokeAllThreadLocalAllocationStacks(Thread* self) NO_THREAD_SAFETY_ANALYSIS;
diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc
index b6591fc..5faa3a1 100644
--- a/runtime/gc/collector/semi_space.cc
+++ b/runtime/gc/collector/semi_space.cc
@@ -97,18 +97,13 @@
 SemiSpace::SemiSpace(Heap* heap, bool generational, const std::string& name_prefix)
     : GarbageCollector(heap,
                        name_prefix + (name_prefix.empty() ? "" : " ") + "marksweep + semispace"),
-      mark_stack_(nullptr),
-      is_large_object_space_immune_(false),
       to_space_(nullptr),
-      to_space_live_bitmap_(nullptr),
       from_space_(nullptr),
-      self_(nullptr),
       generational_(generational),
       last_gc_to_space_end_(nullptr),
       bytes_promoted_(0),
       whole_heap_collection_(true),
-      whole_heap_collection_interval_counter_(0),
-      saved_bytes_(0) {
+      whole_heap_collection_interval_counter_(0) {
 }
 
 void SemiSpace::InitializePhase() {
@@ -214,7 +209,7 @@
             space->IsZygoteSpace() ? "UpdateAndMarkZygoteModUnionTable" :
                                      "UpdateAndMarkImageModUnionTable",
                                      &timings_);
-        table->UpdateAndMarkReferences(MarkObjectCallback, this);
+        table->UpdateAndMarkReferences(MarkHeapReferenceCallback, this);
       } else if (heap_->FindRememberedSetFromSpace(space) != nullptr) {
         DCHECK(kUseRememberedSet);
         // If a bump pointer space only collection, the non-moving
@@ -246,7 +241,8 @@
 class SemiSpaceScanObjectVisitor {
  public:
   explicit SemiSpaceScanObjectVisitor(SemiSpace* ss) : semi_space_(ss) {}
-  void operator()(Object* obj) const NO_THREAD_SAFETY_ANALYSIS {
+  void operator()(Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
     // TODO: fix NO_THREAD_SAFETY_ANALYSIS. ScanObject() requires an
     // exclusive lock on the mutator lock, but
     // SpaceBitmap::VisitMarkedRange() only requires the shared lock.
@@ -263,22 +259,21 @@
   explicit SemiSpaceVerifyNoFromSpaceReferencesVisitor(space::ContinuousMemMapAllocSpace* from_space) :
       from_space_(from_space) {}
 
-  void operator()(Object* obj, Object* ref, const MemberOffset& offset, bool /* is_static */)
-      const ALWAYS_INLINE {
+  void operator()(Object* obj, MemberOffset offset, bool /* is_static */) const
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE {
+    mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset, false);
     if (from_space_->HasAddress(ref)) {
       Runtime::Current()->GetHeap()->DumpObject(LOG(INFO), obj);
     }
-    DCHECK(!from_space_->HasAddress(ref));
   }
  private:
   space::ContinuousMemMapAllocSpace* from_space_;
 };
 
 void SemiSpace::VerifyNoFromSpaceReferences(Object* obj) {
-  DCHECK(obj != NULL);
   DCHECK(!from_space_->HasAddress(obj)) << "Scanning object " << obj << " in from space";
   SemiSpaceVerifyNoFromSpaceReferencesVisitor visitor(from_space_);
-  MarkSweep::VisitObjectReferences<kMovingClasses>(obj, visitor);
+  obj->VisitReferences<kMovingClasses>(visitor);
 }
 
 class SemiSpaceVerifyNoFromSpaceReferencesObjectVisitor {
@@ -313,7 +308,7 @@
       accounting::RememberedSet* rem_set = heap_->FindRememberedSetFromSpace(space);
       if (kUseRememberedSet) {
         DCHECK(rem_set != nullptr);
-        rem_set->UpdateAndMarkReferences(MarkObjectCallback, from_space_, this);
+        rem_set->UpdateAndMarkReferences(MarkHeapReferenceCallback, from_space_, this);
         if (kIsDebugBuild) {
           // Verify that there are no from-space references that
           // remain in the space, that is, the remembered set (and the
@@ -475,9 +470,9 @@
   memcpy(dest, src, page_remain);
   byte_src += page_remain;
   byte_dest += page_remain;
-  CHECK_ALIGNED(reinterpret_cast<uintptr_t>(byte_dest), kPageSize);
-  CHECK_ALIGNED(reinterpret_cast<uintptr_t>(byte_dest), sizeof(uintptr_t));
-  CHECK_ALIGNED(reinterpret_cast<uintptr_t>(byte_src), sizeof(uintptr_t));
+  DCHECK_ALIGNED(reinterpret_cast<uintptr_t>(byte_dest), kPageSize);
+  DCHECK_ALIGNED(reinterpret_cast<uintptr_t>(byte_dest), sizeof(uintptr_t));
+  DCHECK_ALIGNED(reinterpret_cast<uintptr_t>(byte_src), sizeof(uintptr_t));
   while (byte_src + kPageSize < limit) {
     bool all_zero = true;
     uintptr_t* word_dest = reinterpret_cast<uintptr_t*>(byte_dest);
@@ -582,17 +577,18 @@
 // Used to mark and copy objects. Any newly-marked objects who are in the from space get moved to
 // the to-space and have their forward address updated. Objects which have been newly marked are
 // pushed on the mark stack.
-Object* SemiSpace::MarkObject(Object* obj) {
+void SemiSpace::MarkObject(mirror::HeapReference<Object>* obj_ptr) {
+  Object* obj = obj_ptr->AsMirrorPtr();
+  if (obj == nullptr) {
+    return;
+  }
   if (kUseBrooksPointer) {
     // Verify all the objects have the correct forward pointer installed.
-    if (obj != nullptr) {
-      obj->AssertSelfBrooksPointer();
-    }
+    obj->AssertSelfBrooksPointer();
   }
-  Object* forward_address = obj;
-  if (obj != nullptr && !immune_region_.ContainsObject(obj)) {
+  if (!immune_region_.ContainsObject(obj)) {
     if (from_space_->HasAddress(obj)) {
-      forward_address = GetForwardingAddressInFromSpace(obj);
+      mirror::Object* forward_address = GetForwardingAddressInFromSpace(obj);
       // If the object has already been moved, return the new forward address.
       if (forward_address == nullptr) {
         forward_address = MarkNonForwardedObject(obj);
@@ -604,9 +600,10 @@
         // Push the object onto the mark stack for later processing.
         MarkStackPush(forward_address);
       }
-      // TODO: Do we need this if in the else statement?
+      obj_ptr->Assign(forward_address);
     } else {
-      accounting::SpaceBitmap* object_bitmap = heap_->GetMarkBitmap()->GetContinuousSpaceBitmap(obj);
+      accounting::SpaceBitmap* object_bitmap =
+          heap_->GetMarkBitmap()->GetContinuousSpaceBitmap(obj);
       if (LIKELY(object_bitmap != nullptr)) {
         if (generational_) {
           // If a bump pointer space only collection, we should not
@@ -615,9 +612,8 @@
           // the non-moving space is added to the immune space.
           DCHECK(whole_heap_collection_);
         }
-        // This object was not previously marked.
-        if (!object_bitmap->Test(obj)) {
-          object_bitmap->Set(obj);
+        if (!object_bitmap->Set(obj)) {
+          // This object was not previously marked.
           MarkStackPush(obj);
         }
       } else {
@@ -628,25 +624,30 @@
       }
     }
   }
-  return forward_address;
 }
 
 void SemiSpace::ProcessMarkStackCallback(void* arg) {
-  DCHECK(arg != nullptr);
   reinterpret_cast<SemiSpace*>(arg)->ProcessMarkStack();
 }
 
 mirror::Object* SemiSpace::MarkObjectCallback(mirror::Object* root, void* arg) {
-  DCHECK(root != nullptr);
-  DCHECK(arg != nullptr);
-  return reinterpret_cast<SemiSpace*>(arg)->MarkObject(root);
+  auto ref = mirror::HeapReference<mirror::Object>::FromMirrorPtr(root);
+  reinterpret_cast<SemiSpace*>(arg)->MarkObject(&ref);
+  return ref.AsMirrorPtr();
+}
+
+void SemiSpace::MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* obj_ptr,
+                                          void* arg) {
+  reinterpret_cast<SemiSpace*>(arg)->MarkObject(obj_ptr);
 }
 
 void SemiSpace::MarkRootCallback(Object** root, void* arg, uint32_t /*thread_id*/,
                                  RootType /*root_type*/) {
-  DCHECK(root != nullptr);
-  DCHECK(arg != nullptr);
-  *root = reinterpret_cast<SemiSpace*>(arg)->MarkObject(*root);
+  auto ref = mirror::HeapReference<mirror::Object>::FromMirrorPtr(*root);
+  reinterpret_cast<SemiSpace*>(arg)->MarkObject(&ref);
+  if (*root != ref.AsMirrorPtr()) {
+    *root = ref.AsMirrorPtr();
+  }
 }
 
 // Marks all objects in the root set.
@@ -708,42 +709,35 @@
 
 // Process the "referent" field in a java.lang.ref.Reference.  If the referent has not yet been
 // marked, put it on the appropriate list in the heap for later processing.
-void SemiSpace::DelayReferenceReferent(mirror::Class* klass, Object* obj) {
-  heap_->DelayReferenceReferent(klass, obj->AsReference(), MarkedForwardingAddressCallback, this);
+void SemiSpace::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) {
+  heap_->DelayReferenceReferent(klass, reference, MarkedForwardingAddressCallback, this);
 }
 
 class SemiSpaceMarkObjectVisitor {
  public:
-  explicit SemiSpaceMarkObjectVisitor(SemiSpace* semi_space) : semi_space_(semi_space) {
+  explicit SemiSpaceMarkObjectVisitor(SemiSpace* collector) : collector_(collector) {
   }
 
-  void operator()(Object* obj, Object* ref, const MemberOffset& offset, bool /* is_static */)
-      const ALWAYS_INLINE NO_THREAD_SAFETY_ANALYSIS /* EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) */ {
-    mirror::Object* new_address = semi_space_->MarkObject(ref);
-    if (new_address != ref) {
-      DCHECK(new_address != nullptr);
-      // Don't need to mark the card since we updating the object address and not changing the
-      // actual objects its pointing to. Using SetFieldObjectWithoutWriteBarrier is better in this
-      // case since it does not dirty cards and use additional memory.
-      // Since we do not change the actual object, we can safely use non-transactional mode. Also
-      // disable check as we could run inside a transaction.
-      obj->SetFieldObjectWithoutWriteBarrier<false, false, kVerifyNone>(offset, new_address, false);
-    }
+  void operator()(Object* obj, MemberOffset offset, bool /* is_static */) const ALWAYS_INLINE
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
+    collector_->MarkObject(obj->GetFieldObjectReferenceAddr(offset));
   }
+
+  void operator()(mirror::Class* klass, mirror::Reference* ref) const
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
+    collector_->DelayReferenceReferent(klass, ref);
+  }
+
  private:
-  SemiSpace* const semi_space_;
+  SemiSpace* const collector_;
 };
 
 // Visit all of the references of an object and update.
 void SemiSpace::ScanObject(Object* obj) {
-  DCHECK(obj != NULL);
   DCHECK(!from_space_->HasAddress(obj)) << "Scanning object " << obj << " in from space";
   SemiSpaceMarkObjectVisitor visitor(this);
-  MarkSweep::VisitObjectReferences<kMovingClasses>(obj, visitor);
-  mirror::Class* klass = obj->GetClass<kVerifyNone>();
-  if (UNLIKELY(klass->IsReferenceClass<kVerifyNone>())) {
-    DelayReferenceReferent(klass, obj);
-  }
+  obj->VisitReferences<kMovingClasses>(visitor, visitor);
 }
 
 // Scan anything that's on the mark stack.
diff --git a/runtime/gc/collector/semi_space.h b/runtime/gc/collector/semi_space.h
index 0d77f29..523c2ab 100644
--- a/runtime/gc/collector/semi_space.h
+++ b/runtime/gc/collector/semi_space.h
@@ -98,11 +98,13 @@
   void FindDefaultMarkBitmap();
 
   // Returns the new address of the object.
-  mirror::Object* MarkObject(mirror::Object* object)
-      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
+  void MarkObject(mirror::HeapReference<mirror::Object>* obj_ptr)
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   void ScanObject(mirror::Object* obj)
-      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   void VerifyNoFromSpaceReferences(mirror::Object* obj)
       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
@@ -138,11 +140,19 @@
   static mirror::Object* MarkObjectCallback(mirror::Object* root, void* arg)
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
 
+  static void MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* obj_ptr, void* arg)
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
+
   static void ProcessMarkStackCallback(void* arg)
       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
 
   virtual mirror::Object* MarkNonForwardedObject(mirror::Object* obj)
-      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+  // Schedules an unmarked object for reference processing.
+  void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
+      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
 
  protected:
   // Returns null if the object is not marked, otherwise returns the forwarding address (same as
@@ -158,7 +168,8 @@
   // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
   // mark, otherwise we unmark.
   bool MarkLargeObject(const mirror::Object* obj)
-      EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   // Expand mark stack to 2x its current size.
   void ResizeMarkStack(size_t new_size);
@@ -173,10 +184,6 @@
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
-  // Schedules an unmarked object for reference processing.
-  void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference)
-      SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
-
   // Recursively blackens objects on the mark stack.
   void ProcessMarkStack()
       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 11c0f71..6c3ae5e 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -1018,20 +1018,20 @@
     }
     if (search_allocation_stack) {
       if (sorted) {
-        if (allocation_stack_->ContainsSorted(const_cast<mirror::Object*>(obj))) {
+        if (allocation_stack_->ContainsSorted(obj)) {
           return true;
         }
-      } else if (allocation_stack_->Contains(const_cast<mirror::Object*>(obj))) {
+      } else if (allocation_stack_->Contains(obj)) {
         return true;
       }
     }
 
     if (search_live_stack) {
       if (sorted) {
-        if (live_stack_->ContainsSorted(const_cast<mirror::Object*>(obj))) {
+        if (live_stack_->ContainsSorted(obj)) {
           return true;
         }
-      } else if (live_stack_->Contains(const_cast<mirror::Object*>(obj))) {
+      } else if (live_stack_->Contains(obj)) {
         return true;
       }
     }
@@ -1303,14 +1303,15 @@
   // TODO: Fix lock analysis to not use NO_THREAD_SAFETY_ANALYSIS, requires support for
   // annotalysis on visitors.
   void operator()(mirror::Object* o) const NO_THREAD_SAFETY_ANALYSIS {
-    collector::MarkSweep::VisitObjectReferences<true>(o, *this);
+    o->VisitReferences<true>(*this);
   }
 
   // For MarkSweep::VisitObjectReferences.
-  void operator()(mirror::Object* referrer, mirror::Object* object,
-                  const MemberOffset&, bool) const {
-    if (object == object_ && (max_count_ == 0 || referring_objects_.size() < max_count_)) {
-      referring_objects_.push_back(referrer);
+  void operator()(mirror::Object* obj, MemberOffset offset, bool /* is_static */) const
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset, false);
+    if (ref == object_ && (max_count_ == 0 || referring_objects_.size() < max_count_)) {
+      referring_objects_.push_back(obj);
     }
   }
 
@@ -1910,10 +1911,18 @@
     return failed_;
   }
 
-  // TODO: Fix lock analysis to not use NO_THREAD_SAFETY_ANALYSIS, requires support for smarter
-  // analysis on visitors.
-  void operator()(mirror::Object* obj, mirror::Object* ref,
-                  const MemberOffset& offset, bool /* is_static */) const
+  void operator()(mirror::Class* klass, mirror::Reference* ref) const
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    this->operator()(ref, mirror::Reference::ReferentOffset(), false);
+  }
+
+  void operator()(mirror::Object* obj, MemberOffset offset, bool /* static */) const
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    this->operator()(obj, obj->GetFieldObject<mirror::Object>(offset, false), offset);
+  }
+
+  // TODO: Fix the no thread safety analysis.
+  void operator()(mirror::Object* obj, mirror::Object* ref, MemberOffset offset) const
       NO_THREAD_SAFETY_ANALYSIS {
     if (ref == nullptr || IsLive(ref)) {
       // Verify that the reference is live.
@@ -2014,7 +2023,7 @@
   static void VerifyRoots(mirror::Object** root, void* arg, uint32_t /*thread_id*/,
                           RootType /*root_type*/) {
     VerifyReferenceVisitor* visitor = reinterpret_cast<VerifyReferenceVisitor*>(arg);
-    (*visitor)(nullptr, *root, MemberOffset(0), true);
+    (*visitor)(nullptr, *root, MemberOffset(0));
   }
 
  private:
@@ -2033,11 +2042,7 @@
     // be live or else how did we find it in the live bitmap?
     VerifyReferenceVisitor visitor(heap_);
     // The class doesn't count as a reference but we should verify it anyways.
-    collector::MarkSweep::VisitObjectReferences<true>(obj, visitor);
-    if (obj->IsReferenceInstance()) {
-      mirror::Reference* ref = obj->AsReference();
-      visitor(obj, ref->GetReferent(), mirror::Reference::ReferentOffset(), false);
-    }
+    obj->VisitReferences<true>(visitor, visitor);
     failed_ = failed_ || visitor.Failed();
   }
 
@@ -2102,11 +2107,12 @@
 
   // TODO: Fix lock analysis to not use NO_THREAD_SAFETY_ANALYSIS, requires support for
   // annotalysis on visitors.
-  void operator()(mirror::Object* obj, mirror::Object* ref, const MemberOffset& offset,
-                  bool is_static) const NO_THREAD_SAFETY_ANALYSIS {
+  void operator()(mirror::Object* obj, MemberOffset offset, bool is_static) const
+      NO_THREAD_SAFETY_ANALYSIS {
+    mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset, false);
     // Filter out class references since changing an object's class does not mark the card as dirty.
     // Also handles large objects, since the only reference they hold is a class reference.
-    if (ref != NULL && !ref->IsClass()) {
+    if (ref != nullptr && !ref->IsClass()) {
       accounting::CardTable* card_table = heap_->GetCardTable();
       // If the object is not dirty and it is referencing something in the live stack other than
       // class, then it must be on a dirty card.
@@ -2118,8 +2124,8 @@
         // Card should be either kCardDirty if it got re-dirtied after we aged it, or
         // kCardDirty - 1 if it didnt get touched since we aged it.
         accounting::ObjectStack* live_stack = heap_->live_stack_.get();
-        if (live_stack->ContainsSorted(const_cast<mirror::Object*>(ref))) {
-          if (live_stack->ContainsSorted(const_cast<mirror::Object*>(obj))) {
+        if (live_stack->ContainsSorted(ref)) {
+          if (live_stack->ContainsSorted(obj)) {
             LOG(ERROR) << "Object " << obj << " found in live stack";
           }
           if (heap_->GetLiveBitmap()->Test(obj)) {
@@ -2173,7 +2179,7 @@
   void operator()(mirror::Object* obj) const
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
     VerifyReferenceCardVisitor visitor(heap_, const_cast<bool*>(&failed_));
-    collector::MarkSweep::VisitObjectReferences<true>(obj, visitor);
+    obj->VisitReferences<true>(visitor);
   }
 
   bool Failed() const {
@@ -2282,8 +2288,7 @@
   }
 }
 
-static mirror::Object* IdentityMarkObjectCallback(mirror::Object* obj, void*) {
-  return obj;
+static void IdentityMarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>*, void*) {
 }
 
 void Heap::PreGcVerification(collector::GarbageCollector* gc) {
@@ -2321,7 +2326,7 @@
     ReaderMutexLock reader_lock(self, *Locks::heap_bitmap_lock_);
     for (const auto& table_pair : mod_union_tables_) {
       accounting::ModUnionTable* mod_union_table = table_pair.second;
-      mod_union_table->UpdateAndMarkReferences(IdentityMarkObjectCallback, nullptr);
+      mod_union_table->UpdateAndMarkReferences(IdentityMarkHeapReferenceCallback, nullptr);
       mod_union_table->Verify();
     }
     thread_list->ResumeAll();
diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h
index e3f4eed..89d9241 100644
--- a/runtime/mirror/class-inl.h
+++ b/runtime/mirror/class-inl.h
@@ -465,6 +465,12 @@
   return Alloc<true>(self, Runtime::Current()->GetHeap()->GetCurrentNonMovingAllocator());
 }
 
+template <bool kVisitClass, typename Visitor>
+inline void Class::VisitReferences(mirror::Class* klass, const Visitor& visitor) {
+  VisitInstanceFieldsReferences<kVisitClass>(klass, visitor);
+  VisitStaticFieldsReferences<kVisitClass>(this, visitor);
+}
+
 }  // namespace mirror
 }  // namespace art
 
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index 76ab94c..ddc07ff 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -861,6 +861,10 @@
   // When class is verified, set the kAccPreverified flag on each method.
   void SetPreverifiedFlagOnAllMethods() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
+  template <bool kVisitClass, typename Visitor>
+  void VisitReferences(mirror::Class* klass, const Visitor& visitor)
+      NO_THREAD_SAFETY_ANALYSIS;
+
  private:
   void SetVerifyErrorClass(Class* klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
diff --git a/runtime/mirror/object-inl.h b/runtime/mirror/object-inl.h
index cad1017..281d4ec 100644
--- a/runtime/mirror/object-inl.h
+++ b/runtime/mirror/object-inl.h
@@ -557,6 +557,77 @@
   return success;
 }
 
+template<bool kVisitClass, bool kIsStatic, typename Visitor>
+inline void Object::VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) {
+  if (LIKELY(ref_offsets != CLASS_WALK_SUPER)) {
+    if (!kVisitClass) {
+     // Mask out the class from the reference offsets.
+      ref_offsets ^= kWordHighBitMask;
+    }
+    DCHECK_EQ(ClassOffset().Uint32Value(), 0U);
+    // Found a reference offset bitmap. Visit the specified offsets.
+    while (ref_offsets != 0) {
+      size_t right_shift = CLZ(ref_offsets);
+      MemberOffset field_offset = CLASS_OFFSET_FROM_CLZ(right_shift);
+      visitor(this, field_offset, kIsStatic);
+      ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift);
+    }
+  } else {
+    // There is no reference offset bitmap.  In the non-static case, walk up the class
+    // inheritance hierarchy and find reference offsets the hard way. In the static case, just
+    // consider this class.
+    for (mirror::Class* klass = kIsStatic ? AsClass() : GetClass(); klass != nullptr;
+        klass = kIsStatic ? nullptr : klass->GetSuperClass()) {
+      size_t num_reference_fields =
+          kIsStatic ? klass->NumReferenceStaticFields() : klass->NumReferenceInstanceFields();
+      for (size_t i = 0; i < num_reference_fields; ++i) {
+        mirror::ArtField* field = kIsStatic ? klass->GetStaticField(i)
+            : klass->GetInstanceField(i);
+        MemberOffset field_offset = field->GetOffset();
+        // TODO: Do a simpler check?
+        if (!kVisitClass && UNLIKELY(field_offset.Uint32Value() == ClassOffset().Uint32Value())) {
+          continue;
+        }
+        visitor(this, field_offset, kIsStatic);
+      }
+    }
+  }
+}
+
+template<bool kVisitClass, typename Visitor>
+inline void Object::VisitInstanceFieldsReferences(mirror::Class* klass, const Visitor& visitor) {
+  VisitFieldsReferences<kVisitClass, false>(
+      klass->GetReferenceInstanceOffsets<kVerifyNone>(), visitor);
+}
+
+template<bool kVisitClass, typename Visitor>
+inline void Object::VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor) {
+  klass->VisitFieldsReferences<kVisitClass, true>(
+      klass->GetReferenceStaticOffsets<kVerifyNone>(), visitor);
+}
+
+template <const bool kVisitClass, VerifyObjectFlags kVerifyFlags, typename Visitor,
+    typename JavaLangRefVisitor>
+inline void Object::VisitReferences(const Visitor& visitor,
+                                    const JavaLangRefVisitor& ref_visitor) {
+  mirror::Class* klass = GetClass<kVerifyFlags>();
+  if (UNLIKELY(klass == Class::GetJavaLangClass())) {
+    DCHECK_EQ(klass->GetClass(), Class::GetJavaLangClass());
+    AsClass<kVerifyNone>()->VisitReferences<kVisitClass>(klass, visitor);
+  } else if (UNLIKELY(klass->IsArrayClass<kVerifyFlags>())) {
+    if (klass->IsObjectArrayClass<kVerifyNone>()) {
+      AsObjectArray<mirror::Object>()->VisitReferences<kVisitClass>(visitor);
+    } else if (kVisitClass) {
+      visitor(this, ClassOffset(), false);
+    }
+  } else {
+    VisitFieldsReferences<kVisitClass, false>(klass->GetReferenceInstanceOffsets(), visitor);
+    if (UNLIKELY(klass->IsReferenceClass())) {
+      ref_visitor(klass, AsReference());
+    }
+  }
+}
+
 }  // namespace mirror
 }  // namespace art
 
diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h
index 476259f..0a77828 100644
--- a/runtime/mirror/object.h
+++ b/runtime/mirror/object.h
@@ -240,6 +240,14 @@
 #endif
   }
 
+  // TODO fix thread safety analysis broken by the use of template. This should be
+  // SHARED_LOCKS_REQUIRED(Locks::mutator_lock_).
+  template <const bool kVisitClass, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
+      typename Visitor, typename JavaLangRefVisitor = VoidFunctor>
+  void VisitReferences(const Visitor& visitor,
+                       const JavaLangRefVisitor& ref_visitor = VoidFunctor())
+      NO_THREAD_SAFETY_ANALYSIS;
+
  protected:
   // Accessors for non-Java type fields
   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
@@ -251,6 +259,17 @@
 #endif
   }
 
+  // TODO: Fixme when anotatalysis works with visitors.
+  template<bool kVisitClass, bool kIsStatic, typename Visitor>
+  void VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor)
+      NO_THREAD_SAFETY_ANALYSIS;
+  template<bool kVisitClass, typename Visitor>
+  void VisitInstanceFieldsReferences(mirror::Class* klass, const Visitor& visitor)
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+  template<bool kVisitClass, typename Visitor>
+  void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor)
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
  private:
   // Verify the type correctness of stores to fields.
   void CheckFieldAssignmentImpl(MemberOffset field_offset, Object* new_value)
diff --git a/runtime/mirror/object_array-inl.h b/runtime/mirror/object_array-inl.h
index a427957..8032cc3 100644
--- a/runtime/mirror/object_array-inl.h
+++ b/runtime/mirror/object_array-inl.h
@@ -233,6 +233,17 @@
                       (i * sizeof(HeapReference<Object>)));
 }
 
+template<class T> template<const bool kVisitClass, typename Visitor>
+void ObjectArray<T>::VisitReferences(const Visitor& visitor) {
+  if (kVisitClass) {
+    visitor(this, ClassOffset(), false);
+  }
+  const size_t length = static_cast<size_t>(GetLength());
+  for (size_t i = 0; i < length; ++i) {
+    visitor(this, OffsetOfElement(i), false);
+  }
+}
+
 }  // namespace mirror
 }  // namespace art
 
diff --git a/runtime/mirror/object_array.h b/runtime/mirror/object_array.h
index 7f9e716..5ff0490 100644
--- a/runtime/mirror/object_array.h
+++ b/runtime/mirror/object_array.h
@@ -78,6 +78,11 @@
   ObjectArray<T>* CopyOf(Thread* self, int32_t new_length)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
+  // TODO fix thread safety analysis broken by the use of template. This should be
+  // SHARED_LOCKS_REQUIRED(Locks::mutator_lock_).
+  template<const bool kVisitClass, typename Visitor>
+  void VisitReferences(const Visitor& visitor) NO_THREAD_SAFETY_ANALYSIS;
+
  private:
   static MemberOffset OffsetOfElement(int32_t i);
 
diff --git a/runtime/object_callbacks.h b/runtime/object_callbacks.h
index 468ba08..89ee34e 100644
--- a/runtime/object_callbacks.h
+++ b/runtime/object_callbacks.h
@@ -25,6 +25,7 @@
 namespace art {
 namespace mirror {
 class Object;
+template<class MirrorType> class HeapReference;
 }  // namespace mirror
 class StackVisitor;
 
@@ -56,6 +57,9 @@
 // A callback for verifying roots.
 typedef void (VerifyRootCallback)(const mirror::Object* root, void* arg, size_t vreg,
     const StackVisitor* visitor);
+
+typedef void (MarkHeapReferenceCallback)(mirror::HeapReference<mirror::Object>* ref, void* arg);
+
 // A callback for testing if an object is marked, returns nullptr if not marked, otherwise the new
 // address the object (if the object didn't move, returns the object input parameter).
 typedef mirror::Object* (IsMarkedCallback)(mirror::Object* object, void* arg)