Mark breakpoint roots

Used to prevent class unloading on methods that have breakpoints.

Bug: 22720414
Change-Id: I9aee8bcbfdf253607e89dfc55a50ba3f11d99206
diff --git a/runtime/debugger.cc b/runtime/debugger.cc
index b19381d..a4f95b6 100644
--- a/runtime/debugger.cc
+++ b/runtime/debugger.cc
@@ -612,7 +612,7 @@
         // Since we're going to disable deoptimization, we clear the deoptimization requests queue.
         // This prevents us from having any pending deoptimization request when the debugger attaches
         // to us again while no event has been requested yet.
-        MutexLock mu(Thread::Current(), *Locks::deoptimization_lock_);
+        MutexLock mu(self, *Locks::deoptimization_lock_);
         deoptimization_requests_.clear();
         full_deoptimization_event_count_ = 0U;
       }
@@ -5043,4 +5043,13 @@
   method_ = soa.EncodeMethod(m);
 }
 
+void Dbg::VisitRoots(RootVisitor* visitor) {
+  // Visit breakpoint roots, used to prevent unloading of methods with breakpoints.
+  ReaderMutexLock mu(Thread::Current(), *Locks::breakpoint_lock_);
+  BufferedRootVisitor<128> root_visitor(visitor, RootInfo(kRootVMInternal));
+  for (Breakpoint& breakpoint : gBreakpoints) {
+    breakpoint.Method()->VisitRoots(root_visitor, sizeof(void*));
+  }
+}
+
 }  // namespace art
diff --git a/runtime/debugger.h b/runtime/debugger.h
index b3617e4..e908304 100644
--- a/runtime/debugger.h
+++ b/runtime/debugger.h
@@ -646,6 +646,7 @@
   static void DdmSendChunkV(uint32_t type, const iovec* iov, int iov_count)
       SHARED_REQUIRES(Locks::mutator_lock_);
 
+  // Visit breakpoint roots, used to prevent unloading of methods with breakpoints.
   static void VisitRoots(RootVisitor* visitor)
       SHARED_REQUIRES(Locks::mutator_lock_);
 
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index 0a7a69f..d2d12af 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -18,6 +18,7 @@
 
 #include "art_field-inl.h"
 #include "base/stl_util.h"
+#include "debugger.h"
 #include "gc/accounting/heap_bitmap-inl.h"
 #include "gc/accounting/space_bitmap-inl.h"
 #include "gc/reference_processor.h"
@@ -385,6 +386,10 @@
     TimingLogger::ScopedTiming split5("VisitNonThreadRoots", GetTimings());
     Runtime::Current()->VisitNonThreadRoots(this);
   }
+  {
+    TimingLogger::ScopedTiming split6("Dbg::VisitRoots", GetTimings());
+    Dbg::VisitRoots(this);
+  }
   Runtime::Current()->GetHeap()->VisitAllocationRecords(this);
 
   // Immune spaces.
@@ -401,7 +406,7 @@
 
   Thread* self = Thread::Current();
   {
-    TimingLogger::ScopedTiming split6("ProcessMarkStack", GetTimings());
+    TimingLogger::ScopedTiming split7("ProcessMarkStack", GetTimings());
     // We transition through three mark stack modes (thread-local, shared, GC-exclusive). The
     // primary reasons are the fact that we need to use a checkpoint to process thread-local mark
     // stacks, but after we disable weak refs accesses, we can't use a checkpoint due to a deadlock
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index 1f447d0..9fb21a8 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -1426,6 +1426,7 @@
     // Guaranteed to have no new roots in the constant roots.
     VisitConstantRoots(visitor);
   }
+  Dbg::VisitRoots(visitor);
 }
 
 void Runtime::VisitTransactionRoots(RootVisitor* visitor) {