Add read barriers for the GC roots in Instrumentation.
Bug: 12687968
Change-Id: I324e2f950ce4500b0e00722044af3a9c82487b23
diff --git a/runtime/debugger.cc b/runtime/debugger.cc
index 4cf4c09..bebc0c0 100644
--- a/runtime/debugger.cc
+++ b/runtime/debugger.cc
@@ -3048,7 +3048,7 @@
// Sanity checks all existing breakpoints on the same method.
static void SanityCheckExistingBreakpoints(mirror::ArtMethod* m, bool need_full_deoptimization)
- EXCLUSIVE_LOCKS_REQUIRED(Locks::breakpoint_lock_) {
+ EXCLUSIVE_LOCKS_REQUIRED(Locks::breakpoint_lock_) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (kIsDebugBuild) {
for (const Breakpoint& breakpoint : gBreakpoints) {
CHECK_EQ(need_full_deoptimization, breakpoint.NeedFullDeoptimization());
diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc
index f4eaa61..8e375cf 100644
--- a/runtime/instrumentation.cc
+++ b/runtime/instrumentation.cc
@@ -519,7 +519,7 @@
bool empty;
{
ReaderMutexLock mu(self, deoptimized_methods_lock_);
- empty = deoptimized_methods_.empty(); // Avoid lock violation.
+ empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
}
if (empty) {
instrumentation_stubs_installed_ = false;
@@ -580,7 +580,7 @@
}
void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
- const void* portable_code, bool have_portable_code) const {
+ const void* portable_code, bool have_portable_code) {
const void* new_portable_code;
const void* new_quick_code;
bool new_have_portable_code;
@@ -617,20 +617,77 @@
UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
}
+bool Instrumentation::AddDeoptimizedMethod(mirror::ArtMethod* method) {
+ // Note that the insert() below isn't read barrier-aware. So, this
+ // FindDeoptimizedMethod() call is necessary or else we would end up
+ // storing the same method twice in the map (the from-space and the
+ // to-space ones).
+ if (FindDeoptimizedMethod(method)) {
+ // Already in the map. Return.
+ return false;
+ }
+ // Not found. Add it.
+ int32_t hash_code = method->IdentityHashCode();
+ deoptimized_methods_.insert(std::make_pair(hash_code, method));
+ return true;
+}
+
+bool Instrumentation::FindDeoptimizedMethod(mirror::ArtMethod* method) {
+ int32_t hash_code = method->IdentityHashCode();
+ auto range = deoptimized_methods_.equal_range(hash_code);
+ for (auto it = range.first; it != range.second; ++it) {
+ mirror::ArtMethod** root = &it->second;
+ mirror::ArtMethod* m = ReadBarrier::BarrierForRoot<mirror::ArtMethod>(root);
+ if (m == method) {
+ // Found.
+ return true;
+ }
+ }
+ // Not found.
+ return false;
+}
+
+mirror::ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
+ auto it = deoptimized_methods_.begin();
+ if (it == deoptimized_methods_.end()) {
+ // Empty.
+ return nullptr;
+ }
+ mirror::ArtMethod** root = &it->second;
+ return ReadBarrier::BarrierForRoot<mirror::ArtMethod>(root);
+}
+
+bool Instrumentation::RemoveDeoptimizedMethod(mirror::ArtMethod* method) {
+ int32_t hash_code = method->IdentityHashCode();
+ auto range = deoptimized_methods_.equal_range(hash_code);
+ for (auto it = range.first; it != range.second; ++it) {
+ mirror::ArtMethod** root = &it->second;
+ mirror::ArtMethod* m = ReadBarrier::BarrierForRoot<mirror::ArtMethod>(root);
+ if (m == method) {
+ // Found. Erase and return.
+ deoptimized_methods_.erase(it);
+ return true;
+ }
+ }
+ // Not found.
+ return false;
+}
+
+bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
+ return deoptimized_methods_.empty();
+}
+
void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
CHECK(!method->IsNative());
CHECK(!method->IsProxyMethod());
CHECK(!method->IsAbstract());
Thread* self = Thread::Current();
- std::pair<std::set<mirror::ArtMethod*>::iterator, bool> pair;
{
WriterMutexLock mu(self, deoptimized_methods_lock_);
- pair = deoptimized_methods_.insert(method);
+ bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
+ CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
}
- bool already_deoptimized = !pair.second;
- CHECK(!already_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
-
if (!interpreter_stubs_installed_) {
UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint(), GetPortableToInterpreterBridge(),
false);
@@ -652,11 +709,10 @@
bool empty;
{
WriterMutexLock mu(self, deoptimized_methods_lock_);
- auto it = deoptimized_methods_.find(method);
- CHECK(it != deoptimized_methods_.end()) << "Method " << PrettyMethod(method)
+ bool found_and_erased = RemoveDeoptimizedMethod(method);
+ CHECK(found_and_erased) << "Method " << PrettyMethod(method)
<< " is not deoptimized";
- deoptimized_methods_.erase(it);
- empty = deoptimized_methods_.empty();
+ empty = IsDeoptimizedMethodsEmpty();
}
// Restore code and possibly stack only if we did not deoptimize everything.
@@ -684,15 +740,15 @@
}
}
-bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) const {
- ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
+bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) {
DCHECK(method != nullptr);
- return deoptimized_methods_.find(method) != deoptimized_methods_.end();
+ ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
+ return FindDeoptimizedMethod(method);
}
void Instrumentation::EnableDeoptimization() {
ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
- CHECK(deoptimized_methods_.empty());
+ CHECK(IsDeoptimizedMethodsEmpty());
CHECK_EQ(deoptimization_enabled_, false);
deoptimization_enabled_ = true;
}
@@ -708,10 +764,11 @@
mirror::ArtMethod* method;
{
ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
- if (deoptimized_methods_.empty()) {
+ if (IsDeoptimizedMethodsEmpty()) {
break;
}
- method = *deoptimized_methods_.begin();
+ method = BeginDeoptimizedMethod();
+ CHECK(method != nullptr);
}
Undeoptimize(method);
}
@@ -963,16 +1020,13 @@
void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
- if (deoptimized_methods_.empty()) {
+ if (IsDeoptimizedMethodsEmpty()) {
return;
}
- std::set<mirror::ArtMethod*> new_deoptimized_methods;
- for (mirror::ArtMethod* method : deoptimized_methods_) {
- DCHECK(method != nullptr);
- callback(reinterpret_cast<mirror::Object**>(&method), arg, 0, kRootVMInternal);
- new_deoptimized_methods.insert(method);
+ for (auto pair : deoptimized_methods_) {
+ mirror::ArtMethod** root = &pair.second;
+ callback(reinterpret_cast<mirror::Object**>(root), arg, 0, kRootVMInternal);
}
- deoptimized_methods_ = new_deoptimized_methods;
}
std::string InstrumentationStackFrame::Dump() const {
diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h
index d0cb4de..cabb0e9 100644
--- a/runtime/instrumentation.h
+++ b/runtime/instrumentation.h
@@ -18,8 +18,8 @@
#define ART_RUNTIME_INSTRUMENTATION_H_
#include <stdint.h>
-#include <set>
#include <list>
+#include <map>
#include "atomic.h"
#include "instruction_set.h"
@@ -162,7 +162,9 @@
LOCKS_EXCLUDED(Locks::thread_list_lock_, deoptimized_methods_lock_)
EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
- bool IsDeoptimized(mirror::ArtMethod* method) const LOCKS_EXCLUDED(deoptimized_methods_lock_);
+ bool IsDeoptimized(mirror::ArtMethod* method)
+ LOCKS_EXCLUDED(deoptimized_methods_lock_)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Enable method tracing by installing instrumentation entry/exit stubs.
void EnableMethodTracing()
@@ -186,7 +188,7 @@
// Update the code of a method respecting any installed stubs.
void UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
- const void* portable_code, bool have_portable_code) const
+ const void* portable_code, bool have_portable_code)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Get the quick code for the given method. More efficient than asking the class linker as it
@@ -367,6 +369,23 @@
mirror::ArtField* field, const JValue& field_value) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ // Read barrier-aware utility functions for accessing deoptimized_methods_
+ bool AddDeoptimizedMethod(mirror::ArtMethod* method)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+ EXCLUSIVE_LOCKS_REQUIRED(deoptimized_methods_lock_);
+ bool FindDeoptimizedMethod(mirror::ArtMethod* method)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+ SHARED_LOCKS_REQUIRED(deoptimized_methods_lock_);
+ bool RemoveDeoptimizedMethod(mirror::ArtMethod* method)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+ EXCLUSIVE_LOCKS_REQUIRED(deoptimized_methods_lock_);
+ mirror::ArtMethod* BeginDeoptimizedMethod()
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+ SHARED_LOCKS_REQUIRED(deoptimized_methods_lock_);
+ bool IsDeoptimizedMethodsEmpty() const
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+ SHARED_LOCKS_REQUIRED(deoptimized_methods_lock_);
+
// Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
bool instrumentation_stubs_installed_;
@@ -421,7 +440,7 @@
// The set of methods being deoptimized (by the debugger) which must be executed with interpreter
// only.
mutable ReaderWriterMutex deoptimized_methods_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
- std::set<mirror::ArtMethod*> deoptimized_methods_ GUARDED_BY(deoptimized_methods_lock_);
+ std::multimap<int32_t, mirror::ArtMethod*> deoptimized_methods_ GUARDED_BY(deoptimized_methods_lock_);
bool deoptimization_enabled_;
// Current interpreter handler table. This is updated each time the thread state flags are
diff --git a/runtime/runtime.h b/runtime/runtime.h
index 284e4ff..6a5fe75 100644
--- a/runtime/runtime.h
+++ b/runtime/runtime.h
@@ -21,6 +21,7 @@
#include <stdio.h>
#include <iosfwd>
+#include <set>
#include <string>
#include <utility>
#include <vector>