Add read barriers for the weak roots in the JNI weak globals.

Bug: 12687968
Change-Id: Ic265a0e162e8cc9edc4ab7fa34f8afd5ce968d08
diff --git a/runtime/indirect_reference_table-inl.h b/runtime/indirect_reference_table-inl.h
index 42a9757..790f4d0 100644
--- a/runtime/indirect_reference_table-inl.h
+++ b/runtime/indirect_reference_table-inl.h
@@ -71,12 +71,16 @@
   return true;
 }
 
+template<ReadBarrierOption kReadBarrierOption>
 inline mirror::Object* IndirectReferenceTable::Get(IndirectRef iref) const {
   if (!GetChecked(iref)) {
     return kInvalidIndirectRefObject;
   }
-  mirror::Object* obj = table_[ExtractIndex(iref)];
+  mirror::Object** root = &table_[ExtractIndex(iref)];
+  mirror::Object* obj = *root;
   if (LIKELY(obj != kClearedJniWeakGlobal)) {
+    // The read barrier or VerifyObject won't handle kClearedJniWeakGlobal.
+    obj = ReadBarrier::BarrierForWeakRoot<mirror::Object, kReadBarrierOption>(root);
     VerifyObject(obj);
   }
   return obj;
diff --git a/runtime/indirect_reference_table.cc b/runtime/indirect_reference_table.cc
index 432481b..756ac96 100644
--- a/runtime/indirect_reference_table.cc
+++ b/runtime/indirect_reference_table.cc
@@ -266,11 +266,22 @@
 
 void IndirectReferenceTable::Dump(std::ostream& os) const {
   os << kind_ << " table dump:\n";
-  ReferenceTable::Table entries(table_, table_ + Capacity());
-  // Remove NULLs.
-  for (int i = entries.size() - 1; i >= 0; --i) {
-    if (entries[i] == NULL) {
-      entries.erase(entries.begin() + i);
+  ReferenceTable::Table entries;
+  for (size_t i = 0; i < Capacity(); ++i) {
+    mirror::Object** root = &table_[i];
+    mirror::Object* obj = *root;
+    if (UNLIKELY(obj == nullptr)) {
+      // Remove NULLs.
+    } else if (UNLIKELY(obj == kClearedJniWeakGlobal)) {
+      // ReferenceTable::Dump() will handle kClearedJniWeakGlobal
+      // while the read barrier won't.
+      entries.push_back(obj);
+    } else {
+      // We need a read barrier if weak globals. Since this is for
+      // debugging where performance isn't top priority, we
+      // unconditionally enable the read barrier, which is conservative.
+      obj = ReadBarrier::BarrierForWeakRoot<mirror::Object, kWithReadBarrier>(root);
+      entries.push_back(obj);
     }
   }
   ReferenceTable::Dump(os, entries);
diff --git a/runtime/indirect_reference_table.h b/runtime/indirect_reference_table.h
index 5015410..5b3ed68 100644
--- a/runtime/indirect_reference_table.h
+++ b/runtime/indirect_reference_table.h
@@ -263,14 +263,16 @@
    *
    * Returns kInvalidIndirectRefObject if iref is invalid.
    */
+  template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
   mirror::Object* Get(IndirectRef iref) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
       ALWAYS_INLINE;
 
   // Synchronized get which reads a reference, acquiring a lock if necessary.
+  template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
   mirror::Object* SynchronizedGet(Thread* /*self*/, ReaderWriterMutex* /*mutex*/,
                                   IndirectRef iref) const
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    return Get(iref);
+    return Get<kReadBarrierOption>(iref);
   }
 
   /*
@@ -366,7 +368,9 @@
   std::unique_ptr<MemMap> table_mem_map_;
   // Mem map where we store the extended debugging info.
   std::unique_ptr<MemMap> slot_mem_map_;
-  /* bottom of the stack */
+  // bottom of the stack. If a JNI weak global table, do not directly
+  // access the object references in this as they are weak roots. Use
+  // Get() that has a read barrier.
   mirror::Object** table_;
   /* bit mask, ORed into all irefs */
   IndirectRefKind kind_;
diff --git a/runtime/jni_internal.cc b/runtime/jni_internal.cc
index b51e1d5..a660183 100644
--- a/runtime/jni_internal.cc
+++ b/runtime/jni_internal.cc
@@ -2441,7 +2441,9 @@
     switch (kind) {
     case kLocal: {
       ScopedObjectAccess soa(env);
-      if (static_cast<JNIEnvExt*>(env)->locals.Get(ref) != kInvalidIndirectRefObject) {
+      // The local refs don't need a read barrier.
+      if (static_cast<JNIEnvExt*>(env)->locals.Get<kWithoutReadBarrier>(ref) !=
+          kInvalidIndirectRefObject) {
         return JNILocalRefType;
       }
       return JNIInvalidRefType;
@@ -3118,7 +3120,9 @@
   while (UNLIKELY(!allow_new_weak_globals_)) {
     weak_globals_add_condition_.WaitHoldingLocks(self);
   }
-  return weak_globals_.Get(ref);
+  // The weak globals do need a read barrier as they are weak roots.
+  mirror::Object* obj = weak_globals_.Get<kWithReadBarrier>(ref);
+  return obj;
 }
 
 void JavaVMExt::DumpReferenceTables(std::ostream& os) {
@@ -3298,6 +3302,7 @@
 void JavaVMExt::SweepJniWeakGlobals(IsMarkedCallback* callback, void* arg) {
   MutexLock mu(Thread::Current(), weak_globals_lock_);
   for (mirror::Object** entry : weak_globals_) {
+    // Since this is called by the GC, we don't need a read barrier.
     mirror::Object* obj = *entry;
     mirror::Object* new_obj = callback(obj, arg);
     if (new_obj == nullptr) {
diff --git a/runtime/jni_internal.h b/runtime/jni_internal.h
index 7e76e11..4072da4 100644
--- a/runtime/jni_internal.h
+++ b/runtime/jni_internal.h
@@ -129,6 +129,9 @@
   // TODO: Make the other members of this class also private.
   // JNI weak global references.
   Mutex weak_globals_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
+  // Since weak_globals_ contain weak roots, be careful not to
+  // directly access the object references in it. Use Get() with the
+  // read barrier enabled.
   IndirectReferenceTable weak_globals_ GUARDED_BY(weak_globals_lock_);
   bool allow_new_weak_globals_ GUARDED_BY(weak_globals_lock_);
   ConditionVariable weak_globals_add_condition_ GUARDED_BY(weak_globals_lock_);
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 55bec1e..1355aa1 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -1253,7 +1253,8 @@
   // The "kinds" below are sorted by the frequency we expect to encounter them.
   if (kind == kLocal) {
     IndirectReferenceTable& locals = tlsPtr_.jni_env->locals;
-    result = locals.Get(ref);
+    // Local references do not need a read barrier.
+    result = locals.Get<kWithoutReadBarrier>(ref);
   } else if (kind == kHandleScopeOrInvalid) {
     // TODO: make stack indirect reference table lookup more efficient.
     // Check if this is a local reference in the handle scope.
@@ -1266,7 +1267,9 @@
     }
   } else if (kind == kGlobal) {
     JavaVMExt* const vm = Runtime::Current()->GetJavaVM();
-    result = vm->globals.SynchronizedGet(const_cast<Thread*>(this), &vm->globals_lock, ref);
+    // Strong global references do not need a read barrier.
+    result = vm->globals.SynchronizedGet<kWithoutReadBarrier>(
+        const_cast<Thread*>(this), &vm->globals_lock, ref);
   } else {
     DCHECK_EQ(kind, kWeakGlobal);
     result = Runtime::Current()->GetJavaVM()->DecodeWeakGlobal(const_cast<Thread*>(this), ref);