Revert "Revert "Revert "Revert "Basic obsolete methods support""""

A GetDeclaringClass()->GetDexCache() got inserted during the
merge/review process meaning that we would try to access incorrect
dex-cache in obsolete methods in some situations.

Also when using tracing we would loop forever (or at least until an
OOM error) in test 916 due to tracing forcing InterpretOnly mode
meaning methods would never be jitted.

Bug: 32369913
Bug: 33630159

Test: ART_TEST_TRACE=true \
      ART_TEST_JIT=true   \
      ART_TEST_INTERPRETER=true mma -j40 test-art-host

This reverts commit f6abcda293b115a9d7d8a26376ea2dcf2d1dc510.

Change-Id: I0773bfcba52e3cd51a83be815c6a50c189558f48
diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h
index ef03bb3..96976d9 100644
--- a/runtime/art_method-inl.h
+++ b/runtime/art_method-inl.h
@@ -134,8 +134,7 @@
   // NOTE: Unchecked, i.e. not throwing AIOOB. We don't even know the length here
   // without accessing the DexCache and we don't want to do that in release build.
   DCHECK_LT(method_index,
-            GetInterfaceMethodIfProxy(pointer_size)->GetDeclaringClass()
-                ->GetDexCache()->NumResolvedMethods());
+            GetInterfaceMethodIfProxy(pointer_size)->GetDexCache()->NumResolvedMethods());
   ArtMethod* method = mirror::DexCache::GetElementPtrSize(GetDexCacheResolvedMethods(pointer_size),
                                                           method_index,
                                                           pointer_size);
@@ -154,8 +153,7 @@
   // NOTE: Unchecked, i.e. not throwing AIOOB. We don't even know the length here
   // without accessing the DexCache and we don't want to do that in release build.
   DCHECK_LT(method_index,
-            GetInterfaceMethodIfProxy(pointer_size)->GetDeclaringClass()
-                ->GetDexCache()->NumResolvedMethods());
+            GetInterfaceMethodIfProxy(pointer_size)->GetDexCache()->NumResolvedMethods());
   DCHECK(new_method == nullptr || new_method->GetDeclaringClass() != nullptr);
   mirror::DexCache::SetElementPtrSize(GetDexCacheResolvedMethods(pointer_size),
                                       method_index,
@@ -186,8 +184,7 @@
 inline mirror::Class* ArtMethod::GetDexCacheResolvedType(dex::TypeIndex type_index,
                                                          PointerSize pointer_size) {
   if (kWithCheck) {
-    mirror::DexCache* dex_cache =
-        GetInterfaceMethodIfProxy(pointer_size)->GetDeclaringClass()->GetDexCache();
+    mirror::DexCache* dex_cache = GetInterfaceMethodIfProxy(pointer_size)->GetDexCache();
     if (UNLIKELY(type_index.index_ >= dex_cache->NumResolvedTypes())) {
       ThrowArrayIndexOutOfBoundsException(type_index.index_, dex_cache->NumResolvedTypes());
       return nullptr;
@@ -333,7 +330,7 @@
 }
 
 inline const DexFile::CodeItem* ArtMethod::GetCodeItem() {
-  return GetDeclaringClass()->GetDexFile().GetCodeItem(GetCodeItemOffset());
+  return GetDexFile()->GetCodeItem(GetCodeItemOffset());
 }
 
 inline bool ArtMethod::IsResolvedTypeIdx(dex::TypeIndex type_idx, PointerSize pointer_size) {
@@ -398,11 +395,11 @@
 }
 
 inline mirror::DexCache* ArtMethod::GetDexCache() {
-  DCHECK(!IsProxyMethod());
-  if (UNLIKELY(IsObsolete())) {
-    return GetObsoleteDexCache();
-  } else {
+  if (LIKELY(!IsObsolete())) {
     return GetDeclaringClass()->GetDexCache();
+  } else {
+    DCHECK(!IsProxyMethod());
+    return GetObsoleteDexCache();
   }
 }
 
diff --git a/runtime/art_method.h b/runtime/art_method.h
index 3bc6f5d..963a541 100644
--- a/runtime/art_method.h
+++ b/runtime/art_method.h
@@ -27,6 +27,7 @@
 #include "invoke_type.h"
 #include "method_reference.h"
 #include "modifiers.h"
+#include "mirror/dex_cache.h"
 #include "mirror/object.h"
 #include "obj_ptr.h"
 #include "read_barrier_option.h"
@@ -220,6 +221,12 @@
     return !IsIntrinsic() && (GetAccessFlags() & kAccObsoleteMethod) != 0;
   }
 
+  void SetIsObsolete() {
+    // TODO We should really support redefining intrinsic if possible.
+    DCHECK(!IsIntrinsic());
+    SetAccessFlags(GetAccessFlags() | kAccObsoleteMethod);
+  }
+
   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
   bool IsNative() {
     return (GetAccessFlags<kReadBarrierOption>() & kAccNative) != 0;
@@ -326,6 +333,7 @@
   ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_index,
                                                      PointerSize pointer_size)
       REQUIRES_SHARED(Locks::mutator_lock_);
+
   ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_index,
                                                ArtMethod* new_method,
                                                PointerSize pointer_size)
diff --git a/runtime/class_linker-inl.h b/runtime/class_linker-inl.h
index a11257f..71b490e 100644
--- a/runtime/class_linker-inl.h
+++ b/runtime/class_linker-inl.h
@@ -192,20 +192,15 @@
   return dex_cache->GetResolvedField(field_idx, image_pointer_size_);
 }
 
-inline ArtField* ClassLinker::GetResolvedField(uint32_t field_idx,
-                                               ObjPtr<mirror::Class> field_declaring_class) {
-  return GetResolvedField(field_idx, MakeObjPtr(field_declaring_class->GetDexCache()));
-}
-
 inline ArtField* ClassLinker::ResolveField(uint32_t field_idx,
                                            ArtMethod* referrer,
                                            bool is_static) {
   Thread::PoisonObjectPointersIfDebug();
   ObjPtr<mirror::Class> declaring_class = referrer->GetDeclaringClass();
-  ArtField* resolved_field = GetResolvedField(field_idx, declaring_class);
+  ArtField* resolved_field = GetResolvedField(field_idx, referrer->GetDexCache());
   if (UNLIKELY(resolved_field == nullptr)) {
     StackHandleScope<2> hs(Thread::Current());
-    Handle<mirror::DexCache> dex_cache(hs.NewHandle(declaring_class->GetDexCache()));
+    Handle<mirror::DexCache> dex_cache(hs.NewHandle(referrer->GetDexCache()));
     Handle<mirror::ClassLoader> class_loader(hs.NewHandle(declaring_class->GetClassLoader()));
     const DexFile& dex_file = *dex_cache->GetDexFile();
     resolved_field = ResolveField(dex_file, field_idx, dex_cache, class_loader, is_static);
diff --git a/runtime/class_linker.h b/runtime/class_linker.h
index 9b25303..6ef882a 100644
--- a/runtime/class_linker.h
+++ b/runtime/class_linker.h
@@ -327,8 +327,6 @@
       REQUIRES_SHARED(Locks::mutator_lock_)
       REQUIRES(!Locks::dex_lock_, !Roles::uninterruptible_);
 
-  ArtField* GetResolvedField(uint32_t field_idx, ObjPtr<mirror::Class> field_declaring_class)
-      REQUIRES_SHARED(Locks::mutator_lock_);
   ArtField* GetResolvedField(uint32_t field_idx, ObjPtr<mirror::DexCache> dex_cache)
       REQUIRES_SHARED(Locks::mutator_lock_);
   ArtField* ResolveField(uint32_t field_idx, ArtMethod* referrer, bool is_static)
diff --git a/runtime/entrypoints/entrypoint_utils-inl.h b/runtime/entrypoints/entrypoint_utils-inl.h
index f6eeffc..14c9c21 100644
--- a/runtime/entrypoints/entrypoint_utils-inl.h
+++ b/runtime/entrypoints/entrypoint_utils-inl.h
@@ -563,7 +563,7 @@
       HandleWrapperObjPtr<mirror::Object> h_this(hs2.NewHandleWrapper(this_object));
       Handle<mirror::Class> h_referring_class(hs2.NewHandle(referrer->GetDeclaringClass()));
       const dex::TypeIndex method_type_idx =
-          h_referring_class->GetDexFile().GetMethodId(method_idx).class_idx_;
+          referrer->GetDexFile()->GetMethodId(method_idx).class_idx_;
       mirror::Class* method_reference_class = class_linker->ResolveType(method_type_idx, referrer);
       if (UNLIKELY(method_reference_class == nullptr)) {
         // Bad type idx.
@@ -673,8 +673,7 @@
                                size_t expected_size) {
   ScopedAssertNoThreadSuspension ants(__FUNCTION__);
   ArtField* resolved_field =
-      referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx,
-                                                                     kRuntimePointerSize);
+      referrer->GetDexCache()->GetResolvedField(field_idx, kRuntimePointerSize);
   if (UNLIKELY(resolved_field == nullptr)) {
     return nullptr;
   }
@@ -733,7 +732,7 @@
   }
   mirror::Class* referring_class = referrer->GetDeclaringClass();
   ArtMethod* resolved_method =
-      referring_class->GetDexCache()->GetResolvedMethod(method_idx, kRuntimePointerSize);
+      referrer->GetDexCache()->GetResolvedMethod(method_idx, kRuntimePointerSize);
   if (UNLIKELY(resolved_method == nullptr)) {
     return nullptr;
   }
@@ -759,9 +758,9 @@
   } else if (type == kSuper) {
     // TODO This lookup is rather slow.
     dex::TypeIndex method_type_idx =
-        referring_class->GetDexFile().GetMethodId(method_idx).class_idx_;
+        referrer->GetDexFile()->GetMethodId(method_idx).class_idx_;
     mirror::Class* method_reference_class =
-        referring_class->GetDexCache()->GetResolvedType(method_type_idx);
+        referrer->GetDexCache()->GetResolvedType(method_type_idx);
     if (method_reference_class == nullptr) {
       // Need to do full type resolution...
       return nullptr;
diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc
index 03ef962..4ea1130 100644
--- a/runtime/instrumentation.cc
+++ b/runtime/instrumentation.cc
@@ -557,8 +557,10 @@
 }
 
 Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
-  if (interpreter_stubs_installed_) {
+  if (interpreter_stubs_installed_ && interpret_only_) {
     return InstrumentationLevel::kInstrumentWithInterpreter;
+  } else if (interpreter_stubs_installed_) {
+    return InstrumentationLevel::kInstrumentWithInterpreterAndJit;
   } else if (entry_exit_stubs_installed_) {
     return InstrumentationLevel::kInstrumentWithInstrumentationStubs;
   } else {
@@ -566,6 +568,14 @@
   }
 }
 
+bool Instrumentation::RequiresInstrumentationInstallation(InstrumentationLevel new_level) const {
+  // We need to reinstall instrumentation if we go to a different level or if the current level is
+  // kInstrumentWithInterpreterAndJit since that level does not force all code to always use the
+  // interpreter and so we might have started running optimized code again.
+  return new_level == InstrumentationLevel::kInstrumentWithInterpreterAndJit ||
+      GetCurrentInstrumentationLevel() != new_level;
+}
+
 void Instrumentation::ConfigureStubs(const char* key, InstrumentationLevel desired_level) {
   // Store the instrumentation level for this key or remove it.
   if (desired_level == InstrumentationLevel::kInstrumentNothing) {
@@ -585,8 +595,7 @@
   interpret_only_ = (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) ||
                     forced_interpret_only_;
 
-  InstrumentationLevel current_level = GetCurrentInstrumentationLevel();
-  if (requested_level == current_level) {
+  if (!RequiresInstrumentationInstallation(requested_level)) {
     // We're already set.
     return;
   }
@@ -595,7 +604,7 @@
   Locks::mutator_lock_->AssertExclusiveHeld(self);
   Locks::thread_list_lock_->AssertNotHeld(self);
   if (requested_level > InstrumentationLevel::kInstrumentNothing) {
-    if (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) {
+    if (requested_level >= InstrumentationLevel::kInstrumentWithInterpreterAndJit) {
       interpreter_stubs_installed_ = true;
       entry_exit_stubs_installed_ = true;
     } else {
@@ -842,7 +851,8 @@
 void Instrumentation::DisableDeoptimization(const char* key) {
   CHECK_EQ(deoptimization_enabled_, true);
   // If we deoptimized everything, undo it.
-  if (interpreter_stubs_installed_) {
+  InstrumentationLevel level = GetCurrentInstrumentationLevel();
+  if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
     UndeoptimizeEverything(key);
   }
   // Undeoptimized selected methods.
@@ -869,6 +879,14 @@
   return !deoptimization_enabled_ && !interpreter_stubs_installed_;
 }
 
+// TODO we don't check deoptimization_enabled_ because currently there isn't really any support for
+// multiple users of instrumentation. Since this is just a temporary state anyway pending work to
+// ensure that the current_method doesn't get kept across suspend points this should be okay.
+// TODO Remove once b/33630159 is resolved.
+void Instrumentation::ReJitEverything(const char* key) {
+  ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreterAndJit);
+}
+
 void Instrumentation::DeoptimizeEverything(const char* key) {
   CHECK(deoptimization_enabled_);
   ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreter);
diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h
index 1e5fcf2..05c0aaa 100644
--- a/runtime/instrumentation.h
+++ b/runtime/instrumentation.h
@@ -133,6 +133,9 @@
   enum class InstrumentationLevel {
     kInstrumentNothing,                   // execute without instrumentation
     kInstrumentWithInstrumentationStubs,  // execute with instrumentation entry/exit stubs
+    kInstrumentWithInterpreterAndJit,     // execute with interpreter initially and later the JIT
+                                          // (if it is enabled). This level is special in that it
+                                          // always requires re-instrumentation.
     kInstrumentWithInterpreter            // execute with interpreter
   };
 
@@ -163,6 +166,13 @@
   }
   bool ShouldNotifyMethodEnterExitEvents() const REQUIRES_SHARED(Locks::mutator_lock_);
 
+  // Executes everything with the interpreter/jit (if available).
+  void ReJitEverything(const char* key)
+      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
+      REQUIRES(!Locks::thread_list_lock_,
+               !Locks::classlinker_classes_lock_,
+               !deoptimized_methods_lock_);
+
   // Executes everything with interpreter.
   void DeoptimizeEverything(const char* key)
       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
@@ -432,9 +442,13 @@
     return alloc_entrypoints_instrumented_;
   }
 
- private:
   InstrumentationLevel GetCurrentInstrumentationLevel() const;
 
+ private:
+  // Returns true if moving to the given instrumentation level requires the installation of stubs.
+  // False otherwise.
+  bool RequiresInstrumentationInstallation(InstrumentationLevel new_level) const;
+
   // Does the job of installing or removing instrumentation code within methods.
   // In order to support multiple clients using instrumentation at the same time,
   // the caller must pass a unique key (a string) identifying it so we remind which
diff --git a/runtime/interpreter/interpreter_common.h b/runtime/interpreter/interpreter_common.h
index 423f054..b599949 100644
--- a/runtime/interpreter/interpreter_common.h
+++ b/runtime/interpreter/interpreter_common.h
@@ -251,17 +251,16 @@
     }
   }
   ArtMethod* method = shadow_frame.GetMethod();
-  ObjPtr<mirror::Class> declaring_class = method->GetDeclaringClass();
   // MethodVerifier refuses methods with string_idx out of bounds.
   DCHECK_LT(string_idx.index_ % mirror::DexCache::kDexCacheStringCacheSize,
-            declaring_class->GetDexFile().NumStringIds());
+            method->GetDexFile()->NumStringIds());
   ObjPtr<mirror::String> string_ptr =
-      mirror::StringDexCachePair::Lookup(declaring_class->GetDexCache()->GetStrings(),
+      mirror::StringDexCachePair::Lookup(method->GetDexCache()->GetStrings(),
                                          string_idx.index_,
                                          mirror::DexCache::kDexCacheStringCacheSize).Read();
   if (UNLIKELY(string_ptr == nullptr)) {
     StackHandleScope<1> hs(self);
-    Handle<mirror::DexCache> dex_cache(hs.NewHandle(declaring_class->GetDexCache()));
+    Handle<mirror::DexCache> dex_cache(hs.NewHandle(method->GetDexCache()));
     string_ptr = Runtime::Current()->GetClassLinker()->ResolveString(*method->GetDexFile(),
                                                                      string_idx,
                                                                      dex_cache);
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc
index f43e30d..6336cdd 100644
--- a/runtime/jit/jit_code_cache.cc
+++ b/runtime/jit/jit_code_cache.cc
@@ -594,6 +594,9 @@
       VLOG(jit) << "JIT discarded jitted code due to invalid single-implementation assumptions.";
       return nullptr;
     }
+    DCHECK(cha_single_implementation_list.empty() || !Runtime::Current()->IsDebuggable())
+        << "Should not be using cha on debuggable apps/runs!";
+
     for (ArtMethod* single_impl : cha_single_implementation_list) {
       Runtime::Current()->GetClassHierarchyAnalysis()->AddDependency(
           single_impl, method, method_header);
@@ -645,6 +648,69 @@
   return CodeCacheSizeLocked();
 }
 
+// This notifies the code cache that the given method has been redefined and that it should remove
+// any cached information it has on the method. All threads must be suspended before calling this
+// method. The compiled code for the method (if there is any) must not be in any threads call stack.
+void JitCodeCache::NotifyMethodRedefined(ArtMethod* method) {
+  MutexLock mu(Thread::Current(), lock_);
+  if (method->IsNative()) {
+    return;
+  }
+  ProfilingInfo* info = method->GetProfilingInfo(kRuntimePointerSize);
+  if (info != nullptr) {
+    auto profile = std::find(profiling_infos_.begin(), profiling_infos_.end(), info);
+    DCHECK(profile != profiling_infos_.end());
+    profiling_infos_.erase(profile);
+  }
+  method->SetProfilingInfo(nullptr);
+  ScopedCodeCacheWrite ccw(code_map_.get());
+  for (auto code_iter = method_code_map_.begin();
+       code_iter != method_code_map_.end();
+       ++code_iter) {
+    if (code_iter->second == method) {
+      FreeCode(code_iter->first);
+      method_code_map_.erase(code_iter);
+    }
+  }
+  auto code_map = osr_code_map_.find(method);
+  if (code_map != osr_code_map_.end()) {
+    osr_code_map_.erase(code_map);
+  }
+}
+
+// This invalidates old_method. Once this function returns one can no longer use old_method to
+// execute code unless it is fixed up. This fixup will happen later in the process of installing a
+// class redefinition.
+// TODO We should add some info to ArtMethod to note that 'old_method' has been invalidated and
+// shouldn't be used since it is no longer logically in the jit code cache.
+// TODO We should add DCHECKS that validate that the JIT is paused when this method is entered.
+void JitCodeCache::MoveObsoleteMethod(ArtMethod* old_method, ArtMethod* new_method) {
+  MutexLock mu(Thread::Current(), lock_);
+  // Update ProfilingInfo to the new one and remove it from the old_method.
+  if (old_method->GetProfilingInfo(kRuntimePointerSize) != nullptr) {
+    DCHECK_EQ(old_method->GetProfilingInfo(kRuntimePointerSize)->GetMethod(), old_method);
+    ProfilingInfo* info = old_method->GetProfilingInfo(kRuntimePointerSize);
+    old_method->SetProfilingInfo(nullptr);
+    // Since the JIT should be paused and all threads suspended by the time this is called these
+    // checks should always pass.
+    DCHECK(!info->IsInUseByCompiler());
+    new_method->SetProfilingInfo(info);
+    info->method_ = new_method;
+  }
+  // Update method_code_map_ to point to the new method.
+  for (auto& it : method_code_map_) {
+    if (it.second == old_method) {
+      it.second = new_method;
+    }
+  }
+  // Update osr_code_map_ to point to the new method.
+  auto code_map = osr_code_map_.find(old_method);
+  if (code_map != osr_code_map_.end()) {
+    osr_code_map_.Put(new_method, code_map->second);
+    osr_code_map_.erase(old_method);
+  }
+}
+
 size_t JitCodeCache::CodeCacheSizeLocked() {
   return used_memory_for_code_;
 }
diff --git a/runtime/jit/jit_code_cache.h b/runtime/jit/jit_code_cache.h
index d97742d..b5e3176 100644
--- a/runtime/jit/jit_code_cache.h
+++ b/runtime/jit/jit_code_cache.h
@@ -75,6 +75,10 @@
       REQUIRES_SHARED(Locks::mutator_lock_)
       REQUIRES(!lock_);
 
+  void NotifyMethodRedefined(ArtMethod* method)
+      REQUIRES(Locks::mutator_lock_)
+      REQUIRES(!lock_);
+
   // Notify to the code cache that the compiler wants to use the
   // profiling info of `method` to drive optimizations,
   // and therefore ensure the returned profiling info object is not
@@ -219,6 +223,11 @@
   void DisallowInlineCacheAccess() REQUIRES(!lock_);
   void BroadcastForInlineCacheAccess() REQUIRES(!lock_);
 
+  // Notify the code cache that the method at the pointer 'old_method' is being moved to the pointer
+  // 'new_method' since it is being made obsolete.
+  void MoveObsoleteMethod(ArtMethod* old_method, ArtMethod* new_method)
+      REQUIRES(!lock_) REQUIRES(Locks::mutator_lock_);
+
  private:
   // Take ownership of maps.
   JitCodeCache(MemMap* code_map,
diff --git a/runtime/jit/profiling_info.h b/runtime/jit/profiling_info.h
index 9902bb5..9fbf2e3 100644
--- a/runtime/jit/profiling_info.h
+++ b/runtime/jit/profiling_info.h
@@ -128,7 +128,9 @@
   const uint32_t number_of_inline_caches_;
 
   // Method this profiling info is for.
-  ArtMethod* const method_;
+  // Not 'const' as JVMTI introduces obsolete methods that we implement by creating new ArtMethods.
+  // See JitCodeCache::MoveObsoleteMethod.
+  ArtMethod* method_;
 
   // Whether the ArtMethod is currently being compiled. This flag
   // is implicitly guarded by the JIT code cache lock.
diff --git a/runtime/mirror/dex_cache.h b/runtime/mirror/dex_cache.h
index ec265e5..6f88cc5 100644
--- a/runtime/mirror/dex_cache.h
+++ b/runtime/mirror/dex_cache.h
@@ -19,7 +19,6 @@
 
 #include "array.h"
 #include "art_field.h"
-#include "art_method.h"
 #include "class.h"
 #include "dex_file_types.h"
 #include "object.h"
@@ -27,6 +26,7 @@
 
 namespace art {
 
+class ArtMethod;
 struct DexCacheOffsets;
 class DexFile;
 class ImageWriter;
diff --git a/runtime/openjdkjvmti/ti_redefine.cc b/runtime/openjdkjvmti/ti_redefine.cc
index 68815e7..57cc938 100644
--- a/runtime/openjdkjvmti/ti_redefine.cc
+++ b/runtime/openjdkjvmti/ti_redefine.cc
@@ -40,6 +40,8 @@
 #include "events-inl.h"
 #include "gc/allocation_listener.h"
 #include "instrumentation.h"
+#include "jit/jit.h"
+#include "jit/jit_code_cache.h"
 #include "jni_env_ext-inl.h"
 #include "jvmti_allocator.h"
 #include "mirror/class.h"
@@ -53,6 +55,143 @@
 
 using android::base::StringPrintf;
 
+// This visitor walks thread stacks and allocates and sets up the obsolete methods. It also does
+// some basic sanity checks that the obsolete method is sane.
+class ObsoleteMethodStackVisitor : public art::StackVisitor {
+ protected:
+  ObsoleteMethodStackVisitor(
+      art::Thread* thread,
+      art::LinearAlloc* allocator,
+      const std::unordered_set<art::ArtMethod*>& obsoleted_methods,
+      /*out*/std::unordered_map<art::ArtMethod*, art::ArtMethod*>* obsolete_maps,
+      /*out*/bool* success,
+      /*out*/std::string* error_msg)
+        : StackVisitor(thread,
+                       /*context*/nullptr,
+                       StackVisitor::StackWalkKind::kIncludeInlinedFrames),
+          allocator_(allocator),
+          obsoleted_methods_(obsoleted_methods),
+          obsolete_maps_(obsolete_maps),
+          success_(success),
+          is_runtime_frame_(false),
+          error_msg_(error_msg) {
+    *success_ = true;
+  }
+
+  ~ObsoleteMethodStackVisitor() OVERRIDE {}
+
+ public:
+  // Returns true if we successfully installed obsolete methods on this thread, filling
+  // obsolete_maps_ with the translations if needed. Returns false and fills error_msg if we fail.
+  // The stack is cleaned up when we fail.
+  static bool UpdateObsoleteFrames(
+      art::Thread* thread,
+      art::LinearAlloc* allocator,
+      const std::unordered_set<art::ArtMethod*>& obsoleted_methods,
+      /*out*/std::unordered_map<art::ArtMethod*, art::ArtMethod*>* obsolete_maps,
+      /*out*/std::string* error_msg) REQUIRES(art::Locks::mutator_lock_) {
+    bool success = true;
+    ObsoleteMethodStackVisitor visitor(thread,
+                                       allocator,
+                                       obsoleted_methods,
+                                       obsolete_maps,
+                                       &success,
+                                       error_msg);
+    visitor.WalkStack();
+    if (!success) {
+      RestoreFrames(thread, *obsolete_maps, error_msg);
+      return false;
+    } else {
+      return true;
+    }
+  }
+
+  static void RestoreFrames(
+      art::Thread* thread ATTRIBUTE_UNUSED,
+      const std::unordered_map<art::ArtMethod*, art::ArtMethod*>& obsolete_maps ATTRIBUTE_UNUSED,
+      std::string* error_msg)
+        REQUIRES(art::Locks::mutator_lock_) {
+    LOG(FATAL) << "Restoring stack frames is not yet supported. Error was: " << *error_msg;
+  }
+
+  bool VisitFrame() OVERRIDE REQUIRES(art::Locks::mutator_lock_) {
+    art::ArtMethod* old_method = GetMethod();
+    // TODO REMOVE once either current_method doesn't stick around through suspend points or deopt
+    // works through runtime methods.
+    bool prev_was_runtime_frame_ = is_runtime_frame_;
+    is_runtime_frame_ = old_method->IsRuntimeMethod();
+    if (obsoleted_methods_.find(old_method) != obsoleted_methods_.end()) {
+      // The check below works since when we deoptimize we set shadow frames for all frames until a
+      // native/runtime transition and for those set the return PC to a function that will complete
+      // the deoptimization. This does leave us with the unfortunate side-effect that frames just
+      // below runtime frames cannot be deoptimized at the moment.
+      // TODO REMOVE once either current_method doesn't stick around through suspend points or deopt
+      // works through runtime methods.
+      // TODO b/33616143
+      if (!IsShadowFrame() && prev_was_runtime_frame_) {
+        *error_msg_ = StringPrintf("Deoptimization failed due to runtime method in stack.");
+        *success_ = false;
+        return false;
+      }
+      // We cannot ensure that the right dex file is used in inlined frames so we don't support
+      // redefining them.
+      DCHECK(!IsInInlinedFrame()) << "Inlined frames are not supported when using redefinition";
+      // TODO We should really support intrinsic obsolete methods.
+      // TODO We should really support redefining intrinsics.
+      // We don't support intrinsics so check for them here.
+      DCHECK(!old_method->IsIntrinsic());
+      art::ArtMethod* new_obsolete_method = nullptr;
+      auto obsolete_method_pair = obsolete_maps_->find(old_method);
+      if (obsolete_method_pair == obsolete_maps_->end()) {
+        // Create a new Obsolete Method and put it in the list.
+        art::Runtime* runtime = art::Runtime::Current();
+        art::ClassLinker* cl = runtime->GetClassLinker();
+        auto ptr_size = cl->GetImagePointerSize();
+        const size_t method_size = art::ArtMethod::Size(ptr_size);
+        auto* method_storage = allocator_->Alloc(GetThread(), method_size);
+        if (method_storage == nullptr) {
+          *success_ = false;
+          *error_msg_ = StringPrintf("Unable to allocate storage for obsolete version of '%s'",
+                                     old_method->PrettyMethod().c_str());
+          return false;
+        }
+        new_obsolete_method = new (method_storage) art::ArtMethod();
+        new_obsolete_method->CopyFrom(old_method, ptr_size);
+        DCHECK_EQ(new_obsolete_method->GetDeclaringClass(), old_method->GetDeclaringClass());
+        new_obsolete_method->SetIsObsolete();
+        obsolete_maps_->insert({old_method, new_obsolete_method});
+        // Update JIT Data structures to point to the new method.
+        art::jit::Jit* jit = art::Runtime::Current()->GetJit();
+        if (jit != nullptr) {
+          // Notify the JIT we are making this obsolete method. It will update the jit's internal
+          // structures to keep track of the new obsolete method.
+          jit->GetCodeCache()->MoveObsoleteMethod(old_method, new_obsolete_method);
+        }
+      } else {
+        new_obsolete_method = obsolete_method_pair->second;
+      }
+      DCHECK(new_obsolete_method != nullptr);
+      SetMethod(new_obsolete_method);
+    }
+    return true;
+  }
+
+ private:
+  // The linear allocator we should use to make new methods.
+  art::LinearAlloc* allocator_;
+  // The set of all methods which could be obsoleted.
+  const std::unordered_set<art::ArtMethod*>& obsoleted_methods_;
+  // A map from the original to the newly allocated obsolete method for frames on this thread. The
+  // values in this map must be added to the obsolete_methods_ (and obsolete_dex_caches_) fields of
+  // the redefined classes ClassExt by the caller.
+  std::unordered_map<art::ArtMethod*, art::ArtMethod*>* obsolete_maps_;
+  bool* success_;
+  // TODO REMOVE once either current_method doesn't stick around through suspend points or deopt
+  // works through runtime methods.
+  bool is_runtime_frame_;
+  std::string* error_msg_;
+};
+
 // Moves dex data to an anonymous, read-only mmap'd region.
 std::unique_ptr<art::MemMap> Redefiner::MoveDataToMemMap(const std::string& original_location,
                                                          jint data_len,
@@ -76,6 +215,8 @@
   return map;
 }
 
+// TODO This should handle doing multiple classes at once so we need to do less cleanup when things
+// go wrong.
 jvmtiError Redefiner::RedefineClass(ArtJvmTiEnv* env,
                                     art::Runtime* runtime,
                                     art::Thread* self,
@@ -116,6 +257,9 @@
     *error_msg = os.str();
     return ERR(INVALID_CLASS_FORMAT);
   }
+  // Stop JIT for the duration of this redefine since the JIT might concurrently compile a method we
+  // are going to redefine.
+  art::jit::ScopedJitSuspend suspend_jit;
   // Get shared mutator lock.
   art::ScopedObjectAccess soa(self);
   art::StackHandleScope<1> hs(self);
@@ -296,6 +440,107 @@
   return true;
 }
 
+struct CallbackCtx {
+  Redefiner* const r;
+  art::LinearAlloc* allocator;
+  std::unordered_map<art::ArtMethod*, art::ArtMethod*> obsolete_map;
+  std::unordered_set<art::ArtMethod*> obsolete_methods;
+  bool success;
+  std::string* error_msg;
+
+  CallbackCtx(Redefiner* self, art::LinearAlloc* alloc, std::string* error)
+      : r(self), allocator(alloc), success(true), error_msg(error) {}
+};
+
+void DoRestoreObsoleteMethodsCallback(art::Thread* t, void* vdata) NO_THREAD_SAFETY_ANALYSIS {
+  CallbackCtx* data = reinterpret_cast<CallbackCtx*>(vdata);
+  ObsoleteMethodStackVisitor::RestoreFrames(t, data->obsolete_map, data->error_msg);
+}
+
+void DoAllocateObsoleteMethodsCallback(art::Thread* t, void* vdata) NO_THREAD_SAFETY_ANALYSIS {
+  CallbackCtx* data = reinterpret_cast<CallbackCtx*>(vdata);
+  if (data->success) {
+    // Don't do anything if we already failed once.
+    data->success = ObsoleteMethodStackVisitor::UpdateObsoleteFrames(t,
+                                                                     data->allocator,
+                                                                     data->obsolete_methods,
+                                                                     &data->obsolete_map,
+                                                                     data->error_msg);
+  }
+}
+
+// This creates any ArtMethod* structures needed for obsolete methods and ensures that the stack is
+// updated so they will be run.
+bool Redefiner::FindAndAllocateObsoleteMethods(art::mirror::Class* art_klass) {
+  art::ScopedAssertNoThreadSuspension ns("No thread suspension during thread stack walking");
+  art::mirror::ClassExt* ext = art_klass->GetExtData();
+  CHECK(ext->GetObsoleteMethods() != nullptr);
+  CallbackCtx ctx(this, art_klass->GetClassLoader()->GetAllocator(), error_msg_);
+  // Add all the declared methods to the map
+  for (auto& m : art_klass->GetDeclaredMethods(art::kRuntimePointerSize)) {
+    ctx.obsolete_methods.insert(&m);
+  }
+  for (art::ArtMethod* old_method : ctx.obsolete_methods) {
+    if (old_method->IsIntrinsic()) {
+      *error_msg_ = StringPrintf("Method '%s' is intrinsic and cannot be made obsolete!",
+                                 old_method->PrettyMethod().c_str());
+      return false;
+    }
+  }
+  {
+    art::MutexLock mu(self_, *art::Locks::thread_list_lock_);
+    art::ThreadList* list = art::Runtime::Current()->GetThreadList();
+    list->ForEach(DoAllocateObsoleteMethodsCallback, static_cast<void*>(&ctx));
+    if (!ctx.success) {
+      list->ForEach(DoRestoreObsoleteMethodsCallback, static_cast<void*>(&ctx));
+      return false;
+    }
+  }
+  FillObsoleteMethodMap(art_klass, ctx.obsolete_map);
+  return true;
+}
+
+// Fills the obsolete method map in the art_klass's extData. This is so obsolete methods are able to
+// figure out their DexCaches.
+void Redefiner::FillObsoleteMethodMap(
+    art::mirror::Class* art_klass,
+    const std::unordered_map<art::ArtMethod*, art::ArtMethod*>& obsoletes) {
+  int32_t index = 0;
+  art::mirror::ClassExt* ext_data = art_klass->GetExtData();
+  art::mirror::PointerArray* obsolete_methods = ext_data->GetObsoleteMethods();
+  art::mirror::ObjectArray<art::mirror::DexCache>* obsolete_dex_caches =
+      ext_data->GetObsoleteDexCaches();
+  int32_t num_method_slots = obsolete_methods->GetLength();
+  // Find the first empty index.
+  for (; index < num_method_slots; index++) {
+    if (obsolete_methods->GetElementPtrSize<art::ArtMethod*>(
+          index, art::kRuntimePointerSize) == nullptr) {
+      break;
+    }
+  }
+  // Make sure we have enough space.
+  CHECK_GT(num_method_slots, static_cast<int32_t>(obsoletes.size() + index));
+  CHECK(obsolete_dex_caches->Get(index) == nullptr);
+  // Fill in the map.
+  for (auto& obs : obsoletes) {
+    obsolete_methods->SetElementPtrSize(index, obs.second, art::kRuntimePointerSize);
+    obsolete_dex_caches->Set(index, art_klass->GetDexCache());
+    index++;
+  }
+}
+
+// TODO It should be possible to only deoptimize the specific obsolete methods.
+// TODO ReJitEverything can (sort of) fail. In certain cases it will skip deoptimizing some frames.
+// If one of these frames is an obsolete method we have a problem. b/33616143
+// TODO This shouldn't be necessary once we can ensure that the current method is not kept in
+// registers across suspend points.
+// TODO Pending b/33630159
+void Redefiner::EnsureObsoleteMethodsAreDeoptimized() {
+  art::ScopedAssertNoThreadSuspension nts("Deoptimizing everything!");
+  art::instrumentation::Instrumentation* i = runtime_->GetInstrumentation();
+  i->ReJitEverything("libOpenJkdJvmti - Class Redefinition");
+}
+
 jvmtiError Redefiner::Run() {
   art::StackHandleScope<5> hs(self_);
   // TODO We might want to have a global lock (or one based on the class being redefined at least)
@@ -338,6 +583,11 @@
   self_->TransitionFromRunnableToSuspended(art::ThreadState::kNative);
   runtime_->GetThreadList()->SuspendAll(
       "Final installation of redefined Class!", /*long_suspend*/true);
+  // TODO We need to invalidate all breakpoints in the redefined class with the debugger.
+  // TODO We need to deal with any instrumentation/debugger deoptimized_methods_.
+  // TODO We need to update all debugger MethodIDs so they note the method they point to is
+  // obsolete or implement some other well defined semantics.
+  // TODO We need to decide on & implement semantics for JNI jmethodids when we redefine methods.
   // TODO Might want to move this into a different type.
   // Now we reach the part where we must do active cleanup if something fails.
   // TODO We should really Retry if this fails instead of simply aborting.
@@ -345,7 +595,8 @@
   art::ObjPtr<art::mirror::LongArray> original_dex_file_cookie(nullptr);
   if (!UpdateJavaDexFile(java_dex_file.Get(),
                          new_dex_file_cookie.Get(),
-                         &original_dex_file_cookie)) {
+                         &original_dex_file_cookie) ||
+      !FindAndAllocateObsoleteMethods(art_class.Get())) {
     // Release suspendAll
     runtime_->GetThreadList()->ResumeAll();
     // Get back shared mutator lock as expected for return.
@@ -361,23 +612,25 @@
     self_->TransitionFromSuspendedToRunnable();
     return result_;
   }
-  // Update the ClassObjects Keep the old DexCache (and other stuff) around so we can restore
-  // functions/fields.
-  // Verify the new Class.
-  //   Failure then undo updates to class
-  // Do stack walks and allocate obsolete methods
-  // Shrink the obsolete method maps if possible?
-  // TODO find appropriate class loader. Allocate new dex files array. Pause all java treads.
-  // Replace dex files array. Do stack scan + allocate obsoletes. Remove array if possible.
-  // TODO We might want to ensure that all threads are stopped for this!
-  // AddDexToClassPath();
-  // TODO
-  // Release suspendAll
+  // Ensure that obsolete methods are deoptimized. This is needed since optimized methods may have
+  // pointers to their ArtMethod's stashed in registers that they then use to attempt to hit the
+  // DexCache.
+  // TODO This can fail (leave some methods optimized) near runtime methods (including
+  // quick-to-interpreter transition function).
+  // TODO We probably don't need this at all once we have a way to ensure that the
+  // current_art_method is never stashed in a (physical) register by the JIT and lost to the
+  // stack-walker.
+  EnsureObsoleteMethodsAreDeoptimized();
+  // TODO Verify the new Class.
+  // TODO   Failure then undo updates to class
+  // TODO Shrink the obsolete method maps if possible?
+  // TODO find appropriate class loader.
   // TODO Put this into a scoped thing.
   runtime_->GetThreadList()->ResumeAll();
   // Get back shared mutator lock as expected for return.
   self_->TransitionFromSuspendedToRunnable();
-  // TODO Do this at a more reasonable place.
+  // TODO Do the dex_file_ release at a more reasonable place. This works but it muddles who really
+  // owns the DexFile.
   dex_file_.release();
   return OK;
 }
@@ -420,19 +673,24 @@
     }
     const art::DexFile::ProtoId* proto_id = dex_file_->FindProtoId(method_return_idx,
                                                                    new_type_list);
-    CHECK(proto_id != nullptr || old_type_list == nullptr);
     // TODO Return false, cleanup.
+    CHECK(proto_id != nullptr || old_type_list == nullptr);
     const art::DexFile::MethodId* method_id = dex_file_->FindMethodId(declaring_class_id,
                                                                       *new_name_id,
                                                                       *proto_id);
-    CHECK(method_id != nullptr);
     // TODO Return false, cleanup.
+    CHECK(method_id != nullptr);
     uint32_t dex_method_idx = dex_file_->GetIndexForMethodId(*method_id);
     method.SetDexMethodIndex(dex_method_idx);
     linker->SetEntryPointsToInterpreter(&method);
     method.SetCodeItemOffset(dex_file_->FindCodeItemOffset(class_def, dex_method_idx));
     method.SetDexCacheResolvedMethods(new_dex_cache->GetResolvedMethods(), image_pointer_size);
     method.SetDexCacheResolvedTypes(new_dex_cache->GetResolvedTypes(), image_pointer_size);
+    // Notify the jit that this method is redefined.
+    art::jit::Jit* jit = runtime_->GetJit();
+    if (jit != nullptr) {
+      jit->GetCodeCache()->NotifyMethodRedefined(&method);
+    }
   }
   return true;
 }
diff --git a/runtime/openjdkjvmti/ti_redefine.h b/runtime/openjdkjvmti/ti_redefine.h
index 73cfc2b..9d23ce4 100644
--- a/runtime/openjdkjvmti/ti_redefine.h
+++ b/runtime/openjdkjvmti/ti_redefine.h
@@ -64,6 +64,8 @@
 namespace openjdkjvmti {
 
 // Class that can redefine a single class's methods.
+// TODO We should really make this be driven by an outside class so we can do multiple classes at
+// the same time and have less required cleanup.
 class Redefiner {
  public:
   // Redefine the given class with the given dex data. Note this function does not take ownership of
@@ -124,6 +126,14 @@
   // in the future. For now we will just take the memory hit.
   bool EnsureClassAllocationsFinished() REQUIRES_SHARED(art::Locks::mutator_lock_);
 
+  // Ensure that obsolete methods are deoptimized. This is needed since optimized methods may have
+  // pointers to their ArtMethods stashed in registers that they then use to attempt to hit the
+  // DexCache.
+  void EnsureObsoleteMethodsAreDeoptimized()
+      REQUIRES(art::Locks::mutator_lock_)
+      REQUIRES(!art::Locks::thread_list_lock_,
+               !art::Locks::classlinker_classes_lock_);
+
   art::mirror::ClassLoader* GetClassLoader() REQUIRES_SHARED(art::Locks::mutator_lock_);
 
   // This finds the java.lang.DexFile we will add the native DexFile to as part of the classpath.
@@ -170,6 +180,13 @@
   bool UpdateClass(art::ObjPtr<art::mirror::Class> mclass,
                    art::ObjPtr<art::mirror::DexCache> new_dex_cache)
       REQUIRES(art::Locks::mutator_lock_);
+
+  bool FindAndAllocateObsoleteMethods(art::mirror::Class* art_klass)
+      REQUIRES(art::Locks::mutator_lock_);
+
+  void FillObsoleteMethodMap(art::mirror::Class* art_klass,
+                             const std::unordered_map<art::ArtMethod*, art::ArtMethod*>& obsoletes)
+      REQUIRES(art::Locks::mutator_lock_);
 };
 
 }  // namespace openjdkjvmti
diff --git a/runtime/stack.cc b/runtime/stack.cc
index 3fed7c9..f9efc0b 100644
--- a/runtime/stack.cc
+++ b/runtime/stack.cc
@@ -618,6 +618,17 @@
   return result;
 }
 
+void StackVisitor::SetMethod(ArtMethod* method) {
+  DCHECK(GetMethod() != nullptr);
+  if (cur_shadow_frame_ != nullptr) {
+    cur_shadow_frame_->SetMethod(method);
+  } else {
+    DCHECK(cur_quick_frame_ != nullptr);
+    CHECK(!IsInInlinedFrame()) << "We do not support setting inlined method's ArtMethod!";
+      *cur_quick_frame_ = method;
+  }
+}
+
 static void AssertPcIsWithinQuickCode(ArtMethod* method, uintptr_t pc)
     REQUIRES_SHARED(Locks::mutator_lock_) {
   if (method->IsNative() || method->IsRuntimeMethod() || method->IsProxyMethod()) {
diff --git a/runtime/stack.h b/runtime/stack.h
index b1e99e5..9dceb29 100644
--- a/runtime/stack.h
+++ b/runtime/stack.h
@@ -327,6 +327,12 @@
     }
   }
 
+  void SetMethod(ArtMethod* method) REQUIRES(Locks::mutator_lock_) {
+    DCHECK(method != nullptr);
+    DCHECK(method_ != nullptr);
+    method_ = method;
+  }
+
   ArtMethod* GetMethod() const REQUIRES_SHARED(Locks::mutator_lock_) {
     DCHECK(method_ != nullptr);
     return method_;
@@ -610,6 +616,10 @@
 
   ArtMethod* GetMethod() const REQUIRES_SHARED(Locks::mutator_lock_);
 
+  // Sets this stack frame's method pointer. This requires a full lock of the MutatorLock. This
+  // doesn't work with inlined methods.
+  void SetMethod(ArtMethod* method) REQUIRES(Locks::mutator_lock_);
+
   ArtMethod* GetOuterMethod() const {
     return *GetCurrentQuickFrame();
   }