Pulled out instrumentation into its own class separate from trace.

This is in preparation for Ian's debugger changes. The instrumentation
still needs support for debugging, which I'll add later.

Change-Id: I29ce0af1ff2eaec8f6d2f54963263381363ebf72
diff --git a/src/thread.cc b/src/thread.cc
index 00aaabd..0d06e42 100644
--- a/src/thread.cc
+++ b/src/thread.cc
@@ -808,7 +808,7 @@
 struct StackDumpVisitor : public StackVisitor {
   StackDumpVisitor(std::ostream& os, const Thread* thread, Context* context, bool can_allocate)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
-      : StackVisitor(thread->GetManagedStack(), thread->GetTraceStack(), context),
+      : StackVisitor(thread->GetManagedStack(), thread->GetInstrumentationStack(), context),
         os(os), thread(thread), can_allocate(can_allocate),
         last_method(NULL), last_line_number(0), repetition_count(0), frame_count(0) {
   }
@@ -953,7 +953,7 @@
       throwing_OutOfMemoryError_(false),
       debug_suspend_count_(0),
       debug_invoke_req_(new DebugInvokeReq),
-      trace_stack_(new std::vector<TraceStackFrame>),
+      instrumentation_stack_(new std::vector<InstrumentationStackFrame>),
       name_(new std::string(kThreadNameDuringStartup)),
       daemon_(daemon),
       pthread_self_(0),
@@ -1055,7 +1055,7 @@
 #endif
 
   delete debug_invoke_req_;
-  delete trace_stack_;
+  delete instrumentation_stack_;
   delete name_;
 
   TearDownAlternateSignalStack();
@@ -1229,9 +1229,9 @@
 class CountStackDepthVisitor : public StackVisitor {
  public:
   CountStackDepthVisitor(const ManagedStack* stack,
-                         const std::vector<TraceStackFrame>* trace_stack)
+                         const std::vector<InstrumentationStackFrame>* instrumentation_stack)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
-      : StackVisitor(stack, trace_stack, NULL),
+      : StackVisitor(stack, instrumentation_stack, NULL),
         depth_(0), skip_depth_(0), skipping_(true) {}
 
   bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
@@ -1270,9 +1270,9 @@
 class BuildInternalStackTraceVisitor : public StackVisitor {
  public:
   explicit BuildInternalStackTraceVisitor(Thread* self, const ManagedStack* stack,
-                                          const std::vector<TraceStackFrame>* trace_stack,
+                                          const std::vector<InstrumentationStackFrame>* instrumentation_stack,
                                           int skip_depth)
-      : StackVisitor(stack, trace_stack, NULL), self_(self),
+      : StackVisitor(stack, instrumentation_stack, NULL), self_(self),
         skip_depth_(skip_depth), count_(0), dex_pc_trace_(NULL), method_trace_(NULL) {}
 
   bool Init(int depth)
@@ -1343,13 +1343,13 @@
 
 jobject Thread::CreateInternalStackTrace(const ScopedObjectAccessUnchecked& soa) const {
   // Compute depth of stack
-  CountStackDepthVisitor count_visitor(GetManagedStack(), GetTraceStack());
+  CountStackDepthVisitor count_visitor(GetManagedStack(), GetInstrumentationStack());
   count_visitor.WalkStack();
   int32_t depth = count_visitor.GetDepth();
   int32_t skip_depth = count_visitor.GetSkipDepth();
 
   // Build internal stack trace.
-  BuildInternalStackTraceVisitor build_trace_visitor(soa.Self(), GetManagedStack(), GetTraceStack(),
+  BuildInternalStackTraceVisitor build_trace_visitor(soa.Self(), GetManagedStack(), GetInstrumentationStack(),
                                                      skip_depth);
   if (!build_trace_visitor.Init(depth)) {
     return NULL;  // Allocation failed.
@@ -1667,7 +1667,7 @@
  public:
   CatchBlockStackVisitor(Thread* self, Throwable* exception)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
-      : StackVisitor(self->GetManagedStack(), self->GetTraceStack(), self->GetLongJumpContext()),
+      : StackVisitor(self->GetManagedStack(), self->GetInstrumentationStack(), self->GetLongJumpContext()),
         self_(self), exception_(exception), to_find_(exception->GetClass()), throw_method_(NULL),
         throw_frame_id_(0), throw_dex_pc_(0), handler_quick_frame_(NULL),
         handler_quick_frame_pc_(0), handler_dex_pc_(0), native_method_count_(0),
@@ -1702,9 +1702,9 @@
       if (method->IsNative()) {
         native_method_count_++;
       } else {
-        // Unwind stack when an exception occurs during method tracing
-        if (UNLIKELY(method_tracing_active_ && IsTraceExitPc(GetCurrentQuickFramePc()))) {
-          uintptr_t pc = TraceMethodUnwindFromCode(Thread::Current());
+        // Unwind stack when an exception occurs during instrumentation
+        if (UNLIKELY(method_tracing_active_ && IsInstrumentationExitPc(GetCurrentQuickFramePc()))) {
+          uintptr_t pc = InstrumentationMethodUnwindFromCode(Thread::Current());
           dex_pc = method->ToDexPc(pc);
         } else {
           dex_pc = GetDexPc();
@@ -1800,9 +1800,9 @@
 AbstractMethod* Thread::GetCurrentMethod(uint32_t* dex_pc, size_t* frame_id) const {
   struct CurrentMethodVisitor : public StackVisitor {
     CurrentMethodVisitor(const ManagedStack* stack,
-                         const std::vector<TraceStackFrame>* trace_stack)
+                         const std::vector<InstrumentationStackFrame>* instrumentation_stack)
         SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
-        : StackVisitor(stack, trace_stack, NULL), method_(NULL), dex_pc_(0), frame_id_(0) {}
+        : StackVisitor(stack, instrumentation_stack, NULL), method_(NULL), dex_pc_(0), frame_id_(0) {}
 
     virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
       AbstractMethod* m = GetMethod();
@@ -1820,7 +1820,7 @@
     size_t frame_id_;
   };
 
-  CurrentMethodVisitor visitor(GetManagedStack(), GetTraceStack());
+  CurrentMethodVisitor visitor(GetManagedStack(), GetInstrumentationStack());
   visitor.WalkStack(false);
   if (dex_pc != NULL) {
     *dex_pc = visitor.dex_pc_;
@@ -1842,10 +1842,10 @@
 template <typename Visitor>
 class ReferenceMapVisitor : public StackVisitor {
  public:
-  ReferenceMapVisitor(const ManagedStack* stack, const std::vector<TraceStackFrame>* trace_stack,
+  ReferenceMapVisitor(const ManagedStack* stack, const std::vector<InstrumentationStackFrame>* instrumentation_stack,
                       Context* context, const Visitor& visitor)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
-      : StackVisitor(stack, trace_stack, context), visitor_(visitor) {}
+      : StackVisitor(stack, instrumentation_stack, context), visitor_(visitor) {}
 
   bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     if (false) {
@@ -2006,7 +2006,7 @@
   // Visit roots on this thread's stack
   Context* context = GetLongJumpContext();
   VerifyCallbackVisitor visitorToCallback(visitor, arg);
-  ReferenceMapVisitor<VerifyCallbackVisitor> mapper(GetManagedStack(), GetTraceStack(), context,
+  ReferenceMapVisitor<VerifyCallbackVisitor> mapper(GetManagedStack(), GetInstrumentationStack(), context,
                                                   visitorToCallback);
   mapper.WalkStack();
   ReleaseLongJumpContext(context);
@@ -2027,7 +2027,7 @@
   // Visit roots on this thread's stack
   Context* context = GetLongJumpContext();
   RootCallbackVisitor visitorToCallback(visitor, arg);
-  ReferenceMapVisitor<RootCallbackVisitor> mapper(GetManagedStack(), GetTraceStack(), context,
+  ReferenceMapVisitor<RootCallbackVisitor> mapper(GetManagedStack(), GetInstrumentationStack(), context,
                                                   visitorToCallback);
   mapper.WalkStack();
   ReleaseLongJumpContext(context);
@@ -2042,7 +2042,7 @@
 void Thread::VerifyStack() {
   UniquePtr<Context> context(Context::Create());
   RootCallbackVisitor visitorToCallback(VerifyObject, Runtime::Current()->GetHeap());
-  ReferenceMapVisitor<RootCallbackVisitor> mapper(GetManagedStack(), GetTraceStack(), context.get(),
+  ReferenceMapVisitor<RootCallbackVisitor> mapper(GetManagedStack(), GetInstrumentationStack(), context.get(),
                                                   visitorToCallback);
   mapper.WalkStack();
 }