Remove ArtCode.

- Instead use OatQuickMethodHeader.
- Various cleanups now that we don't have all those
  ArtMethod -> ArtCode -> OatQuickMethodHeader indirections.

As a consequence of this cleanup, exception handling got a bit
faster.

ParserCombinators benchmark (exception intensive) on x64: (lower is better)
Before:
ParserCombinators(RunTime): 1062500.0 us.
After:
ParserCombinators(RunTime): 833000.0 us.

Change-Id: Idac917b6f1b0dc254ad68fb3781cd61bccadb0f3
diff --git a/test/004-ReferenceMap/stack_walk_refmap_jni.cc b/test/004-ReferenceMap/stack_walk_refmap_jni.cc
index f8d321c..34fb3f8 100644
--- a/test/004-ReferenceMap/stack_walk_refmap_jni.cc
+++ b/test/004-ReferenceMap/stack_walk_refmap_jni.cc
@@ -19,15 +19,17 @@
 
 namespace art {
 
-#define CHECK_REGS_CONTAIN_REFS(dex_pc, abort_if_not_found, ...) do { \
-  int t[] = {__VA_ARGS__}; \
-  int t_size = sizeof(t) / sizeof(*t); \
-  uintptr_t native_quick_pc = GetCurrentCode().ToNativeQuickPc(dex_pc, \
-                                                 /* is_catch_handler */ false, \
-                                                 abort_if_not_found); \
-  if (native_quick_pc != UINTPTR_MAX) { \
-    CheckReferences(t, t_size, GetCurrentCode().NativeQuickPcOffset(native_quick_pc)); \
-  } \
+#define CHECK_REGS_CONTAIN_REFS(dex_pc, abort_if_not_found, ...) do {                 \
+  int t[] = {__VA_ARGS__};                                                            \
+  int t_size = sizeof(t) / sizeof(*t);                                                \
+  const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();       \
+  uintptr_t native_quick_pc = method_header->ToNativeQuickPc(GetMethod(),             \
+                                                 dex_pc,                              \
+                                                 /* is_catch_handler */ false,        \
+                                                 abort_if_not_found);                 \
+  if (native_quick_pc != UINTPTR_MAX) {                                               \
+    CheckReferences(t, t_size, method_header->NativeQuickPcOffset(native_quick_pc));  \
+  }                                                                                   \
 } while (false);
 
 struct ReferenceMap2Visitor : public CheckReferenceMapVisitor {
@@ -49,7 +51,7 @@
       CHECK_REGS_CONTAIN_REFS(0x06U, true, 8, 1);  // v8: this, v1: x
       CHECK_REGS_CONTAIN_REFS(0x08U, true, 8, 3, 1);  // v8: this, v3: y, v1: x
       CHECK_REGS_CONTAIN_REFS(0x0cU, true, 8, 3, 1);  // v8: this, v3: y, v1: x
-      if (!GetCurrentCode().IsOptimized(sizeof(void*))) {
+      if (!GetCurrentOatQuickMethodHeader()->IsOptimized()) {
         CHECK_REGS_CONTAIN_REFS(0x0eU, true, 8, 3, 1);  // v8: this, v3: y, v1: x
       }
       CHECK_REGS_CONTAIN_REFS(0x10U, true, 8, 3, 1);  // v8: this, v3: y, v1: x
@@ -65,7 +67,7 @@
       // Note that v0: ex can be eliminated because it's a dead merge of two different exceptions.
       CHECK_REGS_CONTAIN_REFS(0x18U, true, 8, 2, 1);  // v8: this, v2: y, v1: x (dead v0: ex)
       CHECK_REGS_CONTAIN_REFS(0x1aU, true, 8, 5, 2, 1);  // v8: this, v5: x[1], v2: y, v1: x (dead v0: ex)
-      if (!GetCurrentCode().IsOptimized(sizeof(void*))) {
+      if (!GetCurrentOatQuickMethodHeader()->IsOptimized()) {
         // v8: this, v5: x[1], v2: y, v1: x (dead v0: ex)
         CHECK_REGS_CONTAIN_REFS(0x1dU, true, 8, 5, 2, 1);
         // v5 is removed from the root set because there is a "merge" operation.
@@ -74,7 +76,7 @@
       }
       CHECK_REGS_CONTAIN_REFS(0x21U, true, 8, 2, 1);  // v8: this, v2: y, v1: x (dead v0: ex)
 
-      if (!GetCurrentCode().IsOptimized(sizeof(void*))) {
+      if (!GetCurrentOatQuickMethodHeader()->IsOptimized()) {
         CHECK_REGS_CONTAIN_REFS(0x27U, true, 8, 4, 2, 1);  // v8: this, v4: ex, v2: y, v1: x
       }
       CHECK_REGS_CONTAIN_REFS(0x29U, true, 8, 4, 2, 1);  // v8: this, v4: ex, v2: y, v1: x