Merge "Enable multi-threaded Quick compilation" into dalvik-dev
diff --git a/src/class_linker.cc b/src/class_linker.cc
index 1853a59..83661cb 100644
--- a/src/class_linker.cc
+++ b/src/class_linker.cc
@@ -166,7 +166,6 @@
   "Ljava/lang/Object;",
   "[Ljava/lang/Class;",
   "[Ljava/lang/Object;",
-  "[[Ljava/lang/Object;",
   "Ljava/lang/String;",
   "Ljava/lang/DexCache;",
   "Ljava/lang/ref/Reference;",
@@ -258,10 +257,6 @@
   SirtRef<Class> object_array_class(self, AllocClass(self, java_lang_Class.get(), sizeof(Class)));
   object_array_class->SetComponentType(java_lang_Object.get());
 
-  // Object[][] needed for iftables.
-  SirtRef<Class> object_array_array_class(self, AllocClass(self, java_lang_Class.get(), sizeof(Class)));
-  object_array_array_class->SetComponentType(object_array_class.get());
-
   // Setup the char class to be used for char[].
   SirtRef<Class> char_class(self, AllocClass(self, java_lang_Class.get(), sizeof(Class)));
 
@@ -283,7 +278,6 @@
   SetClassRoot(kJavaLangObject, java_lang_Object.get());
   SetClassRoot(kClassArrayClass, class_array_class.get());
   SetClassRoot(kObjectArrayClass, object_array_class.get());
-  SetClassRoot(kObjectArrayArrayClass, object_array_array_class.get());
   SetClassRoot(kCharArrayClass, char_array_class.get());
   SetClassRoot(kJavaLangString, java_lang_String.get());
 
@@ -308,7 +302,7 @@
 
   // now that these are registered, we can use AllocClass() and AllocObjectArray
 
-  // Setup DexCache. This can not be done later since AppendToBootClassPath calls AllocDexCache.
+  // Set up DexCache. This cannot be done later since AppendToBootClassPath calls AllocDexCache.
   SirtRef<Class>
       java_lang_DexCache(self, AllocClass(self, java_lang_Class.get(), sizeof(DexCacheClass)));
   SetClassRoot(kJavaLangDexCache, java_lang_DexCache.get());
@@ -428,9 +422,6 @@
   Class* found_object_array_class = FindSystemClass("[Ljava/lang/Object;");
   CHECK_EQ(object_array_class.get(), found_object_array_class);
 
-  Class* found_object_array_array_class = FindSystemClass("[[Ljava/lang/Object;");
-  CHECK_EQ(object_array_array_class.get(), found_object_array_array_class);
-
   // Setup the single, global copy of "iftable".
   Class* java_lang_Cloneable = FindSystemClass("Ljava/lang/Cloneable;");
   CHECK(java_lang_Cloneable != NULL);
@@ -438,8 +429,8 @@
   CHECK(java_io_Serializable != NULL);
   // We assume that Cloneable/Serializable don't have superinterfaces -- normally we'd have to
   // crawl up and explicitly list all of the supers as well.
-  array_iftable_->Set(0, AllocInterfaceEntry(self, java_lang_Cloneable));
-  array_iftable_->Set(1, AllocInterfaceEntry(self, java_io_Serializable));
+  array_iftable_->SetInterface(0, java_lang_Cloneable);
+  array_iftable_->SetInterface(1, java_io_Serializable);
 
   // Sanity check Class[] and Object[]'s interfaces.
   ClassHelper kh(class_array_class.get(), this);
@@ -1174,14 +1165,6 @@
   return dex_cache.get();
 }
 
-InterfaceEntry* ClassLinker::AllocInterfaceEntry(Thread* self, Class* interface) {
-  DCHECK(interface->IsInterface());
-  SirtRef<ObjectArray<Object> > array(self, AllocObjectArray<Object>(self, InterfaceEntry::LengthAsArray()));
-  SirtRef<InterfaceEntry> interface_entry(self, down_cast<InterfaceEntry*>(array.get()));
-  interface_entry->SetInterface(interface);
-  return interface_entry.get();
-}
-
 Class* ClassLinker::AllocClass(Thread* self, Class* java_lang_Class, size_t class_size) {
   DCHECK_GE(class_size, sizeof(Class));
   Heap* heap = Runtime::Current()->GetHeap();
@@ -1885,8 +1868,6 @@
       new_class.reset(GetClassRoot(kClassArrayClass));
     } else if (descriptor == "[Ljava/lang/Object;") {
       new_class.reset(GetClassRoot(kObjectArrayClass));
-    } else if (descriptor == "[[Ljava/lang/Object;") {
-      new_class.reset(GetClassRoot(kObjectArrayArrayClass));
     } else if (descriptor == class_roots_descriptors_[kJavaLangStringArrayClass]) {
       new_class.reset(GetClassRoot(kJavaLangStringArrayClass));
     } else if (descriptor == class_roots_descriptors_[kJavaLangReflectAbstractMethodArrayClass]) {
@@ -2683,12 +2664,12 @@
       }
     }
   }
+  IfTable* iftable = klass->GetIfTable();
   for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
-    InterfaceEntry* interface_entry = klass->GetIfTable()->Get(i);
-    Class* interface = interface_entry->GetInterface();
+    Class* interface = iftable->GetInterface(i);
     if (klass->GetClassLoader() != interface->GetClassLoader()) {
       for (size_t j = 0; j < interface->NumVirtualMethods(); ++j) {
-        const AbstractMethod* method = interface_entry->GetMethodArray()->Get(j);
+        const AbstractMethod* method = iftable->GetMethodArray(i)->Get(j);
         if (!IsSameMethodSignatureInDifferentClassContexts(method, interface,
                                                            method->GetDeclaringClass())) {
           ThrowLinkageError("Class %s method %s resolves differently in interface %s",
@@ -2784,7 +2765,7 @@
 }
 
 bool ClassLinker::EnsureInitialized(Class* c, bool can_run_clinit, bool can_init_fields) {
-  CHECK(c != NULL);
+  DCHECK(c != NULL);
   if (c->IsInitialized()) {
     return true;
   }
@@ -3074,18 +3055,34 @@
     ifcount += interface->GetIfTableCount();
   }
   if (ifcount == 0) {
-    // TODO: enable these asserts with klass status validation
-    // DCHECK_EQ(klass->GetIfTableCount(), 0);
-    // DCHECK(klass->GetIfTable() == NULL);
+    // Class implements no interfaces.
+    DCHECK_EQ(klass->GetIfTableCount(), 0);
+    DCHECK(klass->GetIfTable() == NULL);
     return true;
   }
+  if (ifcount == super_ifcount) {
+    // Class implements same interfaces as parent, are any of these not marker interfaces?
+    bool has_non_marker_interface = false;
+    IfTable* super_iftable = klass->GetSuperClass()->GetIfTable();
+    for (size_t i = 0; i < ifcount; ++i) {
+      if (super_iftable->GetMethodArrayCount(i) > 0) {
+        has_non_marker_interface = true;
+        break;
+      }
+    }
+    if (!has_non_marker_interface) {
+      // Class just inherits marker interfaces from parent so recycle parent's iftable.
+      klass->SetIfTable(super_iftable);
+      return true;
+    }
+  }
   Thread* self = Thread::Current();
-  SirtRef<ObjectArray<InterfaceEntry> > iftable(self, AllocIfTable(self, ifcount));
+  SirtRef<IfTable> iftable(self, AllocIfTable(self, ifcount));
   if (super_ifcount != 0) {
-    ObjectArray<InterfaceEntry>* super_iftable = klass->GetSuperClass()->GetIfTable();
+    IfTable* super_iftable = klass->GetSuperClass()->GetIfTable();
     for (size_t i = 0; i < super_ifcount; i++) {
-      Class* super_interface = super_iftable->Get(i)->GetInterface();
-      iftable->Set(i, AllocInterfaceEntry(self, super_interface));
+      Class* super_interface = super_iftable->GetInterface(i);
+      iftable->SetInterface(i, super_interface);
     }
   }
   // Flatten the interface inheritance hierarchy.
@@ -3104,7 +3101,7 @@
     // Check if interface is already in iftable
     bool duplicate = false;
     for (size_t j = 0; j < idx; j++) {
-      Class* existing_interface = iftable->Get(j)->GetInterface();
+      Class* existing_interface = iftable->GetInterface(j);
       if (existing_interface == interface) {
         duplicate = true;
         break;
@@ -3112,27 +3109,27 @@
     }
     if (!duplicate) {
       // Add this non-duplicate interface.
-      iftable->Set(idx++, AllocInterfaceEntry(self, interface));
+      iftable->SetInterface(idx++, interface);
       // Add this interface's non-duplicate super-interfaces.
       for (int32_t j = 0; j < interface->GetIfTableCount(); j++) {
-        Class* super_interface = interface->GetIfTable()->Get(j)->GetInterface();
+        Class* super_interface = interface->GetIfTable()->GetInterface(j);
         bool super_duplicate = false;
         for (size_t k = 0; k < idx; k++) {
-          Class* existing_interface = iftable->Get(k)->GetInterface();
+          Class* existing_interface = iftable->GetInterface(k);
           if (existing_interface == super_interface) {
             super_duplicate = true;
             break;
           }
         }
         if (!super_duplicate) {
-          iftable->Set(idx++, AllocInterfaceEntry(self, super_interface));
+          iftable->SetInterface(idx++, super_interface);
         }
       }
     }
   }
   // Shrink iftable in case duplicates were found
   if (idx < ifcount) {
-    iftable.reset(iftable->CopyOf(self, idx));
+    iftable.reset(down_cast<IfTable*>(iftable->CopyOf(self, idx * IfTable::kMax)));
     ifcount = idx;
   } else {
     CHECK_EQ(idx, ifcount);
@@ -3140,60 +3137,62 @@
   klass->SetIfTable(iftable.get());
 
   // If we're an interface, we don't need the vtable pointers, so we're done.
-  if (klass->IsInterface() /*|| super_ifcount == ifcount*/) {
+  if (klass->IsInterface()) {
     return true;
   }
   std::vector<AbstractMethod*> miranda_list;
   MethodHelper vtable_mh(NULL, this);
   MethodHelper interface_mh(NULL, this);
   for (size_t i = 0; i < ifcount; ++i) {
-    InterfaceEntry* interface_entry = iftable->Get(i);
-    Class* interface = interface_entry->GetInterface();
-    ObjectArray<AbstractMethod>* method_array =
-        AllocMethodArray(self, interface->NumVirtualMethods());
-    interface_entry->SetMethodArray(method_array);
-    ObjectArray<AbstractMethod>* vtable = klass->GetVTableDuringLinking();
-    for (size_t j = 0; j < interface->NumVirtualMethods(); ++j) {
-      AbstractMethod* interface_method = interface->GetVirtualMethod(j);
-      interface_mh.ChangeMethod(interface_method);
-      int32_t k;
-      // For each method listed in the interface's method list, find the
-      // matching method in our class's method list.  We want to favor the
-      // subclass over the superclass, which just requires walking
-      // back from the end of the vtable.  (This only matters if the
-      // superclass defines a private method and this class redefines
-      // it -- otherwise it would use the same vtable slot.  In .dex files
-      // those don't end up in the virtual method table, so it shouldn't
-      // matter which direction we go.  We walk it backward anyway.)
-      for (k = vtable->GetLength() - 1; k >= 0; --k) {
-        AbstractMethod* vtable_method = vtable->Get(k);
-        vtable_mh.ChangeMethod(vtable_method);
-        if (interface_mh.HasSameNameAndSignature(&vtable_mh)) {
-          if (!vtable_method->IsPublic()) {
-            Thread::Current()->ThrowNewExceptionF("Ljava/lang/IllegalAccessError;",
-                "Implementation not public: %s", PrettyMethod(vtable_method).c_str());
-            return false;
-          }
-          method_array->Set(j, vtable_method);
-          break;
-        }
-      }
-      if (k < 0) {
-        SirtRef<AbstractMethod> miranda_method(self, NULL);
-        for (size_t mir = 0; mir < miranda_list.size(); mir++) {
-          AbstractMethod* mir_method = miranda_list[mir];
-          vtable_mh.ChangeMethod(mir_method);
+    Class* interface = iftable->GetInterface(i);
+    size_t num_methods = interface->NumVirtualMethods();
+    if (num_methods > 0) {
+      ObjectArray<AbstractMethod>* method_array = AllocMethodArray(self, num_methods);
+      iftable->SetMethodArray(i, method_array);
+      ObjectArray<AbstractMethod>* vtable = klass->GetVTableDuringLinking();
+      for (size_t j = 0; j < interface->NumVirtualMethods(); ++j) {
+        AbstractMethod* interface_method = interface->GetVirtualMethod(j);
+        interface_mh.ChangeMethod(interface_method);
+        int32_t k;
+        // For each method listed in the interface's method list, find the
+        // matching method in our class's method list.  We want to favor the
+        // subclass over the superclass, which just requires walking
+        // back from the end of the vtable.  (This only matters if the
+        // superclass defines a private method and this class redefines
+        // it -- otherwise it would use the same vtable slot.  In .dex files
+        // those don't end up in the virtual method table, so it shouldn't
+        // matter which direction we go.  We walk it backward anyway.)
+        for (k = vtable->GetLength() - 1; k >= 0; --k) {
+          AbstractMethod* vtable_method = vtable->Get(k);
+          vtable_mh.ChangeMethod(vtable_method);
           if (interface_mh.HasSameNameAndSignature(&vtable_mh)) {
-            miranda_method.reset(miranda_list[mir]);
+            if (!vtable_method->IsPublic()) {
+              self->ThrowNewExceptionF("Ljava/lang/IllegalAccessError;",
+                                       "Implementation not public: %s",
+                                       PrettyMethod(vtable_method).c_str());
+              return false;
+            }
+            method_array->Set(j, vtable_method);
             break;
           }
         }
-        if (miranda_method.get() == NULL) {
-          // point the interface table at a phantom slot
-          miranda_method.reset(down_cast<AbstractMethod*>(interface_method->Clone(self)));
-          miranda_list.push_back(miranda_method.get());
+        if (k < 0) {
+          SirtRef<AbstractMethod> miranda_method(self, NULL);
+          for (size_t mir = 0; mir < miranda_list.size(); mir++) {
+            AbstractMethod* mir_method = miranda_list[mir];
+            vtable_mh.ChangeMethod(mir_method);
+            if (interface_mh.HasSameNameAndSignature(&vtable_mh)) {
+              miranda_method.reset(miranda_list[mir]);
+              break;
+            }
+          }
+          if (miranda_method.get() == NULL) {
+            // point the interface table at a phantom slot
+            miranda_method.reset(down_cast<AbstractMethod*>(interface_method->Clone(self)));
+            miranda_list.push_back(miranda_method.get());
+          }
+          method_array->Set(j, miranda_method.get());
         }
-        method_array->Set(j, miranda_method.get());
       }
     }
   }
diff --git a/src/class_linker.h b/src/class_linker.h
index fd404c1..096d602 100644
--- a/src/class_linker.h
+++ b/src/class_linker.h
@@ -337,9 +337,9 @@
         GetClassRoot(kJavaLangReflectMethodArrayClass), length);
   }
 
-  ObjectArray<InterfaceEntry>* AllocIfTable(Thread* self, size_t length)
-      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    return ObjectArray<InterfaceEntry>::Alloc(self, GetClassRoot(kObjectArrayArrayClass), length);
+  IfTable* AllocIfTable(Thread* self, size_t ifcount) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    return down_cast<IfTable*>(
+        IfTable::Alloc(self, GetClassRoot(kObjectArrayClass), ifcount * IfTable::kMax));
   }
 
   ObjectArray<Field>* AllocFieldArray(Thread* self, size_t length)
@@ -412,9 +412,6 @@
   Method* AllocMethod(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
   Constructor* AllocConstructor(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
-  InterfaceEntry* AllocInterfaceEntry(Thread* self, Class* interface)
-      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
   Class* CreatePrimitiveClass(Thread* self, Primitive::Type type)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     return InitializePrimitiveClass(AllocClass(self, sizeof(Class)), type);
@@ -574,7 +571,6 @@
     kJavaLangObject,
     kClassArrayClass,
     kObjectArrayClass,
-    kObjectArrayArrayClass,
     kJavaLangString,
     kJavaLangDexCache,
     kJavaLangRefReference,
@@ -637,7 +633,7 @@
     return descriptor;
   }
 
-  ObjectArray<InterfaceEntry>* array_iftable_;
+  IfTable* array_iftable_;
 
   bool init_done_;
 
diff --git a/src/class_linker_test.cc b/src/class_linker_test.cc
index 02512eb..1c41f3b 100644
--- a/src/class_linker_test.cc
+++ b/src/class_linker_test.cc
@@ -127,7 +127,7 @@
     EXPECT_EQ(2U, kh.NumDirectInterfaces());
     EXPECT_TRUE(array->GetVTable() != NULL);
     EXPECT_EQ(2, array->GetIfTableCount());
-    ObjectArray<InterfaceEntry>* iftable = array->GetIfTable();
+    IfTable* iftable = array->GetIfTable();
     ASSERT_TRUE(iftable != NULL);
     kh.ChangeClass(kh.GetDirectInterface(0));
     EXPECT_STREQ(kh.GetDescriptor(), "Ljava/lang/Cloneable;");
@@ -202,17 +202,14 @@
       }
     }
     EXPECT_EQ(klass->IsInterface(), klass->GetVTable() == NULL);
+    const IfTable* iftable = klass->GetIfTable();
     for (int i = 0; i < klass->GetIfTableCount(); i++) {
-      const InterfaceEntry* interface_entry = klass->GetIfTable()->Get(i);
-      ASSERT_TRUE(interface_entry != NULL);
-      Class* interface = interface_entry->GetInterface();
+      Class* interface = iftable->GetInterface(i);
       ASSERT_TRUE(interface != NULL);
-      EXPECT_TRUE(interface_entry->GetInterface() != NULL);
       if (klass->IsInterface()) {
-        EXPECT_EQ(0U, interface_entry->GetMethodArrayCount());
+        EXPECT_EQ(0U, iftable->GetMethodArrayCount(i));
       } else {
-        CHECK_EQ(interface->NumVirtualMethods(), interface_entry->GetMethodArrayCount());
-        EXPECT_EQ(interface->NumVirtualMethods(), interface_entry->GetMethodArrayCount());
+        EXPECT_EQ(interface->NumVirtualMethods(), iftable->GetMethodArrayCount(i));
       }
     }
     if (klass->IsAbstract()) {
diff --git a/src/dex_instruction.cc b/src/dex_instruction.cc
index 0de7523..201a8e6 100644
--- a/src/dex_instruction.cc
+++ b/src/dex_instruction.cc
@@ -297,7 +297,8 @@
         case CONST_CLASS:
         case NEW_INSTANCE:
           if (file != NULL) {
-            os << opcode << " " << PrettyType(insn.vB, *file) << " // type@" << insn.vB;
+            os << opcode << " v" << insn.vA << ", " << PrettyType(insn.vB, *file)
+               << " // type@" << insn.vB;
             break;
           }  // else fall-through
         case SGET:
@@ -308,7 +309,8 @@
         case SGET_CHAR:
         case SGET_SHORT:
           if (file != NULL) {
-            os << opcode << " " << PrettyField(insn.vB, *file, true) << " // field@" << insn.vB;
+            os << opcode << "  v" << insn.vA << ", " << PrettyField(insn.vB, *file, true)
+               << " // field@" << insn.vB;
             break;
           }  // else fall-through
         case SPUT:
@@ -319,7 +321,8 @@
         case SPUT_CHAR:
         case SPUT_SHORT:
           if (file != NULL) {
-            os << opcode << " " << PrettyField(insn.vB, *file, true) << " // field@" << insn.vB;
+            os << opcode << " v" << insn.vA << ", " << PrettyField(insn.vB, *file, true)
+               << " // field@" << insn.vB;
             break;
           }  // else fall-through
         default:
@@ -342,7 +345,8 @@
         case IGET_CHAR:
         case IGET_SHORT:
           if (file != NULL) {
-            os << PrettyField(insn.vC, *file, true) << " // field@" << insn.vC;
+            os << opcode << " v" << insn.vA << ", v" << insn.vB << ", "
+               << PrettyField(insn.vC, *file, true) << " // field@" << insn.vC;
             break;
           }  // else fall-through
         case IPUT:
@@ -353,17 +357,20 @@
         case IPUT_CHAR:
         case IPUT_SHORT:
           if (file != NULL) {
-            os << opcode << " " << PrettyField(insn.vC, *file, true) << " // field@" << insn.vB;
+            os << opcode << " v" << insn.vA << ", v" << insn.vB << ", "
+               << PrettyField(insn.vC, *file, true) << " // field@" << insn.vC;
             break;
           }  // else fall-through
         case INSTANCE_OF:
           if (file != NULL) {
-            os << opcode << " " << PrettyType(insn.vC, *file) << " // type@" << insn.vC;
+            os << opcode << " v" << insn.vA << ", v" << insn.vB << ", "
+               << PrettyType(insn.vC, *file) << " // type@" << insn.vC;
             break;
           }
         case NEW_ARRAY:
           if (file != NULL) {
-            os << opcode << " " << PrettyType(insn.vC, *file) << " // type@" << insn.vC;
+            os << opcode << " v" << insn.vA << ", v" << insn.vB << ", "
+               << PrettyType(insn.vC, *file) << " // type@" << insn.vC;
             break;
           }  // else fall-through
         default:
diff --git a/src/gc/space.cc b/src/gc/space.cc
index 9c8819b..274d777 100644
--- a/src/gc/space.cc
+++ b/src/gc/space.cc
@@ -73,8 +73,8 @@
 AllocSpace::AllocSpace(const std::string& name, MemMap* mem_map, void* mspace, byte* begin,
                        byte* end, size_t growth_limit)
     : MemMapSpace(name, mem_map, end - begin, kGcRetentionPolicyAlwaysCollect),
-      num_bytes_allocated_(0), num_objects_allocated_(0),
-      lock_("allocation space lock", kAllocSpaceLock), mspace_(mspace),
+      num_bytes_allocated_(0), num_objects_allocated_(0), total_bytes_allocated_(0),
+      total_objects_allocated_(0), lock_("allocation space lock", kAllocSpaceLock), mspace_(mspace),
       growth_limit_(growth_limit) {
   CHECK(mspace != NULL);
 
@@ -202,7 +202,10 @@
     *reinterpret_cast<word*>(reinterpret_cast<byte*>(result) + AllocationSize(result)
         - sizeof(word) - kChunkOverhead) = kPaddingValue;
   }
-  num_bytes_allocated_ += AllocationSize(result);
+  size_t allocation_size = AllocationSize(result);
+  num_bytes_allocated_ += allocation_size;
+  total_bytes_allocated_ += allocation_size;
+  ++total_objects_allocated_;
   ++num_objects_allocated_;
   return result;
 }
@@ -552,8 +555,8 @@
 
 LargeObjectSpace::LargeObjectSpace(const std::string& name)
     : DiscontinuousSpace(name, kGcRetentionPolicyAlwaysCollect),
-      num_bytes_allocated_(0),
-      num_objects_allocated_(0) {
+      num_bytes_allocated_(0), num_objects_allocated_(0), total_bytes_allocated_(0),
+      total_objects_allocated_(0) {
   live_objects_.reset(new SpaceSetMap("large live objects"));
   mark_objects_.reset(new SpaceSetMap("large marked objects"));
 }
@@ -583,8 +586,11 @@
   Object* obj = reinterpret_cast<Object*>(mem_map->Begin());
   large_objects_.push_back(obj);
   mem_maps_.Put(obj, mem_map);
-  num_bytes_allocated_ += mem_map->Size();
+  size_t allocation_size = mem_map->Size();
+  num_bytes_allocated_ += allocation_size;
+  total_bytes_allocated_ += allocation_size;
   ++num_objects_allocated_;
+  ++total_objects_allocated_;
   return obj;
 }
 
@@ -757,7 +763,9 @@
   }
 
   num_objects_allocated_++;
+  total_objects_allocated_++;
   num_bytes_allocated_ += num_bytes;
+  total_bytes_allocated_ += num_bytes;
   return reinterpret_cast<Object*>(addr);
 }
 
diff --git a/src/gc/space.h b/src/gc/space.h
index a543500..ab29582 100644
--- a/src/gc/space.h
+++ b/src/gc/space.h
@@ -314,6 +314,14 @@
     return num_objects_allocated_;
   }
 
+  size_t GetTotalBytesAllocated() const {
+    return total_bytes_allocated_;
+  }
+
+  size_t GetTotalObjectsAllocated() const {
+    return total_objects_allocated_;
+  }
+
  private:
   Object* AllocWithoutGrowthLocked(size_t num_bytes) EXCLUSIVE_LOCKS_REQUIRED(lock_);
 
@@ -324,6 +332,8 @@
   // Approximate number of bytes which have been allocated into the space.
   size_t num_bytes_allocated_;
   size_t num_objects_allocated_;
+  size_t total_bytes_allocated_;
+  size_t total_objects_allocated_;
 
   static size_t bitmap_index_;
 
@@ -453,6 +463,14 @@
     return num_objects_allocated_;
   }
 
+  size_t GetTotalBytesAllocated() const {
+    return total_bytes_allocated_;
+  }
+
+  size_t GetTotalObjectsAllocated() const {
+    return total_objects_allocated_;
+  }
+
  protected:
 
   LargeObjectSpace(const std::string& name);
@@ -460,6 +478,8 @@
   // Approximate number of bytes which have been allocated into the space.
   size_t num_bytes_allocated_;
   size_t num_objects_allocated_;
+  size_t total_bytes_allocated_;
+  size_t total_objects_allocated_;
 
   UniquePtr<SpaceSetMap> live_objects_;
   UniquePtr<SpaceSetMap> mark_objects_;
diff --git a/src/heap.cc b/src/heap.cc
index 3989a24..d3de603 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -139,8 +139,9 @@
       concurrent_start_bytes_(std::numeric_limits<size_t>::max()),
       concurrent_start_size_(128 * KB),
       concurrent_min_free_(256 * KB),
-      concurrent_gc_start_rate_(3 * MB / 2),
       sticky_gc_count_(0),
+      total_bytes_freed_(0),
+      total_objects_freed_(0),
       large_object_threshold_(3 * kPageSize),
       num_bytes_allocated_(0),
       verify_missing_card_marks_(false),
@@ -152,7 +153,6 @@
       min_alloc_space_size_for_sticky_gc_(2 * MB),
       min_remaining_space_for_sticky_gc_(1 * MB),
       last_trim_time_(0),
-      try_running_gc_(false),
       requesting_gc_(false),
       max_allocation_stack_size_(MB),
       reference_referent_offset_(0),
@@ -161,6 +161,10 @@
       reference_pendingNext_offset_(0),
       finalizer_reference_zombie_offset_(0),
       target_utilization_(0.5),
+      total_paused_time_(0),
+      total_wait_time_(0),
+      measure_allocation_time_(false),
+      total_allocation_time_(0),
       verify_objects_(false) {
   if (VLOG_IS_ON(heap) || VLOG_IS_ON(startup)) {
     LOG(INFO) << "Heap() entering";
@@ -320,6 +324,39 @@
   }
 }
 
+void Heap::DumpGcPerformanceInfo() {
+  // Dump cumulative timings.
+  LOG(INFO) << "Dumping cumulative Gc timings";
+  uint64_t total_duration = 0;
+  for (CumulativeTimings::iterator it = cumulative_timings_.begin();
+      it != cumulative_timings_.end(); ++it) {
+    CumulativeLogger* logger = it->second;
+    if (logger->GetTotalNs() != 0) {
+      logger->Dump();
+      total_duration += logger->GetTotalNs();
+    }
+  }
+  uint64_t allocation_time = static_cast<uint64_t>(total_allocation_time_) * kTimeAdjust;
+  size_t total_objects_allocated = GetTotalObjectsAllocated();
+  size_t total_bytes_allocated = GetTotalBytesAllocated();
+  if (total_duration != 0) {
+    const double total_seconds = double(total_duration / 1000) / 1000000.0;
+    LOG(INFO) << "Total time spent in GC: " << PrettyDuration(total_duration);
+    LOG(INFO) << "Mean GC size throughput: "
+              << PrettySize(GetTotalBytesFreed() / total_seconds) << "/s";
+    LOG(INFO) << "Mean GC object throughput: " << GetTotalObjectsFreed() / total_seconds << "/s";
+  }
+  LOG(INFO) << "Total number of allocations: " << total_objects_allocated;
+  LOG(INFO) << "Total bytes allocated " << PrettySize(total_bytes_allocated);
+  if (measure_allocation_time_) {
+    LOG(INFO) << "Total time spent allocating: " << PrettyDuration(allocation_time);
+    LOG(INFO) << "Mean allocation time: "
+              << PrettyDuration(allocation_time / total_objects_allocated);
+  }
+  LOG(INFO) << "Total mutator paused time: " << PrettyDuration(total_paused_time_);
+  LOG(INFO) << "Total waiting for Gc to complete time: " << PrettyDuration(total_wait_time_);
+}
+
 Heap::~Heap() {
   // If we don't reset then the mark stack complains in it's destructor.
   allocation_stack_->Reset();
@@ -377,6 +414,10 @@
 
   Object* obj = NULL;
   size_t size = 0;
+  uint64_t allocation_start = 0;
+  if (measure_allocation_time_) {
+    allocation_start = NanoTime();
+  }
 
   // We need to have a zygote space or else our newly allocated large object can end up in the
   // Zygote resulting in it being prematurely freed.
@@ -385,21 +426,17 @@
   if (byte_count >= large_object_threshold_ && have_zygote_space_ && c->IsPrimitiveArray()) {
     size = RoundUp(byte_count, kPageSize);
     obj = Allocate(self, NULL, size);
-
-    if (obj != NULL) {
-      // Make sure that our large object didn't get placed anywhere within the space interval or else
-      // it breaks the immune range.
-      DCHECK(reinterpret_cast<byte*>(obj) < spaces_.front()->Begin() ||
-             reinterpret_cast<byte*>(obj) >= spaces_.back()->End());
-    }
+    // Make sure that our large object didn't get placed anywhere within the space interval or else
+    // it breaks the immune range.
+    DCHECK(obj == NULL ||
+           reinterpret_cast<byte*>(obj) < spaces_.front()->Begin() ||
+           reinterpret_cast<byte*>(obj) >= spaces_.back()->End());
   } else {
     obj = Allocate(self, alloc_space_, byte_count);
-    size = alloc_space_->AllocationSize(obj);
 
-    if (obj != NULL) {
-      // Additional verification to ensure that we did not allocate into a zygote space.
-      DCHECK(!have_zygote_space_ || !FindSpaceFromObject(obj)->IsZygoteSpace());
-    }
+    // Ensure that we did not allocate into a zygote space.
+    DCHECK(obj == NULL || !have_zygote_space_ || !FindSpaceFromObject(obj)->IsZygoteSpace());
+    size = alloc_space_->AllocationSize(obj);
   }
 
   if (LIKELY(obj != NULL)) {
@@ -422,6 +459,10 @@
     }
     VerifyObject(obj);
 
+    if (measure_allocation_time_) {
+      total_allocation_time_ += (NanoTime() - allocation_start) / kTimeAdjust;
+    }
+
     return obj;
   }
   int64_t total_bytes_free = GetFreeMemory();
@@ -566,8 +607,6 @@
 }
 
 void Heap::RecordFree(size_t freed_objects, size_t freed_bytes) {
-  COMPILE_ASSERT(sizeof(size_t) == sizeof(int32_t),
-                 int32_t_must_be_same_size_as_size_t_for_used_atomic_operations);
   DCHECK_LE(freed_bytes, static_cast<size_t>(num_bytes_allocated_));
   num_bytes_allocated_ -= freed_bytes;
 
@@ -688,18 +727,58 @@
   return TryToAllocate(self, space, alloc_size, true);
 }
 
-int64_t Heap::GetMaxMemory() {
+float Heap::GetTargetHeapUtilization() const {
+  return target_utilization_;
+}
+
+void Heap::SetTargetHeapUtilization(float target) {
+  DCHECK_GT(target, 0.0f);  // asserted in Java code
+  DCHECK_LT(target, 1.0f);
+  target_utilization_ = target;
+}
+
+int64_t Heap::GetMaxMemory() const {
   return growth_limit_;
 }
 
-int64_t Heap::GetTotalMemory() {
+int64_t Heap::GetTotalMemory() const {
   return GetMaxMemory();
 }
 
-int64_t Heap::GetFreeMemory() {
+int64_t Heap::GetFreeMemory() const {
   return GetMaxMemory() - num_bytes_allocated_;
 }
 
+size_t Heap::GetTotalBytesFreed() const {
+  return total_bytes_freed_;
+}
+
+size_t Heap::GetTotalObjectsFreed() const {
+  return total_objects_freed_;
+}
+
+size_t Heap::GetTotalObjectsAllocated() const {
+  size_t total = large_object_space_->GetTotalObjectsAllocated();
+  for (Spaces::const_iterator it = spaces_.begin(); it != spaces_.end(); ++it) {
+    Space* space = *it;
+    if (space->IsAllocSpace()) {
+      total += space->AsAllocSpace()->GetTotalObjectsAllocated();
+    }
+  }
+  return total;
+}
+
+size_t Heap::GetTotalBytesAllocated() const {
+  size_t total = large_object_space_->GetTotalBytesAllocated();
+  for (Spaces::const_iterator it = spaces_.begin(); it != spaces_.end(); ++it) {
+    Space* space = *it;
+    if (space->IsAllocSpace()) {
+      total += space->AsAllocSpace()->GetTotalBytesAllocated();
+    }
+  }
+  return total;
+}
+
 class InstanceCounter {
  public:
   InstanceCounter(Class* c, bool count_assignable, size_t* const count)
@@ -1027,6 +1106,8 @@
 
     cleared_references = mark_sweep.GetClearedReferences();
     bytes_freed = mark_sweep.GetFreedBytes();
+    total_bytes_freed_ += bytes_freed;
+    total_objects_freed_ += mark_sweep.GetFreedObjects();
   }
 
   if (verify_post_gc_heap_) {
@@ -1049,6 +1130,7 @@
 
   // If the GC was slow, then print timings in the log.
   uint64_t duration = (NanoTime() - start_time) / 1000 * 1000;
+  total_paused_time_ += duration / kTimeAdjust;
   if (duration > MsToNs(50)) {
     const size_t percent_free = GetPercentFree();
     const size_t current_heap_size = GetUsedMemorySize();
@@ -1586,15 +1668,12 @@
         // Make sure everything in the live stack isn't something we unmarked.
         std::sort(allocation_stack_->Begin(), allocation_stack_->End());
         for (Object** it = live_stack_->Begin(); it != live_stack_->End(); ++it) {
-          if (std::binary_search(allocation_stack_->Begin(), allocation_stack_->End(), *it)) {
-            LOG(FATAL) << "Unmarked object " << *it << " in the live stack";
-          }
+          DCHECK(!std::binary_search(allocation_stack_->Begin(), allocation_stack_->End(), *it))
+              << "Unmarked object " << *it << " in the live stack";
         }
       } else {
         for (Object** it = allocation_stack_->Begin(); it != allocation_stack_->End(); ++it) {
-          if (GetLiveBitmap()->Test(*it)) {
-            LOG(FATAL) << "Object " << *it << " is marked as live";
-          }
+          DCHECK(!GetLiveBitmap()->Test(*it)) << "Object " << *it << " is marked as live";
         }
       }
 #endif
@@ -1662,12 +1741,16 @@
 
     cleared_references = mark_sweep.GetClearedReferences();
     bytes_freed = mark_sweep.GetFreedBytes();
+    total_bytes_freed_ += bytes_freed;
+    total_objects_freed_ += mark_sweep.GetFreedObjects();
   }
 
   GrowForUtilization();
   timings.AddSplit("GrowForUtilization");
 
   EnqueueClearedReferences(&cleared_references);
+  timings.AddSplit("EnqueueClearedReferences");
+
   RequestHeapTrim();
   timings.AddSplit("Finish");
 
@@ -1675,6 +1758,7 @@
   uint64_t pause_roots = (root_end - root_begin) / 1000 * 1000;
   uint64_t pause_dirty = (dirty_end - dirty_begin) / 1000 * 1000;
   uint64_t duration = (NanoTime() - root_begin) / 1000 * 1000;
+  total_paused_time_ += (pause_roots + pause_dirty) / kTimeAdjust;
   if (pause_roots > MsToNs(5) || pause_dirty > MsToNs(5)) {
     const size_t percent_free = GetPercentFree();
     const size_t current_heap_size = GetUsedMemorySize();
@@ -1706,6 +1790,7 @@
       do_wait = is_gc_running_;
     }
     if (do_wait) {
+      uint64_t wait_time;
       // We must wait, change thread state then sleep on gc_complete_cond_;
       ScopedThreadStateChange tsc(Thread::Current(), kWaitingForGcToComplete);
       {
@@ -1714,8 +1799,9 @@
           gc_complete_cond_->Wait(self, *gc_complete_lock_);
         }
         last_gc_type = last_gc_type_;
+        wait_time = NanoTime() - wait_start;;
+        total_wait_time_ += wait_time;
       }
-      uint64_t wait_time = NanoTime() - wait_start;
       if (wait_time > MsToNs(5)) {
         LOG(INFO) << "WaitForConcurrentGcToComplete blocked for " << PrettyDuration(wait_time);
       }
@@ -1727,12 +1813,7 @@
 void Heap::DumpForSigQuit(std::ostream& os) {
   os << "Heap: " << GetPercentFree() << "% free, " << PrettySize(GetUsedMemorySize()) << "/"
      << PrettySize(GetTotalMemory()) << "; " << GetObjectsAllocated() << " objects\n";
-  // Dump cumulative timings.
-  LOG(INFO) << "Dumping cumulative Gc timings";
-  for (CumulativeTimings::iterator it = cumulative_timings_.begin();
-      it != cumulative_timings_.end(); ++it) {
-    it->second->Dump();
-  }
+  DumpGcPerformanceInfo();
 }
 
 size_t Heap::GetPercentFree() {
diff --git a/src/heap.h b/src/heap.h
index 209d631..3d49e30 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -82,8 +82,9 @@
 class Heap {
  public:
   static const size_t kInitialSize = 2 * MB;
-
   static const size_t kMaximumSize = 32 * MB;
+  // Used so that we don't overflow the allocation time atomic integer.
+  static const size_t kTimeAdjust = 1024;
 
   typedef void (RootVisitor)(const Object* root, void* arg);
   typedef bool (IsMarkedTester)(const Object* object, void* arg);
@@ -136,11 +137,11 @@
   void ConcurrentGC(Thread* self) LOCKS_EXCLUDED(Locks::runtime_shutdown_lock_);
 
   // Implements java.lang.Runtime.maxMemory.
-  int64_t GetMaxMemory();
+  int64_t GetMaxMemory() const;
   // Implements java.lang.Runtime.totalMemory.
-  int64_t GetTotalMemory();
+  int64_t GetTotalMemory() const;
   // Implements java.lang.Runtime.freeMemory.
-  int64_t GetFreeMemory();
+  int64_t GetFreeMemory() const;
 
   // Implements VMDebug.countInstancesOfClass.
   int64_t CountInstances(Class* c, bool count_assignable)
@@ -153,16 +154,11 @@
 
   // Target ideal heap utilization ratio, implements
   // dalvik.system.VMRuntime.getTargetHeapUtilization.
-  float GetTargetHeapUtilization() {
-    return target_utilization_;
-  }
+  float GetTargetHeapUtilization() const;
+
   // Set target ideal heap utilization ratio, implements
   // dalvik.system.VMRuntime.setTargetHeapUtilization.
-  void SetTargetHeapUtilization(float target) {
-    DCHECK_GT(target, 0.0f);  // asserted in Java code
-    DCHECK_LT(target, 1.0f);
-    target_utilization_ = target;
-  }
+  void SetTargetHeapUtilization(float target);
 
   // For the alloc space, sets the maximum number of bytes that the heap is allowed to allocate
   // from the system. Doesn't allow the space to exceed its growth limit.
@@ -252,6 +248,18 @@
   size_t GetConcurrentMinFree() const;
   size_t GetUsedMemorySize() const;
 
+  // Returns the total number of objects allocated since the heap was created.
+  size_t GetTotalObjectsAllocated() const;
+
+  // Returns the total number of bytes allocated since the heap was created.
+  size_t GetTotalBytesAllocated() const;
+
+  // Returns the total number of objects freed since the heap was created.
+  size_t GetTotalObjectsFreed() const;
+
+  // Returns the total number of bytes freed since the heap was created.
+  size_t GetTotalBytesFreed() const;
+
   // Functions for getting the bitmap which corresponds to an object's address.
   // This is probably slow, TODO: use better data structure like binary tree .
   ContinuousSpace* FindSpaceFromObject(const Object*) const;
@@ -298,6 +306,9 @@
   // UnReserve the address range where the oat file will be placed.
   void UnReserveOatFileAddressRange();
 
+  // GC performance measuring
+  void DumpGcPerformanceInfo();
+
  private:
   // Allocates uninitialized storage. Passing in a null space tries to place the object in the
   // large object space.
@@ -413,10 +424,11 @@
   size_t concurrent_min_free_;
   // Number of bytes allocated since the last Gc, we use this to help determine when to schedule concurrent GCs.
   size_t bytes_since_last_gc_;
-  // Start a concurrent GC if we have allocated concurrent_gc_start_rate_ bytes and not done a GCs.
-  size_t concurrent_gc_start_rate_;
   size_t sticky_gc_count_;
 
+  size_t total_bytes_freed_;
+  size_t total_objects_freed_;
+
   // Primitive objects larger than this size are put in the large object space.
   size_t large_object_threshold_;
 
@@ -450,11 +462,6 @@
   UniquePtr<HeapBitmap> live_bitmap_ GUARDED_BY(Locks::heap_bitmap_lock_);
   UniquePtr<HeapBitmap> mark_bitmap_ GUARDED_BY(Locks::heap_bitmap_lock_);
 
-  // True while the garbage collector is trying to signal the GC daemon thread.
-  // This flag is needed to prevent recursion from occurring when the JNI calls
-  // allocate memory and request another GC.
-  bool try_running_gc_;
-
   // Used to ensure that we don't ever recursively request GC.
   volatile bool requesting_gc_;
 
@@ -487,6 +494,14 @@
   // Target ideal heap utilization ratio
   float target_utilization_;
 
+  // Total time which mutators are paused or waiting for GC to complete.
+  uint64_t total_paused_time_;
+  uint64_t total_wait_time_;
+
+  // Total number of objects allocated in microseconds.
+  const bool measure_allocation_time_;
+  AtomicInteger total_allocation_time_;
+
   bool verify_objects_;
 
   friend class MarkSweep;
diff --git a/src/oat/runtime/support_invoke.cc b/src/oat/runtime/support_invoke.cc
index 4656198..e66749d 100644
--- a/src/oat/runtime/support_invoke.cc
+++ b/src/oat/runtime/support_invoke.cc
@@ -27,6 +27,12 @@
   AbstractMethod* method;
   if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex16)) {
     method = this_object->GetClass()->FindVirtualMethodForInterface(interface_method);
+    if (UNLIKELY(method == NULL)) {
+      FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
+      ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(interface_method, this_object,
+                                                                 caller_method);
+      return 0;  // Failure.
+    }
   } else {
     FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
     DCHECK(interface_method == Runtime::Current()->GetResolutionMethod());
@@ -86,7 +92,7 @@
                                                 false, kInterface);
     if (UNLIKELY(method == NULL)) {
       CHECK(self->IsExceptionPending());
-      return 0;  // failure
+      return 0;  // Failure.
     }
   }
   const void* code = method->GetCode();
diff --git a/src/oatdump.cc b/src/oatdump.cc
index b40eb1c..a592b9f 100644
--- a/src/oatdump.cc
+++ b/src/oatdump.cc
@@ -504,7 +504,7 @@
           } else {
             os << "\t\t\tcatch entry dex PC: 0x";
           }
-          os << std::hex << raw_table[i + 1] << "\n";
+          os << std::hex << raw_table[i + 1] << std::dec << "\n";
           return;
         }
       }
diff --git a/src/object.cc b/src/object.cc
index e8381db..5fdea71 100644
--- a/src/object.cc
+++ b/src/object.cc
@@ -474,10 +474,9 @@
     } else {
       MethodHelper mh(this);
       MethodHelper interface_mh;
-      ObjectArray<InterfaceEntry>* iftable = GetDeclaringClass()->GetIfTable();
-      for (int32_t i = 0; i < iftable->GetLength() && result == NULL; i++) {
-        InterfaceEntry* entry = iftable->Get(i);
-        Class* interface = entry->GetInterface();
+      IfTable* iftable = GetDeclaringClass()->GetIfTable();
+      for (size_t i = 0; i < iftable->Count() && result == NULL; i++) {
+        Class* interface = iftable->GetInterface(i);
         for (size_t j = 0; j < interface->NumVirtualMethods(); ++j) {
           AbstractMethod* interface_method = interface->GetVirtualMethod(j);
           interface_mh.ChangeMethod(interface_method);
@@ -881,9 +880,9 @@
   // recursively all super-interfaces of those interfaces, are listed
   // in iftable_, so we can just do a linear scan through that.
   int32_t iftable_count = GetIfTableCount();
-  ObjectArray<InterfaceEntry>* iftable = GetIfTable();
+  IfTable* iftable = GetIfTable();
   for (int32_t i = 0; i < iftable_count; i++) {
-    if (iftable->Get(i)->GetInterface() == klass) {
+    if (iftable->GetInterface(i) == klass) {
       return true;
     }
   }
@@ -1007,11 +1006,10 @@
   DCHECK(declaring_class->IsInterface()) << PrettyMethod(method);
   // TODO cache to improve lookup speed
   int32_t iftable_count = GetIfTableCount();
-  ObjectArray<InterfaceEntry>* iftable = GetIfTable();
+  IfTable* iftable = GetIfTable();
   for (int32_t i = 0; i < iftable_count; i++) {
-    InterfaceEntry* interface_entry = iftable->Get(i);
-    if (interface_entry->GetInterface() == declaring_class) {
-      return interface_entry->GetMethodArray()->Get(method->GetMethodIndex());
+    if (iftable->GetInterface(i) == declaring_class) {
+      return iftable->GetMethodArray(i)->Get(method->GetMethodIndex());
     }
   }
   return NULL;
@@ -1025,9 +1023,9 @@
   }
 
   int32_t iftable_count = GetIfTableCount();
-  ObjectArray<InterfaceEntry>* iftable = GetIfTable();
+  IfTable* iftable = GetIfTable();
   for (int32_t i = 0; i < iftable_count; i++) {
-    method = iftable->Get(i)->GetInterface()->FindVirtualMethod(name, signature);
+    method = iftable->GetInterface(i)->FindVirtualMethod(name, signature);
     if (method != NULL) {
       return method;
     }
@@ -1043,9 +1041,9 @@
   }
 
   int32_t iftable_count = GetIfTableCount();
-  ObjectArray<InterfaceEntry>* iftable = GetIfTable();
+  IfTable* iftable = GetIfTable();
   for (int32_t i = 0; i < iftable_count; i++) {
-    method = iftable->Get(i)->GetInterface()->FindVirtualMethod(dex_cache, dex_method_idx);
+    method = iftable->GetInterface(i)->FindVirtualMethod(dex_cache, dex_method_idx);
     if (method != NULL) {
       return method;
     }
diff --git a/src/object.h b/src/object.h
index 1af30ea..43aed33 100644
--- a/src/object.h
+++ b/src/object.h
@@ -44,7 +44,7 @@
 class CodeAndDirectMethods;
 class DexCache;
 class Field;
-class InterfaceEntry;
+class IfTable;
 class Monitor;
 class Member;
 class AbstractMethod;
@@ -1173,6 +1173,57 @@
   return new_array;
 }
 
+class MANAGED IfTable : public ObjectArray<Object> {
+ public:
+  Class* GetInterface(int32_t i) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    Class* interface = Get((i * kMax) + kInterface)->AsClass();
+    DCHECK(interface != NULL);
+    return interface;
+  }
+
+  void SetInterface(int32_t i, Class* interface) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+  ObjectArray<AbstractMethod>* GetMethodArray(int32_t i) const
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    ObjectArray<AbstractMethod>* method_array =
+        down_cast<ObjectArray<AbstractMethod>*>(Get((i * kMax) + kMethodArray));
+    DCHECK(method_array != NULL);
+    return method_array;
+  }
+
+  size_t GetMethodArrayCount(int32_t i) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    ObjectArray<AbstractMethod>* method_array =
+        down_cast<ObjectArray<AbstractMethod>*>(Get((i * kMax) + kMethodArray));
+    if (method_array == NULL) {
+      return 0;
+    }
+    return method_array->GetLength();
+  }
+
+  void SetMethodArray(int32_t i, ObjectArray<AbstractMethod>* new_ma)
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    DCHECK(new_ma != NULL);
+    DCHECK(Get((i * kMax) + kMethodArray) == NULL);
+    Set((i * kMax) + kMethodArray, new_ma);
+  }
+
+  size_t Count() const {
+    return GetLength() / kMax;
+  }
+
+  enum {
+    // Points to the interface class.
+    kInterface   = 0,
+    // Method pointers into the vtable, allow fast map from interface method index to concrete
+    // instance method.
+    kMethodArray = 1,
+    kMax         = 2,
+  };
+
+ private:
+  DISALLOW_IMPLICIT_CONSTRUCTORS(IfTable);
+};
+
 // Type for the InitializedStaticStorage table. Currently the Class
 // provides the static storage. However, this might change to an Array
 // to improve image sharing, so we use this type to avoid assumptions
@@ -1746,20 +1797,18 @@
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   int32_t GetIfTableCount() const {
-    ObjectArray<InterfaceEntry>* iftable = GetIfTable();
+    IfTable* iftable = GetIfTable();
     if (iftable == NULL) {
       return 0;
     }
-    return iftable->GetLength();
+    return iftable->Count();
   }
 
-  ObjectArray<InterfaceEntry>* GetIfTable() const {
-    DCHECK(IsResolved() || IsErroneous());
-    return GetFieldObject<ObjectArray<InterfaceEntry>*>(
-        OFFSET_OF_OBJECT_MEMBER(Class, iftable_), false);
+  IfTable* GetIfTable() const {
+    return GetFieldObject<IfTable*>(OFFSET_OF_OBJECT_MEMBER(Class, iftable_), false);
   }
 
-  void SetIfTable(ObjectArray<InterfaceEntry>* new_iftable)
+  void SetIfTable(IfTable* new_iftable)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     SetFieldObject(OFFSET_OF_OBJECT_MEMBER(Class, iftable_), new_iftable, false);
   }
@@ -1970,20 +2019,18 @@
   // specifies the number of reference fields.
   ObjectArray<Field>* ifields_;
 
-  // Interface table (iftable_), one entry per interface supported by
-  // this class.  That means one entry for each interface we support
-  // directly, indirectly via superclass, or indirectly via
-  // superinterface.  This will be null if neither we nor our
-  // superclass implement any interfaces.
+  // The interface table (iftable_) contains pairs of a interface class and an array of the
+  // interface methods. There is one pair per interface supported by this class.  That means one
+  // pair for each interface we support directly, indirectly via superclass, or indirectly via a
+  // superinterface.  This will be null if neither we nor our superclass implement any interfaces.
   //
-  // Why we need this: given "class Foo implements Face", declare
-  // "Face faceObj = new Foo()".  Invoke faceObj.blah(), where "blah"
-  // is part of the Face interface.  We can't easily use a single
-  // vtable.
+  // Why we need this: given "class Foo implements Face", declare "Face faceObj = new Foo()".
+  // Invoke faceObj.blah(), where "blah" is part of the Face interface.  We can't easily use a
+  // single vtable.
   //
-  // For every interface a concrete class implements, we create an array
-  // of the concrete vtable_ methods for the methods in the interface.
-  ObjectArray<InterfaceEntry>* iftable_;
+  // For every interface a concrete class implements, we create an array of the concrete vtable_
+  // methods for the methods in the interface.
+  IfTable* iftable_;
 
   // descriptor for the class such as "java.lang.Class" or "[C". Lazily initialized by ComputeName
   String* name_;
@@ -2246,6 +2293,13 @@
   }
 }
 
+inline void IfTable::SetInterface(int32_t i, Class* interface) {
+  DCHECK(interface != NULL);
+  DCHECK(interface->IsInterface());
+  DCHECK(Get((i * kMax) + kInterface) == NULL);
+  Set((i * kMax) + kInterface, interface);
+}
+
 class MANAGED ClassClass : public Class {
  private:
   int32_t padding_;
@@ -2648,60 +2702,6 @@
   DISALLOW_IMPLICIT_CONSTRUCTORS(StackTraceElement);
 };
 
-class MANAGED InterfaceEntry : public ObjectArray<Object> {
- public:
-  Class* GetInterface() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    Class* interface = Get(kInterface)->AsClass();
-    DCHECK(interface != NULL);
-    return interface;
-  }
-
-  void SetInterface(Class* interface) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    DCHECK(interface != NULL);
-    DCHECK(interface->IsInterface());
-    DCHECK(Get(kInterface) == NULL);
-    Set(kInterface, interface);
-  }
-
-  size_t GetMethodArrayCount() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    ObjectArray<AbstractMethod>* method_array = down_cast<ObjectArray<AbstractMethod>*>(Get(kMethodArray));
-    if (method_array == NULL) {
-      return 0;
-    }
-    return method_array->GetLength();
-  }
-
-  ObjectArray<AbstractMethod>* GetMethodArray() const
-      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    ObjectArray<AbstractMethod>* method_array = down_cast<ObjectArray<AbstractMethod>*>(Get(kMethodArray));
-    DCHECK(method_array != NULL);
-    return method_array;
-  }
-
-  void SetMethodArray(ObjectArray<AbstractMethod>* new_ma)
-      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    DCHECK(new_ma != NULL);
-    DCHECK(Get(kMethodArray) == NULL);
-    Set(kMethodArray, new_ma);
-  }
-
-  static size_t LengthAsArray() {
-    return kMax;
-  }
-
- private:
-  enum {
-    // Points to the interface class.
-    kInterface   = 0,
-    // Method pointers into the vtable, allow fast map from interface
-    // method index to concrete instance method.
-    kMethodArray = 1,
-    kMax         = 2,
-  };
-
-  DISALLOW_IMPLICIT_CONSTRUCTORS(InterfaceEntry);
-};
-
 class MANAGED SynthesizedProxyClass : public Class {
  public:
   ObjectArray<Class>* GetInterfaces() {
diff --git a/src/object_utils.h b/src/object_utils.h
index 1bbb7bc..c6e71c3 100644
--- a/src/object_utils.h
+++ b/src/object_utils.h
@@ -169,7 +169,7 @@
         return GetClassLinker()->FindSystemClass("Ljava/io/Serializable;");
       }
     } else if (klass_->IsProxyClass()) {
-      return klass_->GetIfTable()->Get(idx)->GetInterface();
+      return klass_->GetIfTable()->GetInterface(idx);
     } else {
       uint16_t type_idx = GetDirectInterfaceTypeIdx(idx);
       Class* interface = GetDexCache()->GetResolvedType(type_idx);
diff --git a/src/runtime_support.h b/src/runtime_support.h
index 0d24e48..5a5cdcd 100644
--- a/src/runtime_support.h
+++ b/src/runtime_support.h
@@ -74,7 +74,8 @@
       return NULL;  // Failure
     }
   }
-  if (!runtime->GetClassLinker()->EnsureInitialized(klass, true, true)) {
+  if (!klass->IsInitialized() &&
+      !runtime->GetClassLinker()->EnsureInitialized(klass, true, true)) {
     DCHECK(self->IsExceptionPending());
     return NULL;  // Failure
   }
diff --git a/src/timing_logger.h b/src/timing_logger.h
index 5bc9d71..7dc2671 100644
--- a/src/timing_logger.h
+++ b/src/timing_logger.h
@@ -170,7 +170,7 @@
          << FormatDuration(std_dev * kAdjust, tu) << " " << labels_[i] << "\n";
     }
     uint64_t total_mean_x2 = total_time_squared_;
-    uint64_t mean_total_ns = GetTotalNs();
+    uint64_t mean_total_ns = GetTotalTime();
     if (iterations_ != 0) {
       total_mean_x2 /= iterations_;
       mean_total_ns /= iterations_;
@@ -183,6 +183,12 @@
   }
 
   uint64_t GetTotalNs() const {
+    return GetTotalTime() * kAdjust;
+  }
+
+ private:
+
+  uint64_t GetTotalTime() const {
     uint64_t total = 0;
     for (size_t i = 0; i < times_.size(); ++i) {
       total += times_[i];
@@ -190,7 +196,6 @@
     return total;
   }
 
- private:
   static const uint64_t kAdjust = 1000;
   std::string name_;
   bool precise_;
diff --git a/src/verifier/method_verifier.cc b/src/verifier/method_verifier.cc
index ad238b8..67507bc 100644
--- a/src/verifier/method_verifier.cc
+++ b/src/verifier/method_verifier.cc
@@ -1054,7 +1054,8 @@
     os << "Native method\n";
     return;
   }
-  DCHECK(code_item_ != NULL);
+  reg_types_.Dump(os);
+  os << "Dumping instructions and register lines:\n";
   const Instruction* inst = Instruction::At(code_item_->insns_);
   for (size_t dex_pc = 0; dex_pc < code_item_->insns_size_in_code_units_;
       dex_pc += insn_flags_[dex_pc].GetLengthInCodeUnits()) {
@@ -1128,7 +1129,7 @@
         // it's effectively considered initialized the instant we reach here (in the sense that we
         // can return without doing anything or call virtual methods).
         {
-          const RegType& reg_type = reg_types_.FromDescriptor(class_loader_, descriptor);
+          const RegType& reg_type = reg_types_.FromDescriptor(class_loader_, descriptor, false);
           reg_line->SetRegisterType(arg_start + cur_arg, reg_type);
         }
         break;
@@ -1527,7 +1528,8 @@
       const RegType& res_type = ResolveClassAndCheckAccess(dec_insn.vB);
       // Register holds class, ie its type is class, on error it will hold Conflict.
       work_line_->SetRegisterType(dec_insn.vA,
-                                  res_type.IsConflict() ? res_type : reg_types_.JavaLangClass());
+                                  res_type.IsConflict() ? res_type
+                                                        : reg_types_.JavaLangClass(true));
       break;
     }
     case Instruction::MONITOR_ENTER:
@@ -1667,7 +1669,7 @@
       break;
     case Instruction::THROW: {
       const RegType& res_type = work_line_->GetRegisterType(dec_insn.vA);
-      if (!reg_types_.JavaLangThrowable().IsAssignableFrom(res_type)) {
+      if (!reg_types_.JavaLangThrowable(false).IsAssignableFrom(res_type)) {
         Fail(VERIFY_ERROR_BAD_CLASS_SOFT) << "thrown class " << res_type << " not instanceof Throwable";
       }
       break;
@@ -1785,7 +1787,7 @@
       VerifyAGet(dec_insn, reg_types_.Long(), true);
       break;
     case Instruction::AGET_OBJECT:
-      VerifyAGet(dec_insn, reg_types_.JavaLangObject(), false);
+      VerifyAGet(dec_insn, reg_types_.JavaLangObject(false), false);
       break;
 
     case Instruction::APUT_BOOLEAN:
@@ -1807,7 +1809,7 @@
       VerifyAPut(dec_insn, reg_types_.Long(), true);
       break;
     case Instruction::APUT_OBJECT:
-      VerifyAPut(dec_insn, reg_types_.JavaLangObject(), false);
+      VerifyAPut(dec_insn, reg_types_.JavaLangObject(false), false);
       break;
 
     case Instruction::IGET_BOOLEAN:
@@ -1829,7 +1831,7 @@
       VerifyISGet(dec_insn, reg_types_.Long(), true, false);
       break;
     case Instruction::IGET_OBJECT:
-      VerifyISGet(dec_insn, reg_types_.JavaLangObject(), false, false);
+      VerifyISGet(dec_insn, reg_types_.JavaLangObject(false), false, false);
       break;
 
     case Instruction::IPUT_BOOLEAN:
@@ -1851,7 +1853,7 @@
       VerifyISPut(dec_insn, reg_types_.Long(), true, false);
       break;
     case Instruction::IPUT_OBJECT:
-      VerifyISPut(dec_insn, reg_types_.JavaLangObject(), false, false);
+      VerifyISPut(dec_insn, reg_types_.JavaLangObject(false), false, false);
       break;
 
     case Instruction::SGET_BOOLEAN:
@@ -1873,7 +1875,7 @@
       VerifyISGet(dec_insn, reg_types_.Long(), true, true);
       break;
     case Instruction::SGET_OBJECT:
-      VerifyISGet(dec_insn, reg_types_.JavaLangObject(), false, true);
+      VerifyISGet(dec_insn, reg_types_.JavaLangObject(false), false, true);
       break;
 
     case Instruction::SPUT_BOOLEAN:
@@ -1895,7 +1897,7 @@
       VerifyISPut(dec_insn, reg_types_.Long(), true, true);
       break;
     case Instruction::SPUT_OBJECT:
-      VerifyISPut(dec_insn, reg_types_.JavaLangObject(), false, true);
+      VerifyISPut(dec_insn, reg_types_.JavaLangObject(false), false, true);
       break;
 
     case Instruction::INVOKE_VIRTUAL:
@@ -1916,7 +1918,7 @@
       } else {
         descriptor = MethodHelper(called_method).GetReturnTypeDescriptor();
       }
-      const RegType& return_type = reg_types_.FromDescriptor(class_loader_, descriptor);
+      const RegType& return_type = reg_types_.FromDescriptor(class_loader_, descriptor, false);
       work_line_->SetResultRegisterType(return_type);
       just_set_result = true;
       break;
@@ -1977,7 +1979,8 @@
          */
         work_line_->MarkRefsAsInitialized(this_type);
       }
-      const RegType& return_type = reg_types_.FromDescriptor(class_loader_, return_type_descriptor);
+      const RegType& return_type = reg_types_.FromDescriptor(class_loader_, return_type_descriptor,
+                                                             false);
       work_line_->SetResultRegisterType(return_type);
       just_set_result = true;
       break;
@@ -1995,7 +1998,7 @@
         } else {
           descriptor = MethodHelper(called_method).GetReturnTypeDescriptor();
         }
-        const RegType& return_type =  reg_types_.FromDescriptor(class_loader_, descriptor);
+        const RegType& return_type =  reg_types_.FromDescriptor(class_loader_, descriptor, false);
         work_line_->SetResultRegisterType(return_type);
         just_set_result = true;
       }
@@ -2045,7 +2048,7 @@
       } else {
         descriptor = MethodHelper(abs_method).GetReturnTypeDescriptor();
       }
-      const RegType& return_type = reg_types_.FromDescriptor(class_loader_, descriptor);
+      const RegType& return_type = reg_types_.FromDescriptor(class_loader_, descriptor, false);
       work_line_->SetResultRegisterType(return_type);
       work_line_->SetResultRegisterType(return_type);
       just_set_result = true;
@@ -2466,8 +2469,8 @@
   const RegType& referrer = GetDeclaringClass();
   Class* klass = dex_cache_->GetResolvedType(class_idx);
   const RegType& result =
-      klass != NULL ? reg_types_.FromClass(klass)
-                    : reg_types_.FromDescriptor(class_loader_, descriptor);
+      klass != NULL ? reg_types_.FromClass(klass, klass->IsFinal())
+                    : reg_types_.FromDescriptor(class_loader_, descriptor, false);
   if (result.IsConflict()) {
     Fail(VERIFY_ERROR_BAD_CLASS_SOFT) << "accessing broken descriptor '" << descriptor
         << "' in " << referrer;
@@ -2495,14 +2498,14 @@
       for (; iterator.HasNext(); iterator.Next()) {
         if (iterator.GetHandlerAddress() == (uint32_t) work_insn_idx_) {
           if (iterator.GetHandlerTypeIndex() == DexFile::kDexNoIndex16) {
-            common_super = &reg_types_.JavaLangThrowable();
+            common_super = &reg_types_.JavaLangThrowable(false);
           } else {
             const RegType& exception = ResolveClassAndCheckAccess(iterator.GetHandlerTypeIndex());
             if (common_super == NULL) {
               // Unconditionally assign for the first handler. We don't assert this is a Throwable
               // as that is caught at runtime
               common_super = &exception;
-            } else if (!reg_types_.JavaLangThrowable().IsAssignableFrom(exception)) {
+            } else if (!reg_types_.JavaLangThrowable(false).IsAssignableFrom(exception)) {
               // We don't know enough about the type and the common path merge will result in
               // Conflict. Fail here knowing the correct thing can be done at runtime.
               Fail(VERIFY_ERROR_BAD_CLASS_SOFT) << "unexpected non-exception class " << exception;
@@ -2511,7 +2514,7 @@
               // odd case, but nothing to do
             } else {
               common_super = &common_super->Merge(exception, &reg_types_);
-              CHECK(reg_types_.JavaLangThrowable().IsAssignableFrom(*common_super));
+              CHECK(reg_types_.JavaLangThrowable(false).IsAssignableFrom(*common_super));
             }
           }
         }
@@ -2677,7 +2680,8 @@
       return NULL;
     }
     if (method_type != METHOD_INTERFACE && !actual_arg_type.IsZero()) {
-      const RegType& res_method_class = reg_types_.FromClass(res_method->GetDeclaringClass());
+      Class* klass = res_method->GetDeclaringClass();
+      const RegType& res_method_class = reg_types_.FromClass(klass, klass->IsFinal());
       if (!res_method_class.IsAssignableFrom(actual_arg_type)) {
         Fail(VERIFY_ERROR_BAD_CLASS_SOFT) << "'this' argument '" << actual_arg_type
             << "' not instance of '" << res_method_class << "'";
@@ -2707,7 +2711,7 @@
           << " missing signature component";
       return NULL;
     }
-    const RegType& reg_type = reg_types_.FromDescriptor(class_loader_, descriptor);
+    const RegType& reg_type = reg_types_.FromDescriptor(class_loader_, descriptor, false);
     uint32_t get_reg = is_range ? dec_insn.vC + actual_args : dec_insn.arg[actual_args];
     if (!work_line_->VerifyRegisterType(get_reg, reg_type)) {
       return res_method;
@@ -2904,7 +2908,8 @@
     // Cannot infer and check type, however, access will cause null pointer exception
     return field;
   } else {
-    const RegType& field_klass = reg_types_.FromClass(field->GetDeclaringClass());
+    Class* klass = field->GetDeclaringClass();
+    const RegType& field_klass = reg_types_.FromClass(klass, klass->IsFinal());
     if (obj_type.IsUninitializedTypes() &&
         (!IsConstructor() || GetDeclaringClass().Equals(obj_type) ||
             !field_klass.Equals(GetDeclaringClass()))) {
@@ -2947,7 +2952,7 @@
     descriptor = dex_file_->GetFieldTypeDescriptor(field_id);
     loader = class_loader_;
   }
-  const RegType& field_type = reg_types_.FromDescriptor(loader, descriptor);
+  const RegType& field_type = reg_types_.FromDescriptor(loader, descriptor, false);
   if (is_primitive) {
     if (field_type.Equals(insn_type) ||
         (field_type.IsFloat() && insn_type.IsIntegralTypes()) ||
@@ -2996,7 +3001,7 @@
     descriptor = dex_file_->GetFieldTypeDescriptor(field_id);
     loader = class_loader_;
   }
-  const RegType& field_type = reg_types_.FromDescriptor(loader, descriptor);
+  const RegType& field_type = reg_types_.FromDescriptor(loader, descriptor, false);
   if (field != NULL) {
     if (field->IsFinal() && field->GetDeclaringClass() != GetDeclaringClass().GetClass()) {
       Fail(VERIFY_ERROR_ACCESS_FIELD) << "cannot modify final field " << PrettyField(field)
@@ -3104,16 +3109,17 @@
   const DexFile::ProtoId& proto_id = dex_file_->GetMethodPrototype(method_id);
   uint16_t return_type_idx = proto_id.return_type_idx_;
   const char* descriptor = dex_file_->GetTypeDescriptor(dex_file_->GetTypeId(return_type_idx));
-  return reg_types_.FromDescriptor(class_loader_, descriptor);
+  return reg_types_.FromDescriptor(class_loader_, descriptor, false);
 }
 
 const RegType& MethodVerifier::GetDeclaringClass() {
   if (foo_method_ != NULL) {
-    return reg_types_.FromClass(foo_method_->GetDeclaringClass());
+    Class* klass = foo_method_->GetDeclaringClass();
+    return reg_types_.FromClass(klass, klass->IsFinal());
   } else {
     const DexFile::MethodId& method_id = dex_file_->GetMethodId(method_idx_);
     const char* descriptor = dex_file_->GetTypeDescriptor(dex_file_->GetTypeId(method_id.class_idx_));
-    return reg_types_.FromDescriptor(class_loader_, descriptor);
+    return reg_types_.FromDescriptor(class_loader_, descriptor, false);
   }
 }
 
diff --git a/src/verifier/reg_type.cc b/src/verifier/reg_type.cc
index f555223..e02fbf4 100644
--- a/src/verifier/reg_type.cc
+++ b/src/verifier/reg_type.cc
@@ -46,11 +46,12 @@
     "Unresolved Merged References",
     "Unresolved Super Class",
     "Reference",
+    "Precise Reference",
 };
 
 std::string RegType::Dump(const RegTypeCache* reg_types) const {
-  DCHECK(type_ >=  kRegTypeUndefined && type_ <= kRegTypeReference);
-  DCHECK(arraysize(type_strings) == (kRegTypeReference + 1));
+  DCHECK(type_ >=  kRegTypeUndefined && type_ <= kRegTypePreciseReference);
+  DCHECK(arraysize(type_strings) == (kRegTypePreciseReference + 1));
   std::string result;
   if (IsUnresolvedMergedReference()) {
     if (reg_types == NULL) {
@@ -142,7 +143,7 @@
   if (!IsUnresolvedTypes()) {
     Class* super_klass = GetClass()->GetSuperClass();
     if (super_klass != NULL) {
-      return cache->FromClass(super_klass);
+      return cache->FromClass(super_klass, IsPreciseReference());
     } else {
       return cache->Zero();
     }
@@ -150,7 +151,7 @@
     if (!IsUnresolvedMergedReference() && !IsUnresolvedSuperClass() &&
         GetDescriptor()->CharAt(0) == '[') {
       // Super class of all arrays is Object.
-      return cache->JavaLangObject();
+      return cache->JavaLangObject(true);
     } else {
       return cache->FromUnresolvedSuperClass(*this);
     }
@@ -301,7 +302,7 @@
     if (IsZero() || incoming_type.IsZero()) {
       return SelectNonConstant(*this, incoming_type);  // 0 MERGE ref => ref
     } else if (IsJavaLangObject() || incoming_type.IsJavaLangObject()) {
-      return reg_types->JavaLangObject();  // Object MERGE ref => Object
+      return reg_types->JavaLangObject(false);  // Object MERGE ref => Object
     } else if (IsUnresolvedTypes() || incoming_type.IsUnresolvedTypes()) {
       // We know how to merge an unresolved type with itself, 0 or Object. In this case we
       // have two sub-classes and don't know how to merge. Create a new string-based unresolved
@@ -319,12 +320,12 @@
       DCHECK(c1 != NULL && !c1->IsPrimitive());
       DCHECK(c2 != NULL && !c2->IsPrimitive());
       Class* join_class = ClassJoin(c1, c2);
-      if (c1 == join_class) {
+      if (c1 == join_class && !IsPreciseReference()) {
         return *this;
-      } else if (c2 == join_class) {
+      } else if (c2 == join_class && !incoming_type.IsPreciseReference()) {
         return incoming_type;
       } else {
-        return reg_types->FromClass(join_class);
+        return reg_types->FromClass(join_class, false);
       }
     }
   } else {
diff --git a/src/verifier/reg_type.h b/src/verifier/reg_type.h
index 3064f30..205867d 100644
--- a/src/verifier/reg_type.h
+++ b/src/verifier/reg_type.h
@@ -65,6 +65,7 @@
     kRegTypeUnresolvedMergedReference,  // Tree of merged references (at least 1 is unresolved).
     kRegTypeUnresolvedSuperClass,       // Super class of an unresolved type.
     kRegTypeReference,                  // Reference type.
+    kRegTypePreciseReference,           // Precisely the given type.
   };
 
   Type GetType() const {
@@ -93,6 +94,8 @@
   bool IsUnresolvedMergedReference() const {  return type_ == kRegTypeUnresolvedMergedReference; }
   bool IsUnresolvedSuperClass() const {  return type_ == kRegTypeUnresolvedSuperClass; }
   bool IsReference() const { return type_ == kRegTypeReference; }
+  bool IsPreciseReference() const { return type_ == kRegTypePreciseReference; }
+
   bool IsUninitializedTypes() const {
     return IsUninitializedReference() || IsUninitializedThisReference() ||
         IsUnresolvedAndUninitializedReference() || IsUnresolvedAndUninitializedThisReference();
@@ -154,7 +157,7 @@
   }
 
   bool IsNonZeroReferenceTypes() const {
-    return IsReference() || IsUnresolvedReference() ||
+    return IsReference() || IsPreciseReference() || IsUnresolvedReference() ||
         IsUninitializedReference() || IsUninitializedThisReference() ||
         IsUnresolvedAndUninitializedReference() || IsUnresolvedAndUninitializedThisReference() ||
         IsUnresolvedMergedReference() || IsUnresolvedSuperClass();
@@ -198,6 +201,10 @@
     return allocation_pc_or_constant_or_merged_types_;
   }
 
+  bool HasClass() const {
+    return IsReference() || IsPreciseReference();
+  }
+
   Class* GetClass() const {
     DCHECK(!IsUnresolvedReference());
     DCHECK(klass_or_descriptor_ != NULL);
@@ -212,7 +219,7 @@
   bool IsArrayTypes() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     if (IsUnresolvedTypes() && !IsUnresolvedMergedReference() && !IsUnresolvedSuperClass()) {
       return GetDescriptor()->CharAt(0) == '[';
-    } else if (IsReference()) {
+    } else if (HasClass()) {
       return GetClass()->IsArrayClass();
     } else {
       return false;
@@ -224,7 +231,7 @@
       // Primitive arrays will always resolve
       DCHECK(GetDescriptor()->CharAt(1) == 'L' || GetDescriptor()->CharAt(1) == '[');
       return GetDescriptor()->CharAt(0) == '[';
-    } else if (IsReference()) {
+    } else if (HasClass()) {
       Class* type = GetClass();
       return type->IsArrayClass() && !type->GetComponentType()->IsPrimitive();
     } else {
@@ -256,7 +263,7 @@
   }
 
   bool IsJavaLangObjectArray() const {
-    if (IsReference()) {
+    if (HasClass()) {
       Class* type = GetClass();
       return type->IsArrayClass() && type->GetComponentType()->IsObjectClass();
     }
diff --git a/src/verifier/reg_type_cache.cc b/src/verifier/reg_type_cache.cc
index 37086c9..847cde9 100644
--- a/src/verifier/reg_type_cache.cc
+++ b/src/verifier/reg_type_cache.cc
@@ -57,11 +57,18 @@
   }
 }
 
-const RegType& RegTypeCache::FromDescriptor(ClassLoader* loader, const char* descriptor) {
-  return From(RegTypeFromDescriptor(descriptor), loader, descriptor);
+const RegType& RegTypeCache::FromDescriptor(ClassLoader* loader, const char* descriptor,
+                                            bool precise) {
+  return From(RegTypeFromDescriptor(descriptor), loader, descriptor, precise);
 }
 
-const RegType& RegTypeCache::From(RegType::Type type, ClassLoader* loader, const char* descriptor) {
+static bool MatchingPrecisionForClass(RegType* entry, bool precise)
+    SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+  return (entry->IsPreciseReference() == precise) || (entry->GetClass()->IsFinal() && !precise);
+}
+
+const RegType& RegTypeCache::From(RegType::Type type, ClassLoader* loader, const char* descriptor,
+                                  bool precise) {
   if (type <= RegType::kRegTypeLastFixedLocation) {
     // entries should be sized greater than primitive types
     DCHECK_GT(entries_.size(), static_cast<size_t>(type));
@@ -76,14 +83,15 @@
     }
     return *entry;
   } else {
-    DCHECK(type == RegType::kRegTypeReference);
+    DCHECK(type == RegType::kRegTypeReference || type == RegType::kRegTypePreciseReference);
     ClassHelper kh;
     for (size_t i = RegType::kRegTypeLastFixedLocation + 1; i < entries_.size(); i++) {
       RegType* cur_entry = entries_[i];
       // check resolved and unresolved references, ignore uninitialized references
-      if (cur_entry->IsReference()) {
+      if (cur_entry->HasClass()) {
         kh.ChangeClass(cur_entry->GetClass());
-        if (strcmp(descriptor, kh.GetDescriptor()) == 0) {
+        if (MatchingPrecisionForClass(cur_entry, precise) &&
+            (strcmp(descriptor, kh.GetDescriptor()) == 0)) {
           return *cur_entry;
         }
       } else if (cur_entry->IsUnresolvedReference() &&
@@ -93,8 +101,11 @@
     }
     Class* klass = Runtime::Current()->GetClassLinker()->FindClass(descriptor, loader);
     if (klass != NULL) {
-      // Able to resolve so create resolved register type
-      RegType* entry = new RegType(type, klass, 0, entries_.size());
+      // Able to resolve so create resolved register type that is precise if we
+      // know the type is final.
+      RegType* entry = new RegType(klass->IsFinal() ? RegType::kRegTypePreciseReference
+                                                    : RegType::kRegTypeReference,
+                                   klass, 0, entries_.size());
       entries_.push_back(entry);
       return *entry;
     } else {
@@ -119,7 +130,7 @@
   }
 }
 
-const RegType& RegTypeCache::FromClass(Class* klass) {
+const RegType& RegTypeCache::FromClass(Class* klass, bool precise) {
   if (klass->IsPrimitive()) {
     RegType::Type type = RegTypeFromPrimitiveType(klass->GetPrimitiveType());
     // entries should be sized greater than primitive types
@@ -133,11 +144,14 @@
   } else {
     for (size_t i = RegType::kRegTypeLastFixedLocation + 1; i < entries_.size(); i++) {
       RegType* cur_entry = entries_[i];
-      if (cur_entry->IsReference() && cur_entry->GetClass() == klass) {
+      if ((cur_entry->HasClass()) &&
+          MatchingPrecisionForClass(cur_entry, precise) && cur_entry->GetClass() == klass) {
         return *cur_entry;
       }
     }
-    RegType* entry = new RegType(RegType::kRegTypeReference, klass, 0, entries_.size());
+    RegType* entry = new RegType(precise ? RegType::kRegTypePreciseReference
+                                         : RegType::kRegTypeReference,
+                                 klass, 0, entries_.size());
     entries_.push_back(entry);
     return *entry;
   }
@@ -243,11 +257,11 @@
     Class* klass = uninit_type.GetClass();
     for (size_t i = RegType::kRegTypeLastFixedLocation + 1; i < entries_.size(); i++) {
       RegType* cur_entry = entries_[i];
-      if (cur_entry->IsReference() && cur_entry->GetClass() == klass) {
+      if (cur_entry->IsPreciseReference() && cur_entry->GetClass() == klass) {
         return *cur_entry;
       }
     }
-    entry = new RegType(RegType::kRegTypeReference, klass, 0, entries_.size());
+    entry = new RegType(RegType::kRegTypePreciseReference, klass, 0, entries_.size());
   }
   entries_.push_back(entry);
   return *entry;
@@ -284,17 +298,17 @@
 const RegType& RegTypeCache::FromType(RegType::Type type) {
   CHECK(type < RegType::kRegTypeReference);
   switch (type) {
-    case RegType::kRegTypeBoolean:  return From(type, NULL, "Z");
-    case RegType::kRegTypeByte:     return From(type, NULL, "B");
-    case RegType::kRegTypeShort:    return From(type, NULL, "S");
-    case RegType::kRegTypeChar:     return From(type, NULL, "C");
-    case RegType::kRegTypeInteger:  return From(type, NULL, "I");
-    case RegType::kRegTypeFloat:    return From(type, NULL, "F");
+    case RegType::kRegTypeBoolean:  return From(type, NULL, "Z", true);
+    case RegType::kRegTypeByte:     return From(type, NULL, "B", true);
+    case RegType::kRegTypeShort:    return From(type, NULL, "S", true);
+    case RegType::kRegTypeChar:     return From(type, NULL, "C", true);
+    case RegType::kRegTypeInteger:  return From(type, NULL, "I", true);
+    case RegType::kRegTypeFloat:    return From(type, NULL, "F", true);
     case RegType::kRegTypeLongLo:
-    case RegType::kRegTypeLongHi:   return From(type, NULL, "J");
+    case RegType::kRegTypeLongHi:   return From(type, NULL, "J", true);
     case RegType::kRegTypeDoubleLo:
-    case RegType::kRegTypeDoubleHi: return From(type, NULL, "D");
-    default:                        return From(type, NULL, "");
+    case RegType::kRegTypeDoubleHi: return From(type, NULL, "D", true);
+    default:                        return From(type, NULL, "", true);
   }
 }
 
@@ -315,9 +329,20 @@
   if (array.IsUnresolvedTypes()) {
     std::string descriptor(array.GetDescriptor()->ToModifiedUtf8());
     std::string component(descriptor.substr(1, descriptor.size() - 1));
-    return FromDescriptor(loader, component.c_str());
+    return FromDescriptor(loader, component.c_str(), false);
   } else {
-    return FromClass(array.GetClass()->GetComponentType());
+    Class* klass = array.GetClass()->GetComponentType();
+    return FromClass(klass, klass->IsFinal());
+  }
+}
+
+void RegTypeCache::Dump(std::ostream& os) {
+  os << "Register Types:\n";
+  for (size_t i = 0; i < entries_.size(); i++) {
+    RegType* cur_entry = entries_[i];
+    if (cur_entry != NULL) {
+      os << "\t" << i << ": " << cur_entry->Dump() << "\n";
+    }
   }
 }
 
diff --git a/src/verifier/reg_type_cache.h b/src/verifier/reg_type_cache.h
index 5a2c49c..36fd2c7 100644
--- a/src/verifier/reg_type_cache.h
+++ b/src/verifier/reg_type_cache.h
@@ -40,12 +40,12 @@
     return *result;
   }
 
-  const RegType& From(RegType::Type type, ClassLoader* loader, const char* descriptor)
+  const RegType& From(RegType::Type type, ClassLoader* loader, const char* descriptor, bool precise)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-  const RegType& FromClass(Class* klass)
+  const RegType& FromClass(Class* klass, bool precise)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
   const RegType& FromCat1Const(int32_t value);
-  const RegType& FromDescriptor(ClassLoader* loader, const char* descriptor)
+  const RegType& FromDescriptor(ClassLoader* loader, const char* descriptor, bool precise)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
   const RegType& FromType(RegType::Type)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -77,17 +77,24 @@
     return FromType(RegType::kRegTypeDoubleLo);
   }
 
-  const RegType& JavaLangClass() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    return From(RegType::kRegTypeReference, NULL, "Ljava/lang/Class;");
+  const RegType& JavaLangClass(bool precise) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    return From(precise ? RegType::kRegTypeReference
+                        : RegType::kRegTypePreciseReference,
+                NULL, "Ljava/lang/Class;", precise);
   }
-  const RegType& JavaLangObject() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    return From(RegType::kRegTypeReference, NULL, "Ljava/lang/Object;");
+  const RegType& JavaLangObject(bool precise) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    return From(precise ? RegType::kRegTypeReference
+                        : RegType::kRegTypePreciseReference,
+                NULL, "Ljava/lang/Object;", precise);
   }
   const RegType& JavaLangString() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    return From(RegType::kRegTypeReference, NULL, "Ljava/lang/String;");
+    // String is final and therefore always precise.
+    return From(RegType::kRegTypePreciseReference, NULL, "Ljava/lang/String;", true);
   }
-  const RegType& JavaLangThrowable() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
-    return From(RegType::kRegTypeReference, NULL, "Ljava/lang/Throwable;");
+  const RegType& JavaLangThrowable(bool precise) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    return From(precise ? RegType::kRegTypeReference
+                        : RegType::kRegTypePreciseReference,
+                NULL, "Ljava/lang/Throwable;", precise);
   }
 
   const RegType& Undefined() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
@@ -118,6 +125,8 @@
   const RegType& GetComponentType(const RegType& array, ClassLoader* loader)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
+  void Dump(std::ostream& os) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
  private:
   // The allocated entries
   std::vector<RegType*> entries_;