Revert "Optimize code generation of check-cast and instance-of."

Failures with libcore tests.

This reverts commit 64acf303eaa2f32c0b1d8cfcbf044a822c5eec08.

Change-Id: Ie6f323fcf5d86bae5c334c1352bb21f1bad60a88
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index c852758..ef20b71 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -300,8 +300,8 @@
 
 class TypeCheckSlowPathX86_64 : public SlowPathCodeX86_64 {
  public:
-  TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal)
-      : instruction_(instruction), is_fatal_(is_fatal) {}
+  explicit TypeCheckSlowPathX86_64(HInstruction* instruction)
+      : instruction_(instruction) {}
 
   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
     LocationSummary* locations = instruction_->GetLocations();
@@ -313,19 +313,7 @@
 
     CodeGeneratorX86_64* x64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
     __ Bind(GetEntryLabel());
-
-    if (instruction_->IsCheckCast()) {
-      // The codegen for the instruction overwrites `temp`, so put it back in place.
-      CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
-      CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
-      uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
-      __ movl(temp, Address(obj, class_offset));
-      __ MaybeUnpoisonHeapReference(temp);
-    }
-
-    if (!is_fatal_) {
-      SaveLiveRegisters(codegen, locations);
-    }
+    SaveLiveRegisters(codegen, locations);
 
     // We're moving two locations to locations that could overlap, so we need a parallel
     // move resolver.
@@ -351,23 +339,18 @@
                                  this);
     }
 
-    if (!is_fatal_) {
-      if (instruction_->IsInstanceOf()) {
-        x64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
-      }
-
-      RestoreLiveRegisters(codegen, locations);
-      __ jmp(GetExitLabel());
+    if (instruction_->IsInstanceOf()) {
+      x64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
     }
+
+    RestoreLiveRegisters(codegen, locations);
+    __ jmp(GetExitLabel());
   }
 
   const char* GetDescription() const OVERRIDE { return "TypeCheckSlowPathX86_64"; }
 
-  bool IsFatal() const OVERRIDE { return is_fatal_; }
-
  private:
   HInstruction* const instruction_;
-  const bool is_fatal_;
 
   DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86_64);
 };
@@ -4797,33 +4780,14 @@
 }
 
 void LocationsBuilderX86_64::VisitInstanceOf(HInstanceOf* instruction) {
-  LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
-  switch (instruction->GetTypeCheckKind()) {
-    case TypeCheckKind::kExactCheck:
-    case TypeCheckKind::kAbstractClassCheck:
-    case TypeCheckKind::kClassHierarchyCheck:
-    case TypeCheckKind::kArrayObjectCheck:
-      call_kind = LocationSummary::kNoCall;
-      break;
-    case TypeCheckKind::kInterfaceCheck:
-      call_kind = LocationSummary::kCall;
-      break;
-    case TypeCheckKind::kArrayCheck:
-      call_kind = LocationSummary::kCallOnSlowPath;
-      break;
-  }
+  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
+      ? LocationSummary::kNoCall
+      : LocationSummary::kCallOnSlowPath;
   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
-  if (call_kind != LocationSummary::kCall) {
-    locations->SetInAt(0, Location::RequiresRegister());
-    locations->SetInAt(1, Location::Any());
-    // Note that TypeCheckSlowPathX86_64 uses this register too.
-    locations->SetOut(Location::RequiresRegister());
-  } else {
-    InvokeRuntimeCallingConvention calling_convention;
-    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
-    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
-    locations->SetOut(Location::RegisterLocation(RAX));
-  }
+  locations->SetInAt(0, Location::RequiresRegister());
+  locations->SetInAt(1, Location::Any());
+  // Note that TypeCheckSlowPathX86_64 uses this register too.
+  locations->SetOut(Location::RequiresRegister());
 }
 
 void InstructionCodeGeneratorX86_64::VisitInstanceOf(HInstanceOf* instruction) {
@@ -4832,11 +4796,8 @@
   Location cls = locations->InAt(1);
   CpuRegister out = locations->Out().AsRegister<CpuRegister>();
   uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
-  uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
-  uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
-  uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
-  SlowPathCodeX86_64* slow_path = nullptr;
   NearLabel done, zero;
+  SlowPathCodeX86_64* slow_path = nullptr;
 
   // Return 0 if `obj` is null.
   // Avoid null check if we know obj is not null.
@@ -4844,282 +4805,78 @@
     __ testl(obj, obj);
     __ j(kEqual, &zero);
   }
-
-  // In case of an interface check, we put the object class into the object register.
-  // This is safe, as the register is caller-save, and the object must be in another
-  // register if it survives the runtime call.
-  CpuRegister target = (instruction->GetTypeCheckKind() == TypeCheckKind::kInterfaceCheck)
-      ? obj
-      : out;
-  __ movl(target, Address(obj, class_offset));
-  __ MaybeUnpoisonHeapReference(target);
-
-  switch (instruction->GetTypeCheckKind()) {
-    case TypeCheckKind::kExactCheck: {
-      if (cls.IsRegister()) {
-        __ cmpl(out, cls.AsRegister<CpuRegister>());
-      } else {
-        DCHECK(cls.IsStackSlot()) << cls;
-        __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
-      }
-      // Classes must be equal for the instanceof to succeed.
-      __ j(kNotEqual, &zero);
-      __ movl(out, Immediate(1));
-      __ jmp(&done);
-      break;
-    }
-    case TypeCheckKind::kAbstractClassCheck: {
-      // If the class is abstract, we eagerly fetch the super class of the
-      // object to avoid doing a comparison we know will fail.
-      NearLabel loop, success;
-      __ Bind(&loop);
-      __ movl(out, Address(out, super_offset));
-      __ MaybeUnpoisonHeapReference(out);
-      __ testl(out, out);
-      // If `out` is null, we use it for the result, and jump to `done`.
-      __ j(kEqual, &done);
-      if (cls.IsRegister()) {
-        __ cmpl(out, cls.AsRegister<CpuRegister>());
-      } else {
-        DCHECK(cls.IsStackSlot()) << cls;
-        __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
-      }
-      __ j(kNotEqual, &loop);
-      __ movl(out, Immediate(1));
-      if (zero.IsLinked()) {
-        __ jmp(&done);
-      }
-      break;
-    }
-    case TypeCheckKind::kClassHierarchyCheck: {
-      // Walk over the class hierarchy to find a match.
-      NearLabel loop, success;
-      __ Bind(&loop);
-      if (cls.IsRegister()) {
-        __ cmpl(out, cls.AsRegister<CpuRegister>());
-      } else {
-        DCHECK(cls.IsStackSlot()) << cls;
-        __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
-      }
-      __ j(kEqual, &success);
-      __ movl(out, Address(out, super_offset));
-      __ MaybeUnpoisonHeapReference(out);
-      __ testl(out, out);
-      __ j(kNotEqual, &loop);
-      // If `out` is null, we use it for the result, and jump to `done`.
-      __ jmp(&done);
-      __ Bind(&success);
-      __ movl(out, Immediate(1));
-      if (zero.IsLinked()) {
-        __ jmp(&done);
-      }
-      break;
-    }
-    case TypeCheckKind::kArrayObjectCheck: {
-      // Just need to check that the object's class is a non primitive array.
-      __ movl(out, Address(out, component_offset));
-      __ MaybeUnpoisonHeapReference(out);
-      __ testl(out, out);
-      // If `out` is null, we use it for the result, and jump to `done`.
-      __ j(kEqual, &done);
-      __ cmpw(Address(out, primitive_offset), Immediate(Primitive::kPrimNot));
-      __ j(kNotEqual, &zero);
-      __ movl(out, Immediate(1));
-      __ jmp(&done);
-      break;
-    }
-    case TypeCheckKind::kArrayCheck: {
-      if (cls.IsRegister()) {
-        __ cmpl(out, cls.AsRegister<CpuRegister>());
-      } else {
-        DCHECK(cls.IsStackSlot()) << cls;
-        __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
-      }
-      DCHECK(locations->OnlyCallsOnSlowPath());
-      slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(
-          instruction, /* is_fatal */ false);
-      codegen_->AddSlowPath(slow_path);
-      __ j(kNotEqual, slow_path->GetEntryLabel());
-      __ movl(out, Immediate(1));
-      if (zero.IsLinked()) {
-        __ jmp(&done);
-      }
-      break;
-    }
-
-    case TypeCheckKind::kInterfaceCheck:
-    default: {
-      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial),
-                              instruction,
-                              instruction->GetDexPc(),
-                              nullptr);
-      if (zero.IsLinked()) {
-        __ jmp(&done);
-      }
-      break;
-    }
+  // Compare the class of `obj` with `cls`.
+  __ movl(out, Address(obj, class_offset));
+  __ MaybeUnpoisonHeapReference(out);
+  if (cls.IsRegister()) {
+    __ cmpl(out, cls.AsRegister<CpuRegister>());
+  } else {
+    DCHECK(cls.IsStackSlot()) << cls;
+    __ cmpl(out, Address(CpuRegister(RSP), cls.GetStackIndex()));
+  }
+  if (instruction->IsClassFinal()) {
+    // Classes must be equal for the instanceof to succeed.
+    __ j(kNotEqual, &zero);
+    __ movl(out, Immediate(1));
+    __ jmp(&done);
+  } else {
+    // If the classes are not equal, we go into a slow path.
+    DCHECK(locations->OnlyCallsOnSlowPath());
+    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction);
+    codegen_->AddSlowPath(slow_path);
+    __ j(kNotEqual, slow_path->GetEntryLabel());
+    __ movl(out, Immediate(1));
+    __ jmp(&done);
   }
 
-  if (zero.IsLinked()) {
+  if (instruction->MustDoNullCheck() || instruction->IsClassFinal()) {
     __ Bind(&zero);
-    __ xorl(out, out);
-  }
-
-  if (done.IsLinked()) {
-    __ Bind(&done);
+    __ movl(out, Immediate(0));
   }
 
   if (slow_path != nullptr) {
     __ Bind(slow_path->GetExitLabel());
   }
+  __ Bind(&done);
 }
 
 void LocationsBuilderX86_64::VisitCheckCast(HCheckCast* instruction) {
-  LocationSummary::CallKind call_kind = LocationSummary::kNoCall;
-  bool throws_into_catch = instruction->CanThrowIntoCatchBlock();
-
-  switch (instruction->GetTypeCheckKind()) {
-    case TypeCheckKind::kExactCheck:
-    case TypeCheckKind::kAbstractClassCheck:
-    case TypeCheckKind::kClassHierarchyCheck:
-    case TypeCheckKind::kArrayObjectCheck:
-      call_kind = throws_into_catch
-          ? LocationSummary::kCallOnSlowPath
-          : LocationSummary::kNoCall;
-      break;
-    case TypeCheckKind::kInterfaceCheck:
-      call_kind = LocationSummary::kCall;
-      break;
-    case TypeCheckKind::kArrayCheck:
-      call_kind = LocationSummary::kCallOnSlowPath;
-      break;
-  }
-
   LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
-      instruction, call_kind);
-  if (call_kind != LocationSummary::kCall) {
-    locations->SetInAt(0, Location::RequiresRegister());
-    locations->SetInAt(1, Location::Any());
-    // Note that TypeCheckSlowPathX86_64 uses this register too.
-    locations->AddTemp(Location::RequiresRegister());
-  } else {
-    InvokeRuntimeCallingConvention calling_convention;
-    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
-    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
-  }
+      instruction, LocationSummary::kCallOnSlowPath);
+  locations->SetInAt(0, Location::RequiresRegister());
+  locations->SetInAt(1, Location::Any());
+  // Note that TypeCheckSlowPathX86_64 uses this register too.
+  locations->AddTemp(Location::RequiresRegister());
 }
 
 void InstructionCodeGeneratorX86_64::VisitCheckCast(HCheckCast* instruction) {
   LocationSummary* locations = instruction->GetLocations();
   CpuRegister obj = locations->InAt(0).AsRegister<CpuRegister>();
   Location cls = locations->InAt(1);
-  CpuRegister temp = locations->WillCall()
-      ? CpuRegister(kNoRegister)
-      : locations->GetTemp(0).AsRegister<CpuRegister>();
-
+  CpuRegister temp = locations->GetTemp(0).AsRegister<CpuRegister>();
   uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
-  uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
-  uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
-  uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
-  SlowPathCodeX86_64* slow_path = nullptr;
+  SlowPathCodeX86_64* slow_path =
+      new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(instruction);
+  codegen_->AddSlowPath(slow_path);
 
-  if (!locations->WillCall()) {
-    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86_64(
-        instruction, !locations->CanCall());
-    codegen_->AddSlowPath(slow_path);
-  }
-
-  NearLabel done;
   // Avoid null check if we know obj is not null.
   if (instruction->MustDoNullCheck()) {
     __ testl(obj, obj);
-    __ j(kEqual, &done);
+    __ j(kEqual, slow_path->GetExitLabel());
   }
-
-  if (locations->WillCall()) {
-    __ movl(obj, Address(obj, class_offset));
-    __ MaybeUnpoisonHeapReference(obj);
+  // Compare the class of `obj` with `cls`.
+  __ movl(temp, Address(obj, class_offset));
+  __ MaybeUnpoisonHeapReference(temp);
+  if (cls.IsRegister()) {
+    __ cmpl(temp, cls.AsRegister<CpuRegister>());
   } else {
-    __ movl(temp, Address(obj, class_offset));
-    __ MaybeUnpoisonHeapReference(temp);
+    DCHECK(cls.IsStackSlot()) << cls;
+    __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
   }
-
-  switch (instruction->GetTypeCheckKind()) {
-    case TypeCheckKind::kExactCheck:
-    case TypeCheckKind::kArrayCheck: {
-      if (cls.IsRegister()) {
-        __ cmpl(temp, cls.AsRegister<CpuRegister>());
-      } else {
-        DCHECK(cls.IsStackSlot()) << cls;
-        __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
-      }
-      // Jump to slow path for throwing the exception or doing a
-      // more involved array check.
-      __ j(kNotEqual, slow_path->GetEntryLabel());
-      break;
-    }
-    case TypeCheckKind::kAbstractClassCheck: {
-      // If the class is abstract, we eagerly fetch the super class of the
-      // object to avoid doing a comparison we know will fail.
-      NearLabel loop;
-      __ Bind(&loop);
-      __ movl(temp, Address(temp, super_offset));
-      __ MaybeUnpoisonHeapReference(temp);
-      __ testl(temp, temp);
-      // Jump to the slow path to throw the exception.
-      __ j(kEqual, slow_path->GetEntryLabel());
-      if (cls.IsRegister()) {
-        __ cmpl(temp, cls.AsRegister<CpuRegister>());
-      } else {
-        DCHECK(cls.IsStackSlot()) << cls;
-        __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
-      }
-      __ j(kNotEqual, &loop);
-      break;
-    }
-    case TypeCheckKind::kClassHierarchyCheck: {
-      // Walk over the class hierarchy to find a match.
-      NearLabel loop, success;
-      __ Bind(&loop);
-      if (cls.IsRegister()) {
-        __ cmpl(temp, cls.AsRegister<CpuRegister>());
-      } else {
-        DCHECK(cls.IsStackSlot()) << cls;
-        __ cmpl(temp, Address(CpuRegister(RSP), cls.GetStackIndex()));
-      }
-      __ j(kEqual, &success);
-      __ movl(temp, Address(temp, super_offset));
-      __ MaybeUnpoisonHeapReference(temp);
-      __ testl(temp, temp);
-      __ j(kNotEqual, &loop);
-      // Jump to the slow path to throw the exception.
-      __ jmp(slow_path->GetEntryLabel());
-      __ Bind(&success);
-      break;
-    }
-    case TypeCheckKind::kArrayObjectCheck: {
-      // Just need to check that the object's class is a non primitive array.
-      __ movl(temp, Address(temp, component_offset));
-      __ MaybeUnpoisonHeapReference(temp);
-      __ testl(temp, temp);
-      __ j(kEqual, slow_path->GetEntryLabel());
-      __ cmpw(Address(temp, primitive_offset), Immediate(Primitive::kPrimNot));
-      __ j(kNotEqual, slow_path->GetEntryLabel());
-      break;
-    }
-    case TypeCheckKind::kInterfaceCheck:
-    default:
-      codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast),
-                              instruction,
-                              instruction->GetDexPc(),
-                              nullptr);
-      break;
-  }
-  __ Bind(&done);
-
-  if (slow_path != nullptr) {
-    __ Bind(slow_path->GetExitLabel());
-  }
+  // The checkcast succeeds if the classes are equal (fast path).
+  // Otherwise, we need to go into the slow path to check the types.
+  __ j(kNotEqual, slow_path->GetEntryLabel());
+  __ Bind(slow_path->GetExitLabel());
 }
 
 void LocationsBuilderX86_64::VisitMonitorOperation(HMonitorOperation* instruction) {