Ensure coherency of call kinds for LocationSummary.

The coherency is enforced with checks added in the `InvokeRuntime`
helper, that we now also use on x86 and x86_64.

Change-Id: I8cb92b042f25dc3c5fd390e9c61a45b477d081f4
diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc
index a5bad65..b6d67de 100644
--- a/compiler/optimizing/code_generator_mips64.cc
+++ b/compiler/optimizing/code_generator_mips64.cc
@@ -138,6 +138,8 @@
     CheckEntrypointTypes<kQuickThrowArrayBounds, void, int32_t, int32_t>();
   }
 
+  bool IsFatal() const OVERRIDE { return true; }
+
   const char* GetDescription() const OVERRIDE { return "BoundsCheckSlowPathMIPS64"; }
 
  private:
@@ -162,6 +164,8 @@
     CheckEntrypointTypes<kQuickThrowDivZero, void, void>();
   }
 
+  bool IsFatal() const OVERRIDE { return true; }
+
   const char* GetDescription() const OVERRIDE { return "DivZeroCheckSlowPathMIPS64"; }
 
  private:
@@ -278,6 +282,8 @@
     CheckEntrypointTypes<kQuickThrowNullPointer, void, void>();
   }
 
+  bool IsFatal() const OVERRIDE { return true; }
+
   const char* GetDescription() const OVERRIDE { return "NullCheckSlowPathMIPS64"; }
 
  private:
@@ -971,6 +977,14 @@
                                         HInstruction* instruction,
                                         uint32_t dex_pc,
                                         SlowPathCode* slow_path) {
+  // Ensure that the call kind indication given to the register allocator is
+  // coherent with the runtime call generated.
+  if (slow_path == nullptr) {
+    DCHECK(instruction->GetLocations()->WillCall());
+  } else {
+    DCHECK(instruction->GetLocations()->OnlyCallsOnSlowPath() || slow_path->IsFatal());
+  }
+
   // TODO: anything related to T9/GP/GOT/PIC/.so's?
   __ LoadFromOffset(kLoadDoubleword, T9, TR, entry_point_offset);
   __ Jalr(T9);