ART: arm64 explicit stack overflow checks

Implement only the explicit checks for the quick backend for arm64.
Implicit checks require fault handlers, which are currently unimplemented.

CMN + CMP have extended versions implemented for comparisons against the
stack pointer. More extended opcode implementations will need to follow.

Change-Id: I8db297aec73df818b20fe410297800c886701c76
diff --git a/compiler/dex/quick/arm64/utility_arm64.cc b/compiler/dex/quick/arm64/utility_arm64.cc
index 4f0d7bc..11c96d1 100644
--- a/compiler/dex/quick/arm64/utility_arm64.cc
+++ b/compiler/dex/quick/arm64/utility_arm64.cc
@@ -426,8 +426,43 @@
   return NULL;
 }
 
+LIR* Arm64Mir2Lir::OpRegRegExtend(OpKind op, RegStorage r_dest_src1, RegStorage r_src2, int extend) {
+  ArmOpcode wide = (r_dest_src1.Is64Bit()) ? WIDE(0) : UNWIDE(0);
+  ArmOpcode opcode = kA64Brk1d;
+
+  switch (op) {
+    case kOpCmn:
+      opcode = kA64Cmn3Rre;
+      break;
+    case kOpCmp:
+      opcode = kA64Cmp3Rre;
+      break;
+    default:
+      LOG(FATAL) << "Bad Opcode: " << opcode;
+      break;
+  }
+
+  DCHECK(!IsPseudoLirOp(opcode));
+  if (EncodingMap[opcode].flags & IS_TERTIARY_OP) {
+    ArmEncodingKind kind = EncodingMap[opcode].field_loc[2].kind;
+    if (kind == kFmtExtend) {
+      return NewLIR3(opcode | wide, r_dest_src1.GetReg(), r_src2.GetReg(), extend);
+    }
+  }
+
+  LOG(FATAL) << "Unexpected encoding operand count";
+  return NULL;
+}
+
 LIR* Arm64Mir2Lir::OpRegReg(OpKind op, RegStorage r_dest_src1, RegStorage r_src2) {
-  return OpRegRegShift(op, r_dest_src1, r_src2, ENCODE_NO_SHIFT);
+  /* RegReg operations with SP in first parameter need extended register instruction form.
+   * Only CMN and CMP instructions are implemented.
+   */
+  if (r_dest_src1 == rs_rA64_SP) {
+    return OpRegRegExtend(op, r_dest_src1, r_src2, ENCODE_NO_EXTEND);
+  } else {
+    return OpRegRegShift(op, r_dest_src1, r_src2, ENCODE_NO_SHIFT);
+  }
 }
 
 LIR* Arm64Mir2Lir::OpMovRegMem(RegStorage r_dest, RegStorage r_base, int offset, MoveType move_type) {