AArch64: Add suspend check in managed code.

TODO: Remove x19 in the frame in runtime, generic jni, compiled jni.

Change-Id: Ibdc292c9e7adb3a5d3eff353c22f60ffc101f549
diff --git a/compiler/dex/frontend.cc b/compiler/dex/frontend.cc
index 9bad736..c072959 100644
--- a/compiler/dex/frontend.cc
+++ b/compiler/dex/frontend.cc
@@ -692,7 +692,7 @@
 // S : short
 // C : char
 // I : int
-// L : long
+// J : long
 // F : float
 // D : double
 // L : reference(object, array)
diff --git a/compiler/dex/quick/arm64/int_arm64.cc b/compiler/dex/quick/arm64/int_arm64.cc
index 38f110e..8dad90a 100644
--- a/compiler/dex/quick/arm64/int_arm64.cc
+++ b/compiler/dex/quick/arm64/int_arm64.cc
@@ -725,17 +725,10 @@
 
 // Test suspend flag, return target of taken suspend branch
 LIR* Arm64Mir2Lir::OpTestSuspend(LIR* target) {
-  // TODO(Arm64): re-enable suspend checks, once art_quick_test_suspend is implemented and
-  //   the suspend register is properly handled in the trampolines.
-#if 0
+  // FIXME: Define rA64_SUSPEND as w19, when we do not need two copies of reserved register.
+  // Note: The opcode is not set as wide, so actually we are using the 32-bit version register.
   NewLIR3(kA64Subs3rRd, rA64_SUSPEND, rA64_SUSPEND, 1);
   return OpCondBranch((target == NULL) ? kCondEq : kCondNe, target);
-#else
-  // TODO(Arm64): Fake suspend check. Will always fail to branch. Remove this.
-  LIR* branch = NewLIR2((target == NULL) ? kA64Cbnz2rt : kA64Cbz2rt, rwzr, 0);
-  branch->target = target;
-  return branch;
-#endif
 }
 
 // Decrement register and branch on condition
diff --git a/compiler/dex/quick/arm64/target_arm64.cc b/compiler/dex/quick/arm64/target_arm64.cc
index 808060d..0222447 100644
--- a/compiler/dex/quick/arm64/target_arm64.cc
+++ b/compiler/dex/quick/arm64/target_arm64.cc
@@ -630,12 +630,6 @@
     DCHECK_EQ(info->StorageMask(), 0x1U);
   }
 
-  // TODO: re-enable this when we can safely save r4 over the suspension code path.
-  bool no_suspend = NO_SUSPEND;  // || !Runtime::Current()->ExplicitSuspendChecks();
-  if (no_suspend) {
-    GetRegInfo(rs_rA64_SUSPEND)->MarkFree();
-  }
-
   // Don't start allocating temps at r0/s0/d0 or you may clobber return regs in early-exit methods.
   // TODO: adjust when we roll to hard float calling convention.
   reg_pool_->next_core_reg_ = 2;
diff --git a/runtime/arch/arm64/asm_support_arm64.S b/runtime/arch/arm64/asm_support_arm64.S
index 9614c29..b94375e 100644
--- a/runtime/arch/arm64/asm_support_arm64.S
+++ b/runtime/arch/arm64/asm_support_arm64.S
@@ -21,6 +21,9 @@
 
 // Define special registers.
 
+// Register holding suspend check count down.
+// 32-bit is enough for the suspend register.
+#define wSUSPEND w19
 // Register holding Thread::Current().
 #define xSELF x18
 // Frame Pointer
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index 7f31fb6..97caa1f 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -197,7 +197,8 @@
 .endm
 
 .macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
-    brk 0
+    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
+    ret
 .endm
 
 
@@ -561,32 +562,33 @@
 SAVE_SIZE=5*8   // x4, x5, SP, LR & FP saved.
 SAVE_SIZE_AND_METHOD=SAVE_SIZE+8
 
-    mov x9, sp                          // Save stack pointer.
+    mov x9, sp                             // Save stack pointer.
     .cfi_register sp,x9
 
-    add x10, x2, # SAVE_SIZE_AND_METHOD // calculate size of frame.
-    sub x10, sp, x10                    // Calculate SP position - saves + ArtMethod* +  args
-    and x10, x10, # ~0xf                // Enforce 16 byte stack alignment.
-    mov sp, x10                         // Set new SP.
+    add x10, x2, # SAVE_SIZE_AND_METHOD    // calculate size of frame.
+    sub x10, sp, x10                       // Calculate SP position - saves + ArtMethod* +  args
+    and x10, x10, # ~0xf                   // Enforce 16 byte stack alignment.
+    mov sp, x10                            // Set new SP.
 
-    sub x10, x9, #SAVE_SIZE             // Calculate new FP (later). Done here as we must move SP
-    .cfi_def_cfa_register x10           // before this.
+    sub x10, x9, #SAVE_SIZE                // Calculate new FP (later). Done here as we must move SP
+    .cfi_def_cfa_register x10              // before this.
     .cfi_adjust_cfa_offset SAVE_SIZE
 
-    str x9, [x10, #32]                  // Save old stack pointer.
+    str x9, [x10, #32]                     // Save old stack pointer.
     .cfi_rel_offset sp, 32
 
-    stp x4, x5, [x10, #16]              // Save result and shorty addresses.
+    stp x4, x5, [x10, #16]                 // Save result and shorty addresses.
     .cfi_rel_offset x4, 16
     .cfi_rel_offset x5, 24
 
-    stp xFP, xLR, [x10]                 // Store LR & FP.
+    stp xFP, xLR, [x10]                    // Store LR & FP.
     .cfi_rel_offset x29, 0
     .cfi_rel_offset x30, 8
 
-    mov xFP, x10                        // Use xFP now, as it's callee-saved.
+    mov xFP, x10                           // Use xFP now, as it's callee-saved.
     .cfi_def_cfa_register x29
-    mov xSELF, x3                       // Move thread pointer into SELF register.
+    mov xSELF, x3                          // Move thread pointer into SELF register.
+    mov wSUSPEND, #SUSPEND_CHECK_INTERVAL  // reset wSUSPEND to suspend check interval
 
     // Copy arguments into stack frame.
     // Use simple copy routine for now.
@@ -595,7 +597,7 @@
     // W2 - args length
     // X9 - destination address.
     // W10 - temporary
-    add x9, sp, #8     // Destination address is bottom of stack + NULL.
+    add x9, sp, #8                         // Destination address is bottom of stack + NULL.
 
     // Use \@ to differentiate between macro invocations.
 .LcopyParams\@:
@@ -693,6 +695,7 @@
  *  x1-x7 - integer parameters.
  *  d0-d7 - Floating point parameters.
  *  xSELF = self
+ *  wSUSPEND = suspend count
  *  SP = & of ArtMethod*
  *  x1 = "this" pointer.
  *
@@ -1373,7 +1376,22 @@
 // Generate the allocation entrypoints for each allocator.
 GENERATE_ALL_ALLOC_ENTRYPOINTS
 
-UNIMPLEMENTED art_quick_test_suspend
+    /*
+     * Called by managed code when the value in wSUSPEND has been decremented to 0.
+     */
+    .extern artTestSuspendFromCode
+ENTRY art_quick_test_suspend
+    ldrh   w0, [xSELF, #THREAD_FLAGS_OFFSET]  // get xSELF->state_and_flags.as_struct.flags
+    mov    wSUSPEND, #SUSPEND_CHECK_INTERVAL  // reset wSUSPEND to SUSPEND_CHECK_INTERVAL
+    cbnz   w0, .Lneed_suspend                 // check flags == 0
+    ret                                       // return if flags == 0
+.Lneed_suspend:
+    mov    x0, xSELF
+    SETUP_REF_ONLY_CALLEE_SAVE_FRAME          // save callee saves for stack crawl
+    mov    x1, sp
+    bl     artTestSuspendFromCode             // (Thread*, SP)
+    RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
+END art_quick_test_suspend
 
      /*
      * Called by managed code that is attempting to call a method on a proxy class. On entry