AArch64: Add memcmp16() for Arm64; ensure xSELF not clobbered

This patch modifies memcmp() to memcmp16(). Please note that this
implementation of memcmp16() is based on the bionic's memcmp().

However, to reflect a recent specification change, the file has been
modified to respect the new String.compareTo() behavior.

A test for memcmp16() has been added. The string_compareto test in
stub_test has been changed to invoke __memcmp16 in assembly stubs.

Add artIsAssignableFromCode to the list of native downcalls to
store and reload x18. Remove CheckSuspendFromCode, as it is unused.

Signed-off-by: Serban Constantinescu <serban.constantinescu@arm.com>
Change-Id: Ie0b5425ecfb62906d29a5d02e84c7e07ffb34a11
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index 2e60b93..e088751 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -1632,6 +1632,8 @@
     ldr   x0, [sp], 16        // Restore integer result, and drop stack area.
     .cfi_adjust_cfa_offset 16
 
+    // Need to restore x18.
+    ldr   xSELF, [sp, #72]
     POP_REF_ONLY_CALLEE_SAVE_FRAME
 
     br    x9                  // Tail-call out.
@@ -1647,6 +1649,7 @@
     mov    x0, xSELF          // Pass thread.
     mov    x1, sp             // Pass SP.
     bl     artDeoptimize      // artDeoptimize(Thread*, SP)
+    brk 0
 END art_quick_deoptimize
 
 
@@ -1757,7 +1760,7 @@
      *    x1:   comp object pointer
      *
      */
-    .extern memcmp16_generic_static
+    .extern __memcmp16
 ENTRY art_quick_string_compareto
     mov    x2, x0         // x0 is return, use x2 for first input.
     sub    x0, x2, x1     // Same string object?
@@ -1850,16 +1853,17 @@
     ret
 
 .Ldo_memcmp16:
-    str x0, [sp,#-16]!           // Save x0
+    mov x14, x0                  // Save x0 and LR. __memcmp16 does not use these temps.
+    mov x15, xLR                 //                 TODO: Codify and check that?
 
     mov x0, x2
     uxtw x2, w3
-    bl memcmp16_generic_static
+    bl __memcmp16
 
-    ldr x1, [sp], #16            // Restore old x0 = length diff
+    mov xLR, x15                 // Restore LR.
 
-    cmp x0, #0                   // Check the memcmp difference
-    csel x0, x0, x1, ne          // x0 := x0 != 0 ? x0 : x1
+    cmp x0, #0                   // Check the memcmp difference.
+    csel x0, x0, x14, ne         // x0 := x0 != 0 ? x14(prev x0=length diff) : x1.
     ret
 END art_quick_string_compareto
 
@@ -1869,11 +1873,9 @@
 .macro NATIVE_DOWNCALL name, entrypoint
     .extern \entrypoint
 ENTRY \name
-    sub    sp, sp, #16
-    stp    xSELF, xLR, [sp]
+    stp    xSELF, xLR, [sp, #-16]!
     bl     \entrypoint
-    ldp    xSELF, xLR, [sp]
-    add    sp, sp, #16
+    ldp    xSELF, xLR, [sp], #16
     ret
 END \name
 .endm
@@ -1881,3 +1883,4 @@
 NATIVE_DOWNCALL art_quick_fmod fmod
 NATIVE_DOWNCALL art_quick_fmodf fmodf
 NATIVE_DOWNCALL art_quick_memcpy memcpy
+NATIVE_DOWNCALL art_quick_assignable_from_code artIsAssignableFromCode