Revert "Revert "Fix deoptimization with pending exception""
This reverts commit 6e2d5747d00697a25251d25dd33b953e54709507.
Fixes the deoptimization path from compiled code (generated by the
Optimizing compiler) by adding wrapper artDeoptimizeFromCompiledCode.
This wrapper, called through the matching assembler stub
art_quick_deoptimize_from_compiled_code, pushes the deoptimization
context just before deoptimizing the stack.
Bug: 23371176
Bug: 19944235
Change-Id: Ia7082656998aebdd0157438f7e6504c120e10d3e
diff --git a/runtime/arch/arm/entrypoints_init_arm.cc b/runtime/arch/arm/entrypoints_init_arm.cc
index be9af98..1599025 100644
--- a/runtime/arch/arm/entrypoints_init_arm.cc
+++ b/runtime/arch/arm/entrypoints_init_arm.cc
@@ -167,7 +167,8 @@
qpoints->pThrowNullPointer = art_quick_throw_null_pointer_exception;
qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
- qpoints->pDeoptimize = art_quick_deoptimize;
+ // Deoptimization from compiled code.
+ qpoints->pDeoptimize = art_quick_deoptimize_from_compiled_code;
// Read barrier
qpoints->pReadBarrierJni = ReadBarrierJni;
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S
index f6d954f..b9f8cb1 100644
--- a/runtime/arch/arm/quick_entrypoints_arm.S
+++ b/runtime/arch/arm/quick_entrypoints_arm.S
@@ -1141,6 +1141,17 @@
END art_quick_deoptimize
/*
+ * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
+ * will long jump to the upcall with a special exception of -1.
+ */
+ .extern artDeoptimizeFromCompiledCode
+ENTRY art_quick_deoptimize_from_compiled_code
+ SETUP_SAVE_ALL_CALLEE_SAVE_FRAME r0, r1
+ mov r0, r9 @ Set up args.
+ blx artDeoptimizeFromCompiledCode @ artDeoptimizeFromCompiledCode(Thread*)
+END art_quick_deoptimize_from_compiled_code
+
+ /*
* Signed 64-bit integer multiply.
*
* Consider WXxYZ (r1r0 x r3r2) with a long multiply:
diff --git a/runtime/arch/arm64/entrypoints_init_arm64.cc b/runtime/arch/arm64/entrypoints_init_arm64.cc
index 0f06727..e9c816f 100644
--- a/runtime/arch/arm64/entrypoints_init_arm64.cc
+++ b/runtime/arch/arm64/entrypoints_init_arm64.cc
@@ -150,8 +150,8 @@
qpoints->pThrowNullPointer = art_quick_throw_null_pointer_exception;
qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
- // Deoptimize
- qpoints->pDeoptimize = art_quick_deoptimize;
+ // Deoptimization from compiled code.
+ qpoints->pDeoptimize = art_quick_deoptimize_from_compiled_code;
// Read barrier
qpoints->pReadBarrierJni = ReadBarrierJni;
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index 8ba3d43..07b91a1 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -1739,6 +1739,18 @@
brk 0
END art_quick_deoptimize
+ /*
+ * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
+ * will long jump to the upcall with a special exception of -1.
+ */
+ .extern artDeoptimizeFromCompiledCode
+ENTRY art_quick_deoptimize_from_compiled_code
+ SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
+ mov x0, xSELF // Pass thread.
+ bl artDeoptimizeFromCompiledCode // artDeoptimizeFromCompiledCode(Thread*)
+ brk 0
+END art_quick_deoptimize_from_compiled_code
+
/*
* String's indexOf.
diff --git a/runtime/arch/mips/entrypoints_init_mips.cc b/runtime/arch/mips/entrypoints_init_mips.cc
index 4e4b91f..6721e54 100644
--- a/runtime/arch/mips/entrypoints_init_mips.cc
+++ b/runtime/arch/mips/entrypoints_init_mips.cc
@@ -267,8 +267,8 @@
qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
static_assert(!IsDirectEntrypoint(kQuickThrowStackOverflow), "Non-direct C stub marked direct.");
- // Deoptimize
- qpoints->pDeoptimize = art_quick_deoptimize;
+ // Deoptimization from compiled code.
+ qpoints->pDeoptimize = art_quick_deoptimize_from_compiled_code;
static_assert(!IsDirectEntrypoint(kQuickDeoptimize), "Non-direct C stub marked direct.");
// Atomic 64-bit load/store
diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S
index 8bc75e5..0147230 100644
--- a/runtime/arch/mips/quick_entrypoints_mips.S
+++ b/runtime/arch/mips/quick_entrypoints_mips.S
@@ -1542,6 +1542,18 @@
END art_quick_deoptimize
/*
+ * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
+ * will long jump to the upcall with a special exception of -1.
+ */
+ .extern artDeoptimizeFromCompiledCode
+ENTRY art_quick_deoptimize_from_compiled_code
+ SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
+ jal artDeoptimizeFromCompiledCode # artDeoptimizeFromCompiledCode(Thread*)
+ # Returns caller method's frame size.
+ move $a0, rSELF # pass Thread::current
+END art_quick_deoptimize_from_compiled_code
+
+ /*
* Long integer shift. This is different from the generic 32/64-bit
* binary operations because vAA/vBB are 64-bit but vCC (the shift
* distance) is 32-bit. Also, Dalvik requires us to ignore all but the low
diff --git a/runtime/arch/mips64/entrypoints_init_mips64.cc b/runtime/arch/mips64/entrypoints_init_mips64.cc
index ec02d5a..9f1f0e0 100644
--- a/runtime/arch/mips64/entrypoints_init_mips64.cc
+++ b/runtime/arch/mips64/entrypoints_init_mips64.cc
@@ -176,8 +176,8 @@
qpoints->pThrowNullPointer = art_quick_throw_null_pointer_exception;
qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
- // Deoptimize
- qpoints->pDeoptimize = art_quick_deoptimize;
+ // Deoptimization from compiled code.
+ qpoints->pDeoptimize = art_quick_deoptimize_from_compiled_code;
// TODO - use lld/scd instructions for Mips64
// Atomic 64-bit load/store
diff --git a/runtime/arch/mips64/quick_entrypoints_mips64.S b/runtime/arch/mips64/quick_entrypoints_mips64.S
index c30e6ca..08717a4 100644
--- a/runtime/arch/mips64/quick_entrypoints_mips64.S
+++ b/runtime/arch/mips64/quick_entrypoints_mips64.S
@@ -1603,5 +1603,17 @@
move $a0, rSELF # pass Thread::current
END art_quick_deoptimize
+ /*
+ * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
+ * will long jump to the upcall with a special exception of -1.
+ */
+ .extern artDeoptimizeFromCompiledCode
+ENTRY art_quick_deoptimize_from_compiled_code
+ SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
+ jal artDeoptimizeFromCompiledCode # artDeoptimizeFromCompiledCode(Thread*, SP)
+ # Returns caller method's frame size.
+ move $a0, rSELF # pass Thread::current
+END art_quick_deoptimize_from_compiled_code
+
UNIMPLEMENTED art_quick_indexof
UNIMPLEMENTED art_quick_string_compareto
diff --git a/runtime/arch/x86/entrypoints_init_x86.cc b/runtime/arch/x86/entrypoints_init_x86.cc
index e2632c1..10fc281 100644
--- a/runtime/arch/x86/entrypoints_init_x86.cc
+++ b/runtime/arch/x86/entrypoints_init_x86.cc
@@ -140,7 +140,7 @@
qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
// Deoptimize
- qpoints->pDeoptimize = art_quick_deoptimize_from_compiled_slow_path;
+ qpoints->pDeoptimize = art_quick_deoptimize_from_compiled_code;
// Read barrier
qpoints->pReadBarrierJni = ReadBarrierJni;
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index 1da5a2f..7fbcf8d 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -1677,9 +1677,6 @@
*/
DEFINE_FUNCTION art_quick_deoptimize
PUSH ebx // Entry point for a jump. Fake that we were called.
-.globl SYMBOL(art_quick_deoptimize_from_compiled_slow_path) // Entry point for real calls
- // from compiled slow paths.
-SYMBOL(art_quick_deoptimize_from_compiled_slow_path):
SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx
subl LITERAL(12), %esp // Align stack.
CFI_ADJUST_CFA_OFFSET(12)
@@ -1690,6 +1687,20 @@
END_FUNCTION art_quick_deoptimize
/*
+ * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
+ * will long jump to the upcall with a special exception of -1.
+ */
+DEFINE_FUNCTION art_quick_deoptimize_from_compiled_code
+ SETUP_SAVE_ALL_CALLEE_SAVE_FRAME ebx, ebx
+ subl LITERAL(12), %esp // Align stack.
+ CFI_ADJUST_CFA_OFFSET(12)
+ pushl %fs:THREAD_SELF_OFFSET // Pass Thread::Current().
+ CFI_ADJUST_CFA_OFFSET(4)
+ call SYMBOL(artDeoptimizeFromCompiledCode) // artDeoptimizeFromCompiledCode(Thread*)
+ UNREACHABLE
+END_FUNCTION art_quick_deoptimize_from_compiled_code
+
+ /*
* String's compareTo.
*
* On entry:
diff --git a/runtime/arch/x86_64/entrypoints_init_x86_64.cc b/runtime/arch/x86_64/entrypoints_init_x86_64.cc
index ef1bb5f..5cc72e3 100644
--- a/runtime/arch/x86_64/entrypoints_init_x86_64.cc
+++ b/runtime/arch/x86_64/entrypoints_init_x86_64.cc
@@ -144,7 +144,7 @@
qpoints->pThrowStackOverflow = art_quick_throw_stack_overflow;
// Deoptimize
- qpoints->pDeoptimize = art_quick_deoptimize_from_compiled_slow_path;
+ qpoints->pDeoptimize = art_quick_deoptimize_from_compiled_code;
// Read barrier
qpoints->pReadBarrierJni = ReadBarrierJni;
diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
index f4c9488..5f3f175 100644
--- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S
+++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
@@ -1721,9 +1721,6 @@
DEFINE_FUNCTION art_quick_deoptimize
pushq %rsi // Entry point for a jump. Fake that we were called.
// Use hidden arg.
-.globl SYMBOL(art_quick_deoptimize_from_compiled_slow_path) // Entry point for real calls
- // from compiled slow paths.
-SYMBOL(art_quick_deoptimize_from_compiled_slow_path):
SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
// Stack should be aligned now.
movq %gs:THREAD_SELF_OFFSET, %rdi // Pass Thread.
@@ -1732,6 +1729,18 @@
END_FUNCTION art_quick_deoptimize
/*
+ * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
+ * will long jump to the upcall with a special exception of -1.
+ */
+DEFINE_FUNCTION art_quick_deoptimize_from_compiled_code
+ SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
+ // Stack should be aligned now.
+ movq %gs:THREAD_SELF_OFFSET, %rdi // Pass Thread.
+ call SYMBOL(artDeoptimizeFromCompiledCode) // artDeoptimizeFromCompiledCode(Thread*)
+ UNREACHABLE
+END_FUNCTION art_quick_deoptimize_from_compiled_code
+
+ /*
* String's compareTo.
*
* On entry: