Break the debug cycle!
Rationale:
When entering the path that implements method invocation,
it is possible for the interpreter and JIT to cycle around
trying to interpret or compile a method, never making a
decision. This bug fix avoids such cycling with a minimum
of extra runtime overhead.
BUG=28537292
Change-Id: If805b7bc0279019d0ebf322ddd17cb57a9e884eb
diff --git a/runtime/art_method.cc b/runtime/art_method.cc
index 06156f5..1790df6 100644
--- a/runtime/art_method.cc
+++ b/runtime/art_method.cc
@@ -253,14 +253,17 @@
Runtime* runtime = Runtime::Current();
// Call the invoke stub, passing everything as arguments.
// If the runtime is not yet started or it is required by the debugger, then perform the
- // Invocation by the interpreter.
+ // Invocation by the interpreter, explicitly forcing interpretation over JIT to prevent
+ // cycling around the various JIT/Interpreter methods that handle method invocation.
if (UNLIKELY(!runtime->IsStarted() || Dbg::IsForcedInterpreterNeededForCalling(self, this))) {
if (IsStatic()) {
- art::interpreter::EnterInterpreterFromInvoke(self, this, nullptr, args, result);
+ art::interpreter::EnterInterpreterFromInvoke(
+ self, this, nullptr, args, result, /*stay_in_interpreter*/ true);
} else {
mirror::Object* receiver =
reinterpret_cast<StackReference<mirror::Object>*>(&args[0])->AsMirrorPtr();
- art::interpreter::EnterInterpreterFromInvoke(self, this, receiver, args + 1, result);
+ art::interpreter::EnterInterpreterFromInvoke(
+ self, this, receiver, args + 1, result, /*stay_in_interpreter*/ true);
}
} else {
DCHECK_EQ(runtime->GetClassLinker()->GetImagePointerSize(), sizeof(void*));
diff --git a/runtime/interpreter/interpreter.cc b/runtime/interpreter/interpreter.cc
index 6c630cc..1d0e600 100644
--- a/runtime/interpreter/interpreter.cc
+++ b/runtime/interpreter/interpreter.cc
@@ -264,12 +264,12 @@
ShadowFrame& shadow_frame, JValue result_register);
#endif
-static JValue Execute(Thread* self, const DexFile::CodeItem* code_item, ShadowFrame& shadow_frame,
- JValue result_register)
- SHARED_REQUIRES(Locks::mutator_lock_);
-
-static inline JValue Execute(Thread* self, const DexFile::CodeItem* code_item,
- ShadowFrame& shadow_frame, JValue result_register) {
+static inline JValue Execute(
+ Thread* self,
+ const DexFile::CodeItem* code_item,
+ ShadowFrame& shadow_frame,
+ JValue result_register,
+ bool stay_in_interpreter = false) SHARED_REQUIRES(Locks::mutator_lock_) {
DCHECK(!shadow_frame.GetMethod()->IsAbstract());
DCHECK(!shadow_frame.GetMethod()->IsNative());
if (LIKELY(shadow_frame.GetDexPC() == 0)) { // Entering the method, but not via deoptimization.
@@ -284,19 +284,21 @@
method, 0);
}
- jit::Jit* jit = Runtime::Current()->GetJit();
- if (jit != nullptr) {
- jit->MethodEntered(self, shadow_frame.GetMethod());
- if (jit->CanInvokeCompiledCode(method)) {
- JValue result;
+ if (!stay_in_interpreter) {
+ jit::Jit* jit = Runtime::Current()->GetJit();
+ if (jit != nullptr) {
+ jit->MethodEntered(self, shadow_frame.GetMethod());
+ if (jit->CanInvokeCompiledCode(method)) {
+ JValue result;
- // Pop the shadow frame before calling into compiled code.
- self->PopShadowFrame();
- ArtInterpreterToCompiledCodeBridge(self, nullptr, code_item, &shadow_frame, &result);
- // Push the shadow frame back as the caller will expect it.
- self->PushShadowFrame(&shadow_frame);
+ // Pop the shadow frame before calling into compiled code.
+ self->PopShadowFrame();
+ ArtInterpreterToCompiledCodeBridge(self, nullptr, code_item, &shadow_frame, &result);
+ // Push the shadow frame back as the caller will expect it.
+ self->PushShadowFrame(&shadow_frame);
- return result;
+ return result;
+ }
}
}
}
@@ -387,7 +389,8 @@
}
void EnterInterpreterFromInvoke(Thread* self, ArtMethod* method, Object* receiver,
- uint32_t* args, JValue* result) {
+ uint32_t* args, JValue* result,
+ bool stay_in_interpreter) {
DCHECK_EQ(self, Thread::Current());
bool implicit_check = !Runtime::Current()->ExplicitStackOverflowChecks();
if (UNLIKELY(__builtin_frame_address(0) < self->GetStackEndForInterpreter(implicit_check))) {
@@ -462,7 +465,7 @@
}
}
if (LIKELY(!method->IsNative())) {
- JValue r = Execute(self, code_item, *shadow_frame, JValue());
+ JValue r = Execute(self, code_item, *shadow_frame, JValue(), stay_in_interpreter);
if (result != nullptr) {
*result = r;
}
diff --git a/runtime/interpreter/interpreter.h b/runtime/interpreter/interpreter.h
index 6353a9b..bf4bcff 100644
--- a/runtime/interpreter/interpreter.h
+++ b/runtime/interpreter/interpreter.h
@@ -33,8 +33,11 @@
namespace interpreter {
// Called by ArtMethod::Invoke, shadow frames arguments are taken from the args array.
+// The optional stay_in_interpreter parameter (false by default) can be used by clients to
+// explicitly force interpretation in the remaining path that implements method invocation.
extern void EnterInterpreterFromInvoke(Thread* self, ArtMethod* method,
- mirror::Object* receiver, uint32_t* args, JValue* result)
+ mirror::Object* receiver, uint32_t* args, JValue* result,
+ bool stay_in_interpreter = false)
SHARED_REQUIRES(Locks::mutator_lock_);
// 'from_code' denotes whether the deoptimization was explicitly triggered by compiled code.