Fix moving GC bugs in entrypoints
In FindMethodFromCode, there was missing handles for objects used
after a suspend point (ResolveType).
For check_jni, JniMethodEndWithReferenceHandleResult did not
handlarize the return value when calling CheckReferenceResult.
For CheckReferenceResult, the argument was not in a handle.
Bug: 31458474
Change-Id: I06617fc63f1fb63d7c13ad4245223a8c54fa4b98
diff --git a/runtime/entrypoints/entrypoint_utils-inl.h b/runtime/entrypoints/entrypoint_utils-inl.h
index 1bf5c53..14110c2 100644
--- a/runtime/entrypoints/entrypoint_utils-inl.h
+++ b/runtime/entrypoints/entrypoint_utils-inl.h
@@ -120,8 +120,7 @@
return inlined_method;
}
-inline ArtMethod* GetCalleeSaveMethodCaller(Thread* self, Runtime::CalleeSaveType type)
- REQUIRES_SHARED(Locks::mutator_lock_) {
+inline ArtMethod* GetCalleeSaveMethodCaller(Thread* self, Runtime::CalleeSaveType type) {
return GetCalleeSaveMethodCaller(
self->GetManagedStack()->GetTopQuickFrame(), type, true /* do_caller_check */);
}
@@ -130,7 +129,8 @@
ALWAYS_INLINE
inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
ArtMethod* method,
- Thread* self, bool* slow_path) {
+ Thread* self,
+ bool* slow_path) {
ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
PointerSize pointer_size = class_linker->GetImagePointerSize();
mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx, pointer_size);
@@ -363,7 +363,7 @@
inline ArtField* FindFieldFromCode(uint32_t field_idx,
ArtMethod* referrer,
Thread* self,
- size_t expected_size) REQUIRES(!Roles::uninterruptible_) {
+ size_t expected_size) {
bool is_primitive;
bool is_set;
bool is_static;
@@ -444,8 +444,7 @@
return resolved_field;
} else {
StackHandleScope<1> hs(self);
- Handle<mirror::Class> h_class(hs.NewHandle(fields_class));
- if (LIKELY(class_linker->EnsureInitialized(self, h_class, true, true))) {
+ if (LIKELY(class_linker->EnsureInitialized(self, hs.NewHandle(fields_class), true, true))) {
// Otherwise let's ensure the class is initialized before resolving the field.
return resolved_field;
}
@@ -479,8 +478,10 @@
#undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL
template<InvokeType type, bool access_check>
-inline ArtMethod* FindMethodFromCode(uint32_t method_idx, mirror::Object** this_object,
- ArtMethod* referrer, Thread* self) {
+inline ArtMethod* FindMethodFromCode(uint32_t method_idx,
+ mirror::Object** this_object,
+ ArtMethod* referrer,
+ Thread* self) {
ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, referrer);
if (resolved_method == nullptr) {
@@ -554,8 +555,11 @@
// that will actually not be what we want in some cases where there are miranda methods or
// defaults. What we actually need is a GetContainingClass that says which classes virtuals
// this method is coming from.
- mirror::Class* referring_class = referrer->GetDeclaringClass();
- uint16_t method_type_idx = referring_class->GetDexFile().GetMethodId(method_idx).class_idx_;
+ StackHandleScope<2> hs2(self);
+ HandleWrapper<mirror::Object> h_this(hs2.NewHandleWrapper(this_object));
+ Handle<mirror::Class> h_referring_class(hs2.NewHandle(referrer->GetDeclaringClass()));
+ const uint16_t method_type_idx =
+ h_referring_class->GetDexFile().GetMethodId(method_idx).class_idx_;
mirror::Class* method_reference_class = class_linker->ResolveType(method_type_idx, referrer);
if (UNLIKELY(method_reference_class == nullptr)) {
// Bad type idx.
@@ -566,8 +570,8 @@
// referenced class in the bytecode, we use its super class. Otherwise, we throw
// a NoSuchMethodError.
mirror::Class* super_class = nullptr;
- if (method_reference_class->IsAssignableFrom(referring_class)) {
- super_class = referring_class->GetSuperClass();
+ if (method_reference_class->IsAssignableFrom(h_referring_class.Get())) {
+ super_class = h_referring_class->GetSuperClass();
}
uint16_t vtable_index = resolved_method->GetMethodIndex();
if (access_check) {
@@ -587,10 +591,10 @@
} else {
// It is an interface.
if (access_check) {
- if (!method_reference_class->IsAssignableFrom((*this_object)->GetClass())) {
+ if (!method_reference_class->IsAssignableFrom(h_this->GetClass())) {
ThrowIncompatibleClassChangeErrorClassForInterfaceSuper(resolved_method,
method_reference_class,
- *this_object,
+ h_this.Get(),
referrer);
return nullptr; // Failure.
}
@@ -605,6 +609,7 @@
}
return result;
}
+ UNREACHABLE();
}
case kInterface: {
uint32_t imt_index = resolved_method->GetImtIndex();
@@ -661,6 +666,7 @@
// Fast path field resolution that can't initialize classes or throw exceptions.
inline ArtField* FindFieldFast(uint32_t field_idx, ArtMethod* referrer, FindFieldType type,
size_t expected_size) {
+ ScopedAssertNoThreadSuspension ants(__FUNCTION__);
ArtField* resolved_field =
referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx,
kRuntimePointerSize);
@@ -713,6 +719,7 @@
// Fast path method resolution that can't throw exceptions.
inline ArtMethod* FindMethodFast(uint32_t method_idx, mirror::Object* this_object,
ArtMethod* referrer, bool access_check, InvokeType type) {
+ ScopedAssertNoThreadSuspension ants(__FUNCTION__);
if (UNLIKELY(this_object == nullptr && type != kStatic)) {
return nullptr;
}
diff --git a/runtime/entrypoints/entrypoint_utils.cc b/runtime/entrypoints/entrypoint_utils.cc
index 4056ec5..fd9ffbd 100644
--- a/runtime/entrypoints/entrypoint_utils.cc
+++ b/runtime/entrypoints/entrypoint_utils.cc
@@ -43,7 +43,7 @@
ArtMethod* referrer,
Thread* self,
bool access_check)
- REQUIRES_SHARED(Locks::mutator_lock_) {
+ REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_) {
if (UNLIKELY(component_count < 0)) {
ThrowNegativeArraySizeException(component_count);
return nullptr; // Failure
@@ -120,19 +120,19 @@
heap->GetCurrentAllocator());
}
-void CheckReferenceResult(mirror::Object* o, Thread* self) {
- if (o == nullptr) {
+void CheckReferenceResult(Handle<mirror::Object> o, Thread* self) {
+ if (o.Get() == nullptr) {
return;
}
// Make sure that the result is an instance of the type this method was expected to return.
- mirror::Class* return_type = self->GetCurrentMethod(nullptr)->GetReturnType(true /* resolve */,
- kRuntimePointerSize);
+ ArtMethod* method = self->GetCurrentMethod(nullptr);
+ mirror::Class* return_type = method->GetReturnType(true /* resolve */, kRuntimePointerSize);
if (!o->InstanceOf(return_type)) {
Runtime::Current()->GetJavaVM()->JniAbortF(nullptr,
"attempt to return an instance of %s from %s",
- PrettyTypeOf(o).c_str(),
- PrettyMethod(self->GetCurrentMethod(nullptr)).c_str());
+ PrettyTypeOf(o.Get()).c_str(),
+ PrettyMethod(method).c_str());
}
}
@@ -186,12 +186,11 @@
// Do nothing.
return zero;
} else {
- StackHandleScope<1> hs(soa.Self());
- auto h_interface_method(hs.NewHandle(soa.Decode<mirror::Method*>(interface_method_jobj)));
+ ArtMethod* interface_method =
+ soa.Decode<mirror::Method*>(interface_method_jobj)->GetArtMethod();
// This can cause thread suspension.
PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
- mirror::Class* result_type =
- h_interface_method->GetArtMethod()->GetReturnType(true /* resolve */, pointer_size);
+ mirror::Class* result_type = interface_method->GetReturnType(true /* resolve */, pointer_size);
mirror::Object* result_ref = soa.Decode<mirror::Object*>(result);
JValue result_unboxed;
if (!UnboxPrimitiveForResult(result_ref, result_type, &result_unboxed)) {
@@ -205,26 +204,29 @@
// a UndeclaredThrowableException.
mirror::Throwable* exception = soa.Self()->GetException();
if (exception->IsCheckedException()) {
- mirror::Object* rcvr = soa.Decode<mirror::Object*>(rcvr_jobj);
- mirror::Class* proxy_class = rcvr->GetClass();
- mirror::Method* interface_method = soa.Decode<mirror::Method*>(interface_method_jobj);
- ArtMethod* proxy_method = rcvr->GetClass()->FindVirtualMethodForInterface(
- interface_method->GetArtMethod(), kRuntimePointerSize);
- auto virtual_methods = proxy_class->GetVirtualMethodsSlice(kRuntimePointerSize);
- size_t num_virtuals = proxy_class->NumVirtualMethods();
- size_t method_size = ArtMethod::Size(kRuntimePointerSize);
- // Rely on the fact that the methods are contiguous to determine the index of the method in
- // the slice.
- int throws_index = (reinterpret_cast<uintptr_t>(proxy_method) -
- reinterpret_cast<uintptr_t>(&virtual_methods.At(0))) / method_size;
- CHECK_LT(throws_index, static_cast<int>(num_virtuals));
- mirror::ObjectArray<mirror::Class>* declared_exceptions =
- proxy_class->GetThrows()->Get(throws_index);
- mirror::Class* exception_class = exception->GetClass();
bool declares_exception = false;
- for (int32_t i = 0; i < declared_exceptions->GetLength() && !declares_exception; i++) {
- mirror::Class* declared_exception = declared_exceptions->Get(i);
- declares_exception = declared_exception->IsAssignableFrom(exception_class);
+ {
+ ScopedAssertNoThreadSuspension ants(__FUNCTION__);
+ mirror::Object* rcvr = soa.Decode<mirror::Object*>(rcvr_jobj);
+ mirror::Class* proxy_class = rcvr->GetClass();
+ mirror::Method* interface_method = soa.Decode<mirror::Method*>(interface_method_jobj);
+ ArtMethod* proxy_method = rcvr->GetClass()->FindVirtualMethodForInterface(
+ interface_method->GetArtMethod(), kRuntimePointerSize);
+ auto virtual_methods = proxy_class->GetVirtualMethodsSlice(kRuntimePointerSize);
+ size_t num_virtuals = proxy_class->NumVirtualMethods();
+ size_t method_size = ArtMethod::Size(kRuntimePointerSize);
+ // Rely on the fact that the methods are contiguous to determine the index of the method in
+ // the slice.
+ int throws_index = (reinterpret_cast<uintptr_t>(proxy_method) -
+ reinterpret_cast<uintptr_t>(&virtual_methods.At(0))) / method_size;
+ CHECK_LT(throws_index, static_cast<int>(num_virtuals));
+ mirror::ObjectArray<mirror::Class>* declared_exceptions =
+ proxy_class->GetThrows()->Get(throws_index);
+ mirror::Class* exception_class = exception->GetClass();
+ for (int32_t i = 0; i < declared_exceptions->GetLength() && !declares_exception; i++) {
+ mirror::Class* declared_exception = declared_exceptions->Get(i);
+ declares_exception = declared_exception->IsAssignableFrom(exception_class);
+ }
}
if (!declares_exception) {
soa.Self()->ThrowNewWrappedException("Ljava/lang/reflect/UndeclaredThrowableException;",
@@ -260,6 +262,7 @@
Runtime::CalleeSaveType type,
bool do_caller_check)
REQUIRES_SHARED(Locks::mutator_lock_) {
+ ScopedAssertNoThreadSuspension ants(__FUNCTION__);
DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(type));
const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA, type);
diff --git a/runtime/entrypoints/entrypoint_utils.h b/runtime/entrypoints/entrypoint_utils.h
index f88e81d..20c8401 100644
--- a/runtime/entrypoints/entrypoint_utils.h
+++ b/runtime/entrypoints/entrypoint_utils.h
@@ -24,6 +24,7 @@
#include "base/mutex.h"
#include "dex_instruction.h"
#include "gc/allocator_type.h"
+#include "handle.h"
#include "invoke_type.h"
#include "jvalue.h"
#include "runtime.h"
@@ -46,13 +47,16 @@
template <const bool kAccessCheck>
ALWAYS_INLINE inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
ArtMethod* method,
- Thread* self, bool* slow_path)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ Thread* self,
+ bool* slow_path)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
ALWAYS_INLINE inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
Thread* self,
bool* slow_path)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
// Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it
// cannot be resolved, throw an error. If it can, use it to create an instance.
@@ -63,21 +67,25 @@
ArtMethod* method,
Thread* self,
gc::AllocatorType allocator_type)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
// Given the context of a calling Method and a resolved class, create an instance.
template <bool kInstrumented>
ALWAYS_INLINE inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
Thread* self,
gc::AllocatorType allocator_type)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
// Given the context of a calling Method and an initialized class, create an instance.
template <bool kInstrumented>
-ALWAYS_INLINE inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
- Thread* self,
- gc::AllocatorType allocator_type)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ALWAYS_INLINE inline mirror::Object* AllocObjectFromCodeInitialized(
+ mirror::Class* klass,
+ Thread* self,
+ gc::AllocatorType allocator_type)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
template <bool kAccessCheck>
@@ -85,7 +93,8 @@
int32_t component_count,
ArtMethod* method,
bool* slow_path)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
// Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
// it cannot be resolved, throw an error. If it can, use it to create an array.
@@ -97,7 +106,8 @@
ArtMethod* method,
Thread* self,
gc::AllocatorType allocator_type)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
template <bool kAccessCheck, bool kInstrumented>
ALWAYS_INLINE inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
@@ -105,13 +115,15 @@
ArtMethod* method,
Thread* self,
gc::AllocatorType allocator_type)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
extern mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, int32_t component_count,
ArtMethod* method, Thread* self,
bool access_check,
gc::AllocatorType allocator_type)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
extern mirror::Array* CheckAndAllocArrayFromCodeInstrumented(uint32_t type_idx,
int32_t component_count,
@@ -119,7 +131,8 @@
Thread* self,
bool access_check,
gc::AllocatorType allocator_type)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
// Type of find field operation for fast and slow case.
enum FindFieldType {
@@ -134,54 +147,76 @@
};
template<FindFieldType type, bool access_check>
-inline ArtField* FindFieldFromCode(
- uint32_t field_idx, ArtMethod* referrer, Thread* self, size_t expected_size)
- REQUIRES_SHARED(Locks::mutator_lock_);
+inline ArtField* FindFieldFromCode(uint32_t field_idx,
+ ArtMethod* referrer,
+ Thread* self,
+ size_t expected_size)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
template<InvokeType type, bool access_check>
-inline ArtMethod* FindMethodFromCode(
- uint32_t method_idx, mirror::Object** this_object, ArtMethod* referrer, Thread* self)
- REQUIRES_SHARED(Locks::mutator_lock_);
+inline ArtMethod* FindMethodFromCode(uint32_t method_idx,
+ mirror::Object** this_object,
+ ArtMethod* referrer,
+ Thread* self)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
// Fast path field resolution that can't initialize classes or throw exceptions.
-inline ArtField* FindFieldFast(
- uint32_t field_idx, ArtMethod* referrer, FindFieldType type, size_t expected_size)
+inline ArtField* FindFieldFast(uint32_t field_idx,
+ ArtMethod* referrer,
+ FindFieldType type,
+ size_t expected_size)
REQUIRES_SHARED(Locks::mutator_lock_);
// Fast path method resolution that can't throw exceptions.
-inline ArtMethod* FindMethodFast(
- uint32_t method_idx, mirror::Object* this_object, ArtMethod* referrer, bool access_check,
- InvokeType type)
+inline ArtMethod* FindMethodFast(uint32_t method_idx,
+ mirror::Object* this_object,
+ ArtMethod* referrer,
+ bool access_check,
+ InvokeType type)
REQUIRES_SHARED(Locks::mutator_lock_);
-inline mirror::Class* ResolveVerifyAndClinit(
- uint32_t type_idx, ArtMethod* referrer, Thread* self, bool can_run_clinit, bool verify_access)
- REQUIRES_SHARED(Locks::mutator_lock_);
+inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx,
+ ArtMethod* referrer,
+ Thread* self,
+ bool can_run_clinit,
+ bool verify_access)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
inline mirror::String* ResolveStringFromCode(ArtMethod* referrer, uint32_t string_idx)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
// TODO: annotalysis disabled as monitor semantics are maintained in Java code.
inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self)
- NO_THREAD_SAFETY_ANALYSIS;
+ NO_THREAD_SAFETY_ANALYSIS REQUIRES(!Roles::uninterruptible_);
-void CheckReferenceResult(mirror::Object* o, Thread* self)
- REQUIRES_SHARED(Locks::mutator_lock_);
+void CheckReferenceResult(Handle<mirror::Object> o, Thread* self)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
JValue InvokeProxyInvocationHandler(ScopedObjectAccessAlreadyRunnable& soa, const char* shorty,
jobject rcvr_jobj, jobject interface_art_method_jobj,
std::vector<jvalue>& args)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
bool FillArrayData(mirror::Object* obj, const Instruction::ArrayDataPayload* payload)
- REQUIRES_SHARED(Locks::mutator_lock_);
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(!Roles::uninterruptible_);
template <typename INT_TYPE, typename FLOAT_TYPE>
inline INT_TYPE art_float_to_integral(FLOAT_TYPE f);
ArtMethod* GetCalleeSaveMethodCaller(ArtMethod** sp,
Runtime::CalleeSaveType type,
- bool do_caller_check = false);
+ bool do_caller_check = false)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
+ArtMethod* GetCalleeSaveMethodCaller(Thread* self, Runtime::CalleeSaveType type)
+ REQUIRES_SHARED(Locks::mutator_lock_);
} // namespace art
diff --git a/runtime/entrypoints/quick/quick_jni_entrypoints.cc b/runtime/entrypoints/quick/quick_jni_entrypoints.cc
index 64f19af..76b5456 100644
--- a/runtime/entrypoints/quick/quick_jni_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_jni_entrypoints.cc
@@ -114,7 +114,8 @@
PopLocalReferences(saved_local_ref_cookie, self);
}
-extern void JniMethodEndSynchronized(uint32_t saved_local_ref_cookie, jobject locked,
+extern void JniMethodEndSynchronized(uint32_t saved_local_ref_cookie,
+ jobject locked,
Thread* self) {
GoToRunnable(self);
UnlockJniSynchronizedMethod(locked, self); // Must decode before pop.
@@ -135,13 +136,17 @@
PopLocalReferences(saved_local_ref_cookie, self);
// Process result.
if (UNLIKELY(self->GetJniEnv()->check_jni)) {
- CheckReferenceResult(o, self);
+ // CheckReferenceResult can resolve types.
+ StackHandleScope<1> hs(self);
+ HandleWrapper<mirror::Object> h_obj(hs.NewHandleWrapper(&o));
+ CheckReferenceResult(h_obj, self);
}
VerifyObject(o);
return o;
}
-extern mirror::Object* JniMethodEndWithReference(jobject result, uint32_t saved_local_ref_cookie,
+extern mirror::Object* JniMethodEndWithReference(jobject result,
+ uint32_t saved_local_ref_cookie,
Thread* self) {
GoToRunnable(self);
return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self);
@@ -149,7 +154,8 @@
extern mirror::Object* JniMethodEndWithReferenceSynchronized(jobject result,
uint32_t saved_local_ref_cookie,
- jobject locked, Thread* self) {
+ jobject locked,
+ Thread* self) {
GoToRunnable(self);
UnlockJniSynchronizedMethod(locked, self);
return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self);
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index 3043c83..3c6f807 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -1961,8 +1961,12 @@
// Run the visitor and update sp.
BuildGenericJniFrameVisitor visitor(self, called->IsStatic(), shorty, shorty_len, &sp);
- visitor.VisitArguments();
- visitor.FinalizeHandleScope(self);
+ {
+ ScopedAssertNoThreadSuspension sants(__FUNCTION__);
+ visitor.VisitArguments();
+ // FinalizeHandleScope pushes the handle scope on the thread.
+ visitor.FinalizeHandleScope(self);
+ }
// Fix up managed-stack things in Thread.
self->SetTopOfStack(sp);