Change check cast entrypoint to check instance of
Reduces code size since we do not need to reload class before
calling slow path.
TODO: Delete read barriers in the check cast code since the slow
path will retry with the proper read barriers if the check fails.
Bug: 12687968
Bug: 29516974
Test: test-art-host + test-art-target with CC
Change-Id: Ia4eb9bbe3fe2d2016e44523cf0451210828d7b88
diff --git a/runtime/arch/arm/entrypoints_init_arm.cc b/runtime/arch/arm/entrypoints_init_arm.cc
index cb8edff..01b3f34 100644
--- a/runtime/arch/arm/entrypoints_init_arm.cc
+++ b/runtime/arch/arm/entrypoints_init_arm.cc
@@ -30,8 +30,7 @@
namespace art {
// Cast entrypoints.
-extern "C" size_t artIsAssignableFromCode(const mirror::Class* klass,
- const mirror::Class* ref_class);
+extern "C" size_t artIsAssignableFromCode(mirror::Class* klass, mirror::Class* ref_class);
// Read barrier entrypoints.
// art_quick_read_barrier_mark_regX uses an non-standard calling
@@ -73,7 +72,7 @@
// Cast
qpoints->pInstanceofNonTrivial = artIsAssignableFromCode;
- qpoints->pCheckCast = art_quick_check_cast;
+ qpoints->pCheckInstanceOf = art_quick_check_instance_of;
// Math
qpoints->pIdivmod = __aeabi_idivmod;
diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S
index 0135260..550f8c7 100644
--- a/runtime/arch/arm/quick_entrypoints_arm.S
+++ b/runtime/arch/arm/quick_entrypoints_arm.S
@@ -764,11 +764,12 @@
END art_quick_unlock_object_no_inline
/*
- * Entry from managed code that calls artIsAssignableFromCode and on failure calls
- * artThrowClassCastException.
+ * Entry from managed code that calls artInstanceOfFromCode and on failure calls
+ * artThrowClassCastExceptionForObject.
*/
- .extern artThrowClassCastException
-ENTRY art_quick_check_cast
+ .extern artInstanceOfFromCode
+ .extern artThrowClassCastExceptionForObject
+ENTRY art_quick_check_instance_of
push {r0-r1, lr} @ save arguments, link register and pad
.cfi_adjust_cfa_offset 12
.cfi_rel_offset r0, 0
@@ -776,7 +777,7 @@
.cfi_rel_offset lr, 8
sub sp, #4
.cfi_adjust_cfa_offset 4
- bl artIsAssignableFromCode
+ bl artInstanceOfFromCode
cbz r0, .Lthrow_class_cast_exception
add sp, #4
.cfi_adjust_cfa_offset -4
@@ -792,9 +793,9 @@
.cfi_restore lr
SETUP_SAVE_ALL_CALLEE_SAVES_FRAME r2 @ save all registers as basis for long jump context
mov r2, r9 @ pass Thread::Current
- bl artThrowClassCastException @ (Class*, Class*, Thread*)
+ bl artThrowClassCastExceptionForObject @ (Object*, Class*, Thread*)
bkpt
-END art_quick_check_cast
+END art_quick_check_instance_of
// Restore rReg's value from [sp, #offset] if rReg is not the same as rExclude.
.macro POP_REG_NE rReg, offset, rExclude
diff --git a/runtime/arch/arm64/entrypoints_init_arm64.cc b/runtime/arch/arm64/entrypoints_init_arm64.cc
index c2078f0..3c77672 100644
--- a/runtime/arch/arm64/entrypoints_init_arm64.cc
+++ b/runtime/arch/arm64/entrypoints_init_arm64.cc
@@ -30,8 +30,7 @@
namespace art {
// Cast entrypoints.
-extern "C" size_t artIsAssignableFromCode(const mirror::Class* klass,
- const mirror::Class* ref_class);
+extern "C" size_t artIsAssignableFromCode(mirror::Class* klass, mirror::Class* ref_class);
// Read barrier entrypoints.
// art_quick_read_barrier_mark_regX uses an non-standard calling
@@ -76,7 +75,7 @@
// Cast
qpoints->pInstanceofNonTrivial = artIsAssignableFromCode;
- qpoints->pCheckCast = art_quick_check_cast;
+ qpoints->pCheckInstanceOf = art_quick_check_instance_of;
// Math
// TODO null entrypoints not needed for ARM64 - generate inline.
diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S
index d806715..d8ebe26 100644
--- a/runtime/arch/arm64/quick_entrypoints_arm64.S
+++ b/runtime/arch/arm64/quick_entrypoints_arm64.S
@@ -1294,18 +1294,19 @@
END art_quick_unlock_object_no_inline
/*
- * Entry from managed code that calls artIsAssignableFromCode and on failure calls
- * artThrowClassCastException.
+ * Entry from managed code that calls artInstanceOfFromCode and on failure calls
+ * artThrowClassCastExceptionForObject.
*/
- .extern artThrowClassCastException
-ENTRY art_quick_check_cast
+ .extern artInstanceOfFromCode
+ .extern artThrowClassCastExceptionForObject
+ENTRY art_quick_check_instance_of
// Store arguments and link register
// Stack needs to be 16B aligned on calls.
SAVE_TWO_REGS_INCREASE_FRAME x0, x1, 32
SAVE_REG xLR, 24
// Call runtime code
- bl artIsAssignableFromCode
+ bl artInstanceOfFromCode
// Check for exception
cbz x0, .Lthrow_class_cast_exception
@@ -1324,9 +1325,9 @@
SETUP_SAVE_ALL_CALLEE_SAVES_FRAME // save all registers as basis for long jump context
mov x2, xSELF // pass Thread::Current
- bl artThrowClassCastException // (Class*, Class*, Thread*)
+ bl artThrowClassCastExceptionForObject // (Object*, Class*, Thread*)
brk 0 // We should not return here...
-END art_quick_check_cast
+END art_quick_check_instance_of
// Restore xReg's value from [sp, #offset] if xReg is not the same as xExclude.
.macro POP_REG_NE xReg, offset, xExclude
diff --git a/runtime/arch/mips/entrypoints_init_mips.cc b/runtime/arch/mips/entrypoints_init_mips.cc
index e10d4e6..e3230f6 100644
--- a/runtime/arch/mips/entrypoints_init_mips.cc
+++ b/runtime/arch/mips/entrypoints_init_mips.cc
@@ -30,8 +30,7 @@
namespace art {
// Cast entrypoints.
-extern "C" size_t artIsAssignableFromCode(const mirror::Class* klass,
- const mirror::Class* ref_class);
+extern "C" size_t artIsAssignableFromCode(mirror::Class* klass, mirror::Class* ref_class);
// Math entrypoints.
extern int32_t CmpgDouble(double a, double b);
@@ -73,8 +72,8 @@
// Cast
qpoints->pInstanceofNonTrivial = artIsAssignableFromCode;
static_assert(IsDirectEntrypoint(kQuickInstanceofNonTrivial), "Direct C stub not marked direct.");
- qpoints->pCheckCast = art_quick_check_cast;
- static_assert(!IsDirectEntrypoint(kQuickCheckCast), "Non-direct C stub marked direct.");
+ qpoints->pCheckInstanceOf = art_quick_check_instance_of;
+ static_assert(!IsDirectEntrypoint(kQuickCheckInstanceOf), "Non-direct C stub marked direct.");
// DexCache
qpoints->pInitializeStaticStorage = art_quick_initialize_static_storage;
diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S
index c3c1882..34e34b4 100644
--- a/runtime/arch/mips/quick_entrypoints_mips.S
+++ b/runtime/arch/mips/quick_entrypoints_mips.S
@@ -1171,10 +1171,11 @@
END art_quick_unlock_object_no_inline
/*
- * Entry from managed code that calls artCheckCastFromCode and delivers exception on failure.
+ * Entry from managed code that calls artInstanceOfFromCode and delivers exception on failure.
*/
- .extern artThrowClassCastException
-ENTRY art_quick_check_cast
+ .extern artInstanceOfFromCode
+ .extern artThrowClassCastExceptionForObject
+ENTRY art_quick_check_instance_of
addiu $sp, $sp, -32
.cfi_adjust_cfa_offset 32
sw $gp, 16($sp)
@@ -1183,7 +1184,7 @@
sw $t9, 8($sp)
sw $a1, 4($sp)
sw $a0, 0($sp)
- la $t9, artIsAssignableFromCode
+ la $t9, artInstanceOfFromCode
jalr $t9
addiu $sp, $sp, -16 # reserve argument slots on the stack
addiu $sp, $sp, 16
@@ -1200,10 +1201,10 @@
addiu $sp, $sp, 32
.cfi_adjust_cfa_offset -32
SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
- la $t9, artThrowClassCastException
- jalr $zero, $t9 # artThrowClassCastException (Class*, Class*, Thread*)
+ la $t9, artThrowClassCastExceptionForObject
+ jalr $zero, $t9 # artThrowClassCastException (Object*, Class*, Thread*)
move $a2, rSELF # pass Thread::Current
-END art_quick_check_cast
+END art_quick_check_instance_of
/*
* Restore rReg's value from offset($sp) if rReg is not the same as rExclude.
diff --git a/runtime/arch/mips64/entrypoints_init_mips64.cc b/runtime/arch/mips64/entrypoints_init_mips64.cc
index a037905..43b73f1 100644
--- a/runtime/arch/mips64/entrypoints_init_mips64.cc
+++ b/runtime/arch/mips64/entrypoints_init_mips64.cc
@@ -30,8 +30,8 @@
namespace art {
// Cast entrypoints.
-extern "C" size_t artIsAssignableFromCode(const mirror::Class* klass,
- const mirror::Class* ref_class);
+extern "C" size_t artIsAssignableFromCode(mirror::Class* klass, mirror::Class* ref_class);
+
// Math entrypoints.
extern int32_t CmpgDouble(double a, double b);
extern int32_t CmplDouble(double a, double b);
@@ -64,7 +64,7 @@
// Cast
qpoints->pInstanceofNonTrivial = artIsAssignableFromCode;
- qpoints->pCheckCast = art_quick_check_cast;
+ qpoints->pCheckInstanceOf = art_quick_check_instance_of;
// Math
qpoints->pCmpgDouble = CmpgDouble;
diff --git a/runtime/arch/mips64/quick_entrypoints_mips64.S b/runtime/arch/mips64/quick_entrypoints_mips64.S
index cb2d1c8..0861d2d 100644
--- a/runtime/arch/mips64/quick_entrypoints_mips64.S
+++ b/runtime/arch/mips64/quick_entrypoints_mips64.S
@@ -1256,10 +1256,11 @@
END art_quick_unlock_object_no_inline
/*
- * Entry from managed code that calls artCheckCastFromCode and delivers exception on failure.
+ * Entry from managed code that calls artInstanceOfFromCode and delivers exception on failure.
*/
- .extern artThrowClassCastException
-ENTRY art_quick_check_cast
+ .extern artInstanceOfFromCode
+ .extern artThrowClassCastExceptionForObject
+ENTRY art_quick_check_instance_of
daddiu $sp, $sp, -32
.cfi_adjust_cfa_offset 32
sd $ra, 24($sp)
@@ -1267,7 +1268,7 @@
sd $t9, 16($sp)
sd $a1, 8($sp)
sd $a0, 0($sp)
- jal artIsAssignableFromCode
+ jal artInstanceOfFromCode
.cpreturn # Restore gp from t8 in branch delay slot.
# t8 may be clobbered in artIsAssignableFromCode.
beq $v0, $zero, .Lthrow_class_cast_exception
@@ -1283,10 +1284,10 @@
.cfi_adjust_cfa_offset -32
SETUP_GP
SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
- dla $t9, artThrowClassCastException
- jalr $zero, $t9 # artThrowClassCastException (Class*, Class*, Thread*)
+ dla $t9, artThrowClassCastExceptionForObject
+ jalr $zero, $t9 # artThrowClassCastException (Object*, Class*, Thread*)
move $a2, rSELF # pass Thread::Current
-END art_quick_check_cast
+END art_quick_check_instance_of
/*
diff --git a/runtime/arch/stub_test.cc b/runtime/arch/stub_test.cc
index c151f00..243b9d1 100644
--- a/runtime/arch/stub_test.cc
+++ b/runtime/arch/stub_test.cc
@@ -805,7 +805,7 @@
#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
(defined(__x86_64__) && !defined(__APPLE__))
-extern "C" void art_quick_check_cast(void);
+extern "C" void art_quick_check_instance_of(void);
#endif
TEST_F(StubTest, CheckCast) {
@@ -813,65 +813,89 @@
(defined(__x86_64__) && !defined(__APPLE__))
Thread* self = Thread::Current();
- const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
+ const uintptr_t art_quick_check_instance_of =
+ StubTest::GetEntrypoint(self, kQuickCheckInstanceOf);
// Find some classes.
ScopedObjectAccess soa(self);
// garbage is created during ClassLinker::Init
- StackHandleScope<4> hs(soa.Self());
- Handle<mirror::Class> c(
- hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
- Handle<mirror::Class> c2(
- hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
- Handle<mirror::Class> list(
- hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/util/List;")));
- Handle<mirror::Class> array_list(
- hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/util/ArrayList;")));
+ VariableSizedHandleScope hs(soa.Self());
+ Handle<mirror::Class> klass_obj(
+ hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
+ Handle<mirror::Class> klass_str(
+ hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/String;")));
+ Handle<mirror::Class> klass_list(
+ hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/List;")));
+ Handle<mirror::Class> klass_cloneable(
+ hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Cloneable;")));
+ Handle<mirror::Class> klass_array_list(
+ hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/util/ArrayList;")));
+ Handle<mirror::Object> obj(hs.NewHandle(klass_obj->AllocObject(soa.Self())));
+ Handle<mirror::String> string(hs.NewHandle(
+ mirror::String::AllocFromModifiedUtf8(soa.Self(), "ABCD")));
+ Handle<mirror::Object> array_list(hs.NewHandle(klass_array_list->AllocObject(soa.Self())));
EXPECT_FALSE(self->IsExceptionPending());
- Invoke3(reinterpret_cast<size_t>(c.Get()),
- reinterpret_cast<size_t>(c.Get()),
+ Invoke3(reinterpret_cast<size_t>(obj.Get()),
+ reinterpret_cast<size_t>(klass_obj.Get()),
0U,
- art_quick_check_cast,
+ art_quick_check_instance_of,
self);
EXPECT_FALSE(self->IsExceptionPending());
- Invoke3(reinterpret_cast<size_t>(c2.Get()),
- reinterpret_cast<size_t>(c2.Get()),
+ // Expected true: Test string instance of java.lang.String.
+ Invoke3(reinterpret_cast<size_t>(string.Get()),
+ reinterpret_cast<size_t>(klass_str.Get()),
0U,
- art_quick_check_cast,
+ art_quick_check_instance_of,
self);
EXPECT_FALSE(self->IsExceptionPending());
- Invoke3(reinterpret_cast<size_t>(c.Get()),
- reinterpret_cast<size_t>(c2.Get()),
+ // Expected true: Test string instance of java.lang.Object.
+ Invoke3(reinterpret_cast<size_t>(string.Get()),
+ reinterpret_cast<size_t>(klass_obj.Get()),
0U,
- art_quick_check_cast,
+ art_quick_check_instance_of,
self);
EXPECT_FALSE(self->IsExceptionPending());
- Invoke3(reinterpret_cast<size_t>(list.Get()),
- reinterpret_cast<size_t>(array_list.Get()),
+ // Expected false: Test object instance of java.lang.String.
+ Invoke3(reinterpret_cast<size_t>(obj.Get()),
+ reinterpret_cast<size_t>(klass_str.Get()),
0U,
- art_quick_check_cast,
- self);
- EXPECT_FALSE(self->IsExceptionPending());
-
- Invoke3(reinterpret_cast<size_t>(list.Get()),
- reinterpret_cast<size_t>(c2.Get()),
- 0U,
- art_quick_check_cast,
+ art_quick_check_instance_of,
self);
EXPECT_TRUE(self->IsExceptionPending());
self->ClearException();
- // TODO: Make the following work. But that would require correct managed frames.
- Invoke3(reinterpret_cast<size_t>(c2.Get()),
- reinterpret_cast<size_t>(c.Get()),
+ Invoke3(reinterpret_cast<size_t>(array_list.Get()),
+ reinterpret_cast<size_t>(klass_list.Get()),
0U,
- art_quick_check_cast,
+ art_quick_check_instance_of,
+ self);
+ EXPECT_FALSE(self->IsExceptionPending());
+
+ Invoke3(reinterpret_cast<size_t>(array_list.Get()),
+ reinterpret_cast<size_t>(klass_cloneable.Get()),
+ 0U,
+ art_quick_check_instance_of,
+ self);
+ EXPECT_FALSE(self->IsExceptionPending());
+
+ Invoke3(reinterpret_cast<size_t>(string.Get()),
+ reinterpret_cast<size_t>(klass_array_list.Get()),
+ 0U,
+ art_quick_check_instance_of,
+ self);
+ EXPECT_TRUE(self->IsExceptionPending());
+ self->ClearException();
+
+ Invoke3(reinterpret_cast<size_t>(string.Get()),
+ reinterpret_cast<size_t>(klass_cloneable.Get()),
+ 0U,
+ art_quick_check_instance_of,
self);
EXPECT_TRUE(self->IsExceptionPending());
self->ClearException();
diff --git a/runtime/arch/x86/entrypoints_init_x86.cc b/runtime/arch/x86/entrypoints_init_x86.cc
index 0a10a3c..877df8f 100644
--- a/runtime/arch/x86/entrypoints_init_x86.cc
+++ b/runtime/arch/x86/entrypoints_init_x86.cc
@@ -27,8 +27,7 @@
namespace art {
// Cast entrypoints.
-extern "C" size_t art_quick_is_assignable(const mirror::Class* klass,
- const mirror::Class* ref_class);
+extern "C" size_t art_quick_is_assignable(mirror::Class* klass, mirror::Class* ref_class);
// Read barrier entrypoints.
// art_quick_read_barrier_mark_regX uses an non-standard calling
@@ -50,7 +49,7 @@
// Cast
qpoints->pInstanceofNonTrivial = art_quick_is_assignable;
- qpoints->pCheckCast = art_quick_check_cast;
+ qpoints->pCheckInstanceOf = art_quick_check_instance_of;
// More math.
qpoints->pCos = cos;
diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S
index 98739d3..635bfa3 100644
--- a/runtime/arch/x86/quick_entrypoints_x86.S
+++ b/runtime/arch/x86/quick_entrypoints_x86.S
@@ -1361,11 +1361,11 @@
ret
END_FUNCTION art_quick_is_assignable
-DEFINE_FUNCTION art_quick_check_cast
+DEFINE_FUNCTION art_quick_check_instance_of
PUSH eax // alignment padding
- PUSH ecx // pass arg2 - obj->klass
- PUSH eax // pass arg1 - checked class
- call SYMBOL(artIsAssignableFromCode) // (Class* klass, Class* ref_klass)
+ PUSH ecx // pass arg2 - checked class
+ PUSH eax // pass arg1 - obj
+ call SYMBOL(artInstanceOfFromCode) // (Object* obj, Class* ref_klass)
testl %eax, %eax
jz 1f // jump forward if not assignable
addl LITERAL(12), %esp // pop arguments
@@ -1385,9 +1385,9 @@
CFI_ADJUST_CFA_OFFSET(4)
PUSH ecx // pass arg2
PUSH eax // pass arg1
- call SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*)
+ call SYMBOL(artThrowClassCastExceptionForObject) // (Object* src, Class* dest, Thread*)
UNREACHABLE
-END_FUNCTION art_quick_check_cast
+END_FUNCTION art_quick_check_instance_of
// Restore reg's value if reg is not the same as exclude_reg, otherwise just adjust stack.
MACRO2(POP_REG_NE, reg, exclude_reg)
diff --git a/runtime/arch/x86_64/entrypoints_init_x86_64.cc b/runtime/arch/x86_64/entrypoints_init_x86_64.cc
index 8c425d5..59c9dfe 100644
--- a/runtime/arch/x86_64/entrypoints_init_x86_64.cc
+++ b/runtime/arch/x86_64/entrypoints_init_x86_64.cc
@@ -30,8 +30,7 @@
namespace art {
// Cast entrypoints.
-extern "C" size_t art_quick_assignable_from_code(const mirror::Class* klass,
- const mirror::Class* ref_class);
+extern "C" size_t art_quick_assignable_from_code(mirror::Class* klass, mirror::Class* ref_class);
// Read barrier entrypoints.
// art_quick_read_barrier_mark_regX uses an non-standard calling
@@ -65,7 +64,7 @@
// Cast
qpoints->pInstanceofNonTrivial = art_quick_assignable_from_code;
- qpoints->pCheckCast = art_quick_check_cast;
+ qpoints->pCheckInstanceOf = art_quick_check_instance_of;
// More math.
qpoints->pCos = cos;
diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
index 185e55e..72a03eb 100644
--- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S
+++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S
@@ -1480,14 +1480,14 @@
RETURN_IF_EAX_ZERO
END_FUNCTION art_quick_unlock_object_no_inline
-DEFINE_FUNCTION art_quick_check_cast
+DEFINE_FUNCTION art_quick_check_instance_of
// We could check the super classes here but that is usually already checked in the caller.
PUSH rdi // Save args for exc
PUSH rsi
subq LITERAL(8), %rsp // Alignment padding.
CFI_ADJUST_CFA_OFFSET(8)
SETUP_FP_CALLEE_SAVE_FRAME
- call SYMBOL(artIsAssignableFromCode) // (Class* klass, Class* ref_klass)
+ call SYMBOL(artInstanceOfFromCode) // (Object* obj, Class* ref_klass)
testq %rax, %rax
jz 1f // jump forward if not assignable
RESTORE_FP_CALLEE_SAVE_FRAME
@@ -1506,9 +1506,9 @@
POP rdi
SETUP_SAVE_ALL_CALLEE_SAVES_FRAME // save all registers as basis for long jump context
mov %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current()
- call SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*)
+ call SYMBOL(artThrowClassCastExceptionForObject) // (Object* src, Class* dest, Thread*)
UNREACHABLE
-END_FUNCTION art_quick_check_cast
+END_FUNCTION art_quick_check_instance_of
// Restore reg's value if reg is not the same as exclude_reg, otherwise just adjust stack.
diff --git a/runtime/entrypoints/quick/quick_cast_entrypoints.cc b/runtime/entrypoints/quick/quick_cast_entrypoints.cc
index 2732d68..083d578 100644
--- a/runtime/entrypoints/quick/quick_cast_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_cast_entrypoints.cc
@@ -27,4 +27,12 @@
return klass->IsAssignableFrom(ref_class) ? 1 : 0;
}
+// Is assignable test for code, won't throw. Null and equality test already performed.
+extern "C" size_t artInstanceOfFromCode(mirror::Object* obj, mirror::Class* ref_class)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ DCHECK(obj != nullptr);
+ DCHECK(ref_class != nullptr);
+ return obj->InstanceOf(ref_class) ? 1 : 0;
+}
+
} // namespace art
diff --git a/runtime/entrypoints/quick/quick_default_externs.h b/runtime/entrypoints/quick/quick_default_externs.h
index cfa5325..64030f3 100644
--- a/runtime/entrypoints/quick/quick_default_externs.h
+++ b/runtime/entrypoints/quick/quick_default_externs.h
@@ -31,7 +31,7 @@
// These are extern declarations of assembly stubs with common names.
// Cast entrypoints.
-extern "C" void art_quick_check_cast(const art::mirror::Class*, const art::mirror::Class*);
+extern "C" void art_quick_check_instance_of(art::mirror::Object*, art::mirror::Class*);
// DexCache entrypoints.
extern "C" void* art_quick_initialize_static_storage(uint32_t);
diff --git a/runtime/entrypoints/quick/quick_entrypoints_list.h b/runtime/entrypoints/quick/quick_entrypoints_list.h
index 3cfee45..dd8fe55 100644
--- a/runtime/entrypoints/quick/quick_entrypoints_list.h
+++ b/runtime/entrypoints/quick/quick_entrypoints_list.h
@@ -33,8 +33,8 @@
V(AllocStringFromChars, void*, int32_t, int32_t, void*) \
V(AllocStringFromString, void*, void*) \
\
- V(InstanceofNonTrivial, size_t, const mirror::Class*, const mirror::Class*) \
- V(CheckCast, void, const mirror::Class*, const mirror::Class*) \
+ V(InstanceofNonTrivial, size_t, mirror::Class*, mirror::Class*) \
+ V(CheckInstanceOf, void, mirror::Object*, mirror::Class*) \
\
V(InitializeStaticStorage, void*, uint32_t) \
V(InitializeTypeAndVerifyAccess, void*, uint32_t) \
diff --git a/runtime/entrypoints/quick/quick_throw_entrypoints.cc b/runtime/entrypoints/quick/quick_throw_entrypoints.cc
index a205b17..c8ee99a 100644
--- a/runtime/entrypoints/quick/quick_throw_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_throw_entrypoints.cc
@@ -111,6 +111,14 @@
self->QuickDeliverException();
}
+extern "C" NO_RETURN void artThrowClassCastExceptionForObject(mirror::Object* obj,
+ mirror::Class* dest_type,
+ Thread* self)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ DCHECK(obj != nullptr);
+ artThrowClassCastException(dest_type, obj->GetClass(), self);
+}
+
extern "C" NO_RETURN void artThrowArrayStoreException(mirror::Object* array, mirror::Object* value,
Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_) {
diff --git a/runtime/entrypoints_order_test.cc b/runtime/entrypoints_order_test.cc
index cdb1051..b0463d7 100644
--- a/runtime/entrypoints_order_test.cc
+++ b/runtime/entrypoints_order_test.cc
@@ -174,8 +174,9 @@
sizeof(void*));
EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pAllocStringFromString, pInstanceofNonTrivial,
sizeof(void*));
- EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pInstanceofNonTrivial, pCheckCast, sizeof(void*));
- EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pCheckCast, pInitializeStaticStorage, sizeof(void*));
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pInstanceofNonTrivial, pCheckInstanceOf, sizeof(void*));
+ EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pCheckInstanceOf, pInitializeStaticStorage,
+ sizeof(void*));
EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pInitializeStaticStorage, pInitializeTypeAndVerifyAccess,
sizeof(void*));
EXPECT_OFFSET_DIFFNP(QuickEntryPoints, pInitializeTypeAndVerifyAccess, pInitializeType,
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 3f7d086..0420de6 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -2503,7 +2503,7 @@
QUICK_ENTRY_POINT_INFO(pAllocStringFromChars)
QUICK_ENTRY_POINT_INFO(pAllocStringFromString)
QUICK_ENTRY_POINT_INFO(pInstanceofNonTrivial)
- QUICK_ENTRY_POINT_INFO(pCheckCast)
+ QUICK_ENTRY_POINT_INFO(pCheckInstanceOf)
QUICK_ENTRY_POINT_INFO(pInitializeStaticStorage)
QUICK_ENTRY_POINT_INFO(pInitializeTypeAndVerifyAccess)
QUICK_ENTRY_POINT_INFO(pInitializeType)