jni: Support @FastNative methods that return objects
Bug: 32088975
Change-Id: I16f8b7ec6b251812af60ab25f2153d9b72f37044
diff --git a/compiler/jni/jni_compiler_test.cc b/compiler/jni/jni_compiler_test.cc
index afb8fce..ca1dc69 100644
--- a/compiler/jni/jni_compiler_test.cc
+++ b/compiler/jni/jni_compiler_test.cc
@@ -387,8 +387,7 @@
// Test the normal compiler and normal generic JNI only.
// The following features are unsupported in @FastNative:
// 1) JNI stubs (lookup via dlsym) when methods aren't explicitly registered
-// 2) Returning objects from the JNI function
-// 3) synchronized keyword
+// 2) synchronized keyword
// -- TODO: We can support (1) if we remove the mutator lock assert during stub lookup.
# define JNI_TEST_NORMAL_ONLY(TestName) \
TEST_F(JniCompilerTest, TestName ## NormalCompiler) { \
@@ -826,8 +825,7 @@
gJava_MyClassNatives_fooIOO_calls[gCurrentJni] = 0;
}
-// TODO: Maybe. @FastNative support for returning Objects?
-JNI_TEST_NORMAL_ONLY(CompileAndRunIntObjectObjectMethod)
+JNI_TEST(CompileAndRunIntObjectObjectMethod)
int gJava_MyClassNatives_fooSII_calls[kJniKindCount] = {};
jint Java_MyClassNatives_fooSII(JNIEnv* env ATTRIBUTE_UNUSED,
@@ -1047,8 +1045,7 @@
gJava_MyClassNatives_fooSIOO_calls[gCurrentJni] = 0;
}
-// TODO: Maybe. @FastNative support for returning Objects?
-JNI_TEST_NORMAL_ONLY(CompileAndRunStaticIntObjectObjectMethod)
+JNI_TEST(CompileAndRunStaticIntObjectObjectMethod)
int gJava_MyClassNatives_fooSSIOO_calls[kJniKindCount] = {};
jobject Java_MyClassNatives_fooSSIOO(JNIEnv*, jclass klass, jint x, jobject y, jobject z) {
@@ -1216,8 +1213,7 @@
EXPECT_TRUE(env_->IsSameObject(result, jobj_));
}
-// TODO: Maybe. @FastNative support for returning objects?
-JNI_TEST_NORMAL_ONLY(ReturnGlobalRef)
+JNI_TEST(ReturnGlobalRef)
jint local_ref_test(JNIEnv* env, jobject thisObj, jint x) {
// Add 10 local references
@@ -1357,8 +1353,7 @@
CurrentJniStringSuffix() + "() with CallStaticObjectMethodV");
}
-// TODO: Maybe support returning objects for @FastNative?
-JNI_TEST_NORMAL_ONLY(UpcallReturnTypeChecking_Instance)
+JNI_TEST(UpcallReturnTypeChecking_Instance)
void JniCompilerTest::UpcallReturnTypeChecking_StaticImpl() {
SetUpForTest(true, "staticMethodThatShouldReturnClass", "()Ljava/lang/Class;",
@@ -1385,8 +1380,7 @@
CurrentJniStringSuffix() + "() with CallObjectMethodV");
}
-// TODO: Maybe support returning objects for @FastNative?
-JNI_TEST_NORMAL_ONLY(UpcallReturnTypeChecking_Static)
+JNI_TEST(UpcallReturnTypeChecking_Static)
// This should take jclass, but we're imitating a bug pattern.
void Java_MyClassNatives_instanceMethodThatShouldTakeClass(JNIEnv*, jobject, jclass) {
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc
index 21804e3..3bd290d 100644
--- a/compiler/jni/quick/jni_compiler.cc
+++ b/compiler/jni/quick/jni_compiler.cc
@@ -70,6 +70,47 @@
return JNIMacroAssembler<kPointerSize>::Create(arena, isa, features);
}
+enum class JniEntrypoint {
+ kStart,
+ kEnd
+};
+
+template <PointerSize kPointerSize>
+static ThreadOffset<kPointerSize> GetJniEntrypointThreadOffset(JniEntrypoint which,
+ bool reference_return,
+ bool is_synchronized,
+ bool is_fast_native) {
+ if (which == JniEntrypoint::kStart) { // JniMethodStart
+ ThreadOffset<kPointerSize> jni_start =
+ is_synchronized
+ ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodStartSynchronized)
+ : (is_fast_native
+ ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodFastStart)
+ : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodStart));
+
+ return jni_start;
+ } else { // JniMethodEnd
+ ThreadOffset<kPointerSize> jni_end(-1);
+ if (reference_return) {
+ // Pass result.
+ jni_end = is_synchronized
+ ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndWithReferenceSynchronized)
+ : (is_fast_native
+ ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodFastEndWithReference)
+ : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndWithReference));
+ } else {
+ jni_end = is_synchronized
+ ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndSynchronized)
+ : (is_fast_native
+ ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodFastEnd)
+ : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEnd));
+ }
+
+ return jni_end;
+ }
+}
+
+
// Generate the JNI bridge for the given method, general contract:
// - Arguments are in the managed runtime format, either on stack or in
// registers, a reference to the method object is supplied as part of this
@@ -345,13 +386,11 @@
FrameOffset locked_object_handle_scope_offset(0xBEEFDEAD);
if (LIKELY(!is_critical_native)) {
// Skip this for @CriticalNative methods. They do not call JniMethodStart.
- ThreadOffset<kPointerSize> jni_start =
- is_synchronized
- ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodStartSynchronized)
- : (is_fast_native
- ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodFastStart)
- : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodStart));
-
+ ThreadOffset<kPointerSize> jni_start(
+ GetJniEntrypointThreadOffset<kPointerSize>(JniEntrypoint::kStart,
+ reference_return,
+ is_synchronized,
+ is_fast_native).SizeValue());
main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
locked_object_handle_scope_offset = FrameOffset(0);
if (is_synchronized) {
@@ -543,20 +582,15 @@
if (LIKELY(!is_critical_native)) {
// 12. Call JniMethodEnd
- ThreadOffset<kPointerSize> jni_end(-1);
+ ThreadOffset<kPointerSize> jni_end(
+ GetJniEntrypointThreadOffset<kPointerSize>(JniEntrypoint::kEnd,
+ reference_return,
+ is_synchronized,
+ is_fast_native).SizeValue());
if (reference_return) {
// Pass result.
- jni_end = is_synchronized
- ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndWithReferenceSynchronized)
- : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndWithReference);
SetNativeParameter(jni_asm.get(), end_jni_conv.get(), end_jni_conv->ReturnRegister());
end_jni_conv->Next();
- } else {
- jni_end = is_synchronized
- ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndSynchronized)
- : (is_fast_native
- ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodFastEnd)
- : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEnd));
}
// Pass saved local reference state.
if (end_jni_conv->IsCurrentParamOnStack()) {
diff --git a/compiler/oat_test.cc b/compiler/oat_test.cc
index 593d8e9..ffeff76 100644
--- a/compiler/oat_test.cc
+++ b/compiler/oat_test.cc
@@ -462,7 +462,7 @@
EXPECT_EQ(72U, sizeof(OatHeader));
EXPECT_EQ(4U, sizeof(OatMethodOffsets));
EXPECT_EQ(20U, sizeof(OatQuickMethodHeader));
- EXPECT_EQ(163 * static_cast<size_t>(GetInstructionSetPointerSize(kRuntimeISA)),
+ EXPECT_EQ(164 * static_cast<size_t>(GetInstructionSetPointerSize(kRuntimeISA)),
sizeof(QuickEntryPoints));
}
diff --git a/compiler/utils/assembler_thumb_test.cc b/compiler/utils/assembler_thumb_test.cc
index 86a4aa2..10bed13 100644
--- a/compiler/utils/assembler_thumb_test.cc
+++ b/compiler/utils/assembler_thumb_test.cc
@@ -158,7 +158,7 @@
}
if (CompareIgnoringSpace(results[lineindex], testline) != 0) {
LOG(FATAL) << "Output is not as expected at line: " << lineindex
- << results[lineindex] << "/" << testline;
+ << results[lineindex] << "/" << testline << ", test name: " << testname;
}
++lineindex;
}
diff --git a/compiler/utils/assembler_thumb_test_expected.cc.inc b/compiler/utils/assembler_thumb_test_expected.cc.inc
index 91f3970..69e1d8f 100644
--- a/compiler/utils/assembler_thumb_test_expected.cc.inc
+++ b/compiler/utils/assembler_thumb_test_expected.cc.inc
@@ -5544,7 +5544,7 @@
" 10c: ecbd 8a10 vpop {s16-s31}\n",
" 110: e8bd 8de0 ldmia.w sp!, {r5, r6, r7, r8, sl, fp, pc}\n",
" 114: 4660 mov r0, ip\n",
- " 116: f8d9 c2ac ldr.w ip, [r9, #684] ; 0x2ac\n",
+ " 116: f8d9 c2b0 ldr.w ip, [r9, #688] ; 0x2b0\n",
" 11a: 47e0 blx ip\n",
nullptr
};