Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 1 | /* |
Elliott Hughes | 0f3c553 | 2012-03-30 14:51:51 -0700 | [diff] [blame] | 2 | * Copyright (C) 2012 The Android Open Source Project |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Andreas Gampe | 5794381 | 2017-12-06 21:39:13 -0800 | [diff] [blame^] | 17 | #include <android-base/logging.h> |
| 18 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 19 | #include "art_method-inl.h" |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 20 | #include "base/casts.h" |
Mathieu Chartier | 7643327 | 2014-09-26 14:32:37 -0700 | [diff] [blame] | 21 | #include "entrypoints/entrypoint_utils-inl.h" |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 22 | #include "indirect_reference_table.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 23 | #include "mirror/object-inl.h" |
Ian Rogers | 7b078e8 | 2014-09-10 14:44:24 -0700 | [diff] [blame] | 24 | #include "thread-inl.h" |
Andreas Gampe | 90b936d | 2017-01-31 08:58:55 -0800 | [diff] [blame] | 25 | #include "verify_object.h" |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 26 | |
| 27 | namespace art { |
| 28 | |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 29 | static_assert(sizeof(IRTSegmentState) == sizeof(uint32_t), "IRTSegmentState size unexpected"); |
| 30 | static_assert(std::is_trivial<IRTSegmentState>::value, "IRTSegmentState not trivial"); |
| 31 | |
Vladimir Marko | b0a6aee | 2017-10-27 10:34:04 +0100 | [diff] [blame] | 32 | static inline void GoToRunnableFast(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_); |
Igor Murashkin | af1e299 | 2016-10-12 17:44:50 -0700 | [diff] [blame] | 33 | |
Hiroshi Yamauchi | 1cc71eb | 2015-05-07 10:47:27 -0700 | [diff] [blame] | 34 | extern void ReadBarrierJni(mirror::CompressedReference<mirror::Object>* handle_on_stack, |
| 35 | Thread* self ATTRIBUTE_UNUSED) { |
Hiroshi Yamauchi | 043eb9a | 2016-10-14 11:21:38 -0700 | [diff] [blame] | 36 | DCHECK(kUseReadBarrier); |
| 37 | if (kUseBakerReadBarrier) { |
| 38 | DCHECK(handle_on_stack->AsMirrorPtr() != nullptr) |
| 39 | << "The class of a static jni call must not be null"; |
| 40 | // Check the mark bit and return early if it's already marked. |
| 41 | if (LIKELY(handle_on_stack->AsMirrorPtr()->GetMarkBit() != 0)) { |
| 42 | return; |
| 43 | } |
| 44 | } |
Hiroshi Yamauchi | 1cc71eb | 2015-05-07 10:47:27 -0700 | [diff] [blame] | 45 | // Call the read barrier and update the handle. |
| 46 | mirror::Object* to_ref = ReadBarrier::BarrierForRoot(handle_on_stack); |
| 47 | handle_on_stack->Assign(to_ref); |
| 48 | } |
| 49 | |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 50 | // Called on entry to fast JNI, push a new local reference table only. |
| 51 | extern uint32_t JniMethodFastStart(Thread* self) { |
| 52 | JNIEnvExt* env = self->GetJniEnv(); |
| 53 | DCHECK(env != nullptr); |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 54 | uint32_t saved_local_ref_cookie = bit_cast<uint32_t>(env->local_ref_cookie); |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 55 | env->local_ref_cookie = env->locals.GetSegmentState(); |
| 56 | |
Vladimir Marko | b0a6aee | 2017-10-27 10:34:04 +0100 | [diff] [blame] | 57 | if (kIsDebugBuild) { |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 58 | ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame(); |
Vladimir Marko | b0a6aee | 2017-10-27 10:34:04 +0100 | [diff] [blame] | 59 | CHECK(native_method->IsFastNative()) << native_method->PrettyMethod(); |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 60 | } |
| 61 | |
| 62 | return saved_local_ref_cookie; |
| 63 | } |
| 64 | |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 65 | // Called on entry to JNI, transition out of Runnable and release share of mutator_lock_. |
Ian Rogers | 693ff61 | 2013-02-01 10:56:12 -0800 | [diff] [blame] | 66 | extern uint32_t JniMethodStart(Thread* self) { |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 67 | JNIEnvExt* env = self->GetJniEnv(); |
Ian Rogers | 1eb512d | 2013-10-18 15:42:20 -0700 | [diff] [blame] | 68 | DCHECK(env != nullptr); |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 69 | uint32_t saved_local_ref_cookie = bit_cast<uint32_t>(env->local_ref_cookie); |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 70 | env->local_ref_cookie = env->locals.GetSegmentState(); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 71 | ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame(); |
Vladimir Marko | b0a6aee | 2017-10-27 10:34:04 +0100 | [diff] [blame] | 72 | // TODO: Introduce special entrypoint for synchronized @FastNative methods? |
| 73 | // Or ban synchronized @FastNative outright to avoid the extra check here? |
| 74 | DCHECK(!native_method->IsFastNative() || native_method->IsSynchronized()); |
Ian Rogers | 1eb512d | 2013-10-18 15:42:20 -0700 | [diff] [blame] | 75 | if (!native_method->IsFastNative()) { |
| 76 | // When not fast JNI we transition out of runnable. |
| 77 | self->TransitionFromRunnableToSuspended(kNative); |
| 78 | } |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 79 | return saved_local_ref_cookie; |
| 80 | } |
Elliott Hughes | b264f08 | 2012-04-06 17:10:10 -0700 | [diff] [blame] | 81 | |
Ian Rogers | 693ff61 | 2013-02-01 10:56:12 -0800 | [diff] [blame] | 82 | extern uint32_t JniMethodStartSynchronized(jobject to_lock, Thread* self) { |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 83 | self->DecodeJObject(to_lock)->MonitorEnter(self); |
| 84 | return JniMethodStart(self); |
| 85 | } |
| 86 | |
Ian Rogers | 1eb512d | 2013-10-18 15:42:20 -0700 | [diff] [blame] | 87 | // TODO: NO_THREAD_SAFETY_ANALYSIS due to different control paths depending on fast JNI. |
| 88 | static void GoToRunnable(Thread* self) NO_THREAD_SAFETY_ANALYSIS { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 89 | ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame(); |
Ian Rogers | 1eb512d | 2013-10-18 15:42:20 -0700 | [diff] [blame] | 90 | bool is_fast = native_method->IsFastNative(); |
| 91 | if (!is_fast) { |
| 92 | self->TransitionFromSuspendedToRunnable(); |
Igor Murashkin | af1e299 | 2016-10-12 17:44:50 -0700 | [diff] [blame] | 93 | } else { |
Vladimir Marko | b0a6aee | 2017-10-27 10:34:04 +0100 | [diff] [blame] | 94 | GoToRunnableFast(self); |
Igor Murashkin | af1e299 | 2016-10-12 17:44:50 -0700 | [diff] [blame] | 95 | } |
| 96 | } |
| 97 | |
Vladimir Marko | b0a6aee | 2017-10-27 10:34:04 +0100 | [diff] [blame] | 98 | ALWAYS_INLINE static inline void GoToRunnableFast(Thread* self) { |
| 99 | if (kIsDebugBuild) { |
| 100 | // Should only enter here if the method is @FastNative. |
Igor Murashkin | af1e299 | 2016-10-12 17:44:50 -0700 | [diff] [blame] | 101 | ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame(); |
Vladimir Marko | b0a6aee | 2017-10-27 10:34:04 +0100 | [diff] [blame] | 102 | CHECK(native_method->IsFastNative()) << native_method->PrettyMethod(); |
Igor Murashkin | af1e299 | 2016-10-12 17:44:50 -0700 | [diff] [blame] | 103 | } |
| 104 | |
Vladimir Marko | b0a6aee | 2017-10-27 10:34:04 +0100 | [diff] [blame] | 105 | // When we are in @FastNative, we are already Runnable. |
Igor Murashkin | af1e299 | 2016-10-12 17:44:50 -0700 | [diff] [blame] | 106 | // Only do a suspend check on the way out of JNI. |
| 107 | if (UNLIKELY(self->TestAllFlags())) { |
Ian Rogers | 1eb512d | 2013-10-18 15:42:20 -0700 | [diff] [blame] | 108 | // In fast JNI mode we never transitioned out of runnable. Perform a suspend check if there |
| 109 | // is a flag raised. |
| 110 | DCHECK(Locks::mutator_lock_->IsSharedHeld(self)); |
Ian Rogers | 7b078e8 | 2014-09-10 14:44:24 -0700 | [diff] [blame] | 111 | self->CheckSuspend(); |
Ian Rogers | 1eb512d | 2013-10-18 15:42:20 -0700 | [diff] [blame] | 112 | } |
| 113 | } |
| 114 | |
Yevgeny Rouban | 35aef2c | 2014-05-19 16:19:36 +0700 | [diff] [blame] | 115 | static void PopLocalReferences(uint32_t saved_local_ref_cookie, Thread* self) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 116 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 117 | JNIEnvExt* env = self->GetJniEnv(); |
Andreas Gampe | 5f4a09a | 2015-09-28 13:16:33 -0700 | [diff] [blame] | 118 | if (UNLIKELY(env->check_jni)) { |
| 119 | env->CheckNoHeldMonitors(); |
| 120 | } |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 121 | env->locals.SetSegmentState(env->local_ref_cookie); |
Andreas Gampe | e03662b | 2016-10-13 17:12:56 -0700 | [diff] [blame] | 122 | env->local_ref_cookie = bit_cast<IRTSegmentState>(saved_local_ref_cookie); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 123 | self->PopHandleScope(); |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 124 | } |
| 125 | |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 126 | // TODO: These should probably be templatized or macro-ized. |
| 127 | // Otherwise there's just too much repetitive boilerplate. |
| 128 | |
Ian Rogers | 693ff61 | 2013-02-01 10:56:12 -0800 | [diff] [blame] | 129 | extern void JniMethodEnd(uint32_t saved_local_ref_cookie, Thread* self) { |
Ian Rogers | 1eb512d | 2013-10-18 15:42:20 -0700 | [diff] [blame] | 130 | GoToRunnable(self); |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 131 | PopLocalReferences(saved_local_ref_cookie, self); |
| 132 | } |
| 133 | |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 134 | extern void JniMethodFastEnd(uint32_t saved_local_ref_cookie, Thread* self) { |
Vladimir Marko | b0a6aee | 2017-10-27 10:34:04 +0100 | [diff] [blame] | 135 | GoToRunnableFast(self); |
Igor Murashkin | 9d4b6da | 2016-07-29 09:51:58 -0700 | [diff] [blame] | 136 | PopLocalReferences(saved_local_ref_cookie, self); |
| 137 | } |
| 138 | |
Mathieu Chartier | be08cf5 | 2016-09-13 13:41:24 -0700 | [diff] [blame] | 139 | extern void JniMethodEndSynchronized(uint32_t saved_local_ref_cookie, |
| 140 | jobject locked, |
Ian Rogers | 693ff61 | 2013-02-01 10:56:12 -0800 | [diff] [blame] | 141 | Thread* self) { |
Ian Rogers | 1eb512d | 2013-10-18 15:42:20 -0700 | [diff] [blame] | 142 | GoToRunnable(self); |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 143 | UnlockJniSynchronizedMethod(locked, self); // Must decode before pop. |
| 144 | PopLocalReferences(saved_local_ref_cookie, self); |
| 145 | } |
| 146 | |
Andreas Gampe | 48ee356 | 2015-04-10 19:57:29 -0700 | [diff] [blame] | 147 | // Common result handling for EndWithReference. |
| 148 | static mirror::Object* JniMethodEndWithReferenceHandleResult(jobject result, |
| 149 | uint32_t saved_local_ref_cookie, |
| 150 | Thread* self) |
| 151 | NO_THREAD_SAFETY_ANALYSIS { |
| 152 | // Must decode before pop. The 'result' may not be valid in case of an exception, though. |
Mathieu Chartier | c4f3925 | 2016-10-05 18:32:08 -0700 | [diff] [blame] | 153 | ObjPtr<mirror::Object> o; |
| 154 | if (!self->IsExceptionPending()) { |
| 155 | o = self->DecodeJObject(result); |
| 156 | } |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 157 | PopLocalReferences(saved_local_ref_cookie, self); |
| 158 | // Process result. |
| 159 | if (UNLIKELY(self->GetJniEnv()->check_jni)) { |
Mathieu Chartier | be08cf5 | 2016-09-13 13:41:24 -0700 | [diff] [blame] | 160 | // CheckReferenceResult can resolve types. |
| 161 | StackHandleScope<1> hs(self); |
Mathieu Chartier | c4f3925 | 2016-10-05 18:32:08 -0700 | [diff] [blame] | 162 | HandleWrapperObjPtr<mirror::Object> h_obj(hs.NewHandleWrapper(&o)); |
Mathieu Chartier | be08cf5 | 2016-09-13 13:41:24 -0700 | [diff] [blame] | 163 | CheckReferenceResult(h_obj, self); |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 164 | } |
Mathieu Chartier | 9d156d5 | 2016-10-06 17:44:26 -0700 | [diff] [blame] | 165 | VerifyObject(o); |
Mathieu Chartier | c4f3925 | 2016-10-05 18:32:08 -0700 | [diff] [blame] | 166 | return o.Ptr(); |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 167 | } |
| 168 | |
Igor Murashkin | af1e299 | 2016-10-12 17:44:50 -0700 | [diff] [blame] | 169 | extern mirror::Object* JniMethodFastEndWithReference(jobject result, |
| 170 | uint32_t saved_local_ref_cookie, |
| 171 | Thread* self) { |
Vladimir Marko | b0a6aee | 2017-10-27 10:34:04 +0100 | [diff] [blame] | 172 | GoToRunnableFast(self); |
Igor Murashkin | af1e299 | 2016-10-12 17:44:50 -0700 | [diff] [blame] | 173 | return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self); |
| 174 | } |
| 175 | |
Mathieu Chartier | be08cf5 | 2016-09-13 13:41:24 -0700 | [diff] [blame] | 176 | extern mirror::Object* JniMethodEndWithReference(jobject result, |
| 177 | uint32_t saved_local_ref_cookie, |
Andreas Gampe | 48ee356 | 2015-04-10 19:57:29 -0700 | [diff] [blame] | 178 | Thread* self) { |
| 179 | GoToRunnable(self); |
| 180 | return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self); |
| 181 | } |
| 182 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 183 | extern mirror::Object* JniMethodEndWithReferenceSynchronized(jobject result, |
| 184 | uint32_t saved_local_ref_cookie, |
Mathieu Chartier | be08cf5 | 2016-09-13 13:41:24 -0700 | [diff] [blame] | 185 | jobject locked, |
| 186 | Thread* self) { |
Ian Rogers | 1eb512d | 2013-10-18 15:42:20 -0700 | [diff] [blame] | 187 | GoToRunnable(self); |
Andreas Gampe | 48ee356 | 2015-04-10 19:57:29 -0700 | [diff] [blame] | 188 | UnlockJniSynchronizedMethod(locked, self); |
| 189 | return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self); |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 190 | } |
| 191 | |
Hiroshi Yamauchi | a23b468 | 2015-09-28 17:47:32 -0700 | [diff] [blame] | 192 | extern uint64_t GenericJniMethodEnd(Thread* self, |
| 193 | uint32_t saved_local_ref_cookie, |
| 194 | jvalue result, |
| 195 | uint64_t result_f, |
| 196 | ArtMethod* called, |
| 197 | HandleScope* handle_scope) |
| 198 | // TODO: NO_THREAD_SAFETY_ANALYSIS as GoToRunnable() is NO_THREAD_SAFETY_ANALYSIS |
| 199 | NO_THREAD_SAFETY_ANALYSIS { |
Vladimir Marko | b0a6aee | 2017-10-27 10:34:04 +0100 | [diff] [blame] | 200 | bool critical_native = called->IsCriticalNative(); |
| 201 | bool fast_native = called->IsFastNative(); |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 202 | bool normal_native = !critical_native && !fast_native; |
| 203 | |
| 204 | // @Fast and @CriticalNative do not do a state transition. |
| 205 | if (LIKELY(normal_native)) { |
| 206 | GoToRunnable(self); |
| 207 | } |
Hiroshi Yamauchi | a23b468 | 2015-09-28 17:47:32 -0700 | [diff] [blame] | 208 | // We need the mutator lock (i.e., calling GoToRunnable()) before accessing the shorty or the |
| 209 | // locked object. |
| 210 | jobject locked = called->IsSynchronized() ? handle_scope->GetHandle(0).ToJObject() : nullptr; |
| 211 | char return_shorty_char = called->GetShorty()[0]; |
| 212 | if (return_shorty_char == 'L') { |
| 213 | if (locked != nullptr) { |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 214 | DCHECK(normal_native) << " @FastNative and synchronize is not supported"; |
Hiroshi Yamauchi | a23b468 | 2015-09-28 17:47:32 -0700 | [diff] [blame] | 215 | UnlockJniSynchronizedMethod(locked, self); |
| 216 | } |
| 217 | return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceHandleResult( |
| 218 | result.l, saved_local_ref_cookie, self)); |
| 219 | } else { |
| 220 | if (locked != nullptr) { |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 221 | DCHECK(normal_native) << " @FastNative and synchronize is not supported"; |
Hiroshi Yamauchi | a23b468 | 2015-09-28 17:47:32 -0700 | [diff] [blame] | 222 | UnlockJniSynchronizedMethod(locked, self); // Must decode before pop. |
| 223 | } |
Igor Murashkin | 06a04e0 | 2016-09-13 15:57:37 -0700 | [diff] [blame] | 224 | if (LIKELY(!critical_native)) { |
| 225 | PopLocalReferences(saved_local_ref_cookie, self); |
| 226 | } |
Hiroshi Yamauchi | a23b468 | 2015-09-28 17:47:32 -0700 | [diff] [blame] | 227 | switch (return_shorty_char) { |
| 228 | case 'F': { |
Vladimir Marko | 33bff25 | 2017-11-01 14:35:42 +0000 | [diff] [blame] | 229 | if (kRuntimeISA == InstructionSet::kX86) { |
Hiroshi Yamauchi | a23b468 | 2015-09-28 17:47:32 -0700 | [diff] [blame] | 230 | // Convert back the result to float. |
| 231 | double d = bit_cast<double, uint64_t>(result_f); |
| 232 | return bit_cast<uint32_t, float>(static_cast<float>(d)); |
| 233 | } else { |
| 234 | return result_f; |
| 235 | } |
| 236 | } |
| 237 | case 'D': |
| 238 | return result_f; |
| 239 | case 'Z': |
| 240 | return result.z; |
| 241 | case 'B': |
| 242 | return result.b; |
| 243 | case 'C': |
| 244 | return result.c; |
| 245 | case 'S': |
| 246 | return result.s; |
| 247 | case 'I': |
| 248 | return result.i; |
| 249 | case 'J': |
| 250 | return result.j; |
| 251 | case 'V': |
| 252 | return 0; |
| 253 | default: |
| 254 | LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char; |
| 255 | return 0; |
| 256 | } |
| 257 | } |
| 258 | } |
| 259 | |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 260 | } // namespace art |