blob: 29a62c86eeaa3502a3ba40dc7ee3e24bae84d1ab [file] [log] [blame]
Ian Rogers57b86d42012-03-27 16:05:41 -07001/*
Elliott Hughes0f3c5532012-03-30 14:51:51 -07002 * Copyright (C) 2012 The Android Open Source Project
Ian Rogers57b86d42012-03-27 16:05:41 -07003 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartiere401d142015-04-22 13:56:20 -070017#include "art_method-inl.h"
Andreas Gampee03662b2016-10-13 17:12:56 -070018#include "base/casts.h"
Andreas Gampea6822472017-10-11 22:58:53 -070019#include "base/logging.h"
Mathieu Chartier76433272014-09-26 14:32:37 -070020#include "entrypoints/entrypoint_utils-inl.h"
Andreas Gampee03662b2016-10-13 17:12:56 -070021#include "indirect_reference_table.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080022#include "mirror/object-inl.h"
Ian Rogers7b078e82014-09-10 14:44:24 -070023#include "thread-inl.h"
Andreas Gampe90b936d2017-01-31 08:58:55 -080024#include "verify_object.h"
Ian Rogers57b86d42012-03-27 16:05:41 -070025
26namespace art {
27
Andreas Gampee03662b2016-10-13 17:12:56 -070028static_assert(sizeof(IRTSegmentState) == sizeof(uint32_t), "IRTSegmentState size unexpected");
29static_assert(std::is_trivial<IRTSegmentState>::value, "IRTSegmentState not trivial");
30
Vladimir Markob0a6aee2017-10-27 10:34:04 +010031static inline void GoToRunnableFast(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
Igor Murashkinaf1e2992016-10-12 17:44:50 -070032
Hiroshi Yamauchi1cc71eb2015-05-07 10:47:27 -070033extern void ReadBarrierJni(mirror::CompressedReference<mirror::Object>* handle_on_stack,
34 Thread* self ATTRIBUTE_UNUSED) {
Hiroshi Yamauchi043eb9a2016-10-14 11:21:38 -070035 DCHECK(kUseReadBarrier);
36 if (kUseBakerReadBarrier) {
37 DCHECK(handle_on_stack->AsMirrorPtr() != nullptr)
38 << "The class of a static jni call must not be null";
39 // Check the mark bit and return early if it's already marked.
40 if (LIKELY(handle_on_stack->AsMirrorPtr()->GetMarkBit() != 0)) {
41 return;
42 }
43 }
Hiroshi Yamauchi1cc71eb2015-05-07 10:47:27 -070044 // Call the read barrier and update the handle.
45 mirror::Object* to_ref = ReadBarrier::BarrierForRoot(handle_on_stack);
46 handle_on_stack->Assign(to_ref);
47}
48
Igor Murashkin9d4b6da2016-07-29 09:51:58 -070049// Called on entry to fast JNI, push a new local reference table only.
50extern uint32_t JniMethodFastStart(Thread* self) {
51 JNIEnvExt* env = self->GetJniEnv();
52 DCHECK(env != nullptr);
Andreas Gampee03662b2016-10-13 17:12:56 -070053 uint32_t saved_local_ref_cookie = bit_cast<uint32_t>(env->local_ref_cookie);
Igor Murashkin9d4b6da2016-07-29 09:51:58 -070054 env->local_ref_cookie = env->locals.GetSegmentState();
55
Vladimir Markob0a6aee2017-10-27 10:34:04 +010056 if (kIsDebugBuild) {
Igor Murashkin9d4b6da2016-07-29 09:51:58 -070057 ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
Vladimir Markob0a6aee2017-10-27 10:34:04 +010058 CHECK(native_method->IsFastNative()) << native_method->PrettyMethod();
Igor Murashkin9d4b6da2016-07-29 09:51:58 -070059 }
60
61 return saved_local_ref_cookie;
62}
63
Ian Rogers00f7d0e2012-07-19 15:28:27 -070064// Called on entry to JNI, transition out of Runnable and release share of mutator_lock_.
Ian Rogers693ff612013-02-01 10:56:12 -080065extern uint32_t JniMethodStart(Thread* self) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -070066 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers1eb512d2013-10-18 15:42:20 -070067 DCHECK(env != nullptr);
Andreas Gampee03662b2016-10-13 17:12:56 -070068 uint32_t saved_local_ref_cookie = bit_cast<uint32_t>(env->local_ref_cookie);
Ian Rogers00f7d0e2012-07-19 15:28:27 -070069 env->local_ref_cookie = env->locals.GetSegmentState();
Mathieu Chartiere401d142015-04-22 13:56:20 -070070 ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
Vladimir Markob0a6aee2017-10-27 10:34:04 +010071 // TODO: Introduce special entrypoint for synchronized @FastNative methods?
72 // Or ban synchronized @FastNative outright to avoid the extra check here?
73 DCHECK(!native_method->IsFastNative() || native_method->IsSynchronized());
Ian Rogers1eb512d2013-10-18 15:42:20 -070074 if (!native_method->IsFastNative()) {
75 // When not fast JNI we transition out of runnable.
76 self->TransitionFromRunnableToSuspended(kNative);
77 }
Ian Rogers00f7d0e2012-07-19 15:28:27 -070078 return saved_local_ref_cookie;
79}
Elliott Hughesb264f082012-04-06 17:10:10 -070080
Ian Rogers693ff612013-02-01 10:56:12 -080081extern uint32_t JniMethodStartSynchronized(jobject to_lock, Thread* self) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -070082 self->DecodeJObject(to_lock)->MonitorEnter(self);
83 return JniMethodStart(self);
84}
85
Ian Rogers1eb512d2013-10-18 15:42:20 -070086// TODO: NO_THREAD_SAFETY_ANALYSIS due to different control paths depending on fast JNI.
87static void GoToRunnable(Thread* self) NO_THREAD_SAFETY_ANALYSIS {
Mathieu Chartiere401d142015-04-22 13:56:20 -070088 ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
Ian Rogers1eb512d2013-10-18 15:42:20 -070089 bool is_fast = native_method->IsFastNative();
90 if (!is_fast) {
91 self->TransitionFromSuspendedToRunnable();
Igor Murashkinaf1e2992016-10-12 17:44:50 -070092 } else {
Vladimir Markob0a6aee2017-10-27 10:34:04 +010093 GoToRunnableFast(self);
Igor Murashkinaf1e2992016-10-12 17:44:50 -070094 }
95}
96
Vladimir Markob0a6aee2017-10-27 10:34:04 +010097ALWAYS_INLINE static inline void GoToRunnableFast(Thread* self) {
98 if (kIsDebugBuild) {
99 // Should only enter here if the method is @FastNative.
Igor Murashkinaf1e2992016-10-12 17:44:50 -0700100 ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame();
Vladimir Markob0a6aee2017-10-27 10:34:04 +0100101 CHECK(native_method->IsFastNative()) << native_method->PrettyMethod();
Igor Murashkinaf1e2992016-10-12 17:44:50 -0700102 }
103
Vladimir Markob0a6aee2017-10-27 10:34:04 +0100104 // When we are in @FastNative, we are already Runnable.
Igor Murashkinaf1e2992016-10-12 17:44:50 -0700105 // Only do a suspend check on the way out of JNI.
106 if (UNLIKELY(self->TestAllFlags())) {
Ian Rogers1eb512d2013-10-18 15:42:20 -0700107 // In fast JNI mode we never transitioned out of runnable. Perform a suspend check if there
108 // is a flag raised.
109 DCHECK(Locks::mutator_lock_->IsSharedHeld(self));
Ian Rogers7b078e82014-09-10 14:44:24 -0700110 self->CheckSuspend();
Ian Rogers1eb512d2013-10-18 15:42:20 -0700111 }
112}
113
Yevgeny Rouban35aef2c2014-05-19 16:19:36 +0700114static void PopLocalReferences(uint32_t saved_local_ref_cookie, Thread* self)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700115 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700116 JNIEnvExt* env = self->GetJniEnv();
Andreas Gampe5f4a09a2015-09-28 13:16:33 -0700117 if (UNLIKELY(env->check_jni)) {
118 env->CheckNoHeldMonitors();
119 }
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700120 env->locals.SetSegmentState(env->local_ref_cookie);
Andreas Gampee03662b2016-10-13 17:12:56 -0700121 env->local_ref_cookie = bit_cast<IRTSegmentState>(saved_local_ref_cookie);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700122 self->PopHandleScope();
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700123}
124
Igor Murashkin9d4b6da2016-07-29 09:51:58 -0700125// TODO: These should probably be templatized or macro-ized.
126// Otherwise there's just too much repetitive boilerplate.
127
Ian Rogers693ff612013-02-01 10:56:12 -0800128extern void JniMethodEnd(uint32_t saved_local_ref_cookie, Thread* self) {
Ian Rogers1eb512d2013-10-18 15:42:20 -0700129 GoToRunnable(self);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700130 PopLocalReferences(saved_local_ref_cookie, self);
131}
132
Igor Murashkin9d4b6da2016-07-29 09:51:58 -0700133extern void JniMethodFastEnd(uint32_t saved_local_ref_cookie, Thread* self) {
Vladimir Markob0a6aee2017-10-27 10:34:04 +0100134 GoToRunnableFast(self);
Igor Murashkin9d4b6da2016-07-29 09:51:58 -0700135 PopLocalReferences(saved_local_ref_cookie, self);
136}
137
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700138extern void JniMethodEndSynchronized(uint32_t saved_local_ref_cookie,
139 jobject locked,
Ian Rogers693ff612013-02-01 10:56:12 -0800140 Thread* self) {
Ian Rogers1eb512d2013-10-18 15:42:20 -0700141 GoToRunnable(self);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700142 UnlockJniSynchronizedMethod(locked, self); // Must decode before pop.
143 PopLocalReferences(saved_local_ref_cookie, self);
144}
145
Andreas Gampe48ee3562015-04-10 19:57:29 -0700146// Common result handling for EndWithReference.
147static mirror::Object* JniMethodEndWithReferenceHandleResult(jobject result,
148 uint32_t saved_local_ref_cookie,
149 Thread* self)
150 NO_THREAD_SAFETY_ANALYSIS {
151 // Must decode before pop. The 'result' may not be valid in case of an exception, though.
Mathieu Chartierc4f39252016-10-05 18:32:08 -0700152 ObjPtr<mirror::Object> o;
153 if (!self->IsExceptionPending()) {
154 o = self->DecodeJObject(result);
155 }
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700156 PopLocalReferences(saved_local_ref_cookie, self);
157 // Process result.
158 if (UNLIKELY(self->GetJniEnv()->check_jni)) {
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700159 // CheckReferenceResult can resolve types.
160 StackHandleScope<1> hs(self);
Mathieu Chartierc4f39252016-10-05 18:32:08 -0700161 HandleWrapperObjPtr<mirror::Object> h_obj(hs.NewHandleWrapper(&o));
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700162 CheckReferenceResult(h_obj, self);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700163 }
Mathieu Chartier9d156d52016-10-06 17:44:26 -0700164 VerifyObject(o);
Mathieu Chartierc4f39252016-10-05 18:32:08 -0700165 return o.Ptr();
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700166}
167
Igor Murashkinaf1e2992016-10-12 17:44:50 -0700168extern mirror::Object* JniMethodFastEndWithReference(jobject result,
169 uint32_t saved_local_ref_cookie,
170 Thread* self) {
Vladimir Markob0a6aee2017-10-27 10:34:04 +0100171 GoToRunnableFast(self);
Igor Murashkinaf1e2992016-10-12 17:44:50 -0700172 return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self);
173}
174
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700175extern mirror::Object* JniMethodEndWithReference(jobject result,
176 uint32_t saved_local_ref_cookie,
Andreas Gampe48ee3562015-04-10 19:57:29 -0700177 Thread* self) {
178 GoToRunnable(self);
179 return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self);
180}
181
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800182extern mirror::Object* JniMethodEndWithReferenceSynchronized(jobject result,
183 uint32_t saved_local_ref_cookie,
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700184 jobject locked,
185 Thread* self) {
Ian Rogers1eb512d2013-10-18 15:42:20 -0700186 GoToRunnable(self);
Andreas Gampe48ee3562015-04-10 19:57:29 -0700187 UnlockJniSynchronizedMethod(locked, self);
188 return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self);
Ian Rogers57b86d42012-03-27 16:05:41 -0700189}
190
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -0700191extern uint64_t GenericJniMethodEnd(Thread* self,
192 uint32_t saved_local_ref_cookie,
193 jvalue result,
194 uint64_t result_f,
195 ArtMethod* called,
196 HandleScope* handle_scope)
197 // TODO: NO_THREAD_SAFETY_ANALYSIS as GoToRunnable() is NO_THREAD_SAFETY_ANALYSIS
198 NO_THREAD_SAFETY_ANALYSIS {
Vladimir Markob0a6aee2017-10-27 10:34:04 +0100199 bool critical_native = called->IsCriticalNative();
200 bool fast_native = called->IsFastNative();
Igor Murashkin06a04e02016-09-13 15:57:37 -0700201 bool normal_native = !critical_native && !fast_native;
202
203 // @Fast and @CriticalNative do not do a state transition.
204 if (LIKELY(normal_native)) {
205 GoToRunnable(self);
206 }
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -0700207 // We need the mutator lock (i.e., calling GoToRunnable()) before accessing the shorty or the
208 // locked object.
209 jobject locked = called->IsSynchronized() ? handle_scope->GetHandle(0).ToJObject() : nullptr;
210 char return_shorty_char = called->GetShorty()[0];
211 if (return_shorty_char == 'L') {
212 if (locked != nullptr) {
Igor Murashkin06a04e02016-09-13 15:57:37 -0700213 DCHECK(normal_native) << " @FastNative and synchronize is not supported";
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -0700214 UnlockJniSynchronizedMethod(locked, self);
215 }
216 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceHandleResult(
217 result.l, saved_local_ref_cookie, self));
218 } else {
219 if (locked != nullptr) {
Igor Murashkin06a04e02016-09-13 15:57:37 -0700220 DCHECK(normal_native) << " @FastNative and synchronize is not supported";
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -0700221 UnlockJniSynchronizedMethod(locked, self); // Must decode before pop.
222 }
Igor Murashkin06a04e02016-09-13 15:57:37 -0700223 if (LIKELY(!critical_native)) {
224 PopLocalReferences(saved_local_ref_cookie, self);
225 }
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -0700226 switch (return_shorty_char) {
227 case 'F': {
Vladimir Marko33bff252017-11-01 14:35:42 +0000228 if (kRuntimeISA == InstructionSet::kX86) {
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -0700229 // Convert back the result to float.
230 double d = bit_cast<double, uint64_t>(result_f);
231 return bit_cast<uint32_t, float>(static_cast<float>(d));
232 } else {
233 return result_f;
234 }
235 }
236 case 'D':
237 return result_f;
238 case 'Z':
239 return result.z;
240 case 'B':
241 return result.b;
242 case 'C':
243 return result.c;
244 case 'S':
245 return result.s;
246 case 'I':
247 return result.i;
248 case 'J':
249 return result.j;
250 case 'V':
251 return 0;
252 default:
253 LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char;
254 return 0;
255 }
256 }
257}
258
Ian Rogers57b86d42012-03-27 16:05:41 -0700259} // namespace art