blob: 1ca6c4e4fae86a8e95953a64699d603df5b5c872 [file] [log] [blame]
Ian Rogers00f7d0e2012-07-19 15:28:27 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_
18#define ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_
Ian Rogers00f7d0e2012-07-19 15:28:27 -070019
Elliott Hughes1aa246d2012-12-13 09:29:36 -080020#include "base/casts.h"
Ian Rogers693ff612013-02-01 10:56:12 -080021#include "thread-inl.h"
Ian Rogers00f7d0e2012-07-19 15:28:27 -070022
23namespace art {
24
25// Scoped change into and out of a particular state. Handles Runnable transitions that require
26// more complicated suspension checking. The subclasses ScopedObjectAccessUnchecked and
27// ScopedObjectAccess are used to handle the change into Runnable to get direct access to objects,
28// the unchecked variant doesn't aid annotalysis.
29class ScopedThreadStateChange {
30 public:
31 ScopedThreadStateChange(Thread* self, ThreadState new_thread_state)
Ian Rogers1ffa32f2013-02-05 18:29:08 -080032 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
Ian Rogers00f7d0e2012-07-19 15:28:27 -070033 : self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) {
Ian Rogersc0fa3ad2013-02-05 00:11:55 -080034 if (UNLIKELY(self_ == NULL)) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -070035 // Value chosen arbitrarily and won't be used in the destructor since thread_ == NULL.
36 old_thread_state_ = kTerminated;
Ian Rogers120f1c72012-09-28 17:17:10 -070037 Runtime* runtime = Runtime::Current();
Mathieu Chartier590fee92013-09-13 13:46:47 -070038 CHECK(runtime == NULL || !runtime->IsStarted() || runtime->IsShuttingDown(self_));
Ian Rogers00f7d0e2012-07-19 15:28:27 -070039 } else {
40 bool runnable_transition;
Ian Rogers22f454c2012-09-08 11:06:29 -070041 DCHECK_EQ(self, Thread::Current());
42 // Read state without locks, ok as state is effectively thread local and we're not interested
43 // in the suspend count (this will be handled in the runnable transitions).
Ian Rogers474b6da2012-09-25 00:20:38 -070044 old_thread_state_ = self->GetState();
Ian Rogers22f454c2012-09-08 11:06:29 -070045 runnable_transition = old_thread_state_ == kRunnable || new_thread_state == kRunnable;
46 if (!runnable_transition) {
47 // A suspended transition to another effectively suspended transition, ok to use Unsafe.
Ian Rogers474b6da2012-09-25 00:20:38 -070048 self_->SetState(new_thread_state);
Ian Rogers00f7d0e2012-07-19 15:28:27 -070049 }
Mathieu Chartier858f1c52012-10-17 17:45:55 -070050
Ian Rogers00f7d0e2012-07-19 15:28:27 -070051 if (runnable_transition && old_thread_state_ != new_thread_state) {
52 if (new_thread_state == kRunnable) {
53 self_->TransitionFromSuspendedToRunnable();
54 } else {
55 DCHECK_EQ(old_thread_state_, kRunnable);
56 self_->TransitionFromRunnableToSuspended(new_thread_state);
57 }
58 }
59 }
60 }
61
Ian Rogers1ffa32f2013-02-05 18:29:08 -080062 ~ScopedThreadStateChange() LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE {
Ian Rogersc0fa3ad2013-02-05 00:11:55 -080063 if (UNLIKELY(self_ == NULL)) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -070064 if (!expected_has_no_thread_) {
Ian Rogers120f1c72012-09-28 17:17:10 -070065 Runtime* runtime = Runtime::Current();
Mathieu Chartier590fee92013-09-13 13:46:47 -070066 bool shutting_down = (runtime == NULL) || runtime->IsShuttingDown(nullptr);
Ian Rogers120f1c72012-09-28 17:17:10 -070067 CHECK(shutting_down);
Ian Rogers00f7d0e2012-07-19 15:28:27 -070068 }
69 } else {
70 if (old_thread_state_ != thread_state_) {
71 if (old_thread_state_ == kRunnable) {
72 self_->TransitionFromSuspendedToRunnable();
73 } else if (thread_state_ == kRunnable) {
74 self_->TransitionFromRunnableToSuspended(old_thread_state_);
75 } else {
Ian Rogers22f454c2012-09-08 11:06:29 -070076 // A suspended transition to another effectively suspended transition, ok to use Unsafe.
Ian Rogers474b6da2012-09-25 00:20:38 -070077 self_->SetState(old_thread_state_);
Ian Rogers00f7d0e2012-07-19 15:28:27 -070078 }
79 }
80 }
81 }
82
83 Thread* Self() const {
84 return self_;
85 }
86
87 protected:
88 // Constructor used by ScopedJniThreadState for an unattached thread that has access to the VM*.
89 ScopedThreadStateChange()
90 : self_(NULL), thread_state_(kTerminated), old_thread_state_(kTerminated),
91 expected_has_no_thread_(true) {}
92
93 Thread* const self_;
94 const ThreadState thread_state_;
95
96 private:
97 ThreadState old_thread_state_;
98 const bool expected_has_no_thread_;
99
100 DISALLOW_COPY_AND_ASSIGN(ScopedThreadStateChange);
101};
102
103// Entry/exit processing for transitions from Native to Runnable (ie within JNI functions).
104//
105// This class performs the necessary thread state switching to and from Runnable and lets us
106// amortize the cost of working out the current thread. Additionally it lets us check (and repair)
107// apps that are using a JNIEnv on the wrong thread. The class also decodes and encodes Objects
108// into jobjects via methods of this class. Performing this here enforces the Runnable thread state
109// for use of Object, thereby inhibiting the Object being modified by GC whilst native or VM code
110// is also manipulating the Object.
111//
112// The destructor transitions back to the previous thread state, typically Native. In this state
113// GC and thread suspension may occur.
114//
115// For annotalysis the subclass ScopedObjectAccess (below) makes it explicit that a shared of
116// the mutator_lock_ will be acquired on construction.
117class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
118 public:
119 explicit ScopedObjectAccessUnchecked(JNIEnv* env)
Ian Rogers1ffa32f2013-02-05 18:29:08 -0800120 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700121 : ScopedThreadStateChange(ThreadForEnv(env), kRunnable),
Ian Rogers1eb512d2013-10-18 15:42:20 -0700122 env_(down_cast<JNIEnvExt*>(env)), vm_(env_->vm) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700123 self_->VerifyStack();
124 }
125
126 explicit ScopedObjectAccessUnchecked(Thread* self)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700127 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700128 : ScopedThreadStateChange(self, kRunnable),
Ian Rogers1eb512d2013-10-18 15:42:20 -0700129 env_(down_cast<JNIEnvExt*>(self->GetJniEnv())),
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700130 vm_(env_ != NULL ? env_->vm : NULL) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700131 self_->VerifyStack();
132 }
133
134 // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
135 // change into Runnable or acquire a share on the mutator_lock_.
136 explicit ScopedObjectAccessUnchecked(JavaVM* vm)
Ian Rogers1eb512d2013-10-18 15:42:20 -0700137 : ScopedThreadStateChange(), env_(NULL), vm_(down_cast<JavaVMExt*>(vm)) {}
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700138
Ian Rogers1ffa32f2013-02-05 18:29:08 -0800139 // Here purely to force inlining.
140 ~ScopedObjectAccessUnchecked() ALWAYS_INLINE {
Ian Rogersc0fa3ad2013-02-05 00:11:55 -0800141 }
142
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700143 JNIEnvExt* Env() const {
144 return env_;
145 }
146
147 JavaVMExt* Vm() const {
148 return vm_;
149 }
150
151 /*
152 * Add a local reference for an object to the indirect reference table associated with the
153 * current stack frame. When the native function returns, the reference will be discarded.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700154 *
Elliott Hughes9dcd45c2013-07-29 14:40:52 -0700155 * We need to allow the same reference to be added multiple times, and cope with NULL.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700156 *
Elliott Hughes9dcd45c2013-07-29 14:40:52 -0700157 * This will be called on otherwise unreferenced objects. We cannot do GC allocations here, and
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700158 * it's best if we don't grab a mutex.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700159 */
160 template<typename T>
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800161 T AddLocalReference(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers1eb512d2013-10-18 15:42:20 -0700162 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700163 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
164 if (obj == NULL) {
165 return NULL;
166 }
167
Mathieu Chartier590fee92013-09-13 13:46:47 -0700168 if (kIsDebugBuild) {
169 Runtime::Current()->GetHeap()->VerifyObject(obj);
170 }
171
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700172 DCHECK_NE((reinterpret_cast<uintptr_t>(obj) & 0xffff0000), 0xebad0000);
173
174 IndirectReferenceTable& locals = Env()->locals;
175
176 uint32_t cookie = Env()->local_ref_cookie;
177 IndirectRef ref = locals.Add(cookie, obj);
178
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700179#if 0 // TODO: fix this to understand PushLocalFrame, so we can turn it on.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700180 if (Env()->check_jni) {
181 size_t entry_count = locals.Capacity();
182 if (entry_count > 16) {
183 LOG(WARNING) << "Warning: more than 16 JNI local references: "
184 << entry_count << " (most recent was a " << PrettyTypeOf(obj) << ")\n"
185 << Dumpable<IndirectReferenceTable>(locals);
186 // TODO: LOG(FATAL) in a later release?
187 }
188 }
189#endif
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700190 if (Vm()->work_around_app_jni_bugs) {
191 // Hand out direct pointers to support broken old apps.
192 return reinterpret_cast<T>(obj);
193 }
194
195 return reinterpret_cast<T>(ref);
196 }
197
198 template<typename T>
199 T Decode(jobject obj) const
Ian Rogersb726dcb2012-09-05 08:57:23 -0700200 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700201 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700202 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
203 return down_cast<T>(Self()->DecodeJObject(obj));
204 }
205
Brian Carlstromea46f952013-07-30 01:26:50 -0700206 mirror::ArtField* DecodeField(jfieldID fid) const
Ian Rogersb726dcb2012-09-05 08:57:23 -0700207 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700208 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700209 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700210 CHECK(!kMovingFields);
Brian Carlstromea46f952013-07-30 01:26:50 -0700211 return reinterpret_cast<mirror::ArtField*>(fid);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700212 }
213
Brian Carlstromea46f952013-07-30 01:26:50 -0700214 jfieldID EncodeField(mirror::ArtField* field) const
Ian Rogersb726dcb2012-09-05 08:57:23 -0700215 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700216 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700217 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700218 CHECK(!kMovingFields);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700219 return reinterpret_cast<jfieldID>(field);
220 }
221
Brian Carlstromea46f952013-07-30 01:26:50 -0700222 mirror::ArtMethod* DecodeMethod(jmethodID mid) const
Ian Rogersb726dcb2012-09-05 08:57:23 -0700223 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700224 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700225 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700226 CHECK(!kMovingMethods);
Brian Carlstromea46f952013-07-30 01:26:50 -0700227 return reinterpret_cast<mirror::ArtMethod*>(mid);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700228 }
229
Brian Carlstromea46f952013-07-30 01:26:50 -0700230 jmethodID EncodeMethod(mirror::ArtMethod* method) const
Ian Rogersb726dcb2012-09-05 08:57:23 -0700231 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700232 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700233 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700234 CHECK(!kMovingMethods);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700235 return reinterpret_cast<jmethodID>(method);
236 }
237
238 private:
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700239 // The full JNIEnv.
240 JNIEnvExt* const env_;
241 // The full JavaVM.
242 JavaVMExt* const vm_;
243
244 DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccessUnchecked);
245};
246
247// Annotalysis helping variant of the above.
248class ScopedObjectAccess : public ScopedObjectAccessUnchecked {
249 public:
250 explicit ScopedObjectAccess(JNIEnv* env)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700251 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
Ian Rogers1ffa32f2013-02-05 18:29:08 -0800252 SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700253 : ScopedObjectAccessUnchecked(env) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700254 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700255 }
256
257 explicit ScopedObjectAccess(Thread* self)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700258 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
259 SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700260 : ScopedObjectAccessUnchecked(self) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700261 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700262 }
263
Ian Rogers1ffa32f2013-02-05 18:29:08 -0800264 ~ScopedObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700265 // Base class will release share of lock. Invoked after this destructor.
266 }
267
268 private:
269 // TODO: remove this constructor. It is used by check JNI's ScopedCheck to make it believe that
270 // routines operating with just a VM are sound, they are not, but when you have just a VM
271 // you cannot call the unsound routines.
272 explicit ScopedObjectAccess(JavaVM* vm)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700273 SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700274 : ScopedObjectAccessUnchecked(vm) {}
275
276 friend class ScopedCheck;
277 DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccess);
278};
279
280} // namespace art
281
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700282#endif // ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_