blob: 7ce68c624b8315a86a813e325e752ad9ff5c61cc [file] [log] [blame]
Ian Rogers00f7d0e2012-07-19 15:28:27 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_
18#define ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_
Ian Rogers00f7d0e2012-07-19 15:28:27 -070019
Elliott Hughes1aa246d2012-12-13 09:29:36 -080020#include "base/casts.h"
Ian Rogers576ca0c2014-06-06 15:58:22 -070021#include "jni_internal-inl.h"
Ian Rogers693ff612013-02-01 10:56:12 -080022#include "thread-inl.h"
Mathieu Chartier4e305412014-02-19 10:54:44 -080023#include "verify_object.h"
Ian Rogers00f7d0e2012-07-19 15:28:27 -070024
25namespace art {
26
27// Scoped change into and out of a particular state. Handles Runnable transitions that require
28// more complicated suspension checking. The subclasses ScopedObjectAccessUnchecked and
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070029// ScopedObjectAccess are used to handle the change into Runnable to Get direct access to objects,
Ian Rogers00f7d0e2012-07-19 15:28:27 -070030// the unchecked variant doesn't aid annotalysis.
31class ScopedThreadStateChange {
32 public:
33 ScopedThreadStateChange(Thread* self, ThreadState new_thread_state)
Ian Rogers1ffa32f2013-02-05 18:29:08 -080034 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
Ian Rogers00f7d0e2012-07-19 15:28:27 -070035 : self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) {
Ian Rogersc0fa3ad2013-02-05 00:11:55 -080036 if (UNLIKELY(self_ == NULL)) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -070037 // Value chosen arbitrarily and won't be used in the destructor since thread_ == NULL.
38 old_thread_state_ = kTerminated;
Ian Rogers120f1c72012-09-28 17:17:10 -070039 Runtime* runtime = Runtime::Current();
Mathieu Chartier590fee92013-09-13 13:46:47 -070040 CHECK(runtime == NULL || !runtime->IsStarted() || runtime->IsShuttingDown(self_));
Ian Rogers00f7d0e2012-07-19 15:28:27 -070041 } else {
Ian Rogers22f454c2012-09-08 11:06:29 -070042 DCHECK_EQ(self, Thread::Current());
43 // Read state without locks, ok as state is effectively thread local and we're not interested
44 // in the suspend count (this will be handled in the runnable transitions).
Ian Rogers474b6da2012-09-25 00:20:38 -070045 old_thread_state_ = self->GetState();
Mathieu Chartier92b78892014-04-24 16:14:43 -070046 if (old_thread_state_ != new_thread_state) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -070047 if (new_thread_state == kRunnable) {
48 self_->TransitionFromSuspendedToRunnable();
Mathieu Chartier92b78892014-04-24 16:14:43 -070049 } else if (old_thread_state_ == kRunnable) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -070050 self_->TransitionFromRunnableToSuspended(new_thread_state);
Mathieu Chartier92b78892014-04-24 16:14:43 -070051 } else {
52 // A suspended transition to another effectively suspended transition, ok to use Unsafe.
53 self_->SetState(new_thread_state);
Ian Rogers00f7d0e2012-07-19 15:28:27 -070054 }
55 }
56 }
57 }
58
Ian Rogers1ffa32f2013-02-05 18:29:08 -080059 ~ScopedThreadStateChange() LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE {
Ian Rogersc0fa3ad2013-02-05 00:11:55 -080060 if (UNLIKELY(self_ == NULL)) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -070061 if (!expected_has_no_thread_) {
Ian Rogers120f1c72012-09-28 17:17:10 -070062 Runtime* runtime = Runtime::Current();
Mathieu Chartier590fee92013-09-13 13:46:47 -070063 bool shutting_down = (runtime == NULL) || runtime->IsShuttingDown(nullptr);
Ian Rogers120f1c72012-09-28 17:17:10 -070064 CHECK(shutting_down);
Ian Rogers00f7d0e2012-07-19 15:28:27 -070065 }
66 } else {
67 if (old_thread_state_ != thread_state_) {
68 if (old_thread_state_ == kRunnable) {
69 self_->TransitionFromSuspendedToRunnable();
70 } else if (thread_state_ == kRunnable) {
71 self_->TransitionFromRunnableToSuspended(old_thread_state_);
72 } else {
Ian Rogers22f454c2012-09-08 11:06:29 -070073 // A suspended transition to another effectively suspended transition, ok to use Unsafe.
Ian Rogers474b6da2012-09-25 00:20:38 -070074 self_->SetState(old_thread_state_);
Ian Rogers00f7d0e2012-07-19 15:28:27 -070075 }
76 }
77 }
78 }
79
80 Thread* Self() const {
81 return self_;
82 }
83
84 protected:
85 // Constructor used by ScopedJniThreadState for an unattached thread that has access to the VM*.
86 ScopedThreadStateChange()
87 : self_(NULL), thread_state_(kTerminated), old_thread_state_(kTerminated),
88 expected_has_no_thread_(true) {}
89
90 Thread* const self_;
91 const ThreadState thread_state_;
92
93 private:
94 ThreadState old_thread_state_;
95 const bool expected_has_no_thread_;
96
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -070097 friend class ScopedObjectAccessUnchecked;
Ian Rogers00f7d0e2012-07-19 15:28:27 -070098 DISALLOW_COPY_AND_ASSIGN(ScopedThreadStateChange);
99};
100
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700101// Assumes we are already runnable.
102class ScopedObjectAccessAlreadyRunnable {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700103 public:
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700104 Thread* Self() const {
105 return self_;
Ian Rogersc0fa3ad2013-02-05 00:11:55 -0800106 }
107
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700108 JNIEnvExt* Env() const {
109 return env_;
110 }
111
112 JavaVMExt* Vm() const {
113 return vm_;
114 }
115
116 /*
117 * Add a local reference for an object to the indirect reference table associated with the
118 * current stack frame. When the native function returns, the reference will be discarded.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700119 *
Elliott Hughes9dcd45c2013-07-29 14:40:52 -0700120 * We need to allow the same reference to be added multiple times, and cope with NULL.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700121 *
Elliott Hughes9dcd45c2013-07-29 14:40:52 -0700122 * This will be called on otherwise unreferenced objects. We cannot do GC allocations here, and
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700123 * it's best if we don't grab a mutex.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700124 */
125 template<typename T>
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800126 T AddLocalReference(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers1eb512d2013-10-18 15:42:20 -0700127 Locks::mutator_lock_->AssertSharedHeld(Self());
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700128 DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700129 if (obj == NULL) {
130 return NULL;
131 }
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700132 DCHECK_NE((reinterpret_cast<uintptr_t>(obj) & 0xffff0000), 0xebad0000);
Ian Rogers987560f2014-04-22 11:42:59 -0700133 return Env()->AddLocalReference<T>(obj);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700134 }
135
136 template<typename T>
137 T Decode(jobject obj) const
Ian Rogersb726dcb2012-09-05 08:57:23 -0700138 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700139 Locks::mutator_lock_->AssertSharedHeld(Self());
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700140 DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700141 return down_cast<T>(Self()->DecodeJObject(obj));
142 }
143
Brian Carlstromea46f952013-07-30 01:26:50 -0700144 mirror::ArtField* DecodeField(jfieldID fid) const
Ian Rogersb726dcb2012-09-05 08:57:23 -0700145 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700146 Locks::mutator_lock_->AssertSharedHeld(Self());
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700147 DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700148 CHECK(!kMovingFields);
Brian Carlstromea46f952013-07-30 01:26:50 -0700149 return reinterpret_cast<mirror::ArtField*>(fid);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700150 }
151
Brian Carlstromea46f952013-07-30 01:26:50 -0700152 jfieldID EncodeField(mirror::ArtField* field) const
Ian Rogersb726dcb2012-09-05 08:57:23 -0700153 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700154 Locks::mutator_lock_->AssertSharedHeld(Self());
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700155 DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700156 CHECK(!kMovingFields);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700157 return reinterpret_cast<jfieldID>(field);
158 }
159
Brian Carlstromea46f952013-07-30 01:26:50 -0700160 mirror::ArtMethod* DecodeMethod(jmethodID mid) const
Ian Rogersb726dcb2012-09-05 08:57:23 -0700161 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700162 Locks::mutator_lock_->AssertSharedHeld(Self());
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700163 DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700164 CHECK(!kMovingMethods);
Brian Carlstromea46f952013-07-30 01:26:50 -0700165 return reinterpret_cast<mirror::ArtMethod*>(mid);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700166 }
167
Brian Carlstromea46f952013-07-30 01:26:50 -0700168 jmethodID EncodeMethod(mirror::ArtMethod* method) const
Ian Rogersb726dcb2012-09-05 08:57:23 -0700169 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700170 Locks::mutator_lock_->AssertSharedHeld(Self());
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700171 DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700172 CHECK(!kMovingMethods);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700173 return reinterpret_cast<jmethodID>(method);
174 }
175
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700176 bool IsRunnable() const {
177 return self_->GetState() == kRunnable;
178 }
179
180 protected:
181 explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env)
182 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
183 : self_(ThreadForEnv(env)), env_(down_cast<JNIEnvExt*>(env)), vm_(env_->vm) {
184 }
185
186 explicit ScopedObjectAccessAlreadyRunnable(Thread* self)
187 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
188 : self_(self), env_(down_cast<JNIEnvExt*>(self->GetJniEnv())),
189 vm_(env_ != nullptr ? env_->vm : nullptr) {
190 }
191
192 // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
193 // change into Runnable or acquire a share on the mutator_lock_.
194 explicit ScopedObjectAccessAlreadyRunnable(JavaVM* vm)
195 : self_(nullptr), env_(nullptr), vm_(down_cast<JavaVMExt*>(vm)) {}
196
197 // Here purely to force inlining.
198 ~ScopedObjectAccessAlreadyRunnable() ALWAYS_INLINE {
199 }
200
201 // Self thread, can be null.
202 Thread* const self_;
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700203 // The full JNIEnv.
204 JNIEnvExt* const env_;
205 // The full JavaVM.
206 JavaVMExt* const vm_;
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700207};
208
209// Entry/exit processing for transitions from Native to Runnable (ie within JNI functions).
210//
211// This class performs the necessary thread state switching to and from Runnable and lets us
212// amortize the cost of working out the current thread. Additionally it lets us check (and repair)
213// apps that are using a JNIEnv on the wrong thread. The class also decodes and encodes Objects
214// into jobjects via methods of this class. Performing this here enforces the Runnable thread state
215// for use of Object, thereby inhibiting the Object being modified by GC whilst native or VM code
216// is also manipulating the Object.
217//
218// The destructor transitions back to the previous thread state, typically Native. In this state
219// GC and thread suspension may occur.
220//
221// For annotalysis the subclass ScopedObjectAccess (below) makes it explicit that a shared of
222// the mutator_lock_ will be acquired on construction.
223class ScopedObjectAccessUnchecked : public ScopedObjectAccessAlreadyRunnable {
224 public:
225 explicit ScopedObjectAccessUnchecked(JNIEnv* env)
226 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
227 : ScopedObjectAccessAlreadyRunnable(env), tsc_(Self(), kRunnable) {
228 Self()->VerifyStack();
229 Locks::mutator_lock_->AssertSharedHeld(Self());
230 }
231
232 explicit ScopedObjectAccessUnchecked(Thread* self)
233 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
234 : ScopedObjectAccessAlreadyRunnable(self), tsc_(self, kRunnable) {
235 Self()->VerifyStack();
236 Locks::mutator_lock_->AssertSharedHeld(Self());
237 }
238
239 // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
240 // change into Runnable or acquire a share on the mutator_lock_.
241 explicit ScopedObjectAccessUnchecked(JavaVM* vm) ALWAYS_INLINE
242 : ScopedObjectAccessAlreadyRunnable(vm), tsc_() {}
243
244 private:
245 // The scoped thread state change makes sure that we are runnable and restores the thread state
246 // in the destructor.
247 const ScopedThreadStateChange tsc_;
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700248
249 DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccessUnchecked);
250};
251
252// Annotalysis helping variant of the above.
253class ScopedObjectAccess : public ScopedObjectAccessUnchecked {
254 public:
255 explicit ScopedObjectAccess(JNIEnv* env)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700256 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
Ian Rogers1ffa32f2013-02-05 18:29:08 -0800257 SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700258 : ScopedObjectAccessUnchecked(env) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700259 }
260
261 explicit ScopedObjectAccess(Thread* self)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700262 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700263 SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700264 : ScopedObjectAccessUnchecked(self) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700265 }
266
Ian Rogers1ffa32f2013-02-05 18:29:08 -0800267 ~ScopedObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700268 // Base class will release share of lock. Invoked after this destructor.
269 }
270
271 private:
272 // TODO: remove this constructor. It is used by check JNI's ScopedCheck to make it believe that
273 // routines operating with just a VM are sound, they are not, but when you have just a VM
274 // you cannot call the unsound routines.
275 explicit ScopedObjectAccess(JavaVM* vm)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700276 SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700277 : ScopedObjectAccessUnchecked(vm) {}
278
279 friend class ScopedCheck;
280 DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccess);
281};
282
283} // namespace art
284
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700285#endif // ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_