Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_HANDLE_SCOPE_H_ |
| 18 | #define ART_RUNTIME_HANDLE_SCOPE_H_ |
| 19 | |
Calin Juravle | acf735c | 2015-02-12 15:25:22 +0000 | [diff] [blame] | 20 | #include <stack> |
| 21 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 22 | #include "base/enums.h" |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 23 | #include "base/logging.h" |
| 24 | #include "base/macros.h" |
Vladimir Marko | 3a21e38 | 2016-09-02 12:38:38 +0100 | [diff] [blame^] | 25 | #include "base/mutex.h" |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 26 | #include "handle.h" |
Vladimir Marko | 3a21e38 | 2016-09-02 12:38:38 +0100 | [diff] [blame^] | 27 | #include "stack_reference.h" |
Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 28 | #include "verify_object.h" |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 29 | |
| 30 | namespace art { |
| 31 | namespace mirror { |
| 32 | class Object; |
| 33 | } |
Ian Rogers | e63db27 | 2014-07-15 15:36:11 -0700 | [diff] [blame] | 34 | |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 35 | class Thread; |
| 36 | |
Ian Rogers | 22d5e73 | 2014-07-15 22:23:51 -0700 | [diff] [blame] | 37 | // HandleScopes are scoped objects containing a number of Handles. They are used to allocate |
| 38 | // handles, for these handles (and the objects contained within them) to be visible/roots for the |
| 39 | // GC. It is most common to stack allocate HandleScopes using StackHandleScope. |
Mathieu Chartier | bc56fc3 | 2014-06-03 15:37:03 -0700 | [diff] [blame] | 40 | class PACKED(4) HandleScope { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 41 | public: |
| 42 | ~HandleScope() {} |
| 43 | |
| 44 | // Number of references contained within this handle scope. |
| 45 | uint32_t NumberOfReferences() const { |
| 46 | return number_of_references_; |
| 47 | } |
| 48 | |
| 49 | // We have versions with and without explicit pointer size of the following. The first two are |
| 50 | // used at runtime, so OFFSETOF_MEMBER computes the right offsets automatically. The last one |
| 51 | // takes the pointer size explicitly so that at compile time we can cross-compile correctly. |
| 52 | |
| 53 | // Returns the size of a HandleScope containing num_references handles. |
Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 54 | static size_t SizeOf(uint32_t num_references); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 55 | |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 56 | // Returns the size of a HandleScope containing num_references handles. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 57 | static size_t SizeOf(PointerSize pointer_size, uint32_t num_references); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 58 | |
| 59 | // Link to previous HandleScope or null. |
| 60 | HandleScope* GetLink() const { |
| 61 | return link_; |
| 62 | } |
| 63 | |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 64 | ALWAYS_INLINE mirror::Object* GetReference(size_t i) const |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 65 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 66 | |
Vladimir Marko | f39745e | 2016-01-26 12:16:55 +0000 | [diff] [blame] | 67 | ALWAYS_INLINE Handle<mirror::Object> GetHandle(size_t i); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 68 | |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 69 | ALWAYS_INLINE MutableHandle<mirror::Object> GetMutableHandle(size_t i) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 70 | REQUIRES_SHARED(Locks::mutator_lock_); |
Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 71 | |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 72 | ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 73 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 74 | |
Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 75 | ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const; |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 76 | |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 77 | // Offset of link within HandleScope, used by generated code. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 78 | static constexpr size_t LinkOffset(PointerSize pointer_size ATTRIBUTE_UNUSED) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 79 | return 0; |
| 80 | } |
| 81 | |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 82 | // Offset of length within handle scope, used by generated code. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 83 | static constexpr size_t NumberOfReferencesOffset(PointerSize pointer_size) { |
| 84 | return static_cast<size_t>(pointer_size); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 85 | } |
| 86 | |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 87 | // Offset of link within handle scope, used by generated code. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 88 | static constexpr size_t ReferencesOffset(PointerSize pointer_size) { |
| 89 | return NumberOfReferencesOffset(pointer_size) + sizeof(number_of_references_); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 90 | } |
| 91 | |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 92 | // Placement new creation. |
| 93 | static HandleScope* Create(void* storage, HandleScope* link, uint32_t num_references) |
| 94 | WARN_UNUSED { |
| 95 | return new (storage) HandleScope(link, num_references); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 96 | } |
| 97 | |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 98 | protected: |
| 99 | // Return backing storage used for references. |
| 100 | ALWAYS_INLINE StackReference<mirror::Object>* GetReferences() const { |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 101 | uintptr_t address = reinterpret_cast<uintptr_t>(this) + ReferencesOffset(kRuntimePointerSize); |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 102 | return reinterpret_cast<StackReference<mirror::Object>*>(address); |
| 103 | } |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 104 | |
Mathieu Chartier | d035c2d | 2014-10-27 17:30:20 -0700 | [diff] [blame] | 105 | explicit HandleScope(size_t number_of_references) : |
| 106 | link_(nullptr), number_of_references_(number_of_references) { |
| 107 | } |
| 108 | |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 109 | // Semi-hidden constructor. Construction expected by generated code and StackHandleScope. |
Roland Levillain | 3887c46 | 2015-08-12 18:15:42 +0100 | [diff] [blame] | 110 | HandleScope(HandleScope* link, uint32_t num_references) : |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 111 | link_(link), number_of_references_(num_references) { |
| 112 | } |
| 113 | |
| 114 | // Link-list of handle scopes. The root is held by a Thread. |
| 115 | HandleScope* const link_; |
| 116 | |
| 117 | // Number of handlerized references. |
| 118 | const uint32_t number_of_references_; |
| 119 | |
| 120 | // Storage for references. |
| 121 | // StackReference<mirror::Object> references_[number_of_references_] |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 122 | |
| 123 | private: |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 124 | DISALLOW_COPY_AND_ASSIGN(HandleScope); |
| 125 | }; |
| 126 | |
| 127 | // A wrapper which wraps around Object** and restores the pointer in the destructor. |
| 128 | // TODO: Add more functionality. |
| 129 | template<class T> |
Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 130 | class HandleWrapper : public MutableHandle<T> { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 131 | public: |
Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 132 | HandleWrapper(T** obj, const MutableHandle<T>& handle) |
| 133 | : MutableHandle<T>(handle), obj_(obj) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 134 | } |
| 135 | |
Andreas Gampe | 758a801 | 2015-04-03 21:28:42 -0700 | [diff] [blame] | 136 | HandleWrapper(const HandleWrapper&) = default; |
| 137 | |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 138 | ~HandleWrapper() { |
Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 139 | *obj_ = MutableHandle<T>::Get(); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 140 | } |
| 141 | |
| 142 | private: |
Ian Rogers | b5cb18a | 2014-10-21 15:05:36 -0700 | [diff] [blame] | 143 | T** const obj_; |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 144 | }; |
| 145 | |
| 146 | // Scoped handle storage of a fixed size that is usually stack allocated. |
| 147 | template<size_t kNumReferences> |
Ian Rogers | 22d5e73 | 2014-07-15 22:23:51 -0700 | [diff] [blame] | 148 | class PACKED(4) StackHandleScope FINAL : public HandleScope { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 149 | public: |
Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 150 | explicit ALWAYS_INLINE StackHandleScope(Thread* self, mirror::Object* fill_value = nullptr); |
| 151 | ALWAYS_INLINE ~StackHandleScope(); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 152 | |
Ian Rogers | b5cb18a | 2014-10-21 15:05:36 -0700 | [diff] [blame] | 153 | template<class T> |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 154 | ALWAYS_INLINE MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | bc56fc3 | 2014-06-03 15:37:03 -0700 | [diff] [blame] | 155 | |
Ian Rogers | b5cb18a | 2014-10-21 15:05:36 -0700 | [diff] [blame] | 156 | template<class T> |
Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 157 | ALWAYS_INLINE HandleWrapper<T> NewHandleWrapper(T** object) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 158 | REQUIRES_SHARED(Locks::mutator_lock_); |
Ian Rogers | b5cb18a | 2014-10-21 15:05:36 -0700 | [diff] [blame] | 159 | |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 160 | ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 161 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | bc56fc3 | 2014-06-03 15:37:03 -0700 | [diff] [blame] | 162 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 163 | Thread* Self() const { |
| 164 | return self_; |
| 165 | } |
| 166 | |
Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 167 | private: |
| 168 | template<class T> |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 169 | ALWAYS_INLINE MutableHandle<T> GetHandle(size_t i) REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 170 | DCHECK_LT(i, kNumReferences); |
| 171 | return MutableHandle<T>(&GetReferences()[i]); |
| 172 | } |
| 173 | |
Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 174 | // Reference storage needs to be first as expected by the HandleScope layout. |
| 175 | StackReference<mirror::Object> storage_[kNumReferences]; |
Ian Rogers | 22d5e73 | 2014-07-15 22:23:51 -0700 | [diff] [blame] | 176 | |
| 177 | // The thread that the stack handle scope is a linked list upon. The stack handle scope will |
| 178 | // push and pop itself from this thread. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 179 | Thread* const self_; |
Ian Rogers | 22d5e73 | 2014-07-15 22:23:51 -0700 | [diff] [blame] | 180 | |
| 181 | // Position new handles will be created. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 182 | size_t pos_; |
| 183 | |
| 184 | template<size_t kNumRefs> friend class StackHandleScope; |
| 185 | }; |
| 186 | |
Calin Juravle | acf735c | 2015-02-12 15:25:22 +0000 | [diff] [blame] | 187 | // Utility class to manage a collection (stack) of StackHandleScope. All the managed |
| 188 | // scope handle have the same fixed sized. |
| 189 | // Calls to NewHandle will create a new handle inside the top StackHandleScope. |
| 190 | // When the handle scope becomes full a new one is created and push on top of the |
| 191 | // previous. |
| 192 | // |
| 193 | // NB: |
| 194 | // - it is not safe to use the *same* StackHandleScopeCollection intermix with |
| 195 | // other StackHandleScopes. |
| 196 | // - this is a an easy way around implementing a full ZoneHandleScope to manage an |
| 197 | // arbitrary number of handles. |
| 198 | class StackHandleScopeCollection { |
| 199 | public: |
| 200 | explicit StackHandleScopeCollection(Thread* const self) : |
| 201 | self_(self), |
| 202 | current_scope_num_refs_(0) { |
| 203 | } |
| 204 | |
| 205 | ~StackHandleScopeCollection() { |
| 206 | while (!scopes_.empty()) { |
| 207 | delete scopes_.top(); |
| 208 | scopes_.pop(); |
| 209 | } |
| 210 | } |
| 211 | |
| 212 | template<class T> |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 213 | MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_) { |
Calin Juravle | acf735c | 2015-02-12 15:25:22 +0000 | [diff] [blame] | 214 | if (scopes_.empty() || current_scope_num_refs_ >= kNumReferencesPerScope) { |
| 215 | StackHandleScope<kNumReferencesPerScope>* scope = |
| 216 | new StackHandleScope<kNumReferencesPerScope>(self_); |
| 217 | scopes_.push(scope); |
| 218 | current_scope_num_refs_ = 0; |
| 219 | } |
| 220 | current_scope_num_refs_++; |
| 221 | return scopes_.top()->NewHandle(object); |
| 222 | } |
| 223 | |
| 224 | private: |
| 225 | static constexpr size_t kNumReferencesPerScope = 4; |
| 226 | |
| 227 | Thread* const self_; |
| 228 | |
| 229 | std::stack<StackHandleScope<kNumReferencesPerScope>*> scopes_; |
| 230 | size_t current_scope_num_refs_; |
| 231 | |
| 232 | DISALLOW_COPY_AND_ASSIGN(StackHandleScopeCollection); |
| 233 | }; |
| 234 | |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 235 | } // namespace art |
| 236 | |
| 237 | #endif // ART_RUNTIME_HANDLE_SCOPE_H_ |