Elliott Hughes | 2faa5f1 | 2012-01-30 14:42:07 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_MIRROR_DEX_CACHE_H_ |
| 18 | #define ART_RUNTIME_MIRROR_DEX_CACHE_H_ |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 19 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 20 | #include "array.h" |
Andreas Gampe | 58a5af8 | 2014-07-31 16:23:49 -0700 | [diff] [blame] | 21 | #include "art_field.h" |
Andreas Gampe | 58a5af8 | 2014-07-31 16:23:49 -0700 | [diff] [blame] | 22 | #include "class.h" |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame] | 23 | #include "dex_file_types.h" |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 24 | #include "object.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 25 | #include "object_array.h" |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 26 | |
| 27 | namespace art { |
| 28 | |
Alex Light | dba6148 | 2016-12-21 08:20:29 -0800 | [diff] [blame^] | 29 | class ArtMethod; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 30 | struct DexCacheOffsets; |
| 31 | class DexFile; |
| 32 | class ImageWriter; |
| 33 | union JValue; |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 34 | class LinearAlloc; |
| 35 | class Thread; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 36 | |
| 37 | namespace mirror { |
| 38 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 39 | class MethodType; |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 40 | class String; |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 41 | |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 42 | template <typename T> struct PACKED(8) DexCachePair { |
| 43 | GcRoot<T> object; |
| 44 | uint32_t index; |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 45 | // The array is initially [ {0,0}, {0,0}, {0,0} ... ] |
| 46 | // We maintain the invariant that once a dex cache entry is populated, |
| 47 | // the pointer is always non-0 |
| 48 | // Any given entry would thus be: |
| 49 | // {non-0, non-0} OR {0,0} |
| 50 | // |
| 51 | // It's generally sufficiently enough then to check if the |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 52 | // lookup index matches the stored index (for a >0 lookup index) |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 53 | // because if it's true the pointer is also non-null. |
| 54 | // |
| 55 | // For the 0th entry which is a special case, the value is either |
| 56 | // {0,0} (initial state) or {non-0, 0} which indicates |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 57 | // that a valid object is stored at that index for a dex section id of 0. |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 58 | // |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 59 | // As an optimization, we want to avoid branching on the object pointer since |
| 60 | // it's always non-null if the id branch succeeds (except for the 0th id). |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 61 | // Set the initial state for the 0th entry to be {0,1} which is guaranteed to fail |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 62 | // the lookup id == stored id branch. |
| 63 | DexCachePair(T* object, uint32_t index) |
| 64 | : object(object), |
| 65 | index(index) {} |
| 66 | DexCachePair() = default; |
| 67 | DexCachePair(const DexCachePair<T>&) = default; |
| 68 | DexCachePair& operator=(const DexCachePair<T>&) = default; |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 69 | |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 70 | static void Initialize(std::atomic<DexCachePair<T>>* dex_cache) { |
| 71 | DexCachePair<T> first_elem; |
| 72 | first_elem.object = GcRoot<T>(nullptr); |
| 73 | first_elem.index = InvalidIndexForSlot(0); |
| 74 | dex_cache[0].store(first_elem, std::memory_order_relaxed); |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 75 | } |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 76 | |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 77 | static GcRoot<T> Lookup(std::atomic<DexCachePair<T>>* dex_cache, |
| 78 | uint32_t idx, |
| 79 | uint32_t cache_size) { |
Narayan Kamath | 000e188 | 2016-10-24 17:14:25 +0100 | [diff] [blame] | 80 | DCHECK_NE(cache_size, 0u); |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 81 | DexCachePair<T> element = dex_cache[idx % cache_size].load(std::memory_order_relaxed); |
| 82 | if (idx != element.index) { |
| 83 | return GcRoot<T>(nullptr); |
| 84 | } |
| 85 | |
| 86 | DCHECK(!element.object.IsNull()); |
| 87 | return element.object; |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 88 | } |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 89 | |
Narayan Kamath | 23136d1 | 2016-09-30 16:29:19 +0100 | [diff] [blame] | 90 | static void Assign(std::atomic<DexCachePair<T>>* dex_cache, |
| 91 | uint32_t idx, |
| 92 | T* object, |
| 93 | uint32_t cache_size) { |
| 94 | DCHECK_LT(idx % cache_size, cache_size); |
| 95 | dex_cache[idx % cache_size].store( |
| 96 | DexCachePair<T>(object, idx), std::memory_order_relaxed); |
| 97 | } |
| 98 | |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 99 | static uint32_t InvalidIndexForSlot(uint32_t slot) { |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 100 | // Since the cache size is a power of two, 0 will always map to slot 0. |
| 101 | // Use 1 for slot 0 and 0 for all other slots. |
| 102 | return (slot == 0) ? 1u : 0u; |
| 103 | } |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 104 | }; |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 105 | |
| 106 | using StringDexCachePair = DexCachePair<mirror::String>; |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 107 | using StringDexCacheType = std::atomic<StringDexCachePair>; |
| 108 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 109 | using MethodTypeDexCachePair = DexCachePair<mirror::MethodType>; |
| 110 | using MethodTypeDexCacheType = std::atomic<MethodTypeDexCachePair>; |
| 111 | |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 112 | // C++ mirror of java.lang.DexCache. |
| 113 | class MANAGED DexCache FINAL : public Object { |
Brian Carlstrom | 83db772 | 2011-08-26 17:32:56 -0700 | [diff] [blame] | 114 | public: |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 115 | // Size of java.lang.DexCache.class. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 116 | static uint32_t ClassSize(PointerSize pointer_size); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 117 | |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 118 | // Size of string dex cache. Needs to be a power of 2 for entrypoint assumptions to hold. |
| 119 | static constexpr size_t kDexCacheStringCacheSize = 1024; |
| 120 | static_assert(IsPowerOfTwo(kDexCacheStringCacheSize), |
| 121 | "String dex cache size is not a power of 2."); |
| 122 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 123 | // Size of method type dex cache. Needs to be a power of 2 for entrypoint assumptions |
| 124 | // to hold. |
| 125 | static constexpr size_t kDexCacheMethodTypeCacheSize = 1024; |
| 126 | static_assert(IsPowerOfTwo(kDexCacheMethodTypeCacheSize), |
| 127 | "MethodType dex cache size is not a power of 2."); |
| 128 | |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 129 | static constexpr size_t StaticStringSize() { |
| 130 | return kDexCacheStringCacheSize; |
| 131 | } |
| 132 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 133 | static constexpr size_t StaticMethodTypeSize() { |
| 134 | return kDexCacheMethodTypeCacheSize; |
| 135 | } |
| 136 | |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 137 | // Size of an instance of java.lang.DexCache not including referenced values. |
| 138 | static constexpr uint32_t InstanceSize() { |
| 139 | return sizeof(DexCache); |
| 140 | } |
| 141 | |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 142 | static void InitializeDexCache(Thread* self, |
| 143 | ObjPtr<mirror::DexCache> dex_cache, |
| 144 | ObjPtr<mirror::String> location, |
| 145 | const DexFile* dex_file, |
| 146 | LinearAlloc* linear_alloc, |
| 147 | PointerSize image_pointer_size) |
| 148 | REQUIRES_SHARED(Locks::mutator_lock_) |
| 149 | REQUIRES(Locks::dex_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 150 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 151 | void Fixup(ArtMethod* trampoline, PointerSize pointer_size) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 152 | REQUIRES_SHARED(Locks::mutator_lock_); |
Ian Rogers | 1984651 | 2012-02-24 11:42:47 -0800 | [diff] [blame] | 153 | |
Mathieu Chartier | 60bc39c | 2016-01-27 18:37:48 -0800 | [diff] [blame] | 154 | template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 155 | void FixupStrings(StringDexCacheType* dest, const Visitor& visitor) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 156 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 157 | |
Mathieu Chartier | 60bc39c | 2016-01-27 18:37:48 -0800 | [diff] [blame] | 158 | template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 159 | void FixupResolvedTypes(GcRoot<mirror::Class>* dest, const Visitor& visitor) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 160 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 161 | |
Narayan Kamath | 7fe5658 | 2016-10-14 18:49:12 +0100 | [diff] [blame] | 162 | template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor> |
| 163 | void FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, const Visitor& visitor) |
| 164 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 165 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 166 | String* GetLocation() REQUIRES_SHARED(Locks::mutator_lock_) { |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 167 | return GetFieldObject<String>(OFFSET_OF_OBJECT_MEMBER(DexCache, location_)); |
Brian Carlstrom | a663ea5 | 2011-08-19 23:33:41 -0700 | [diff] [blame] | 168 | } |
| 169 | |
Andreas Gampe | dd9d055 | 2015-03-09 12:57:41 -0700 | [diff] [blame] | 170 | static MemberOffset DexOffset() { |
| 171 | return OFFSET_OF_OBJECT_MEMBER(DexCache, dex_); |
| 172 | } |
| 173 | |
buzbee | 5cd2180 | 2011-08-26 10:40:14 -0700 | [diff] [blame] | 174 | static MemberOffset StringsOffset() { |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 175 | return OFFSET_OF_OBJECT_MEMBER(DexCache, strings_); |
buzbee | c5ef046 | 2011-08-25 18:44:49 -0700 | [diff] [blame] | 176 | } |
| 177 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 178 | static MemberOffset ResolvedTypesOffset() { |
| 179 | return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_types_); |
| 180 | } |
| 181 | |
Brian Carlstrom | 1caa2c2 | 2011-08-28 13:02:33 -0700 | [diff] [blame] | 182 | static MemberOffset ResolvedFieldsOffset() { |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 183 | return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_fields_); |
buzbee | c5ef046 | 2011-08-25 18:44:49 -0700 | [diff] [blame] | 184 | } |
| 185 | |
Brian Carlstrom | 1caa2c2 | 2011-08-28 13:02:33 -0700 | [diff] [blame] | 186 | static MemberOffset ResolvedMethodsOffset() { |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 187 | return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_methods_); |
buzbee | c5ef046 | 2011-08-25 18:44:49 -0700 | [diff] [blame] | 188 | } |
| 189 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 190 | static MemberOffset ResolvedMethodTypesOffset() { |
| 191 | return OFFSET_OF_OBJECT_MEMBER(DexCache, resolved_method_types_); |
| 192 | } |
| 193 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 194 | static MemberOffset NumStringsOffset() { |
| 195 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_strings_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 196 | } |
| 197 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 198 | static MemberOffset NumResolvedTypesOffset() { |
| 199 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_types_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 200 | } |
| 201 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 202 | static MemberOffset NumResolvedFieldsOffset() { |
| 203 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_fields_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 204 | } |
| 205 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 206 | static MemberOffset NumResolvedMethodsOffset() { |
| 207 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_methods_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 208 | } |
| 209 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 210 | static MemberOffset NumResolvedMethodTypesOffset() { |
| 211 | return OFFSET_OF_OBJECT_MEMBER(DexCache, num_resolved_method_types_); |
| 212 | } |
| 213 | |
Andreas Gampe | 8a0128a | 2016-11-28 07:38:35 -0800 | [diff] [blame] | 214 | mirror::String* GetResolvedString(dex::StringIndex string_idx) ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 215 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 216 | |
Andreas Gampe | 8a0128a | 2016-11-28 07:38:35 -0800 | [diff] [blame] | 217 | void SetResolvedString(dex::StringIndex string_idx, ObjPtr<mirror::String> resolved) ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 218 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 219 | |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 220 | // Clear a string for a string_idx, used to undo string intern transactions to make sure |
| 221 | // the string isn't kept live. |
Andreas Gampe | 8a0128a | 2016-11-28 07:38:35 -0800 | [diff] [blame] | 222 | void ClearString(dex::StringIndex string_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 223 | |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame] | 224 | Class* GetResolvedType(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 225 | |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame] | 226 | void SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) |
Mathieu Chartier | 31e8822 | 2016-10-14 18:43:19 -0700 | [diff] [blame] | 227 | REQUIRES_SHARED(Locks::mutator_lock_); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 228 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 229 | ALWAYS_INLINE ArtMethod* GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 230 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 231 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 232 | ALWAYS_INLINE void SetResolvedMethod(uint32_t method_idx, |
| 233 | ArtMethod* resolved, |
| 234 | PointerSize ptr_size) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 235 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 236 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 237 | // Pointer sized variant, used for patching. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 238 | ALWAYS_INLINE ArtField* GetResolvedField(uint32_t idx, PointerSize ptr_size) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 239 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 240 | |
| 241 | // Pointer sized variant, used for patching. |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 242 | ALWAYS_INLINE void SetResolvedField(uint32_t idx, ArtField* field, PointerSize ptr_size) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 243 | REQUIRES_SHARED(Locks::mutator_lock_); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 244 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 245 | MethodType* GetResolvedMethodType(uint32_t proto_idx) REQUIRES_SHARED(Locks::mutator_lock_); |
| 246 | |
| 247 | void SetResolvedMethodType(uint32_t proto_idx, MethodType* resolved) REQUIRES_SHARED(Locks::mutator_lock_); |
| 248 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 249 | StringDexCacheType* GetStrings() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 250 | return GetFieldPtr64<StringDexCacheType*>(StringsOffset()); |
Brian Carlstrom | 1caa2c2 | 2011-08-28 13:02:33 -0700 | [diff] [blame] | 251 | } |
Brian Carlstrom | 83db772 | 2011-08-26 17:32:56 -0700 | [diff] [blame] | 252 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 253 | void SetStrings(StringDexCacheType* strings) ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 254 | SetFieldPtr<false>(StringsOffset(), strings); |
| 255 | } |
| 256 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 257 | GcRoot<Class>* GetResolvedTypes() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 258 | return GetFieldPtr<GcRoot<Class>*>(ResolvedTypesOffset()); |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 259 | } |
| 260 | |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 261 | void SetResolvedTypes(GcRoot<Class>* resolved_types) |
| 262 | ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 263 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 264 | SetFieldPtr<false>(ResolvedTypesOffset(), resolved_types); |
| 265 | } |
| 266 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 267 | ArtMethod** GetResolvedMethods() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 268 | return GetFieldPtr<ArtMethod**>(ResolvedMethodsOffset()); |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 269 | } |
| 270 | |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 271 | void SetResolvedMethods(ArtMethod** resolved_methods) |
| 272 | ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 273 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 274 | SetFieldPtr<false>(ResolvedMethodsOffset(), resolved_methods); |
| 275 | } |
| 276 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 277 | ArtField** GetResolvedFields() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 278 | return GetFieldPtr<ArtField**>(ResolvedFieldsOffset()); |
| 279 | } |
| 280 | |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 281 | void SetResolvedFields(ArtField** resolved_fields) |
| 282 | ALWAYS_INLINE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 283 | REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 284 | SetFieldPtr<false>(ResolvedFieldsOffset(), resolved_fields); |
| 285 | } |
| 286 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 287 | MethodTypeDexCacheType* GetResolvedMethodTypes() |
| 288 | ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Narayan Kamath | 7fe5658 | 2016-10-14 18:49:12 +0100 | [diff] [blame] | 289 | return GetFieldPtr64<MethodTypeDexCacheType*>(ResolvedMethodTypesOffset()); |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 290 | } |
| 291 | |
| 292 | void SetResolvedMethodTypes(MethodTypeDexCacheType* resolved_method_types) |
| 293 | ALWAYS_INLINE |
| 294 | REQUIRES_SHARED(Locks::mutator_lock_) { |
| 295 | SetFieldPtr<false>(ResolvedMethodTypesOffset(), resolved_method_types); |
| 296 | } |
| 297 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 298 | size_t NumStrings() REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 299 | return GetField32(NumStringsOffset()); |
| 300 | } |
| 301 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 302 | size_t NumResolvedTypes() REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 303 | return GetField32(NumResolvedTypesOffset()); |
| 304 | } |
| 305 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 306 | size_t NumResolvedMethods() REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 307 | return GetField32(NumResolvedMethodsOffset()); |
| 308 | } |
| 309 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 310 | size_t NumResolvedFields() REQUIRES_SHARED(Locks::mutator_lock_) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 311 | return GetField32(NumResolvedFieldsOffset()); |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 312 | } |
| 313 | |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 314 | size_t NumResolvedMethodTypes() REQUIRES_SHARED(Locks::mutator_lock_) { |
| 315 | return GetField32(NumResolvedMethodTypesOffset()); |
| 316 | } |
| 317 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 318 | const DexFile* GetDexFile() ALWAYS_INLINE REQUIRES_SHARED(Locks::mutator_lock_) { |
Ian Rogers | b0fa5dc | 2014-04-28 16:47:08 -0700 | [diff] [blame] | 319 | return GetFieldPtr<const DexFile*>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_)); |
Mathieu Chartier | 66f1925 | 2012-09-18 08:57:04 -0700 | [diff] [blame] | 320 | } |
| 321 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 322 | void SetDexFile(const DexFile* dex_file) REQUIRES_SHARED(Locks::mutator_lock_) { |
Mathieu Chartier | 7617216 | 2016-01-26 14:54:06 -0800 | [diff] [blame] | 323 | SetFieldPtr<false>(OFFSET_OF_OBJECT_MEMBER(DexCache, dex_file_), dex_file); |
Brian Carlstrom | 4b620ff | 2011-09-11 01:11:01 -0700 | [diff] [blame] | 324 | } |
Brian Carlstrom | c4fa2c0 | 2011-08-21 03:00:12 -0700 | [diff] [blame] | 325 | |
Mathieu Chartier | 28357fa | 2016-10-18 16:27:40 -0700 | [diff] [blame] | 326 | void SetLocation(ObjPtr<mirror::String> location) REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 7617216 | 2016-01-26 14:54:06 -0800 | [diff] [blame] | 327 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 328 | // NOTE: Get/SetElementPtrSize() are intended for working with ArtMethod** and ArtField** |
| 329 | // provided by GetResolvedMethods/Fields() and ArtMethod::GetDexCacheResolvedMethods(), |
| 330 | // so they need to be public. |
| 331 | |
| 332 | template <typename PtrType> |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 333 | static PtrType GetElementPtrSize(PtrType* ptr_array, size_t idx, PointerSize ptr_size); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 334 | |
| 335 | template <typename PtrType> |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 336 | static void SetElementPtrSize(PtrType* ptr_array, size_t idx, PtrType ptr, PointerSize ptr_size); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 337 | |
Brian Carlstrom | c4fa2c0 | 2011-08-21 03:00:12 -0700 | [diff] [blame] | 338 | private: |
Andreas Gampe | cc1b535 | 2016-12-01 16:58:38 -0800 | [diff] [blame] | 339 | void Init(const DexFile* dex_file, |
| 340 | ObjPtr<String> location, |
| 341 | StringDexCacheType* strings, |
| 342 | uint32_t num_strings, |
| 343 | GcRoot<Class>* resolved_types, |
| 344 | uint32_t num_resolved_types, |
| 345 | ArtMethod** resolved_methods, |
| 346 | uint32_t num_resolved_methods, |
| 347 | ArtField** resolved_fields, |
| 348 | uint32_t num_resolved_fields, |
| 349 | MethodTypeDexCacheType* resolved_methodtypes, |
| 350 | uint32_t num_resolved_methodtypes, |
| 351 | PointerSize pointer_size) |
| 352 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 353 | |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 354 | // Visit instance fields of the dex cache as well as its associated arrays. |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 355 | template <bool kVisitNativeRoots, |
| 356 | VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, |
| 357 | ReadBarrierOption kReadBarrierOption = kWithReadBarrier, |
| 358 | typename Visitor> |
Mathieu Chartier | 31e8822 | 2016-10-14 18:43:19 -0700 | [diff] [blame] | 359 | void VisitReferences(ObjPtr<mirror::Class> klass, const Visitor& visitor) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 360 | REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 361 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 362 | HeapReference<Object> dex_; |
| 363 | HeapReference<String> location_; |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 364 | uint64_t dex_file_; // const DexFile* |
| 365 | uint64_t resolved_fields_; // ArtField*, array with num_resolved_fields_ elements. |
| 366 | uint64_t resolved_method_types_; // std::atomic<MethodTypeDexCachePair>* array with |
| 367 | // num_resolved_method_types_ elements. |
| 368 | uint64_t resolved_methods_; // ArtMethod*, array with num_resolved_methods_ elements. |
| 369 | uint64_t resolved_types_; // GcRoot<Class>*, array with num_resolved_types_ elements. |
| 370 | uint64_t strings_; // std::atomic<StringDexCachePair>*, array with num_strings_ |
| 371 | // elements. |
| 372 | |
| 373 | uint32_t num_resolved_fields_; // Number of elements in the resolved_fields_ array. |
| 374 | uint32_t num_resolved_method_types_; // Number of elements in the resolved_method_types_ array. |
| 375 | uint32_t num_resolved_methods_; // Number of elements in the resolved_methods_ array. |
| 376 | uint32_t num_resolved_types_; // Number of elements in the resolved_types_ array. |
| 377 | uint32_t num_strings_; // Number of elements in the strings_ array. |
Brian Carlstrom | 83db772 | 2011-08-26 17:32:56 -0700 | [diff] [blame] | 378 | |
Brian Carlstrom | 7934ac2 | 2013-07-26 10:54:15 -0700 | [diff] [blame] | 379 | friend struct art::DexCacheOffsets; // for verifying offset information |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 380 | friend class Object; // For VisitReferences |
Brian Carlstrom | c4fa2c0 | 2011-08-21 03:00:12 -0700 | [diff] [blame] | 381 | DISALLOW_IMPLICIT_CONSTRUCTORS(DexCache); |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 382 | }; |
| 383 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 384 | } // namespace mirror |
Brian Carlstrom | 7e49dca | 2011-07-22 18:07:34 -0700 | [diff] [blame] | 385 | } // namespace art |
| 386 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 387 | #endif // ART_RUNTIME_MIRROR_DEX_CACHE_H_ |