Ian Rogers | 39ebcb8 | 2013-05-30 16:57:23 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2013 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_ |
| 18 | #define ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_ |
Ian Rogers | 39ebcb8 | 2013-05-30 16:57:23 -0700 | [diff] [blame] | 19 | |
| 20 | #include "dex_cache.h" |
| 21 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 22 | #include "art_field-inl.h" |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 23 | #include "art_method-inl.h" |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 24 | #include "base/casts.h" |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 25 | #include "base/enums.h" |
Andreas Gampe | aa910d5 | 2014-07-30 18:59:05 -0700 | [diff] [blame] | 26 | #include "base/logging.h" |
Narayan Kamath | d08e39b | 2016-10-19 14:16:35 +0100 | [diff] [blame] | 27 | #include "gc_root.h" |
Andreas Gampe | aa910d5 | 2014-07-30 18:59:05 -0700 | [diff] [blame] | 28 | #include "mirror/class.h" |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 29 | #include "mirror/method_type.h" |
Mathieu Chartier | bc56fc3 | 2014-06-03 15:37:03 -0700 | [diff] [blame] | 30 | #include "runtime.h" |
Mathieu Chartier | 31e8822 | 2016-10-14 18:43:19 -0700 | [diff] [blame] | 31 | #include "obj_ptr.h" |
Mathieu Chartier | bc56fc3 | 2014-06-03 15:37:03 -0700 | [diff] [blame] | 32 | |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 33 | #include <atomic> |
| 34 | |
Ian Rogers | 39ebcb8 | 2013-05-30 16:57:23 -0700 | [diff] [blame] | 35 | namespace art { |
| 36 | namespace mirror { |
| 37 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 38 | inline uint32_t DexCache::ClassSize(PointerSize pointer_size) { |
Vladimir Marko | c136312 | 2015-04-09 14:13:13 +0100 | [diff] [blame] | 39 | uint32_t vtable_entries = Object::kVTableLength + 5; |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 40 | return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0, 0, 0, pointer_size); |
Ian Rogers | 39ebcb8 | 2013-05-30 16:57:23 -0700 | [diff] [blame] | 41 | } |
| 42 | |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 43 | inline uint32_t DexCache::StringSlotIndex(dex::StringIndex string_idx) { |
Andreas Gampe | 8a0128a | 2016-11-28 07:38:35 -0800 | [diff] [blame] | 44 | DCHECK_LT(string_idx.index_, GetDexFile()->NumStringIds()); |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 45 | const uint32_t slot_idx = string_idx.index_ % kDexCacheStringCacheSize; |
| 46 | DCHECK_LT(slot_idx, NumStrings()); |
| 47 | return slot_idx; |
Andreas Gampe | aa910d5 | 2014-07-30 18:59:05 -0700 | [diff] [blame] | 48 | } |
| 49 | |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 50 | inline String* DexCache::GetResolvedString(dex::StringIndex string_idx) { |
| 51 | return GetStrings()[StringSlotIndex(string_idx)].load( |
| 52 | std::memory_order_relaxed).GetObjectForIndex(string_idx.index_); |
| 53 | } |
| 54 | |
| 55 | inline void DexCache::SetResolvedString(dex::StringIndex string_idx, ObjPtr<String> resolved) { |
| 56 | DCHECK(resolved != nullptr); |
| 57 | GetStrings()[StringSlotIndex(string_idx)].store( |
| 58 | StringDexCachePair(resolved, string_idx.index_), std::memory_order_relaxed); |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 59 | Runtime* const runtime = Runtime::Current(); |
| 60 | if (UNLIKELY(runtime->IsActiveTransaction())) { |
| 61 | DCHECK(runtime->IsAotCompiler()); |
| 62 | runtime->RecordResolveString(this, string_idx); |
| 63 | } |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 64 | // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 65 | runtime->GetHeap()->WriteBarrierEveryFieldOf(this); |
| 66 | } |
| 67 | |
Andreas Gampe | 8a0128a | 2016-11-28 07:38:35 -0800 | [diff] [blame] | 68 | inline void DexCache::ClearString(dex::StringIndex string_idx) { |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 69 | DCHECK(Runtime::Current()->IsAotCompiler()); |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 70 | uint32_t slot_idx = StringSlotIndex(string_idx); |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 71 | StringDexCacheType* slot = &GetStrings()[slot_idx]; |
| 72 | // This is racy but should only be called from the transactional interpreter. |
Andreas Gampe | 8a0128a | 2016-11-28 07:38:35 -0800 | [diff] [blame] | 73 | if (slot->load(std::memory_order_relaxed).index == string_idx.index_) { |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 74 | StringDexCachePair cleared(nullptr, StringDexCachePair::InvalidIndexForSlot(slot_idx)); |
Mathieu Chartier | bb816d6 | 2016-09-07 10:17:46 -0700 | [diff] [blame] | 75 | slot->store(cleared, std::memory_order_relaxed); |
| 76 | } |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 77 | } |
| 78 | |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 79 | inline uint32_t DexCache::TypeSlotIndex(dex::TypeIndex type_idx) { |
| 80 | DCHECK_LT(type_idx.index_, GetDexFile()->NumTypeIds()); |
| 81 | const uint32_t slot_idx = type_idx.index_ % kDexCacheTypeCacheSize; |
| 82 | DCHECK_LT(slot_idx, NumResolvedTypes()); |
| 83 | return slot_idx; |
| 84 | } |
| 85 | |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame] | 86 | inline Class* DexCache::GetResolvedType(dex::TypeIndex type_idx) { |
Mathieu Chartier | 5df32d3 | 2016-12-06 16:02:27 -0800 | [diff] [blame] | 87 | // It is theorized that a load acquire is not required since obtaining the resolved class will |
Mathieu Chartier | db70ce5 | 2016-12-12 11:06:59 -0800 | [diff] [blame] | 88 | // always have an address dependency or a lock. |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 89 | return GetResolvedTypes()[TypeSlotIndex(type_idx)].load( |
| 90 | std::memory_order_relaxed).GetObjectForIndex(type_idx.index_); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 91 | } |
| 92 | |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame] | 93 | inline void DexCache::SetResolvedType(dex::TypeIndex type_idx, ObjPtr<Class> resolved) { |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 94 | DCHECK(resolved != nullptr); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 95 | // TODO default transaction support. |
Mathieu Chartier | 5df32d3 | 2016-12-06 16:02:27 -0800 | [diff] [blame] | 96 | // Use a release store for SetResolvedType. This is done to prevent other threads from seeing a |
| 97 | // class but not necessarily seeing the loaded members like the static fields array. |
| 98 | // See b/32075261. |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 99 | GetResolvedTypes()[TypeSlotIndex(type_idx)].store( |
| 100 | TypeDexCachePair(resolved, type_idx.index_), std::memory_order_release); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 101 | // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. |
| 102 | Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(this); |
| 103 | } |
| 104 | |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 105 | inline void DexCache::ClearResolvedType(dex::TypeIndex type_idx) { |
| 106 | DCHECK(Runtime::Current()->IsAotCompiler()); |
| 107 | uint32_t slot_idx = TypeSlotIndex(type_idx); |
| 108 | TypeDexCacheType* slot = &GetResolvedTypes()[slot_idx]; |
| 109 | // This is racy but should only be called from the single-threaded ImageWriter and tests. |
| 110 | if (slot->load(std::memory_order_relaxed).index == type_idx.index_) { |
| 111 | TypeDexCachePair cleared(nullptr, TypeDexCachePair::InvalidIndexForSlot(slot_idx)); |
| 112 | slot->store(cleared, std::memory_order_relaxed); |
| 113 | } |
| 114 | } |
| 115 | |
| 116 | inline uint32_t DexCache::MethodTypeSlotIndex(uint32_t proto_idx) { |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 117 | DCHECK(Runtime::Current()->IsMethodHandlesEnabled()); |
| 118 | DCHECK_LT(proto_idx, GetDexFile()->NumProtoIds()); |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 119 | const uint32_t slot_idx = proto_idx % kDexCacheMethodTypeCacheSize; |
| 120 | DCHECK_LT(slot_idx, NumResolvedMethodTypes()); |
| 121 | return slot_idx; |
| 122 | } |
| 123 | |
| 124 | inline MethodType* DexCache::GetResolvedMethodType(uint32_t proto_idx) { |
| 125 | return GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].load( |
| 126 | std::memory_order_relaxed).GetObjectForIndex(proto_idx); |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 127 | } |
| 128 | |
| 129 | inline void DexCache::SetResolvedMethodType(uint32_t proto_idx, MethodType* resolved) { |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 130 | DCHECK(resolved != nullptr); |
| 131 | GetResolvedMethodTypes()[MethodTypeSlotIndex(proto_idx)].store( |
| 132 | MethodTypeDexCachePair(resolved, proto_idx), std::memory_order_relaxed); |
Narayan Kamath | 25352fc | 2016-08-03 12:46:58 +0100 | [diff] [blame] | 133 | // TODO: Fine-grained marking, so that we don't need to go through all arrays in full. |
| 134 | Runtime::Current()->GetHeap()->WriteBarrierEveryFieldOf(this); |
| 135 | } |
| 136 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 137 | inline ArtField* DexCache::GetResolvedField(uint32_t field_idx, PointerSize ptr_size) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 138 | DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 139 | DCHECK_LT(field_idx, NumResolvedFields()); // NOTE: Unchecked, i.e. not throwing AIOOB. |
| 140 | ArtField* field = GetElementPtrSize(GetResolvedFields(), field_idx, ptr_size); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 141 | if (field == nullptr || field->GetDeclaringClass()->IsErroneous()) { |
| 142 | return nullptr; |
| 143 | } |
| 144 | return field; |
| 145 | } |
| 146 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 147 | inline void DexCache::SetResolvedField(uint32_t field_idx, ArtField* field, PointerSize ptr_size) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 148 | DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 149 | DCHECK_LT(field_idx, NumResolvedFields()); // NOTE: Unchecked, i.e. not throwing AIOOB. |
| 150 | SetElementPtrSize(GetResolvedFields(), field_idx, field, ptr_size); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 151 | } |
| 152 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 153 | inline ArtMethod* DexCache::GetResolvedMethod(uint32_t method_idx, PointerSize ptr_size) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 154 | DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 155 | DCHECK_LT(method_idx, NumResolvedMethods()); // NOTE: Unchecked, i.e. not throwing AIOOB. |
| 156 | ArtMethod* method = GetElementPtrSize<ArtMethod*>(GetResolvedMethods(), method_idx, ptr_size); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 157 | // Hide resolution trampoline methods from the caller |
| 158 | if (method != nullptr && method->IsRuntimeMethod()) { |
| 159 | DCHECK_EQ(method, Runtime::Current()->GetResolutionMethod()); |
| 160 | return nullptr; |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 161 | } |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 162 | return method; |
| 163 | } |
| 164 | |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 165 | inline void DexCache::SetResolvedMethod(uint32_t method_idx, |
| 166 | ArtMethod* method, |
| 167 | PointerSize ptr_size) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 168 | DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), ptr_size); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 169 | DCHECK_LT(method_idx, NumResolvedMethods()); // NOTE: Unchecked, i.e. not throwing AIOOB. |
| 170 | SetElementPtrSize(GetResolvedMethods(), method_idx, method, ptr_size); |
| 171 | } |
| 172 | |
| 173 | template <typename PtrType> |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 174 | inline PtrType DexCache::GetElementPtrSize(PtrType* ptr_array, size_t idx, PointerSize ptr_size) { |
| 175 | if (ptr_size == PointerSize::k64) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 176 | uint64_t element = reinterpret_cast<const uint64_t*>(ptr_array)[idx]; |
| 177 | return reinterpret_cast<PtrType>(dchecked_integral_cast<uintptr_t>(element)); |
| 178 | } else { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 179 | uint32_t element = reinterpret_cast<const uint32_t*>(ptr_array)[idx]; |
| 180 | return reinterpret_cast<PtrType>(dchecked_integral_cast<uintptr_t>(element)); |
| 181 | } |
| 182 | } |
| 183 | |
| 184 | template <typename PtrType> |
| 185 | inline void DexCache::SetElementPtrSize(PtrType* ptr_array, |
| 186 | size_t idx, |
| 187 | PtrType ptr, |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 188 | PointerSize ptr_size) { |
| 189 | if (ptr_size == PointerSize::k64) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 190 | reinterpret_cast<uint64_t*>(ptr_array)[idx] = |
| 191 | dchecked_integral_cast<uint64_t>(reinterpret_cast<uintptr_t>(ptr)); |
| 192 | } else { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 193 | reinterpret_cast<uint32_t*>(ptr_array)[idx] = |
| 194 | dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(ptr)); |
| 195 | } |
| 196 | } |
| 197 | |
Narayan Kamath | d08e39b | 2016-10-19 14:16:35 +0100 | [diff] [blame] | 198 | template <typename T, |
| 199 | ReadBarrierOption kReadBarrierOption, |
| 200 | typename Visitor> |
| 201 | inline void VisitDexCachePairs(std::atomic<DexCachePair<T>>* pairs, |
| 202 | size_t num_pairs, |
| 203 | const Visitor& visitor) |
| 204 | REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { |
| 205 | for (size_t i = 0; i < num_pairs; ++i) { |
| 206 | DexCachePair<T> source = pairs[i].load(std::memory_order_relaxed); |
| 207 | // NOTE: We need the "template" keyword here to avoid a compilation |
| 208 | // failure. GcRoot<T> is a template argument-dependent type and we need to |
| 209 | // tell the compiler to treat "Read" as a template rather than a field or |
| 210 | // function. Otherwise, on encountering the "<" token, the compiler would |
| 211 | // treat "Read" as a field. |
Mathieu Chartier | 6b4c287 | 2016-11-01 14:45:26 -0700 | [diff] [blame] | 212 | T* const before = source.object.template Read<kReadBarrierOption>(); |
| 213 | visitor.VisitRootIfNonNull(source.object.AddressWithoutBarrier()); |
| 214 | if (source.object.template Read<kReadBarrierOption>() != before) { |
Narayan Kamath | d08e39b | 2016-10-19 14:16:35 +0100 | [diff] [blame] | 215 | pairs[i].store(source, std::memory_order_relaxed); |
| 216 | } |
| 217 | } |
| 218 | } |
| 219 | |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 220 | template <bool kVisitNativeRoots, |
| 221 | VerifyObjectFlags kVerifyFlags, |
| 222 | ReadBarrierOption kReadBarrierOption, |
| 223 | typename Visitor> |
Mathieu Chartier | 31e8822 | 2016-10-14 18:43:19 -0700 | [diff] [blame] | 224 | inline void DexCache::VisitReferences(ObjPtr<Class> klass, const Visitor& visitor) { |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 225 | // Visit instance fields first. |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 226 | VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 227 | // Visit arrays after. |
Mathieu Chartier | fbc3108 | 2016-01-24 11:59:56 -0800 | [diff] [blame] | 228 | if (kVisitNativeRoots) { |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 229 | VisitDexCachePairs<String, kReadBarrierOption, Visitor>( |
Narayan Kamath | d08e39b | 2016-10-19 14:16:35 +0100 | [diff] [blame] | 230 | GetStrings(), NumStrings(), visitor); |
| 231 | |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 232 | VisitDexCachePairs<Class, kReadBarrierOption, Visitor>( |
| 233 | GetResolvedTypes(), NumResolvedTypes(), visitor); |
Narayan Kamath | d08e39b | 2016-10-19 14:16:35 +0100 | [diff] [blame] | 234 | |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 235 | VisitDexCachePairs<MethodType, kReadBarrierOption, Visitor>( |
Narayan Kamath | d08e39b | 2016-10-19 14:16:35 +0100 | [diff] [blame] | 236 | GetResolvedMethodTypes(), NumResolvedMethodTypes(), visitor); |
Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 237 | } |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 238 | } |
| 239 | |
Mathieu Chartier | 60bc39c | 2016-01-27 18:37:48 -0800 | [diff] [blame] | 240 | template <ReadBarrierOption kReadBarrierOption, typename Visitor> |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 241 | inline void DexCache::FixupStrings(StringDexCacheType* dest, const Visitor& visitor) { |
| 242 | StringDexCacheType* src = GetStrings(); |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 243 | for (size_t i = 0, count = NumStrings(); i < count; ++i) { |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 244 | StringDexCachePair source = src[i].load(std::memory_order_relaxed); |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 245 | String* ptr = source.object.Read<kReadBarrierOption>(); |
| 246 | String* new_source = visitor(ptr); |
Narayan Kamath | c38a6f8 | 2016-09-29 17:07:20 +0100 | [diff] [blame] | 247 | source.object = GcRoot<String>(new_source); |
Christina Wadsworth | bf44e0e | 2016-08-18 10:37:42 -0700 | [diff] [blame] | 248 | dest[i].store(source, std::memory_order_relaxed); |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 249 | } |
| 250 | } |
| 251 | |
Mathieu Chartier | 60bc39c | 2016-01-27 18:37:48 -0800 | [diff] [blame] | 252 | template <ReadBarrierOption kReadBarrierOption, typename Visitor> |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 253 | inline void DexCache::FixupResolvedTypes(TypeDexCacheType* dest, const Visitor& visitor) { |
| 254 | TypeDexCacheType* src = GetResolvedTypes(); |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 255 | for (size_t i = 0, count = NumResolvedTypes(); i < count; ++i) { |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 256 | TypeDexCachePair source = src[i].load(std::memory_order_relaxed); |
| 257 | Class* ptr = source.object.Read<kReadBarrierOption>(); |
| 258 | Class* new_source = visitor(ptr); |
| 259 | source.object = GcRoot<Class>(new_source); |
| 260 | dest[i].store(source, std::memory_order_relaxed); |
Mathieu Chartier | 4b00d34 | 2015-11-13 10:42:08 -0800 | [diff] [blame] | 261 | } |
| 262 | } |
| 263 | |
Narayan Kamath | 7fe5658 | 2016-10-14 18:49:12 +0100 | [diff] [blame] | 264 | template <ReadBarrierOption kReadBarrierOption, typename Visitor> |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 265 | inline void DexCache::FixupResolvedMethodTypes(MethodTypeDexCacheType* dest, |
Narayan Kamath | 7fe5658 | 2016-10-14 18:49:12 +0100 | [diff] [blame] | 266 | const Visitor& visitor) { |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 267 | MethodTypeDexCacheType* src = GetResolvedMethodTypes(); |
Narayan Kamath | 7fe5658 | 2016-10-14 18:49:12 +0100 | [diff] [blame] | 268 | for (size_t i = 0, count = NumResolvedMethodTypes(); i < count; ++i) { |
| 269 | MethodTypeDexCachePair source = src[i].load(std::memory_order_relaxed); |
Vladimir Marko | ec78622 | 2016-12-20 16:24:13 +0000 | [diff] [blame] | 270 | MethodType* ptr = source.object.Read<kReadBarrierOption>(); |
| 271 | MethodType* new_source = visitor(ptr); |
Narayan Kamath | 7fe5658 | 2016-10-14 18:49:12 +0100 | [diff] [blame] | 272 | source.object = GcRoot<MethodType>(new_source); |
| 273 | dest[i].store(source, std::memory_order_relaxed); |
| 274 | } |
| 275 | } |
| 276 | |
Ian Rogers | 39ebcb8 | 2013-05-30 16:57:23 -0700 | [diff] [blame] | 277 | } // namespace mirror |
| 278 | } // namespace art |
| 279 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 280 | #endif // ART_RUNTIME_MIRROR_DEX_CACHE_INL_H_ |