Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_H_ |
| 18 | #define ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_H_ |
| 19 | |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 20 | #include "barrier.h" |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 21 | #include "garbage_collector.h" |
Mathieu Chartier | 763a31e | 2015-11-16 16:05:55 -0800 | [diff] [blame] | 22 | #include "immune_spaces.h" |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 23 | #include "jni.h" |
| 24 | #include "object_callbacks.h" |
| 25 | #include "offsets.h" |
| 26 | #include "gc/accounting/atomic_stack.h" |
| 27 | #include "gc/accounting/read_barrier_table.h" |
| 28 | #include "gc/accounting/space_bitmap.h" |
| 29 | #include "mirror/object.h" |
| 30 | #include "mirror/object_reference.h" |
| 31 | #include "safe_map.h" |
| 32 | |
| 33 | #include <unordered_map> |
| 34 | #include <vector> |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 35 | |
| 36 | namespace art { |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 37 | class RootInfo; |
| 38 | |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 39 | namespace gc { |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 40 | |
| 41 | namespace accounting { |
| 42 | typedef SpaceBitmap<kObjectAlignment> ContinuousSpaceBitmap; |
| 43 | class HeapBitmap; |
| 44 | } // namespace accounting |
| 45 | |
| 46 | namespace space { |
| 47 | class RegionSpace; |
| 48 | } // namespace space |
| 49 | |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 50 | namespace collector { |
| 51 | |
| 52 | class ConcurrentCopying : public GarbageCollector { |
| 53 | public: |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 54 | // Enable the no-from-space-refs verification at the pause. |
Hiroshi Yamauchi | daf61a1 | 2016-06-10 14:27:38 -0700 | [diff] [blame] | 55 | static constexpr bool kEnableNoFromSpaceRefsVerification = kIsDebugBuild; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 56 | // Enable the from-space bytes/objects check. |
Hiroshi Yamauchi | daf61a1 | 2016-06-10 14:27:38 -0700 | [diff] [blame] | 57 | static constexpr bool kEnableFromSpaceAccountingCheck = kIsDebugBuild; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 58 | // Enable verbose mode. |
Hiroshi Yamauchi | 3c44893 | 2016-01-22 16:26:50 -0800 | [diff] [blame] | 59 | static constexpr bool kVerboseMode = false; |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 60 | |
Mathieu Chartier | 56fe258 | 2016-07-14 13:30:03 -0700 | [diff] [blame] | 61 | ConcurrentCopying(Heap* heap, |
| 62 | const std::string& name_prefix = "", |
| 63 | bool measure_read_barrier_slow_path = false); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 64 | ~ConcurrentCopying(); |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 65 | |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 66 | virtual void RunPhases() OVERRIDE |
Mathieu Chartier | 56fe258 | 2016-07-14 13:30:03 -0700 | [diff] [blame] | 67 | REQUIRES(!immune_gray_stack_lock_, |
| 68 | !mark_stack_lock_, |
| 69 | !rb_slow_path_histogram_lock_, |
| 70 | !skipped_blocks_lock_); |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 71 | void InitializePhase() SHARED_REQUIRES(Locks::mutator_lock_) |
| 72 | REQUIRES(!mark_stack_lock_, !immune_gray_stack_lock_); |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 73 | void MarkingPhase() SHARED_REQUIRES(Locks::mutator_lock_) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 74 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 75 | void ReclaimPhase() SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); |
Mathieu Chartier | 56fe258 | 2016-07-14 13:30:03 -0700 | [diff] [blame] | 76 | void FinishPhase() REQUIRES(!mark_stack_lock_, |
| 77 | !rb_slow_path_histogram_lock_, |
| 78 | !skipped_blocks_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 79 | |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 80 | void BindBitmaps() SHARED_REQUIRES(Locks::mutator_lock_) |
| 81 | REQUIRES(!Locks::heap_bitmap_lock_); |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 82 | virtual GcType GetGcType() const OVERRIDE { |
| 83 | return kGcTypePartial; |
| 84 | } |
| 85 | virtual CollectorType GetCollectorType() const OVERRIDE { |
| 86 | return kCollectorTypeCC; |
| 87 | } |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 88 | virtual void RevokeAllThreadLocalBuffers() OVERRIDE; |
| 89 | void SetRegionSpace(space::RegionSpace* region_space) { |
| 90 | DCHECK(region_space != nullptr); |
| 91 | region_space_ = region_space; |
| 92 | } |
| 93 | space::RegionSpace* RegionSpace() { |
| 94 | return region_space_; |
| 95 | } |
| 96 | void AssertToSpaceInvariant(mirror::Object* obj, MemberOffset offset, mirror::Object* ref) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 97 | SHARED_REQUIRES(Locks::mutator_lock_); |
Hiroshi Yamauchi | 3f64f25 | 2015-06-12 18:35:06 -0700 | [diff] [blame] | 98 | void AssertToSpaceInvariant(GcRootSource* gc_root_source, mirror::Object* ref) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 99 | SHARED_REQUIRES(Locks::mutator_lock_); |
| 100 | bool IsInToSpace(mirror::Object* ref) SHARED_REQUIRES(Locks::mutator_lock_) { |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 101 | DCHECK(ref != nullptr); |
| 102 | return IsMarked(ref) == ref; |
| 103 | } |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 104 | template<bool kGrayImmuneObject = true> |
Mathieu Chartier | 56fe258 | 2016-07-14 13:30:03 -0700 | [diff] [blame] | 105 | ALWAYS_INLINE mirror::Object* Mark(mirror::Object* from_ref) |
| 106 | SHARED_REQUIRES(Locks::mutator_lock_) |
| 107 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
| 108 | ALWAYS_INLINE mirror::Object* MarkFromReadBarrier(mirror::Object* from_ref) |
| 109 | SHARED_REQUIRES(Locks::mutator_lock_) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 110 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 111 | bool IsMarking() const { |
| 112 | return is_marking_; |
| 113 | } |
| 114 | bool IsActive() const { |
| 115 | return is_active_; |
| 116 | } |
| 117 | Barrier& GetBarrier() { |
| 118 | return *gc_barrier_; |
| 119 | } |
Hiroshi Yamauchi | 0b71357 | 2015-06-16 18:29:23 -0700 | [diff] [blame] | 120 | bool IsWeakRefAccessEnabled() { |
| 121 | return weak_ref_access_enabled_.LoadRelaxed(); |
| 122 | } |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 123 | void RevokeThreadLocalMarkStack(Thread* thread) SHARED_REQUIRES(Locks::mutator_lock_) |
| 124 | REQUIRES(!mark_stack_lock_); |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 125 | |
| 126 | private: |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 127 | void PushOntoMarkStack(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_) |
| 128 | REQUIRES(!mark_stack_lock_); |
| 129 | mirror::Object* Copy(mirror::Object* from_ref) SHARED_REQUIRES(Locks::mutator_lock_) |
Mathieu Chartier | d6636d3 | 2016-07-28 11:02:38 -0700 | [diff] [blame] | 130 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 131 | void Scan(mirror::Object* to_ref) SHARED_REQUIRES(Locks::mutator_lock_) |
| 132 | REQUIRES(!mark_stack_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 133 | void Process(mirror::Object* obj, MemberOffset offset) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 134 | SHARED_REQUIRES(Locks::mutator_lock_) |
| 135 | REQUIRES(!mark_stack_lock_ , !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 136 | virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 137 | OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 138 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
| 139 | template<bool kGrayImmuneObject> |
Mathieu Chartier | da7c650 | 2015-07-23 16:01:26 -0700 | [diff] [blame] | 140 | void MarkRoot(mirror::CompressedReference<mirror::Object>* root) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 141 | SHARED_REQUIRES(Locks::mutator_lock_) |
| 142 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 143 | virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, |
| 144 | const RootInfo& info) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 145 | OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 146 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 147 | void VerifyNoFromSpaceReferences() REQUIRES(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 148 | accounting::ObjectStack* GetAllocationStack(); |
| 149 | accounting::ObjectStack* GetLiveStack(); |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 150 | virtual void ProcessMarkStack() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) |
| 151 | REQUIRES(!mark_stack_lock_); |
| 152 | bool ProcessMarkStackOnce() SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); |
| 153 | void ProcessMarkStackRef(mirror::Object* to_ref) SHARED_REQUIRES(Locks::mutator_lock_) |
| 154 | REQUIRES(!mark_stack_lock_); |
Mathieu Chartier | 21328a1 | 2016-07-22 10:47:45 -0700 | [diff] [blame] | 155 | void GrayAllDirtyImmuneObjects() |
| 156 | REQUIRES(Locks::mutator_lock_) |
| 157 | REQUIRES(!mark_stack_lock_); |
| 158 | void VerifyGrayImmuneObjects() |
| 159 | REQUIRES(Locks::mutator_lock_) |
| 160 | REQUIRES(!mark_stack_lock_); |
Hiroshi Yamauchi | 0b71357 | 2015-06-16 18:29:23 -0700 | [diff] [blame] | 161 | size_t ProcessThreadLocalMarkStacks(bool disable_weak_ref_access) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 162 | SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); |
Hiroshi Yamauchi | 0b71357 | 2015-06-16 18:29:23 -0700 | [diff] [blame] | 163 | void RevokeThreadLocalMarkStacks(bool disable_weak_ref_access) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 164 | SHARED_REQUIRES(Locks::mutator_lock_); |
| 165 | void SwitchToSharedMarkStackMode() SHARED_REQUIRES(Locks::mutator_lock_) |
| 166 | REQUIRES(!mark_stack_lock_); |
| 167 | void SwitchToGcExclusiveMarkStackMode() SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 168 | virtual void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) OVERRIDE |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 169 | SHARED_REQUIRES(Locks::mutator_lock_); |
| 170 | void ProcessReferences(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 171 | virtual mirror::Object* MarkObject(mirror::Object* from_ref) OVERRIDE |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 172 | SHARED_REQUIRES(Locks::mutator_lock_) |
| 173 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 174 | virtual void MarkHeapReference(mirror::HeapReference<mirror::Object>* from_ref) OVERRIDE |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 175 | SHARED_REQUIRES(Locks::mutator_lock_) |
| 176 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 177 | virtual mirror::Object* IsMarked(mirror::Object* from_ref) OVERRIDE |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 178 | SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 179 | virtual bool IsMarkedHeapReference(mirror::HeapReference<mirror::Object>* field) OVERRIDE |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 180 | SHARED_REQUIRES(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 181 | void SweepSystemWeaks(Thread* self) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 182 | SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!Locks::heap_bitmap_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 183 | void Sweep(bool swap_bitmaps) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 184 | SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_, !mark_stack_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 185 | void SweepLargeObjects(bool swap_bitmaps) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 186 | SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 187 | void FillWithDummyObject(mirror::Object* dummy_obj, size_t byte_size) |
Mathieu Chartier | d6636d3 | 2016-07-28 11:02:38 -0700 | [diff] [blame] | 188 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 189 | SHARED_REQUIRES(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 190 | mirror::Object* AllocateInSkippedBlock(size_t alloc_size) |
Mathieu Chartier | d6636d3 | 2016-07-28 11:02:38 -0700 | [diff] [blame] | 191 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_) |
| 192 | SHARED_REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 193 | void CheckEmptyMarkStack() SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); |
| 194 | void IssueEmptyCheckpoint() SHARED_REQUIRES(Locks::mutator_lock_); |
| 195 | bool IsOnAllocStack(mirror::Object* ref) SHARED_REQUIRES(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 196 | mirror::Object* GetFwdPtr(mirror::Object* from_ref) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 197 | SHARED_REQUIRES(Locks::mutator_lock_); |
| 198 | void FlipThreadRoots() REQUIRES(!Locks::mutator_lock_); |
Mathieu Chartier | a4f6af9 | 2015-08-11 17:35:25 -0700 | [diff] [blame] | 199 | void SwapStacks() SHARED_REQUIRES(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 200 | void RecordLiveStackFreezeSize(Thread* self); |
| 201 | void ComputeUnevacFromSpaceLiveRatio(); |
Hiroshi Yamauchi | 3f64f25 | 2015-06-12 18:35:06 -0700 | [diff] [blame] | 202 | void LogFromSpaceRefHolder(mirror::Object* obj, MemberOffset offset) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 203 | SHARED_REQUIRES(Locks::mutator_lock_); |
Hiroshi Yamauchi | 3f64f25 | 2015-06-12 18:35:06 -0700 | [diff] [blame] | 204 | void AssertToSpaceInvariantInNonMovingSpace(mirror::Object* obj, mirror::Object* ref) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 205 | SHARED_REQUIRES(Locks::mutator_lock_); |
| 206 | void ReenableWeakRefAccess(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_); |
Hiroshi Yamauchi | 0037082 | 2015-08-18 14:47:25 -0700 | [diff] [blame] | 207 | void DisableMarking() SHARED_REQUIRES(Locks::mutator_lock_); |
| 208 | void IssueDisableMarkingCheckpoint() SHARED_REQUIRES(Locks::mutator_lock_); |
Hiroshi Yamauchi | 19eab40 | 2015-10-23 19:59:58 -0700 | [diff] [blame] | 209 | void ExpandGcMarkStack() SHARED_REQUIRES(Locks::mutator_lock_); |
Hiroshi Yamauchi | 723e6ce | 2015-10-28 20:59:47 -0700 | [diff] [blame] | 210 | mirror::Object* MarkNonMoving(mirror::Object* from_ref) SHARED_REQUIRES(Locks::mutator_lock_) |
| 211 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_); |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 212 | ALWAYS_INLINE mirror::Object* MarkUnevacFromSpaceRegion(mirror::Object* from_ref, |
Hiroshi Yamauchi | 8e67465 | 2015-12-22 11:09:18 -0800 | [diff] [blame] | 213 | accounting::SpaceBitmap<kObjectAlignment>* bitmap) |
| 214 | SHARED_REQUIRES(Locks::mutator_lock_) |
| 215 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_); |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 216 | template<bool kGrayImmuneObject> |
| 217 | ALWAYS_INLINE mirror::Object* MarkImmuneSpace(mirror::Object* from_ref) |
| 218 | SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!immune_gray_stack_lock_); |
Hiroshi Yamauchi | 8e67465 | 2015-12-22 11:09:18 -0800 | [diff] [blame] | 219 | void PushOntoFalseGrayStack(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_) |
| 220 | REQUIRES(!mark_stack_lock_); |
| 221 | void ProcessFalseGrayStack() SHARED_REQUIRES(Locks::mutator_lock_) |
| 222 | REQUIRES(!mark_stack_lock_); |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 223 | void ScanImmuneObject(mirror::Object* obj) |
| 224 | SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); |
Mathieu Chartier | 56fe258 | 2016-07-14 13:30:03 -0700 | [diff] [blame] | 225 | mirror::Object* MarkFromReadBarrierWithMeasurements(mirror::Object* from_ref) |
| 226 | SHARED_REQUIRES(Locks::mutator_lock_) |
| 227 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
| 228 | void DumpPerformanceInfo(std::ostream& os) OVERRIDE REQUIRES(!rb_slow_path_histogram_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 229 | |
| 230 | space::RegionSpace* region_space_; // The underlying region space. |
| 231 | std::unique_ptr<Barrier> gc_barrier_; |
Hiroshi Yamauchi | 0b71357 | 2015-06-16 18:29:23 -0700 | [diff] [blame] | 232 | std::unique_ptr<accounting::ObjectStack> gc_mark_stack_; |
Hiroshi Yamauchi | 8e67465 | 2015-12-22 11:09:18 -0800 | [diff] [blame] | 233 | std::vector<mirror::Object*> false_gray_stack_ GUARDED_BY(mark_stack_lock_); |
Hiroshi Yamauchi | 0b71357 | 2015-06-16 18:29:23 -0700 | [diff] [blame] | 234 | Mutex mark_stack_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; |
| 235 | std::vector<accounting::ObjectStack*> revoked_mark_stacks_ |
| 236 | GUARDED_BY(mark_stack_lock_); |
| 237 | static constexpr size_t kMarkStackSize = kPageSize; |
| 238 | static constexpr size_t kMarkStackPoolSize = 256; |
| 239 | std::vector<accounting::ObjectStack*> pooled_mark_stacks_ |
| 240 | GUARDED_BY(mark_stack_lock_); |
| 241 | Thread* thread_running_gc_; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 242 | bool is_marking_; // True while marking is ongoing. |
| 243 | bool is_active_; // True while the collection is ongoing. |
| 244 | bool is_asserting_to_space_invariant_; // True while asserting the to-space invariant. |
Mathieu Chartier | 763a31e | 2015-11-16 16:05:55 -0800 | [diff] [blame] | 245 | ImmuneSpaces immune_spaces_; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 246 | accounting::SpaceBitmap<kObjectAlignment>* region_space_bitmap_; |
| 247 | // A cache of Heap::GetMarkBitmap(). |
| 248 | accounting::HeapBitmap* heap_mark_bitmap_; |
| 249 | size_t live_stack_freeze_size_; |
| 250 | size_t from_space_num_objects_at_first_pause_; |
| 251 | size_t from_space_num_bytes_at_first_pause_; |
Hiroshi Yamauchi | 0b71357 | 2015-06-16 18:29:23 -0700 | [diff] [blame] | 252 | Atomic<int> is_mark_stack_push_disallowed_; |
| 253 | enum MarkStackMode { |
| 254 | kMarkStackModeOff = 0, // Mark stack is off. |
| 255 | kMarkStackModeThreadLocal, // All threads except for the GC-running thread push refs onto |
| 256 | // thread-local mark stacks. The GC-running thread pushes onto and |
| 257 | // pops off the GC mark stack without a lock. |
| 258 | kMarkStackModeShared, // All threads share the GC mark stack with a lock. |
| 259 | kMarkStackModeGcExclusive // The GC-running thread pushes onto and pops from the GC mark stack |
| 260 | // without a lock. Other threads won't access the mark stack. |
| 261 | }; |
| 262 | Atomic<MarkStackMode> mark_stack_mode_; |
| 263 | Atomic<bool> weak_ref_access_enabled_; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 264 | |
| 265 | // How many objects and bytes we moved. Used for accounting. |
| 266 | Atomic<size_t> bytes_moved_; |
| 267 | Atomic<size_t> objects_moved_; |
| 268 | |
| 269 | // The skipped blocks are memory blocks/chucks that were copies of |
| 270 | // objects that were unused due to lost races (cas failures) at |
| 271 | // object copy/forward pointer install. They are reused. |
| 272 | Mutex skipped_blocks_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; |
| 273 | std::multimap<size_t, uint8_t*> skipped_blocks_map_ GUARDED_BY(skipped_blocks_lock_); |
| 274 | Atomic<size_t> to_space_bytes_skipped_; |
| 275 | Atomic<size_t> to_space_objects_skipped_; |
| 276 | |
Mathieu Chartier | 56fe258 | 2016-07-14 13:30:03 -0700 | [diff] [blame] | 277 | // If measure_read_barrier_slow_path_ is true, we count how long is spent in MarkFromReadBarrier |
| 278 | // and also log. |
| 279 | bool measure_read_barrier_slow_path_; |
| 280 | // mark_from_read_barrier_measurements_ is true if systrace is enabled or |
| 281 | // measure_read_barrier_time_ is true. |
| 282 | bool mark_from_read_barrier_measurements_; |
| 283 | Atomic<uint64_t> rb_slow_path_ns_; |
| 284 | Atomic<uint64_t> rb_slow_path_count_; |
| 285 | Atomic<uint64_t> rb_slow_path_count_gc_; |
| 286 | mutable Mutex rb_slow_path_histogram_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; |
| 287 | Histogram<uint64_t> rb_slow_path_time_histogram_ GUARDED_BY(rb_slow_path_histogram_lock_); |
| 288 | uint64_t rb_slow_path_count_total_ GUARDED_BY(rb_slow_path_histogram_lock_); |
| 289 | uint64_t rb_slow_path_count_gc_total_ GUARDED_BY(rb_slow_path_histogram_lock_); |
| 290 | |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 291 | accounting::ReadBarrierTable* rb_table_; |
| 292 | bool force_evacuate_all_; // True if all regions are evacuated. |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 293 | Atomic<bool> updated_all_immune_objects_; |
| 294 | bool gc_grays_immune_objects_; |
| 295 | Mutex immune_gray_stack_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; |
| 296 | std::vector<mirror::Object*> immune_gray_stack_ GUARDED_BY(immune_gray_stack_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 297 | |
Mathieu Chartier | a07f559 | 2016-06-16 11:44:28 -0700 | [diff] [blame] | 298 | class AssertToSpaceInvariantFieldVisitor; |
| 299 | class AssertToSpaceInvariantObjectVisitor; |
| 300 | class AssertToSpaceInvariantRefsVisitor; |
| 301 | class ClearBlackPtrsVisitor; |
| 302 | class ComputeUnevacFromSpaceLiveRatioVisitor; |
| 303 | class DisableMarkingCheckpoint; |
| 304 | class FlipCallback; |
Mathieu Chartier | 21328a1 | 2016-07-22 10:47:45 -0700 | [diff] [blame] | 305 | class GrayImmuneObjectVisitor; |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 306 | class ImmuneSpaceScanObjVisitor; |
Mathieu Chartier | a07f559 | 2016-06-16 11:44:28 -0700 | [diff] [blame] | 307 | class LostCopyVisitor; |
| 308 | class RefFieldsVisitor; |
| 309 | class RevokeThreadLocalMarkStackCheckpoint; |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 310 | class ScopedGcGraysImmuneObjects; |
| 311 | class ThreadFlipVisitor; |
Mathieu Chartier | 21328a1 | 2016-07-22 10:47:45 -0700 | [diff] [blame] | 312 | class VerifyGrayImmuneObjectsVisitor; |
Mathieu Chartier | a07f559 | 2016-06-16 11:44:28 -0700 | [diff] [blame] | 313 | class VerifyNoFromSpaceRefsFieldVisitor; |
| 314 | class VerifyNoFromSpaceRefsObjectVisitor; |
| 315 | class VerifyNoFromSpaceRefsVisitor; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 316 | |
Mathieu Chartier | 3130cdf | 2015-05-03 15:20:23 -0700 | [diff] [blame] | 317 | DISALLOW_IMPLICIT_CONSTRUCTORS(ConcurrentCopying); |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 318 | }; |
| 319 | |
| 320 | } // namespace collector |
| 321 | } // namespace gc |
| 322 | } // namespace art |
| 323 | |
| 324 | #endif // ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_H_ |