blob: 72112fabc6961a48ce7d03b7395c2333e59f0395 [file] [log] [blame]
Hiroshi Yamauchid5307ec2014-03-27 21:07:51 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_H_
18#define ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_H_
19
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080020#include "barrier.h"
Hiroshi Yamauchid5307ec2014-03-27 21:07:51 -070021#include "garbage_collector.h"
Mathieu Chartier763a31e2015-11-16 16:05:55 -080022#include "immune_spaces.h"
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080023#include "jni.h"
24#include "object_callbacks.h"
25#include "offsets.h"
26#include "gc/accounting/atomic_stack.h"
27#include "gc/accounting/read_barrier_table.h"
28#include "gc/accounting/space_bitmap.h"
29#include "mirror/object.h"
30#include "mirror/object_reference.h"
31#include "safe_map.h"
32
33#include <unordered_map>
34#include <vector>
Hiroshi Yamauchid5307ec2014-03-27 21:07:51 -070035
36namespace art {
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080037class RootInfo;
38
Hiroshi Yamauchid5307ec2014-03-27 21:07:51 -070039namespace gc {
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080040
41namespace accounting {
42 typedef SpaceBitmap<kObjectAlignment> ContinuousSpaceBitmap;
43 class HeapBitmap;
44} // namespace accounting
45
46namespace space {
47 class RegionSpace;
48} // namespace space
49
Hiroshi Yamauchid5307ec2014-03-27 21:07:51 -070050namespace collector {
51
52class ConcurrentCopying : public GarbageCollector {
53 public:
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080054 // Enable the no-from-space-refs verification at the pause.
Hiroshi Yamauchidaf61a12016-06-10 14:27:38 -070055 static constexpr bool kEnableNoFromSpaceRefsVerification = kIsDebugBuild;
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080056 // Enable the from-space bytes/objects check.
Hiroshi Yamauchidaf61a12016-06-10 14:27:38 -070057 static constexpr bool kEnableFromSpaceAccountingCheck = kIsDebugBuild;
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080058 // Enable verbose mode.
Hiroshi Yamauchi3c448932016-01-22 16:26:50 -080059 static constexpr bool kVerboseMode = false;
Hiroshi Yamauchid5307ec2014-03-27 21:07:51 -070060
Mathieu Chartier56fe2582016-07-14 13:30:03 -070061 ConcurrentCopying(Heap* heap,
62 const std::string& name_prefix = "",
63 bool measure_read_barrier_slow_path = false);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080064 ~ConcurrentCopying();
Hiroshi Yamauchid5307ec2014-03-27 21:07:51 -070065
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -070066 virtual void RunPhases() OVERRIDE
Mathieu Chartier56fe2582016-07-14 13:30:03 -070067 REQUIRES(!immune_gray_stack_lock_,
68 !mark_stack_lock_,
69 !rb_slow_path_histogram_lock_,
70 !skipped_blocks_lock_);
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -070071 void InitializePhase() SHARED_REQUIRES(Locks::mutator_lock_)
72 REQUIRES(!mark_stack_lock_, !immune_gray_stack_lock_);
Mathieu Chartier90443472015-07-16 20:32:27 -070073 void MarkingPhase() SHARED_REQUIRES(Locks::mutator_lock_)
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -070074 REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_);
Mathieu Chartier90443472015-07-16 20:32:27 -070075 void ReclaimPhase() SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_);
Mathieu Chartier56fe2582016-07-14 13:30:03 -070076 void FinishPhase() REQUIRES(!mark_stack_lock_,
77 !rb_slow_path_histogram_lock_,
78 !skipped_blocks_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080079
Mathieu Chartier90443472015-07-16 20:32:27 -070080 void BindBitmaps() SHARED_REQUIRES(Locks::mutator_lock_)
81 REQUIRES(!Locks::heap_bitmap_lock_);
Hiroshi Yamauchid5307ec2014-03-27 21:07:51 -070082 virtual GcType GetGcType() const OVERRIDE {
83 return kGcTypePartial;
84 }
85 virtual CollectorType GetCollectorType() const OVERRIDE {
86 return kCollectorTypeCC;
87 }
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080088 virtual void RevokeAllThreadLocalBuffers() OVERRIDE;
89 void SetRegionSpace(space::RegionSpace* region_space) {
90 DCHECK(region_space != nullptr);
91 region_space_ = region_space;
92 }
93 space::RegionSpace* RegionSpace() {
94 return region_space_;
95 }
96 void AssertToSpaceInvariant(mirror::Object* obj, MemberOffset offset, mirror::Object* ref)
Mathieu Chartier90443472015-07-16 20:32:27 -070097 SHARED_REQUIRES(Locks::mutator_lock_);
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -070098 void AssertToSpaceInvariant(GcRootSource* gc_root_source, mirror::Object* ref)
Mathieu Chartier90443472015-07-16 20:32:27 -070099 SHARED_REQUIRES(Locks::mutator_lock_);
100 bool IsInToSpace(mirror::Object* ref) SHARED_REQUIRES(Locks::mutator_lock_) {
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800101 DCHECK(ref != nullptr);
102 return IsMarked(ref) == ref;
103 }
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -0700104 template<bool kGrayImmuneObject = true>
Mathieu Chartier56fe2582016-07-14 13:30:03 -0700105 ALWAYS_INLINE mirror::Object* Mark(mirror::Object* from_ref)
106 SHARED_REQUIRES(Locks::mutator_lock_)
107 REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_);
108 ALWAYS_INLINE mirror::Object* MarkFromReadBarrier(mirror::Object* from_ref)
109 SHARED_REQUIRES(Locks::mutator_lock_)
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -0700110 REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800111 bool IsMarking() const {
112 return is_marking_;
113 }
114 bool IsActive() const {
115 return is_active_;
116 }
117 Barrier& GetBarrier() {
118 return *gc_barrier_;
119 }
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700120 bool IsWeakRefAccessEnabled() {
121 return weak_ref_access_enabled_.LoadRelaxed();
122 }
Mathieu Chartier90443472015-07-16 20:32:27 -0700123 void RevokeThreadLocalMarkStack(Thread* thread) SHARED_REQUIRES(Locks::mutator_lock_)
124 REQUIRES(!mark_stack_lock_);
Hiroshi Yamauchid5307ec2014-03-27 21:07:51 -0700125
126 private:
Mathieu Chartier90443472015-07-16 20:32:27 -0700127 void PushOntoMarkStack(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_)
128 REQUIRES(!mark_stack_lock_);
129 mirror::Object* Copy(mirror::Object* from_ref) SHARED_REQUIRES(Locks::mutator_lock_)
Mathieu Chartierd6636d32016-07-28 11:02:38 -0700130 REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_);
Mathieu Chartier90443472015-07-16 20:32:27 -0700131 void Scan(mirror::Object* to_ref) SHARED_REQUIRES(Locks::mutator_lock_)
132 REQUIRES(!mark_stack_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800133 void Process(mirror::Object* obj, MemberOffset offset)
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -0700134 SHARED_REQUIRES(Locks::mutator_lock_)
135 REQUIRES(!mark_stack_lock_ , !skipped_blocks_lock_, !immune_gray_stack_lock_);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700136 virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info)
Mathieu Chartier90443472015-07-16 20:32:27 -0700137 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_)
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -0700138 REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_);
139 template<bool kGrayImmuneObject>
Mathieu Chartierda7c6502015-07-23 16:01:26 -0700140 void MarkRoot(mirror::CompressedReference<mirror::Object>* root)
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -0700141 SHARED_REQUIRES(Locks::mutator_lock_)
142 REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700143 virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
144 const RootInfo& info)
Mathieu Chartier90443472015-07-16 20:32:27 -0700145 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_)
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -0700146 REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_);
Mathieu Chartier90443472015-07-16 20:32:27 -0700147 void VerifyNoFromSpaceReferences() REQUIRES(Locks::mutator_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800148 accounting::ObjectStack* GetAllocationStack();
149 accounting::ObjectStack* GetLiveStack();
Mathieu Chartier90443472015-07-16 20:32:27 -0700150 virtual void ProcessMarkStack() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_)
151 REQUIRES(!mark_stack_lock_);
152 bool ProcessMarkStackOnce() SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_);
153 void ProcessMarkStackRef(mirror::Object* to_ref) SHARED_REQUIRES(Locks::mutator_lock_)
154 REQUIRES(!mark_stack_lock_);
Mathieu Chartier21328a12016-07-22 10:47:45 -0700155 void GrayAllDirtyImmuneObjects()
156 REQUIRES(Locks::mutator_lock_)
157 REQUIRES(!mark_stack_lock_);
158 void VerifyGrayImmuneObjects()
159 REQUIRES(Locks::mutator_lock_)
160 REQUIRES(!mark_stack_lock_);
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700161 size_t ProcessThreadLocalMarkStacks(bool disable_weak_ref_access)
Mathieu Chartier90443472015-07-16 20:32:27 -0700162 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_);
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700163 void RevokeThreadLocalMarkStacks(bool disable_weak_ref_access)
Mathieu Chartier90443472015-07-16 20:32:27 -0700164 SHARED_REQUIRES(Locks::mutator_lock_);
165 void SwitchToSharedMarkStackMode() SHARED_REQUIRES(Locks::mutator_lock_)
166 REQUIRES(!mark_stack_lock_);
167 void SwitchToGcExclusiveMarkStackMode() SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier97509952015-07-13 14:35:43 -0700168 virtual void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) OVERRIDE
Mathieu Chartier90443472015-07-16 20:32:27 -0700169 SHARED_REQUIRES(Locks::mutator_lock_);
170 void ProcessReferences(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier97509952015-07-13 14:35:43 -0700171 virtual mirror::Object* MarkObject(mirror::Object* from_ref) OVERRIDE
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -0700172 SHARED_REQUIRES(Locks::mutator_lock_)
173 REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_);
Mathieu Chartier97509952015-07-13 14:35:43 -0700174 virtual void MarkHeapReference(mirror::HeapReference<mirror::Object>* from_ref) OVERRIDE
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -0700175 SHARED_REQUIRES(Locks::mutator_lock_)
176 REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_);
Mathieu Chartier97509952015-07-13 14:35:43 -0700177 virtual mirror::Object* IsMarked(mirror::Object* from_ref) OVERRIDE
Mathieu Chartier90443472015-07-16 20:32:27 -0700178 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier97509952015-07-13 14:35:43 -0700179 virtual bool IsMarkedHeapReference(mirror::HeapReference<mirror::Object>* field) OVERRIDE
Mathieu Chartier90443472015-07-16 20:32:27 -0700180 SHARED_REQUIRES(Locks::mutator_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800181 void SweepSystemWeaks(Thread* self)
Mathieu Chartier90443472015-07-16 20:32:27 -0700182 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!Locks::heap_bitmap_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800183 void Sweep(bool swap_bitmaps)
Mathieu Chartier90443472015-07-16 20:32:27 -0700184 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_, !mark_stack_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800185 void SweepLargeObjects(bool swap_bitmaps)
Mathieu Chartier90443472015-07-16 20:32:27 -0700186 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800187 void FillWithDummyObject(mirror::Object* dummy_obj, size_t byte_size)
Mathieu Chartierd6636d32016-07-28 11:02:38 -0700188 REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_)
Mathieu Chartier90443472015-07-16 20:32:27 -0700189 SHARED_REQUIRES(Locks::mutator_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800190 mirror::Object* AllocateInSkippedBlock(size_t alloc_size)
Mathieu Chartierd6636d32016-07-28 11:02:38 -0700191 REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_)
192 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier90443472015-07-16 20:32:27 -0700193 void CheckEmptyMarkStack() SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_);
194 void IssueEmptyCheckpoint() SHARED_REQUIRES(Locks::mutator_lock_);
195 bool IsOnAllocStack(mirror::Object* ref) SHARED_REQUIRES(Locks::mutator_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800196 mirror::Object* GetFwdPtr(mirror::Object* from_ref)
Mathieu Chartier90443472015-07-16 20:32:27 -0700197 SHARED_REQUIRES(Locks::mutator_lock_);
198 void FlipThreadRoots() REQUIRES(!Locks::mutator_lock_);
Mathieu Chartiera4f6af92015-08-11 17:35:25 -0700199 void SwapStacks() SHARED_REQUIRES(Locks::mutator_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800200 void RecordLiveStackFreezeSize(Thread* self);
201 void ComputeUnevacFromSpaceLiveRatio();
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -0700202 void LogFromSpaceRefHolder(mirror::Object* obj, MemberOffset offset)
Mathieu Chartier90443472015-07-16 20:32:27 -0700203 SHARED_REQUIRES(Locks::mutator_lock_);
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -0700204 void AssertToSpaceInvariantInNonMovingSpace(mirror::Object* obj, mirror::Object* ref)
Mathieu Chartier90443472015-07-16 20:32:27 -0700205 SHARED_REQUIRES(Locks::mutator_lock_);
206 void ReenableWeakRefAccess(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_);
Hiroshi Yamauchi00370822015-08-18 14:47:25 -0700207 void DisableMarking() SHARED_REQUIRES(Locks::mutator_lock_);
208 void IssueDisableMarkingCheckpoint() SHARED_REQUIRES(Locks::mutator_lock_);
Hiroshi Yamauchi19eab402015-10-23 19:59:58 -0700209 void ExpandGcMarkStack() SHARED_REQUIRES(Locks::mutator_lock_);
Hiroshi Yamauchi723e6ce2015-10-28 20:59:47 -0700210 mirror::Object* MarkNonMoving(mirror::Object* from_ref) SHARED_REQUIRES(Locks::mutator_lock_)
211 REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_);
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -0700212 ALWAYS_INLINE mirror::Object* MarkUnevacFromSpaceRegion(mirror::Object* from_ref,
Hiroshi Yamauchi8e674652015-12-22 11:09:18 -0800213 accounting::SpaceBitmap<kObjectAlignment>* bitmap)
214 SHARED_REQUIRES(Locks::mutator_lock_)
215 REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_);
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -0700216 template<bool kGrayImmuneObject>
217 ALWAYS_INLINE mirror::Object* MarkImmuneSpace(mirror::Object* from_ref)
218 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!immune_gray_stack_lock_);
Hiroshi Yamauchi8e674652015-12-22 11:09:18 -0800219 void PushOntoFalseGrayStack(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_)
220 REQUIRES(!mark_stack_lock_);
221 void ProcessFalseGrayStack() SHARED_REQUIRES(Locks::mutator_lock_)
222 REQUIRES(!mark_stack_lock_);
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -0700223 void ScanImmuneObject(mirror::Object* obj)
224 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_);
Mathieu Chartier56fe2582016-07-14 13:30:03 -0700225 mirror::Object* MarkFromReadBarrierWithMeasurements(mirror::Object* from_ref)
226 SHARED_REQUIRES(Locks::mutator_lock_)
227 REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_);
228 void DumpPerformanceInfo(std::ostream& os) OVERRIDE REQUIRES(!rb_slow_path_histogram_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800229
230 space::RegionSpace* region_space_; // The underlying region space.
231 std::unique_ptr<Barrier> gc_barrier_;
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700232 std::unique_ptr<accounting::ObjectStack> gc_mark_stack_;
Hiroshi Yamauchi8e674652015-12-22 11:09:18 -0800233 std::vector<mirror::Object*> false_gray_stack_ GUARDED_BY(mark_stack_lock_);
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700234 Mutex mark_stack_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
235 std::vector<accounting::ObjectStack*> revoked_mark_stacks_
236 GUARDED_BY(mark_stack_lock_);
237 static constexpr size_t kMarkStackSize = kPageSize;
238 static constexpr size_t kMarkStackPoolSize = 256;
239 std::vector<accounting::ObjectStack*> pooled_mark_stacks_
240 GUARDED_BY(mark_stack_lock_);
241 Thread* thread_running_gc_;
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800242 bool is_marking_; // True while marking is ongoing.
243 bool is_active_; // True while the collection is ongoing.
244 bool is_asserting_to_space_invariant_; // True while asserting the to-space invariant.
Mathieu Chartier763a31e2015-11-16 16:05:55 -0800245 ImmuneSpaces immune_spaces_;
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800246 accounting::SpaceBitmap<kObjectAlignment>* region_space_bitmap_;
247 // A cache of Heap::GetMarkBitmap().
248 accounting::HeapBitmap* heap_mark_bitmap_;
249 size_t live_stack_freeze_size_;
250 size_t from_space_num_objects_at_first_pause_;
251 size_t from_space_num_bytes_at_first_pause_;
Hiroshi Yamauchi0b713572015-06-16 18:29:23 -0700252 Atomic<int> is_mark_stack_push_disallowed_;
253 enum MarkStackMode {
254 kMarkStackModeOff = 0, // Mark stack is off.
255 kMarkStackModeThreadLocal, // All threads except for the GC-running thread push refs onto
256 // thread-local mark stacks. The GC-running thread pushes onto and
257 // pops off the GC mark stack without a lock.
258 kMarkStackModeShared, // All threads share the GC mark stack with a lock.
259 kMarkStackModeGcExclusive // The GC-running thread pushes onto and pops from the GC mark stack
260 // without a lock. Other threads won't access the mark stack.
261 };
262 Atomic<MarkStackMode> mark_stack_mode_;
263 Atomic<bool> weak_ref_access_enabled_;
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800264
265 // How many objects and bytes we moved. Used for accounting.
266 Atomic<size_t> bytes_moved_;
267 Atomic<size_t> objects_moved_;
268
269 // The skipped blocks are memory blocks/chucks that were copies of
270 // objects that were unused due to lost races (cas failures) at
271 // object copy/forward pointer install. They are reused.
272 Mutex skipped_blocks_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
273 std::multimap<size_t, uint8_t*> skipped_blocks_map_ GUARDED_BY(skipped_blocks_lock_);
274 Atomic<size_t> to_space_bytes_skipped_;
275 Atomic<size_t> to_space_objects_skipped_;
276
Mathieu Chartier56fe2582016-07-14 13:30:03 -0700277 // If measure_read_barrier_slow_path_ is true, we count how long is spent in MarkFromReadBarrier
278 // and also log.
279 bool measure_read_barrier_slow_path_;
280 // mark_from_read_barrier_measurements_ is true if systrace is enabled or
281 // measure_read_barrier_time_ is true.
282 bool mark_from_read_barrier_measurements_;
283 Atomic<uint64_t> rb_slow_path_ns_;
284 Atomic<uint64_t> rb_slow_path_count_;
285 Atomic<uint64_t> rb_slow_path_count_gc_;
286 mutable Mutex rb_slow_path_histogram_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
287 Histogram<uint64_t> rb_slow_path_time_histogram_ GUARDED_BY(rb_slow_path_histogram_lock_);
288 uint64_t rb_slow_path_count_total_ GUARDED_BY(rb_slow_path_histogram_lock_);
289 uint64_t rb_slow_path_count_gc_total_ GUARDED_BY(rb_slow_path_histogram_lock_);
290
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800291 accounting::ReadBarrierTable* rb_table_;
292 bool force_evacuate_all_; // True if all regions are evacuated.
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -0700293 Atomic<bool> updated_all_immune_objects_;
294 bool gc_grays_immune_objects_;
295 Mutex immune_gray_stack_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
296 std::vector<mirror::Object*> immune_gray_stack_ GUARDED_BY(immune_gray_stack_lock_);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800297
Mathieu Chartiera07f5592016-06-16 11:44:28 -0700298 class AssertToSpaceInvariantFieldVisitor;
299 class AssertToSpaceInvariantObjectVisitor;
300 class AssertToSpaceInvariantRefsVisitor;
301 class ClearBlackPtrsVisitor;
302 class ComputeUnevacFromSpaceLiveRatioVisitor;
303 class DisableMarkingCheckpoint;
304 class FlipCallback;
Mathieu Chartier21328a12016-07-22 10:47:45 -0700305 class GrayImmuneObjectVisitor;
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -0700306 class ImmuneSpaceScanObjVisitor;
Mathieu Chartiera07f5592016-06-16 11:44:28 -0700307 class LostCopyVisitor;
308 class RefFieldsVisitor;
309 class RevokeThreadLocalMarkStackCheckpoint;
Hiroshi Yamauchid8db5a22016-06-28 14:07:41 -0700310 class ScopedGcGraysImmuneObjects;
311 class ThreadFlipVisitor;
Mathieu Chartier21328a12016-07-22 10:47:45 -0700312 class VerifyGrayImmuneObjectsVisitor;
Mathieu Chartiera07f5592016-06-16 11:44:28 -0700313 class VerifyNoFromSpaceRefsFieldVisitor;
314 class VerifyNoFromSpaceRefsObjectVisitor;
315 class VerifyNoFromSpaceRefsVisitor;
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800316
Mathieu Chartier3130cdf2015-05-03 15:20:23 -0700317 DISALLOW_IMPLICIT_CONSTRUCTORS(ConcurrentCopying);
Hiroshi Yamauchid5307ec2014-03-27 21:07:51 -0700318};
319
320} // namespace collector
321} // namespace gc
322} // namespace art
323
324#endif // ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_H_