blob: 25cfe0fbfd987299a92ff731db0ea865fff99ee2 [file] [log] [blame]
Mathieu Chartier52e4b432014-06-10 11:22:31 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
18#define ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
19
20#include <deque>
21#include <memory> // For unique_ptr.
22
23#include "atomic.h"
24#include "base/macros.h"
25#include "base/mutex.h"
26#include "garbage_collector.h"
27#include "gc/accounting/heap_bitmap.h"
28#include "immune_region.h"
29#include "lock_word.h"
30#include "object_callbacks.h"
31#include "offsets.h"
32
33namespace art {
34
35class Thread;
36
37namespace mirror {
38 class Class;
39 class Object;
40} // namespace mirror
41
42namespace gc {
43
44class Heap;
45
46namespace accounting {
47 template <typename T> class AtomicStack;
48 typedef AtomicStack<mirror::Object*> ObjectStack;
49} // namespace accounting
50
51namespace space {
52 class ContinuousMemMapAllocSpace;
53 class ContinuousSpace;
54} // namespace space
55
56namespace collector {
57
58class MarkCompact : public GarbageCollector {
59 public:
60 explicit MarkCompact(Heap* heap, const std::string& name_prefix = "");
61 ~MarkCompact() {}
62
63 virtual void RunPhases() OVERRIDE NO_THREAD_SAFETY_ANALYSIS;
64 void InitializePhase();
65 void MarkingPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
66 LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
67 void ReclaimPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
68 LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
69 void FinishPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
70 void MarkReachableObjects()
71 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
72 virtual GcType GetGcType() const OVERRIDE {
73 return kGcTypePartial;
74 }
75 virtual CollectorType GetCollectorType() const OVERRIDE {
76 return kCollectorTypeMC;
77 }
78
79 // Sets which space we will be copying objects in.
80 void SetSpace(space::BumpPointerSpace* space);
81
82 // Initializes internal structures.
83 void Init();
84
85 // Find the default mark bitmap.
86 void FindDefaultMarkBitmap();
87
88 void ScanObject(mirror::Object* obj)
89 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
90
91 // Marks the root set at the start of a garbage collection.
92 void MarkRoots()
93 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
94
95 // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
96 // the image. Mark that portion of the heap as immune.
97 void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
98 LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
99
100 void UnBindBitmaps()
101 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
102
103 void ProcessReferences(Thread* self) EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
104 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
105
106 // Sweeps unmarked objects to complete the garbage collection.
107 void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
108
109 // Sweeps unmarked objects to complete the garbage collection.
110 void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
111
112 void SweepSystemWeaks()
113 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
114
115 static void MarkRootCallback(mirror::Object** root, void* arg, uint32_t /*tid*/,
116 RootType /*root_type*/)
117 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
118
119 static mirror::Object* MarkObjectCallback(mirror::Object* root, void* arg)
120 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
121
122 static void MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* obj_ptr, void* arg)
123 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
124
125 static bool HeapReferenceMarkedCallback(mirror::HeapReference<mirror::Object>* ref_ptr,
126 void* arg)
127 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
128
129 static void ProcessMarkStackCallback(void* arg)
130 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
131
132 static void DelayReferenceReferentCallback(mirror::Class* klass, mirror::Reference* ref,
133 void* arg)
134 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
135
136 // Schedules an unmarked object for reference processing.
137 void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
138 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
139
140 protected:
141 // Returns null if the object is not marked, otherwise returns the forwarding address (same as
142 // object for non movable things).
143 mirror::Object* GetMarkedForwardAddress(mirror::Object* object) const
144 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
145 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
146
147 static mirror::Object* MarkedForwardingAddressCallback(mirror::Object* object, void* arg)
148 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
149 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
150
151 // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
152 // mark, otherwise we unmark.
153 bool MarkLargeObject(const mirror::Object* obj)
154 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
155 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
156
157 // Expand mark stack to 2x its current size.
158 void ResizeMarkStack(size_t new_size);
159
160 // Returns true if we should sweep the space.
161 bool ShouldSweepSpace(space::ContinuousSpace* space) const;
162
163 // Push an object onto the mark stack.
164 void MarkStackPush(mirror::Object* obj);
165
166 void UpdateAndMarkModUnion()
167 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
168 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
169
170 // Recursively blackens objects on the mark stack.
171 void ProcessMarkStack()
172 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
173
174 // 3 pass mark compact approach.
175 void Compact() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
176 // Calculate the forwarding address of objects marked as "live" in the objects_before_forwarding
177 // bitmap.
178 void CalculateObjectForwardingAddresses()
179 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
180 // Update the references of objects by using the forwarding addresses.
181 void UpdateReferences() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
182 static void UpdateRootCallback(mirror::Object** root, void* arg, uint32_t /*thread_id*/,
183 RootType /*root_type*/)
184 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
185 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
186 // Move objects and restore lock words.
187 void MoveObjects() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
188 // Move a single object to its forward address.
189 void MoveObject(mirror::Object* obj, size_t len) EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
190 // Mark a single object.
191 void MarkObject(mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
192 Locks::mutator_lock_);
193 bool IsMarked(const mirror::Object* obj) const
194 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
195 static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg)
196 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
197 void ForwardObject(mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
198 Locks::mutator_lock_);
199 // Update a single heap reference.
200 void UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference)
201 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
202 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
203 static void UpdateHeapReferenceCallback(mirror::HeapReference<mirror::Object>* reference,
204 void* arg)
205 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
206 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
207 // Update all of the references of a single object.
208 void UpdateObjectReferences(mirror::Object* obj)
209 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
210 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
211
212 // Revoke all the thread-local buffers.
213 void RevokeAllThreadLocalBuffers();
214
215 accounting::ObjectStack* mark_stack_;
216
217 // Immune region, every object inside the immune region is assumed to be marked.
218 ImmuneRegion immune_region_;
219
220 // Bump pointer space which we are collecting.
221 space::BumpPointerSpace* space_;
222 // Cached mark bitmap as an optimization.
223 accounting::HeapBitmap* mark_bitmap_;
224
225 // The name of the collector.
226 std::string collector_name_;
227
228 // The bump pointer in the space where the next forwarding address will be.
229 byte* bump_pointer_;
230 // How many live objects we have in the space.
231 size_t live_objects_in_space_;
232
233 // Bitmap which describes which objects we have to move, need to do / 2 so that we can handle
234 // objects which are only 8 bytes.
235 std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_before_forwarding_;
236 // Bitmap which describes which lock words we need to restore.
237 std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_with_lockword_;
238 // Which lock words we need to restore as we are moving objects.
239 std::deque<LockWord> lock_words_to_restore_;
240
241 private:
242 friend class BitmapSetSlowPathVisitor;
243 friend class CalculateObjectForwardingAddressVisitor;
244 friend class MarkCompactMarkObjectVisitor;
245 friend class MoveObjectVisitor;
246 friend class UpdateObjectReferencesVisitor;
247 friend class UpdateReferenceVisitor;
248 DISALLOW_COPY_AND_ASSIGN(MarkCompact);
249};
250
251} // namespace collector
252} // namespace gc
253} // namespace art
254
255#endif // ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_