blob: be7ec05f2a6af02d189ed1e10ab744dc99816d5e [file] [log] [blame]
Mathieu Chartier590fee92013-09-13 13:46:47 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_
18#define ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_
19
Ian Rogersef7d42f2014-01-06 12:55:46 -080020#include "atomic.h"
Mathieu Chartier590fee92013-09-13 13:46:47 -070021#include "base/macros.h"
22#include "base/mutex.h"
23#include "garbage_collector.h"
Mathieu Chartier8d562102014-03-12 17:42:10 -070024#include "immune_region.h"
Mathieu Chartier83c8ee02014-01-28 14:50:23 -080025#include "object_callbacks.h"
Mathieu Chartier590fee92013-09-13 13:46:47 -070026#include "offsets.h"
Mathieu Chartier590fee92013-09-13 13:46:47 -070027#include "UniquePtr.h"
28
29namespace art {
30
31namespace mirror {
32 class Class;
33 class Object;
34 template<class T> class ObjectArray;
35} // namespace mirror
36
37class StackVisitor;
38class Thread;
39
40namespace gc {
41
42namespace accounting {
43 template <typename T> class AtomicStack;
44 class MarkIfReachesAllocspaceVisitor;
45 class ModUnionClearCardVisitor;
46 class ModUnionVisitor;
47 class ModUnionTableBitmap;
48 class MarkStackChunk;
49 typedef AtomicStack<mirror::Object*> ObjectStack;
50 class SpaceBitmap;
51} // namespace accounting
52
53namespace space {
54 class BumpPointerSpace;
55 class ContinuousMemMapAllocSpace;
56 class ContinuousSpace;
Mathieu Chartier85a43c02014-01-07 17:59:00 -080057 class MallocSpace;
Mathieu Chartier590fee92013-09-13 13:46:47 -070058} // namespace space
59
60class Heap;
61
62namespace collector {
63
64class SemiSpace : public GarbageCollector {
65 public:
Hiroshi Yamauchi6f4ffe42014-01-13 12:30:44 -080066 explicit SemiSpace(Heap* heap, bool generational = false,
67 const std::string& name_prefix = "");
Mathieu Chartier590fee92013-09-13 13:46:47 -070068
69 ~SemiSpace() {}
70
71 virtual void InitializePhase();
72 virtual bool IsConcurrent() const {
73 return false;
74 }
75 virtual void MarkingPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
76 virtual void ReclaimPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
77 virtual void FinishPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
78 virtual void MarkReachableObjects()
79 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
80 virtual GcType GetGcType() const {
81 return kGcTypePartial;
82 }
83
84 // Sets which space we will be copying objects to.
85 void SetToSpace(space::ContinuousMemMapAllocSpace* to_space);
86
87 // Set the space where we copy objects from.
88 void SetFromSpace(space::ContinuousMemMapAllocSpace* from_space);
89
90 // Initializes internal structures.
91 void Init();
92
93 // Find the default mark bitmap.
94 void FindDefaultMarkBitmap();
95
96 // Returns the new address of the object.
97 mirror::Object* MarkObject(mirror::Object* object)
98 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
99
100 void ScanObject(mirror::Object* obj)
101 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
102
103 // Marks the root set at the start of a garbage collection.
104 void MarkRoots()
105 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
106
Mathieu Chartier590fee92013-09-13 13:46:47 -0700107 // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
108 // the image. Mark that portion of the heap as immune.
109 virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
110
Mathieu Chartier590fee92013-09-13 13:46:47 -0700111 void UnBindBitmaps()
112 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
113
114 void ProcessReferences(Thread* self)
115 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
116
117 // Sweeps unmarked objects to complete the garbage collection.
118 virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
119
120 // Sweeps unmarked objects to complete the garbage collection.
121 void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
122
123 // Sweep only pointers within an array. WARNING: Trashes objects.
124 void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
125 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
126
Mathieu Chartier590fee92013-09-13 13:46:47 -0700127 // TODO: enable thread safety analysis when in use by multiple worker threads.
128 template <typename MarkVisitor>
129 void ScanObjectVisit(const mirror::Object* obj, const MarkVisitor& visitor)
130 NO_THREAD_SAFETY_ANALYSIS;
131
132 void SweepSystemWeaks()
133 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
134
135 template <typename Visitor>
136 static void VisitObjectReferencesAndClass(mirror::Object* obj, const Visitor& visitor)
137 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
138
Mathieu Chartier815873e2014-02-13 18:02:13 -0800139 static void MarkRootCallback(mirror::Object** root, void* arg, uint32_t /*tid*/,
140 RootType /*root_type*/)
Mathieu Chartier590fee92013-09-13 13:46:47 -0700141 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
142
Mathieu Chartier3bb57c72014-02-18 11:38:45 -0800143 static mirror::Object* MarkObjectCallback(mirror::Object* root, void* arg)
Mathieu Chartier39e32612013-11-12 16:28:05 -0800144 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
145
Mathieu Chartier3bb57c72014-02-18 11:38:45 -0800146 static void ProcessMarkStackCallback(void* arg)
147 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
148
Mathieu Chartier85a43c02014-01-07 17:59:00 -0800149 virtual mirror::Object* MarkNonForwardedObject(mirror::Object* obj)
150 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
151
Mathieu Chartier590fee92013-09-13 13:46:47 -0700152 protected:
153 // Returns null if the object is not marked, otherwise returns the forwarding address (same as
154 // object for non movable things).
155 mirror::Object* GetMarkedForwardAddress(mirror::Object* object) const;
156
Mathieu Chartier39e32612013-11-12 16:28:05 -0800157 static mirror::Object* MarkedForwardingAddressCallback(mirror::Object* object, void* arg)
Mathieu Chartier590fee92013-09-13 13:46:47 -0700158 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
159
160 // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
161 // mark, otherwise we unmark.
162 bool MarkLargeObject(const mirror::Object* obj)
163 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
164
Mathieu Chartier590fee92013-09-13 13:46:47 -0700165 // Expand mark stack to 2x its current size.
166 void ResizeMarkStack(size_t new_size);
167
Mathieu Chartier85a43c02014-01-07 17:59:00 -0800168 // Returns true if we should sweep the space.
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800169 virtual bool ShouldSweepSpace(space::ContinuousSpace* space) const;
Mathieu Chartier85a43c02014-01-07 17:59:00 -0800170
Mathieu Chartier590fee92013-09-13 13:46:47 -0700171 static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
172 const StackVisitor *visitor);
173
174 void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
175 NO_THREAD_SAFETY_ANALYSIS;
176
177 template <typename Visitor>
178 static void VisitInstanceFieldsReferences(const mirror::Class* klass, const mirror::Object* obj,
179 const Visitor& visitor)
180 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
181
182 // Visit the header, static field references, and interface pointers of a class object.
183 template <typename Visitor>
184 static void VisitClassReferences(const mirror::Class* klass, const mirror::Object* obj,
185 const Visitor& visitor)
186 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
187
188 template <typename Visitor>
189 static void VisitStaticFieldsReferences(const mirror::Class* klass, const Visitor& visitor)
190 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
191
192 template <typename Visitor>
193 static void VisitFieldsReferences(const mirror::Object* obj, uint32_t ref_offsets, bool is_static,
194 const Visitor& visitor)
195 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
196
197 // Visit all of the references in an object array.
198 template <typename Visitor>
199 static void VisitObjectArrayReferences(const mirror::ObjectArray<mirror::Object>* array,
200 const Visitor& visitor)
201 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
202
203 // Visits the header and field references of a data object.
204 template <typename Visitor>
205 static void VisitOtherReferences(const mirror::Class* klass, const mirror::Object* obj,
206 const Visitor& visitor)
207 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
208 return VisitInstanceFieldsReferences(klass, obj, visitor);
209 }
210
211 // Push an object onto the mark stack.
212 inline void MarkStackPush(mirror::Object* obj);
213
214 void UpdateAndMarkModUnion()
215 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
216 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
217
218 // Schedules an unmarked object for reference processing.
219 void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference)
220 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
221
222 // Recursively blackens objects on the mark stack.
Mathieu Chartier3bb57c72014-02-18 11:38:45 -0800223 void ProcessMarkStack()
Mathieu Chartier590fee92013-09-13 13:46:47 -0700224 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
225
226 void EnqueueFinalizerReferences(mirror::Object** ref)
227 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
228
229 void PreserveSomeSoftReferences(mirror::Object** ref)
230 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
231
232 void ClearWhiteReferences(mirror::Object** list)
233 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
234
235 void ProcessReferences(mirror::Object** soft_references, bool clear_soft_references,
236 mirror::Object** weak_references,
237 mirror::Object** finalizer_references,
238 mirror::Object** phantom_references)
239 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
240
241 inline mirror::Object* GetForwardingAddressInFromSpace(mirror::Object* obj) const;
242
Mathieu Chartier590fee92013-09-13 13:46:47 -0700243 // Current space, we check this space first to avoid searching for the appropriate space for an
244 // object.
245 accounting::ObjectStack* mark_stack_;
246
Mathieu Chartier8d562102014-03-12 17:42:10 -0700247 // Immune region, every object inside the immune region is assumed to be marked.
248 ImmuneRegion immune_region_;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700249
Hiroshi Yamauchi05e713a2014-01-09 13:24:51 -0800250 // If true, the large object space is immune.
251 bool is_large_object_space_immune_;
252
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800253 // Destination and source spaces (can be any type of ContinuousMemMapAllocSpace which either has
254 // a live bitmap or doesn't).
Mathieu Chartier590fee92013-09-13 13:46:47 -0700255 space::ContinuousMemMapAllocSpace* to_space_;
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800256 accounting::SpaceBitmap* to_space_live_bitmap_; // Cached live bitmap as an optimization.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700257 space::ContinuousMemMapAllocSpace* from_space_;
258
Mathieu Chartier590fee92013-09-13 13:46:47 -0700259 Thread* self_;
260
Hiroshi Yamauchi6f4ffe42014-01-13 12:30:44 -0800261 // When true, the generational mode (promotion and the bump pointer
262 // space only collection) is enabled. TODO: move these to a new file
263 // as a new garbage collector?
Ian Rogers6fac4472014-02-25 17:01:10 -0800264 const bool generational_;
Hiroshi Yamauchi6f4ffe42014-01-13 12:30:44 -0800265
266 // Used for the generational mode. the end/top of the bump
267 // pointer space at the end of the last collection.
Hiroshi Yamauchi4b1782f2013-12-05 16:46:22 -0800268 byte* last_gc_to_space_end_;
269
Hiroshi Yamauchi6f4ffe42014-01-13 12:30:44 -0800270 // Used for the generational mode. During a collection, keeps track
271 // of how many bytes of objects have been copied so far from the
272 // bump pointer space to the non-moving space.
Hiroshi Yamauchi4b1782f2013-12-05 16:46:22 -0800273 uint64_t bytes_promoted_;
274
Hiroshi Yamauchi6f4ffe42014-01-13 12:30:44 -0800275 // Used for the generational mode. When true, collect the whole
276 // heap. When false, collect only the bump pointer spaces.
Hiroshi Yamauchi05e713a2014-01-09 13:24:51 -0800277 bool whole_heap_collection_;
278
Hiroshi Yamauchi6f4ffe42014-01-13 12:30:44 -0800279 // Used for the generational mode. A counter used to enable
280 // whole_heap_collection_ once per interval.
Hiroshi Yamauchi05e713a2014-01-09 13:24:51 -0800281 int whole_heap_collection_interval_counter_;
282
Mathieu Chartierad35d902014-02-11 16:20:42 -0800283 // How many bytes we avoided dirtying.
284 size_t saved_bytes_;
285
Hiroshi Yamauchi6f4ffe42014-01-13 12:30:44 -0800286 // Used for the generational mode. The default interval of the whole
287 // heap collection. If N, the whole heap collection occurs every N
288 // collections.
Hiroshi Yamauchi05e713a2014-01-09 13:24:51 -0800289 static constexpr int kDefaultWholeHeapCollectionInterval = 5;
290
Mathieu Chartier590fee92013-09-13 13:46:47 -0700291 private:
292 DISALLOW_COPY_AND_ASSIGN(SemiSpace);
293};
294
295} // namespace collector
296} // namespace gc
297} // namespace art
298
299#endif // ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_