blob: 0d77f29b1c56d22a2aa981f9bfba54641be780ab [file] [log] [blame]
Mathieu Chartier590fee92013-09-13 13:46:47 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_
18#define ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_
19
Ian Rogersef7d42f2014-01-06 12:55:46 -080020#include "atomic.h"
Mathieu Chartier590fee92013-09-13 13:46:47 -070021#include "base/macros.h"
22#include "base/mutex.h"
23#include "garbage_collector.h"
Mathieu Chartier8d562102014-03-12 17:42:10 -070024#include "immune_region.h"
Mathieu Chartier83c8ee02014-01-28 14:50:23 -080025#include "object_callbacks.h"
Mathieu Chartier590fee92013-09-13 13:46:47 -070026#include "offsets.h"
Mathieu Chartier590fee92013-09-13 13:46:47 -070027#include "UniquePtr.h"
28
29namespace art {
30
31namespace mirror {
32 class Class;
33 class Object;
34 template<class T> class ObjectArray;
35} // namespace mirror
36
37class StackVisitor;
38class Thread;
39
40namespace gc {
41
42namespace accounting {
43 template <typename T> class AtomicStack;
44 class MarkIfReachesAllocspaceVisitor;
45 class ModUnionClearCardVisitor;
46 class ModUnionVisitor;
47 class ModUnionTableBitmap;
48 class MarkStackChunk;
49 typedef AtomicStack<mirror::Object*> ObjectStack;
50 class SpaceBitmap;
51} // namespace accounting
52
53namespace space {
54 class BumpPointerSpace;
55 class ContinuousMemMapAllocSpace;
56 class ContinuousSpace;
Mathieu Chartier85a43c02014-01-07 17:59:00 -080057 class MallocSpace;
Mathieu Chartier590fee92013-09-13 13:46:47 -070058} // namespace space
59
60class Heap;
61
62namespace collector {
63
64class SemiSpace : public GarbageCollector {
65 public:
Hiroshi Yamauchi38e68e92014-03-07 13:59:08 -080066 // If true, use remembered sets in the generational mode.
67 static constexpr bool kUseRememberedSet = true;
68
Mathieu Chartier0e54cd02014-03-20 12:41:23 -070069 explicit SemiSpace(Heap* heap, bool generational = false, const std::string& name_prefix = "");
Mathieu Chartier590fee92013-09-13 13:46:47 -070070
71 ~SemiSpace() {}
72
Mathieu Chartier0e54cd02014-03-20 12:41:23 -070073 virtual void InitializePhase() OVERRIDE;
74 virtual void MarkingPhase() OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
75 LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
76 virtual void ReclaimPhase() OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
77 LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
78 virtual void FinishPhase() OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
79 void MarkReachableObjects()
Mathieu Chartier590fee92013-09-13 13:46:47 -070080 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
Mathieu Chartier0e54cd02014-03-20 12:41:23 -070081 virtual GcType GetGcType() const OVERRIDE {
Mathieu Chartier590fee92013-09-13 13:46:47 -070082 return kGcTypePartial;
83 }
Hiroshi Yamauchi3e417802014-03-20 12:03:02 -070084 virtual CollectorType GetCollectorType() const OVERRIDE {
85 return generational_ ? kCollectorTypeGSS : kCollectorTypeSS;
86 }
Mathieu Chartier590fee92013-09-13 13:46:47 -070087
88 // Sets which space we will be copying objects to.
89 void SetToSpace(space::ContinuousMemMapAllocSpace* to_space);
90
91 // Set the space where we copy objects from.
92 void SetFromSpace(space::ContinuousMemMapAllocSpace* from_space);
93
94 // Initializes internal structures.
95 void Init();
96
97 // Find the default mark bitmap.
98 void FindDefaultMarkBitmap();
99
100 // Returns the new address of the object.
101 mirror::Object* MarkObject(mirror::Object* object)
102 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
103
104 void ScanObject(mirror::Object* obj)
105 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
106
Hiroshi Yamauchi38e68e92014-03-07 13:59:08 -0800107 void VerifyNoFromSpaceReferences(mirror::Object* obj)
108 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
109
Mathieu Chartier590fee92013-09-13 13:46:47 -0700110 // Marks the root set at the start of a garbage collection.
111 void MarkRoots()
112 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
113
Mathieu Chartier590fee92013-09-13 13:46:47 -0700114 // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
115 // the image. Mark that portion of the heap as immune.
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700116 virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
117 LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700118
Mathieu Chartier590fee92013-09-13 13:46:47 -0700119 void UnBindBitmaps()
120 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
121
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700122 void ProcessReferences(Thread* self) EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
Mathieu Chartier590fee92013-09-13 13:46:47 -0700123 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
124
125 // Sweeps unmarked objects to complete the garbage collection.
126 virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
127
128 // Sweeps unmarked objects to complete the garbage collection.
129 void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
130
Mathieu Chartier590fee92013-09-13 13:46:47 -0700131 void SweepSystemWeaks()
132 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
133
Mathieu Chartier815873e2014-02-13 18:02:13 -0800134 static void MarkRootCallback(mirror::Object** root, void* arg, uint32_t /*tid*/,
135 RootType /*root_type*/)
Mathieu Chartier590fee92013-09-13 13:46:47 -0700136 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
137
Mathieu Chartier3bb57c72014-02-18 11:38:45 -0800138 static mirror::Object* MarkObjectCallback(mirror::Object* root, void* arg)
Mathieu Chartier39e32612013-11-12 16:28:05 -0800139 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
140
Mathieu Chartier3bb57c72014-02-18 11:38:45 -0800141 static void ProcessMarkStackCallback(void* arg)
142 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
143
Mathieu Chartier85a43c02014-01-07 17:59:00 -0800144 virtual mirror::Object* MarkNonForwardedObject(mirror::Object* obj)
145 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
146
Mathieu Chartier590fee92013-09-13 13:46:47 -0700147 protected:
148 // Returns null if the object is not marked, otherwise returns the forwarding address (same as
149 // object for non movable things).
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700150 mirror::Object* GetMarkedForwardAddress(mirror::Object* object) const
151 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
152 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700153
Mathieu Chartier39e32612013-11-12 16:28:05 -0800154 static mirror::Object* MarkedForwardingAddressCallback(mirror::Object* object, void* arg)
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700155 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
Mathieu Chartier590fee92013-09-13 13:46:47 -0700156 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
157
158 // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
159 // mark, otherwise we unmark.
160 bool MarkLargeObject(const mirror::Object* obj)
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700161 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700162
Mathieu Chartier590fee92013-09-13 13:46:47 -0700163 // Expand mark stack to 2x its current size.
164 void ResizeMarkStack(size_t new_size);
165
Mathieu Chartier85a43c02014-01-07 17:59:00 -0800166 // Returns true if we should sweep the space.
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800167 virtual bool ShouldSweepSpace(space::ContinuousSpace* space) const;
Mathieu Chartier85a43c02014-01-07 17:59:00 -0800168
Mathieu Chartier590fee92013-09-13 13:46:47 -0700169 // Push an object onto the mark stack.
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700170 void MarkStackPush(mirror::Object* obj);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700171
172 void UpdateAndMarkModUnion()
173 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
174 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
175
176 // Schedules an unmarked object for reference processing.
177 void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference)
178 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
179
180 // Recursively blackens objects on the mark stack.
Mathieu Chartier3bb57c72014-02-18 11:38:45 -0800181 void ProcessMarkStack()
Mathieu Chartier590fee92013-09-13 13:46:47 -0700182 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
183
Mathieu Chartier590fee92013-09-13 13:46:47 -0700184 void ProcessReferences(mirror::Object** soft_references, bool clear_soft_references,
185 mirror::Object** weak_references,
186 mirror::Object** finalizer_references,
187 mirror::Object** phantom_references)
188 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
189
190 inline mirror::Object* GetForwardingAddressInFromSpace(mirror::Object* obj) const;
191
Hiroshi Yamauchic93c5302014-03-20 16:15:37 -0700192 // Revoke all the thread-local buffers.
193 void RevokeAllThreadLocalBuffers();
194
Mathieu Chartier590fee92013-09-13 13:46:47 -0700195 // Current space, we check this space first to avoid searching for the appropriate space for an
196 // object.
197 accounting::ObjectStack* mark_stack_;
198
Mathieu Chartier8d562102014-03-12 17:42:10 -0700199 // Immune region, every object inside the immune region is assumed to be marked.
200 ImmuneRegion immune_region_;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700201
Hiroshi Yamauchi05e713a2014-01-09 13:24:51 -0800202 // If true, the large object space is immune.
203 bool is_large_object_space_immune_;
204
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800205 // Destination and source spaces (can be any type of ContinuousMemMapAllocSpace which either has
206 // a live bitmap or doesn't).
Mathieu Chartier590fee92013-09-13 13:46:47 -0700207 space::ContinuousMemMapAllocSpace* to_space_;
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800208 accounting::SpaceBitmap* to_space_live_bitmap_; // Cached live bitmap as an optimization.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700209 space::ContinuousMemMapAllocSpace* from_space_;
210
Mathieu Chartier590fee92013-09-13 13:46:47 -0700211 Thread* self_;
212
Hiroshi Yamauchi6f4ffe42014-01-13 12:30:44 -0800213 // When true, the generational mode (promotion and the bump pointer
214 // space only collection) is enabled. TODO: move these to a new file
215 // as a new garbage collector?
Ian Rogers6fac4472014-02-25 17:01:10 -0800216 const bool generational_;
Hiroshi Yamauchi6f4ffe42014-01-13 12:30:44 -0800217
218 // Used for the generational mode. the end/top of the bump
219 // pointer space at the end of the last collection.
Hiroshi Yamauchi4b1782f2013-12-05 16:46:22 -0800220 byte* last_gc_to_space_end_;
221
Hiroshi Yamauchi6f4ffe42014-01-13 12:30:44 -0800222 // Used for the generational mode. During a collection, keeps track
223 // of how many bytes of objects have been copied so far from the
224 // bump pointer space to the non-moving space.
Hiroshi Yamauchi4b1782f2013-12-05 16:46:22 -0800225 uint64_t bytes_promoted_;
226
Hiroshi Yamauchi6f4ffe42014-01-13 12:30:44 -0800227 // Used for the generational mode. When true, collect the whole
228 // heap. When false, collect only the bump pointer spaces.
Hiroshi Yamauchi05e713a2014-01-09 13:24:51 -0800229 bool whole_heap_collection_;
230
Hiroshi Yamauchi6f4ffe42014-01-13 12:30:44 -0800231 // Used for the generational mode. A counter used to enable
232 // whole_heap_collection_ once per interval.
Hiroshi Yamauchi05e713a2014-01-09 13:24:51 -0800233 int whole_heap_collection_interval_counter_;
234
Mathieu Chartierad35d902014-02-11 16:20:42 -0800235 // How many bytes we avoided dirtying.
236 size_t saved_bytes_;
237
Hiroshi Yamauchi6f4ffe42014-01-13 12:30:44 -0800238 // Used for the generational mode. The default interval of the whole
239 // heap collection. If N, the whole heap collection occurs every N
240 // collections.
Hiroshi Yamauchi05e713a2014-01-09 13:24:51 -0800241 static constexpr int kDefaultWholeHeapCollectionInterval = 5;
242
Mathieu Chartier590fee92013-09-13 13:46:47 -0700243 private:
244 DISALLOW_COPY_AND_ASSIGN(SemiSpace);
245};
246
247} // namespace collector
248} // namespace gc
249} // namespace art
250
251#endif // ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_