Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2013 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_ |
| 18 | #define ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_ |
| 19 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 20 | #include "atomic.h" |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 21 | #include "base/macros.h" |
| 22 | #include "base/mutex.h" |
| 23 | #include "garbage_collector.h" |
Mathieu Chartier | 8d56210 | 2014-03-12 17:42:10 -0700 | [diff] [blame] | 24 | #include "immune_region.h" |
Mathieu Chartier | 83c8ee0 | 2014-01-28 14:50:23 -0800 | [diff] [blame] | 25 | #include "object_callbacks.h" |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 26 | #include "offsets.h" |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 27 | #include "UniquePtr.h" |
| 28 | |
| 29 | namespace art { |
| 30 | |
| 31 | namespace mirror { |
| 32 | class Class; |
| 33 | class Object; |
| 34 | template<class T> class ObjectArray; |
| 35 | } // namespace mirror |
| 36 | |
| 37 | class StackVisitor; |
| 38 | class Thread; |
| 39 | |
| 40 | namespace gc { |
| 41 | |
| 42 | namespace accounting { |
| 43 | template <typename T> class AtomicStack; |
| 44 | class MarkIfReachesAllocspaceVisitor; |
| 45 | class ModUnionClearCardVisitor; |
| 46 | class ModUnionVisitor; |
| 47 | class ModUnionTableBitmap; |
| 48 | class MarkStackChunk; |
| 49 | typedef AtomicStack<mirror::Object*> ObjectStack; |
| 50 | class SpaceBitmap; |
| 51 | } // namespace accounting |
| 52 | |
| 53 | namespace space { |
| 54 | class BumpPointerSpace; |
| 55 | class ContinuousMemMapAllocSpace; |
| 56 | class ContinuousSpace; |
Mathieu Chartier | 85a43c0 | 2014-01-07 17:59:00 -0800 | [diff] [blame] | 57 | class MallocSpace; |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 58 | } // namespace space |
| 59 | |
| 60 | class Heap; |
| 61 | |
| 62 | namespace collector { |
| 63 | |
| 64 | class SemiSpace : public GarbageCollector { |
| 65 | public: |
Hiroshi Yamauchi | 38e68e9 | 2014-03-07 13:59:08 -0800 | [diff] [blame] | 66 | // If true, use remembered sets in the generational mode. |
| 67 | static constexpr bool kUseRememberedSet = true; |
| 68 | |
Mathieu Chartier | 0e54cd0 | 2014-03-20 12:41:23 -0700 | [diff] [blame] | 69 | explicit SemiSpace(Heap* heap, bool generational = false, const std::string& name_prefix = ""); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 70 | |
| 71 | ~SemiSpace() {} |
| 72 | |
Mathieu Chartier | 0e54cd0 | 2014-03-20 12:41:23 -0700 | [diff] [blame] | 73 | virtual void InitializePhase() OVERRIDE; |
| 74 | virtual void MarkingPhase() OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) |
| 75 | LOCKS_EXCLUDED(Locks::heap_bitmap_lock_); |
| 76 | virtual void ReclaimPhase() OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) |
| 77 | LOCKS_EXCLUDED(Locks::heap_bitmap_lock_); |
| 78 | virtual void FinishPhase() OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 79 | void MarkReachableObjects() |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 80 | EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_); |
Mathieu Chartier | 0e54cd0 | 2014-03-20 12:41:23 -0700 | [diff] [blame] | 81 | virtual GcType GetGcType() const OVERRIDE { |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 82 | return kGcTypePartial; |
| 83 | } |
Hiroshi Yamauchi | 3e41780 | 2014-03-20 12:03:02 -0700 | [diff] [blame] | 84 | virtual CollectorType GetCollectorType() const OVERRIDE { |
| 85 | return generational_ ? kCollectorTypeGSS : kCollectorTypeSS; |
| 86 | } |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 87 | |
| 88 | // Sets which space we will be copying objects to. |
| 89 | void SetToSpace(space::ContinuousMemMapAllocSpace* to_space); |
| 90 | |
| 91 | // Set the space where we copy objects from. |
| 92 | void SetFromSpace(space::ContinuousMemMapAllocSpace* from_space); |
| 93 | |
| 94 | // Initializes internal structures. |
| 95 | void Init(); |
| 96 | |
| 97 | // Find the default mark bitmap. |
| 98 | void FindDefaultMarkBitmap(); |
| 99 | |
| 100 | // Returns the new address of the object. |
Mathieu Chartier | 3b05e9b | 2014-03-25 09:29:43 -0700 | [diff] [blame^] | 101 | template<bool kPoisonReferences> |
| 102 | void MarkObject(mirror::ObjectReference<kPoisonReferences, mirror::Object>* obj_ptr) |
Mathieu Chartier | 407f702 | 2014-02-18 14:37:05 -0800 | [diff] [blame] | 103 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 104 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 105 | |
| 106 | void ScanObject(mirror::Object* obj) |
Mathieu Chartier | 407f702 | 2014-02-18 14:37:05 -0800 | [diff] [blame] | 107 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 108 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 109 | |
Hiroshi Yamauchi | 38e68e9 | 2014-03-07 13:59:08 -0800 | [diff] [blame] | 110 | void VerifyNoFromSpaceReferences(mirror::Object* obj) |
| 111 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
| 112 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 113 | // Marks the root set at the start of a garbage collection. |
| 114 | void MarkRoots() |
| 115 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
| 116 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 117 | // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie |
| 118 | // the image. Mark that portion of the heap as immune. |
Mathieu Chartier | 0e54cd0 | 2014-03-20 12:41:23 -0700 | [diff] [blame] | 119 | virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
| 120 | LOCKS_EXCLUDED(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 121 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 122 | void UnBindBitmaps() |
| 123 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 124 | |
Mathieu Chartier | 0e54cd0 | 2014-03-20 12:41:23 -0700 | [diff] [blame] | 125 | void ProcessReferences(Thread* self) EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 126 | EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 127 | |
| 128 | // Sweeps unmarked objects to complete the garbage collection. |
| 129 | virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 130 | |
| 131 | // Sweeps unmarked objects to complete the garbage collection. |
| 132 | void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 133 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 134 | void SweepSystemWeaks() |
| 135 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
| 136 | |
Mathieu Chartier | 815873e | 2014-02-13 18:02:13 -0800 | [diff] [blame] | 137 | static void MarkRootCallback(mirror::Object** root, void* arg, uint32_t /*tid*/, |
| 138 | RootType /*root_type*/) |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 139 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
| 140 | |
Mathieu Chartier | 3bb57c7 | 2014-02-18 11:38:45 -0800 | [diff] [blame] | 141 | static mirror::Object* MarkObjectCallback(mirror::Object* root, void* arg) |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 142 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
| 143 | |
Mathieu Chartier | 407f702 | 2014-02-18 14:37:05 -0800 | [diff] [blame] | 144 | static void MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* obj_ptr, void* arg) |
| 145 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
| 146 | |
Mathieu Chartier | 3bb57c7 | 2014-02-18 11:38:45 -0800 | [diff] [blame] | 147 | static void ProcessMarkStackCallback(void* arg) |
| 148 | EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_); |
| 149 | |
Mathieu Chartier | 85a43c0 | 2014-01-07 17:59:00 -0800 | [diff] [blame] | 150 | virtual mirror::Object* MarkNonForwardedObject(mirror::Object* obj) |
Mathieu Chartier | 407f702 | 2014-02-18 14:37:05 -0800 | [diff] [blame] | 151 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 152 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 153 | |
| 154 | // Schedules an unmarked object for reference processing. |
| 155 | void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) |
| 156 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Mathieu Chartier | 85a43c0 | 2014-01-07 17:59:00 -0800 | [diff] [blame] | 157 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 158 | protected: |
| 159 | // Returns null if the object is not marked, otherwise returns the forwarding address (same as |
| 160 | // object for non movable things). |
Mathieu Chartier | 0e54cd0 | 2014-03-20 12:41:23 -0700 | [diff] [blame] | 161 | mirror::Object* GetMarkedForwardAddress(mirror::Object* object) const |
| 162 | EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) |
| 163 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 164 | |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 165 | static mirror::Object* MarkedForwardingAddressCallback(mirror::Object* object, void* arg) |
Mathieu Chartier | 0e54cd0 | 2014-03-20 12:41:23 -0700 | [diff] [blame] | 166 | EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 167 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 168 | |
| 169 | // Marks or unmarks a large object based on whether or not set is true. If set is true, then we |
| 170 | // mark, otherwise we unmark. |
| 171 | bool MarkLargeObject(const mirror::Object* obj) |
Mathieu Chartier | 407f702 | 2014-02-18 14:37:05 -0800 | [diff] [blame] | 172 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 173 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 174 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 175 | // Expand mark stack to 2x its current size. |
| 176 | void ResizeMarkStack(size_t new_size); |
| 177 | |
Mathieu Chartier | 85a43c0 | 2014-01-07 17:59:00 -0800 | [diff] [blame] | 178 | // Returns true if we should sweep the space. |
Mathieu Chartier | a1602f2 | 2014-01-13 17:19:19 -0800 | [diff] [blame] | 179 | virtual bool ShouldSweepSpace(space::ContinuousSpace* space) const; |
Mathieu Chartier | 85a43c0 | 2014-01-07 17:59:00 -0800 | [diff] [blame] | 180 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 181 | // Push an object onto the mark stack. |
Mathieu Chartier | 0e54cd0 | 2014-03-20 12:41:23 -0700 | [diff] [blame] | 182 | void MarkStackPush(mirror::Object* obj); |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 183 | |
| 184 | void UpdateAndMarkModUnion() |
| 185 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 186 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 187 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 188 | // Recursively blackens objects on the mark stack. |
Mathieu Chartier | 3bb57c7 | 2014-02-18 11:38:45 -0800 | [diff] [blame] | 189 | void ProcessMarkStack() |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 190 | EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_); |
| 191 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 192 | void ProcessReferences(mirror::Object** soft_references, bool clear_soft_references, |
| 193 | mirror::Object** weak_references, |
| 194 | mirror::Object** finalizer_references, |
| 195 | mirror::Object** phantom_references) |
| 196 | EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_); |
| 197 | |
| 198 | inline mirror::Object* GetForwardingAddressInFromSpace(mirror::Object* obj) const; |
| 199 | |
Hiroshi Yamauchi | c93c530 | 2014-03-20 16:15:37 -0700 | [diff] [blame] | 200 | // Revoke all the thread-local buffers. |
| 201 | void RevokeAllThreadLocalBuffers(); |
| 202 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 203 | // Current space, we check this space first to avoid searching for the appropriate space for an |
| 204 | // object. |
| 205 | accounting::ObjectStack* mark_stack_; |
| 206 | |
Mathieu Chartier | 8d56210 | 2014-03-12 17:42:10 -0700 | [diff] [blame] | 207 | // Immune region, every object inside the immune region is assumed to be marked. |
| 208 | ImmuneRegion immune_region_; |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 209 | |
Hiroshi Yamauchi | 05e713a | 2014-01-09 13:24:51 -0800 | [diff] [blame] | 210 | // If true, the large object space is immune. |
| 211 | bool is_large_object_space_immune_; |
| 212 | |
Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 213 | // Destination and source spaces (can be any type of ContinuousMemMapAllocSpace which either has |
| 214 | // a live bitmap or doesn't). |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 215 | space::ContinuousMemMapAllocSpace* to_space_; |
Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 216 | accounting::SpaceBitmap* to_space_live_bitmap_; // Cached live bitmap as an optimization. |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 217 | space::ContinuousMemMapAllocSpace* from_space_; |
| 218 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 219 | Thread* self_; |
| 220 | |
Hiroshi Yamauchi | 6f4ffe4 | 2014-01-13 12:30:44 -0800 | [diff] [blame] | 221 | // When true, the generational mode (promotion and the bump pointer |
| 222 | // space only collection) is enabled. TODO: move these to a new file |
| 223 | // as a new garbage collector? |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 224 | const bool generational_; |
Hiroshi Yamauchi | 6f4ffe4 | 2014-01-13 12:30:44 -0800 | [diff] [blame] | 225 | |
| 226 | // Used for the generational mode. the end/top of the bump |
| 227 | // pointer space at the end of the last collection. |
Hiroshi Yamauchi | 4b1782f | 2013-12-05 16:46:22 -0800 | [diff] [blame] | 228 | byte* last_gc_to_space_end_; |
| 229 | |
Hiroshi Yamauchi | 6f4ffe4 | 2014-01-13 12:30:44 -0800 | [diff] [blame] | 230 | // Used for the generational mode. During a collection, keeps track |
| 231 | // of how many bytes of objects have been copied so far from the |
| 232 | // bump pointer space to the non-moving space. |
Hiroshi Yamauchi | 4b1782f | 2013-12-05 16:46:22 -0800 | [diff] [blame] | 233 | uint64_t bytes_promoted_; |
| 234 | |
Hiroshi Yamauchi | 6f4ffe4 | 2014-01-13 12:30:44 -0800 | [diff] [blame] | 235 | // Used for the generational mode. When true, collect the whole |
| 236 | // heap. When false, collect only the bump pointer spaces. |
Hiroshi Yamauchi | 05e713a | 2014-01-09 13:24:51 -0800 | [diff] [blame] | 237 | bool whole_heap_collection_; |
| 238 | |
Hiroshi Yamauchi | 6f4ffe4 | 2014-01-13 12:30:44 -0800 | [diff] [blame] | 239 | // Used for the generational mode. A counter used to enable |
| 240 | // whole_heap_collection_ once per interval. |
Hiroshi Yamauchi | 05e713a | 2014-01-09 13:24:51 -0800 | [diff] [blame] | 241 | int whole_heap_collection_interval_counter_; |
| 242 | |
Mathieu Chartier | ad35d90 | 2014-02-11 16:20:42 -0800 | [diff] [blame] | 243 | // How many bytes we avoided dirtying. |
| 244 | size_t saved_bytes_; |
| 245 | |
Hiroshi Yamauchi | 6f4ffe4 | 2014-01-13 12:30:44 -0800 | [diff] [blame] | 246 | // Used for the generational mode. The default interval of the whole |
| 247 | // heap collection. If N, the whole heap collection occurs every N |
| 248 | // collections. |
Hiroshi Yamauchi | 05e713a | 2014-01-09 13:24:51 -0800 | [diff] [blame] | 249 | static constexpr int kDefaultWholeHeapCollectionInterval = 5; |
| 250 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 251 | private: |
| 252 | DISALLOW_COPY_AND_ASSIGN(SemiSpace); |
| 253 | }; |
| 254 | |
| 255 | } // namespace collector |
| 256 | } // namespace gc |
| 257 | } // namespace art |
| 258 | |
| 259 | #endif // ART_RUNTIME_GC_COLLECTOR_SEMI_SPACE_H_ |