blob: 98e6f6501747dddfb03d15ab6aa7e107fc60b9b9 [file] [log] [blame]
Ian Rogers1d54e732013-05-02 21:10:01 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_GC_SPACE_SPACE_H_
18#define ART_RUNTIME_GC_SPACE_SPACE_H_
Ian Rogers1d54e732013-05-02 21:10:01 -070019
20#include <string>
21
22#include "UniquePtr.h"
23#include "base/macros.h"
24#include "base/mutex.h"
25#include "gc/accounting/space_bitmap.h"
26#include "globals.h"
27#include "image.h"
28#include "mem_map.h"
29
30namespace art {
31namespace mirror {
32 class Object;
33} // namespace mirror
34
35namespace gc {
36
37namespace accounting {
38 class SpaceBitmap;
Brian Carlstrom7934ac22013-07-26 10:54:15 -070039} // namespace accounting
Ian Rogers1d54e732013-05-02 21:10:01 -070040
41class Heap;
42
43namespace space {
44
Mathieu Chartier590fee92013-09-13 13:46:47 -070045class AllocSpace;
Mathieu Chartier7410f292013-11-24 13:17:35 -080046class BumpPointerSpace;
Mathieu Chartiera1602f22014-01-13 17:19:19 -080047class ContinuousMemMapAllocSpace;
Mathieu Chartier590fee92013-09-13 13:46:47 -070048class ContinuousSpace;
Mathieu Chartier590fee92013-09-13 13:46:47 -070049class DiscontinuousSpace;
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -070050class MallocSpace;
51class DlMallocSpace;
52class RosAllocSpace;
Ian Rogers1d54e732013-05-02 21:10:01 -070053class ImageSpace;
54class LargeObjectSpace;
Mathieu Chartiera1602f22014-01-13 17:19:19 -080055class ZygoteSpace;
Ian Rogers1d54e732013-05-02 21:10:01 -070056
Mathieu Chartier0f72e412013-09-06 16:40:01 -070057static constexpr bool kDebugSpaces = kIsDebugBuild;
Ian Rogers1d54e732013-05-02 21:10:01 -070058
59// See Space::GetGcRetentionPolicy.
60enum GcRetentionPolicy {
61 // Objects are retained forever with this policy for a space.
62 kGcRetentionPolicyNeverCollect,
63 // Every GC cycle will attempt to collect objects in this space.
64 kGcRetentionPolicyAlwaysCollect,
65 // Objects will be considered for collection only in "full" GC cycles, ie faster partial
66 // collections won't scan these areas such as the Zygote.
67 kGcRetentionPolicyFullCollect,
68};
69std::ostream& operator<<(std::ostream& os, const GcRetentionPolicy& policy);
70
71enum SpaceType {
72 kSpaceTypeImageSpace,
Mathieu Chartiera1602f22014-01-13 17:19:19 -080073 kSpaceTypeMallocSpace,
Ian Rogers1d54e732013-05-02 21:10:01 -070074 kSpaceTypeZygoteSpace,
Mathieu Chartier590fee92013-09-13 13:46:47 -070075 kSpaceTypeBumpPointerSpace,
Ian Rogers1d54e732013-05-02 21:10:01 -070076 kSpaceTypeLargeObjectSpace,
77};
78std::ostream& operator<<(std::ostream& os, const SpaceType& space_type);
79
80// A space contains memory allocated for managed objects.
81class Space {
82 public:
83 // Dump space. Also key method for C++ vtables.
84 virtual void Dump(std::ostream& os) const;
85
86 // Name of the space. May vary, for example before/after the Zygote fork.
87 const char* GetName() const {
88 return name_.c_str();
89 }
90
91 // The policy of when objects are collected associated with this space.
92 GcRetentionPolicy GetGcRetentionPolicy() const {
93 return gc_retention_policy_;
94 }
95
Ian Rogers1d54e732013-05-02 21:10:01 -070096 // Is the given object contained within this space?
97 virtual bool Contains(const mirror::Object* obj) const = 0;
98
99 // The kind of space this: image, alloc, zygote, large object.
100 virtual SpaceType GetType() const = 0;
101
102 // Is this an image space, ie one backed by a memory mapped image file.
103 bool IsImageSpace() const {
104 return GetType() == kSpaceTypeImageSpace;
105 }
106 ImageSpace* AsImageSpace();
107
108 // Is this a dlmalloc backed allocation space?
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700109 bool IsMallocSpace() const {
Ian Rogers1d54e732013-05-02 21:10:01 -0700110 SpaceType type = GetType();
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800111 return type == kSpaceTypeMallocSpace;
Ian Rogers1d54e732013-05-02 21:10:01 -0700112 }
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700113 MallocSpace* AsMallocSpace();
114
115 virtual bool IsDlMallocSpace() const {
116 return false;
117 }
118 virtual DlMallocSpace* AsDlMallocSpace() {
119 LOG(FATAL) << "Unreachable";
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800120 return nullptr;
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700121 }
122 virtual bool IsRosAllocSpace() const {
123 return false;
124 }
125 virtual RosAllocSpace* AsRosAllocSpace() {
126 LOG(FATAL) << "Unreachable";
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800127 return nullptr;
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700128 }
Ian Rogers1d54e732013-05-02 21:10:01 -0700129
130 // Is this the space allocated into by the Zygote and no-longer in use?
131 bool IsZygoteSpace() const {
132 return GetType() == kSpaceTypeZygoteSpace;
133 }
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800134 virtual ZygoteSpace* AsZygoteSpace() {
135 LOG(FATAL) << "Unreachable";
136 return nullptr;
137 }
Ian Rogers1d54e732013-05-02 21:10:01 -0700138
Mathieu Chartier590fee92013-09-13 13:46:47 -0700139 // Is this space a bump pointer space?
140 bool IsBumpPointerSpace() const {
141 return GetType() == kSpaceTypeBumpPointerSpace;
142 }
Mathieu Chartier7410f292013-11-24 13:17:35 -0800143 virtual BumpPointerSpace* AsBumpPointerSpace() {
144 LOG(FATAL) << "Unreachable";
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800145 return nullptr;
Mathieu Chartier7410f292013-11-24 13:17:35 -0800146 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700147
Ian Rogers1d54e732013-05-02 21:10:01 -0700148 // Does this space hold large objects and implement the large object space abstraction?
149 bool IsLargeObjectSpace() const {
150 return GetType() == kSpaceTypeLargeObjectSpace;
151 }
152 LargeObjectSpace* AsLargeObjectSpace();
153
Mathieu Chartier590fee92013-09-13 13:46:47 -0700154 virtual bool IsContinuousSpace() const {
155 return false;
156 }
157 ContinuousSpace* AsContinuousSpace();
158
159 virtual bool IsDiscontinuousSpace() const {
160 return false;
161 }
162 DiscontinuousSpace* AsDiscontinuousSpace();
163
164 virtual bool IsAllocSpace() const {
165 return false;
166 }
167 virtual AllocSpace* AsAllocSpace() {
168 LOG(FATAL) << "Unimplemented";
169 return nullptr;
170 }
171
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800172 virtual bool IsContinuousMemMapAllocSpace() const {
173 return false;
174 }
175 virtual ContinuousMemMapAllocSpace* AsContinuousMemMapAllocSpace() {
176 LOG(FATAL) << "Unimplemented";
177 return nullptr;
178 }
179
Ian Rogers1d54e732013-05-02 21:10:01 -0700180 virtual ~Space() {}
181
182 protected:
183 Space(const std::string& name, GcRetentionPolicy gc_retention_policy);
184
185 void SetGcRetentionPolicy(GcRetentionPolicy gc_retention_policy) {
186 gc_retention_policy_ = gc_retention_policy;
187 }
188
189 // Name of the space that may vary due to the Zygote fork.
190 std::string name_;
191
Mathieu Chartier590fee92013-09-13 13:46:47 -0700192 protected:
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800193 struct SweepCallbackContext {
194 bool swap_bitmaps;
195 Heap* heap;
196 space::Space* space;
197 Thread* self;
198 size_t freed_objects;
199 size_t freed_bytes;
200 };
201
Ian Rogers1d54e732013-05-02 21:10:01 -0700202 // When should objects within this space be reclaimed? Not constant as we vary it in the case
203 // of Zygote forking.
204 GcRetentionPolicy gc_retention_policy_;
205
Mathieu Chartier590fee92013-09-13 13:46:47 -0700206 private:
Ian Rogers1d54e732013-05-02 21:10:01 -0700207 friend class art::gc::Heap;
Ian Rogers1d54e732013-05-02 21:10:01 -0700208 DISALLOW_COPY_AND_ASSIGN(Space);
209};
210std::ostream& operator<<(std::ostream& os, const Space& space);
211
212// AllocSpace interface.
213class AllocSpace {
214 public:
215 // Number of bytes currently allocated.
Hiroshi Yamauchibe031ff2013-10-08 16:42:37 -0700216 virtual uint64_t GetBytesAllocated() = 0;
Ian Rogers1d54e732013-05-02 21:10:01 -0700217 // Number of objects currently allocated.
Hiroshi Yamauchibe031ff2013-10-08 16:42:37 -0700218 virtual uint64_t GetObjectsAllocated() = 0;
Ian Rogers1d54e732013-05-02 21:10:01 -0700219
Hiroshi Yamauchi50b29282013-07-30 13:58:37 -0700220 // Allocate num_bytes without allowing growth. If the allocation
221 // succeeds, the output parameter bytes_allocated will be set to the
222 // actually allocated bytes which is >= num_bytes.
223 virtual mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated) = 0;
Ian Rogers1d54e732013-05-02 21:10:01 -0700224
225 // Return the storage space required by obj.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800226 virtual size_t AllocationSize(mirror::Object* obj) = 0;
Ian Rogers1d54e732013-05-02 21:10:01 -0700227
228 // Returns how many bytes were freed.
229 virtual size_t Free(Thread* self, mirror::Object* ptr) = 0;
230
231 // Returns how many bytes were freed.
232 virtual size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs) = 0;
233
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700234 // Revoke any sort of thread-local buffers that are used to speed up
235 // allocations for the given thread, if the alloc space
236 // implementation uses any. No-op by default.
237 virtual void RevokeThreadLocalBuffers(Thread* /*thread*/) {}
238
239 // Revoke any sort of thread-local buffers that are used to speed up
240 // allocations for all the threads, if the alloc space
241 // implementation uses any. No-op by default.
242 virtual void RevokeAllThreadLocalBuffers() {}
243
Ian Rogers1d54e732013-05-02 21:10:01 -0700244 protected:
245 AllocSpace() {}
246 virtual ~AllocSpace() {}
247
248 private:
249 DISALLOW_COPY_AND_ASSIGN(AllocSpace);
250};
251
252// Continuous spaces have bitmaps, and an address range. Although not required, objects within
253// continuous spaces can be marked in the card table.
254class ContinuousSpace : public Space {
255 public:
Mathieu Chartier590fee92013-09-13 13:46:47 -0700256 // Address at which the space begins.
Ian Rogers1d54e732013-05-02 21:10:01 -0700257 byte* Begin() const {
258 return begin_;
259 }
260
Mathieu Chartier590fee92013-09-13 13:46:47 -0700261 // Current address at which the space ends, which may vary as the space is filled.
Ian Rogers1d54e732013-05-02 21:10:01 -0700262 byte* End() const {
263 return end_;
264 }
265
Mathieu Chartier590fee92013-09-13 13:46:47 -0700266 // The end of the address range covered by the space.
267 byte* Limit() const {
268 return limit_;
269 }
270
271 // Change the end of the space. Be careful with use since changing the end of a space to an
272 // invalid value may break the GC.
273 void SetEnd(byte* end) {
274 end_ = end;
275 }
276
277 void SetLimit(byte* limit) {
278 limit_ = limit;
279 }
280
Ian Rogers1d54e732013-05-02 21:10:01 -0700281 // Current size of space
282 size_t Size() const {
283 return End() - Begin();
284 }
285
286 virtual accounting::SpaceBitmap* GetLiveBitmap() const = 0;
287 virtual accounting::SpaceBitmap* GetMarkBitmap() const = 0;
288
Mathieu Chartier590fee92013-09-13 13:46:47 -0700289 // Maximum which the mapped space can grow to.
290 virtual size_t Capacity() const {
291 return Limit() - Begin();
292 }
293
Ian Rogers1d54e732013-05-02 21:10:01 -0700294 // Is object within this space? We check to see if the pointer is beyond the end first as
295 // continuous spaces are iterated over from low to high.
296 bool HasAddress(const mirror::Object* obj) const {
297 const byte* byte_ptr = reinterpret_cast<const byte*>(obj);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700298 return byte_ptr >= Begin() && byte_ptr < Limit();
Ian Rogers1d54e732013-05-02 21:10:01 -0700299 }
300
301 bool Contains(const mirror::Object* obj) const {
302 return HasAddress(obj);
303 }
304
Mathieu Chartier590fee92013-09-13 13:46:47 -0700305 virtual bool IsContinuousSpace() const {
306 return true;
307 }
308
Ian Rogers1d54e732013-05-02 21:10:01 -0700309 virtual ~ContinuousSpace() {}
310
311 protected:
312 ContinuousSpace(const std::string& name, GcRetentionPolicy gc_retention_policy,
Mathieu Chartier590fee92013-09-13 13:46:47 -0700313 byte* begin, byte* end, byte* limit) :
314 Space(name, gc_retention_policy), begin_(begin), end_(end), limit_(limit) {
Ian Rogers1d54e732013-05-02 21:10:01 -0700315 }
316
Ian Rogers1d54e732013-05-02 21:10:01 -0700317 // The beginning of the storage for fast access.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700318 byte* begin_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700319
320 // Current end of the space.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700321 byte* volatile end_;
322
323 // Limit of the space.
324 byte* limit_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700325
326 private:
327 DISALLOW_COPY_AND_ASSIGN(ContinuousSpace);
328};
329
330// A space where objects may be allocated higgledy-piggledy throughout virtual memory. Currently
331// the card table can't cover these objects and so the write barrier shouldn't be triggered. This
332// is suitable for use for large primitive arrays.
333class DiscontinuousSpace : public Space {
334 public:
Mathieu Chartierdb7f37d2014-01-10 11:09:06 -0800335 accounting::ObjectSet* GetLiveObjects() const {
Ian Rogers1d54e732013-05-02 21:10:01 -0700336 return live_objects_.get();
337 }
338
Mathieu Chartierdb7f37d2014-01-10 11:09:06 -0800339 accounting::ObjectSet* GetMarkObjects() const {
Ian Rogers1d54e732013-05-02 21:10:01 -0700340 return mark_objects_.get();
341 }
342
Mathieu Chartier590fee92013-09-13 13:46:47 -0700343 virtual bool IsDiscontinuousSpace() const {
344 return true;
345 }
346
Ian Rogers1d54e732013-05-02 21:10:01 -0700347 virtual ~DiscontinuousSpace() {}
348
349 protected:
350 DiscontinuousSpace(const std::string& name, GcRetentionPolicy gc_retention_policy);
351
Mathieu Chartierdb7f37d2014-01-10 11:09:06 -0800352 UniquePtr<accounting::ObjectSet> live_objects_;
353 UniquePtr<accounting::ObjectSet> mark_objects_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700354
355 private:
356 DISALLOW_COPY_AND_ASSIGN(DiscontinuousSpace);
357};
358
359class MemMapSpace : public ContinuousSpace {
360 public:
Ian Rogers1d54e732013-05-02 21:10:01 -0700361 // Size of the space without a limit on its growth. By default this is just the Capacity, but
362 // for the allocation space we support starting with a small heap and then extending it.
363 virtual size_t NonGrowthLimitCapacity() const {
364 return Capacity();
365 }
366
Ian Rogers1d54e732013-05-02 21:10:01 -0700367 MemMap* GetMemMap() {
368 return mem_map_.get();
369 }
370
371 const MemMap* GetMemMap() const {
372 return mem_map_.get();
373 }
374
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800375 MemMap* ReleaseMemMap() {
376 return mem_map_.release();
377 }
378
Mathieu Chartier590fee92013-09-13 13:46:47 -0700379 protected:
380 MemMapSpace(const std::string& name, MemMap* mem_map, byte* begin, byte* end, byte* limit,
381 GcRetentionPolicy gc_retention_policy)
382 : ContinuousSpace(name, gc_retention_policy, begin, end, limit),
383 mem_map_(mem_map) {
384 }
385
Ian Rogers1d54e732013-05-02 21:10:01 -0700386 // Underlying storage of the space
387 UniquePtr<MemMap> mem_map_;
388
Mathieu Chartier590fee92013-09-13 13:46:47 -0700389 private:
Ian Rogers1d54e732013-05-02 21:10:01 -0700390 DISALLOW_COPY_AND_ASSIGN(MemMapSpace);
391};
392
Mathieu Chartier590fee92013-09-13 13:46:47 -0700393// Used by the heap compaction interface to enable copying from one type of alloc space to another.
394class ContinuousMemMapAllocSpace : public MemMapSpace, public AllocSpace {
395 public:
396 virtual bool IsAllocSpace() const {
397 return true;
398 }
Mathieu Chartier590fee92013-09-13 13:46:47 -0700399 virtual AllocSpace* AsAllocSpace() {
400 return this;
401 }
402
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800403 virtual bool IsContinuousMemMapAllocSpace() const {
404 return true;
405 }
406 virtual ContinuousMemMapAllocSpace* AsContinuousMemMapAllocSpace() {
407 return this;
408 }
409
410 bool HasBoundBitmaps() const EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
411 void BindLiveToMarkBitmap()
412 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
413 void UnBindBitmaps() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier1f3b5352014-02-03 14:00:42 -0800414 // Swap the live and mark bitmaps of this space. This is used by the GC for concurrent sweeping.
415 void SwapBitmaps();
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800416
Mathieu Chartier590fee92013-09-13 13:46:47 -0700417 virtual void Clear() {
418 LOG(FATAL) << "Unimplemented";
419 }
420
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800421 virtual accounting::SpaceBitmap* GetLiveBitmap() const {
422 return live_bitmap_.get();
423 }
424 virtual accounting::SpaceBitmap* GetMarkBitmap() const {
425 return mark_bitmap_.get();
426 }
427
428 virtual void Sweep(bool swap_bitmaps, size_t* freed_objects, size_t* freed_bytes);
429 virtual accounting::SpaceBitmap::SweepCallback* GetSweepCallback() {
430 LOG(FATAL) << "Unimplemented";
431 return nullptr;
432 }
433
Mathieu Chartier590fee92013-09-13 13:46:47 -0700434 protected:
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800435 UniquePtr<accounting::SpaceBitmap> live_bitmap_;
436 UniquePtr<accounting::SpaceBitmap> mark_bitmap_;
437 UniquePtr<accounting::SpaceBitmap> temp_bitmap_;
438
Mathieu Chartier590fee92013-09-13 13:46:47 -0700439 ContinuousMemMapAllocSpace(const std::string& name, MemMap* mem_map, byte* begin,
440 byte* end, byte* limit, GcRetentionPolicy gc_retention_policy)
441 : MemMapSpace(name, mem_map, begin, end, limit, gc_retention_policy) {
442 }
443
444 private:
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800445 friend class gc::Heap;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700446 DISALLOW_COPY_AND_ASSIGN(ContinuousMemMapAllocSpace);
447};
448
Ian Rogers1d54e732013-05-02 21:10:01 -0700449} // namespace space
450} // namespace gc
451} // namespace art
452
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700453#endif // ART_RUNTIME_GC_SPACE_SPACE_H_