blob: 4c05ddef58ac1755664050f2d5bc8ef35ad1f46b [file] [log] [blame]
Ian Rogers1d54e732013-05-02 21:10:01 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_GC_SPACE_SPACE_H_
18#define ART_RUNTIME_GC_SPACE_SPACE_H_
Ian Rogers1d54e732013-05-02 21:10:01 -070019
20#include <string>
21
22#include "UniquePtr.h"
23#include "base/macros.h"
24#include "base/mutex.h"
25#include "gc/accounting/space_bitmap.h"
26#include "globals.h"
27#include "image.h"
28#include "mem_map.h"
29
30namespace art {
31namespace mirror {
32 class Object;
33} // namespace mirror
34
35namespace gc {
36
37namespace accounting {
38 class SpaceBitmap;
Brian Carlstrom7934ac22013-07-26 10:54:15 -070039} // namespace accounting
Ian Rogers1d54e732013-05-02 21:10:01 -070040
41class Heap;
42
43namespace space {
44
Mathieu Chartier590fee92013-09-13 13:46:47 -070045class AllocSpace;
46class ContinuousSpace;
Ian Rogers1d54e732013-05-02 21:10:01 -070047class DlMallocSpace;
Mathieu Chartier590fee92013-09-13 13:46:47 -070048class DiscontinuousSpace;
Ian Rogers1d54e732013-05-02 21:10:01 -070049class ImageSpace;
50class LargeObjectSpace;
51
Mathieu Chartier0f72e412013-09-06 16:40:01 -070052static constexpr bool kDebugSpaces = kIsDebugBuild;
Ian Rogers1d54e732013-05-02 21:10:01 -070053
54// See Space::GetGcRetentionPolicy.
55enum GcRetentionPolicy {
56 // Objects are retained forever with this policy for a space.
57 kGcRetentionPolicyNeverCollect,
58 // Every GC cycle will attempt to collect objects in this space.
59 kGcRetentionPolicyAlwaysCollect,
60 // Objects will be considered for collection only in "full" GC cycles, ie faster partial
61 // collections won't scan these areas such as the Zygote.
62 kGcRetentionPolicyFullCollect,
63};
64std::ostream& operator<<(std::ostream& os, const GcRetentionPolicy& policy);
65
66enum SpaceType {
67 kSpaceTypeImageSpace,
68 kSpaceTypeAllocSpace,
69 kSpaceTypeZygoteSpace,
Mathieu Chartier590fee92013-09-13 13:46:47 -070070 kSpaceTypeBumpPointerSpace,
Ian Rogers1d54e732013-05-02 21:10:01 -070071 kSpaceTypeLargeObjectSpace,
72};
73std::ostream& operator<<(std::ostream& os, const SpaceType& space_type);
74
75// A space contains memory allocated for managed objects.
76class Space {
77 public:
78 // Dump space. Also key method for C++ vtables.
79 virtual void Dump(std::ostream& os) const;
80
81 // Name of the space. May vary, for example before/after the Zygote fork.
82 const char* GetName() const {
83 return name_.c_str();
84 }
85
86 // The policy of when objects are collected associated with this space.
87 GcRetentionPolicy GetGcRetentionPolicy() const {
88 return gc_retention_policy_;
89 }
90
91 // Does the space support allocation?
92 virtual bool CanAllocateInto() const {
93 return true;
94 }
95
96 // Is the given object contained within this space?
97 virtual bool Contains(const mirror::Object* obj) const = 0;
98
99 // The kind of space this: image, alloc, zygote, large object.
100 virtual SpaceType GetType() const = 0;
101
102 // Is this an image space, ie one backed by a memory mapped image file.
103 bool IsImageSpace() const {
104 return GetType() == kSpaceTypeImageSpace;
105 }
106 ImageSpace* AsImageSpace();
107
108 // Is this a dlmalloc backed allocation space?
109 bool IsDlMallocSpace() const {
110 SpaceType type = GetType();
111 return type == kSpaceTypeAllocSpace || type == kSpaceTypeZygoteSpace;
112 }
113 DlMallocSpace* AsDlMallocSpace();
114
115 // Is this the space allocated into by the Zygote and no-longer in use?
116 bool IsZygoteSpace() const {
117 return GetType() == kSpaceTypeZygoteSpace;
118 }
Ian Rogers1d54e732013-05-02 21:10:01 -0700119
Mathieu Chartier590fee92013-09-13 13:46:47 -0700120 // Is this space a bump pointer space?
121 bool IsBumpPointerSpace() const {
122 return GetType() == kSpaceTypeBumpPointerSpace;
123 }
124
Ian Rogers1d54e732013-05-02 21:10:01 -0700125 // Does this space hold large objects and implement the large object space abstraction?
126 bool IsLargeObjectSpace() const {
127 return GetType() == kSpaceTypeLargeObjectSpace;
128 }
129 LargeObjectSpace* AsLargeObjectSpace();
130
Mathieu Chartier590fee92013-09-13 13:46:47 -0700131 virtual bool IsContinuousSpace() const {
132 return false;
133 }
134 ContinuousSpace* AsContinuousSpace();
135
136 virtual bool IsDiscontinuousSpace() const {
137 return false;
138 }
139 DiscontinuousSpace* AsDiscontinuousSpace();
140
141 virtual bool IsAllocSpace() const {
142 return false;
143 }
144 virtual AllocSpace* AsAllocSpace() {
145 LOG(FATAL) << "Unimplemented";
146 return nullptr;
147 }
148
Ian Rogers1d54e732013-05-02 21:10:01 -0700149 virtual ~Space() {}
150
151 protected:
152 Space(const std::string& name, GcRetentionPolicy gc_retention_policy);
153
154 void SetGcRetentionPolicy(GcRetentionPolicy gc_retention_policy) {
155 gc_retention_policy_ = gc_retention_policy;
156 }
157
158 // Name of the space that may vary due to the Zygote fork.
159 std::string name_;
160
Mathieu Chartier590fee92013-09-13 13:46:47 -0700161 protected:
Ian Rogers1d54e732013-05-02 21:10:01 -0700162 // When should objects within this space be reclaimed? Not constant as we vary it in the case
163 // of Zygote forking.
164 GcRetentionPolicy gc_retention_policy_;
165
Mathieu Chartier590fee92013-09-13 13:46:47 -0700166 private:
Ian Rogers1d54e732013-05-02 21:10:01 -0700167 friend class art::gc::Heap;
Ian Rogers1d54e732013-05-02 21:10:01 -0700168 DISALLOW_COPY_AND_ASSIGN(Space);
169};
170std::ostream& operator<<(std::ostream& os, const Space& space);
171
172// AllocSpace interface.
173class AllocSpace {
174 public:
175 // Number of bytes currently allocated.
Hiroshi Yamauchibe031ff2013-10-08 16:42:37 -0700176 virtual uint64_t GetBytesAllocated() = 0;
Ian Rogers1d54e732013-05-02 21:10:01 -0700177 // Number of objects currently allocated.
Hiroshi Yamauchibe031ff2013-10-08 16:42:37 -0700178 virtual uint64_t GetObjectsAllocated() = 0;
Ian Rogers1d54e732013-05-02 21:10:01 -0700179 // Number of bytes allocated since the space was created.
Hiroshi Yamauchibe031ff2013-10-08 16:42:37 -0700180 virtual uint64_t GetTotalBytesAllocated() = 0;
Ian Rogers1d54e732013-05-02 21:10:01 -0700181 // Number of objects allocated since the space was created.
Hiroshi Yamauchibe031ff2013-10-08 16:42:37 -0700182 virtual uint64_t GetTotalObjectsAllocated() = 0;
Ian Rogers1d54e732013-05-02 21:10:01 -0700183
Hiroshi Yamauchi50b29282013-07-30 13:58:37 -0700184 // Allocate num_bytes without allowing growth. If the allocation
185 // succeeds, the output parameter bytes_allocated will be set to the
186 // actually allocated bytes which is >= num_bytes.
187 virtual mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated) = 0;
Ian Rogers1d54e732013-05-02 21:10:01 -0700188
189 // Return the storage space required by obj.
190 virtual size_t AllocationSize(const mirror::Object* obj) = 0;
191
192 // Returns how many bytes were freed.
193 virtual size_t Free(Thread* self, mirror::Object* ptr) = 0;
194
195 // Returns how many bytes were freed.
196 virtual size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs) = 0;
197
198 protected:
199 AllocSpace() {}
200 virtual ~AllocSpace() {}
201
202 private:
203 DISALLOW_COPY_AND_ASSIGN(AllocSpace);
204};
205
206// Continuous spaces have bitmaps, and an address range. Although not required, objects within
207// continuous spaces can be marked in the card table.
208class ContinuousSpace : public Space {
209 public:
Mathieu Chartier590fee92013-09-13 13:46:47 -0700210 // Address at which the space begins.
Ian Rogers1d54e732013-05-02 21:10:01 -0700211 byte* Begin() const {
212 return begin_;
213 }
214
Mathieu Chartier590fee92013-09-13 13:46:47 -0700215 // Current address at which the space ends, which may vary as the space is filled.
Ian Rogers1d54e732013-05-02 21:10:01 -0700216 byte* End() const {
217 return end_;
218 }
219
Mathieu Chartier590fee92013-09-13 13:46:47 -0700220 // The end of the address range covered by the space.
221 byte* Limit() const {
222 return limit_;
223 }
224
225 // Change the end of the space. Be careful with use since changing the end of a space to an
226 // invalid value may break the GC.
227 void SetEnd(byte* end) {
228 end_ = end;
229 }
230
231 void SetLimit(byte* limit) {
232 limit_ = limit;
233 }
234
Ian Rogers1d54e732013-05-02 21:10:01 -0700235 // Current size of space
236 size_t Size() const {
237 return End() - Begin();
238 }
239
240 virtual accounting::SpaceBitmap* GetLiveBitmap() const = 0;
241 virtual accounting::SpaceBitmap* GetMarkBitmap() const = 0;
242
Mathieu Chartier590fee92013-09-13 13:46:47 -0700243 // Maximum which the mapped space can grow to.
244 virtual size_t Capacity() const {
245 return Limit() - Begin();
246 }
247
Ian Rogers1d54e732013-05-02 21:10:01 -0700248 // Is object within this space? We check to see if the pointer is beyond the end first as
249 // continuous spaces are iterated over from low to high.
250 bool HasAddress(const mirror::Object* obj) const {
251 const byte* byte_ptr = reinterpret_cast<const byte*>(obj);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700252 return byte_ptr >= Begin() && byte_ptr < Limit();
Ian Rogers1d54e732013-05-02 21:10:01 -0700253 }
254
255 bool Contains(const mirror::Object* obj) const {
256 return HasAddress(obj);
257 }
258
Mathieu Chartier590fee92013-09-13 13:46:47 -0700259 virtual bool IsContinuousSpace() const {
260 return true;
261 }
262
Ian Rogers1d54e732013-05-02 21:10:01 -0700263 virtual ~ContinuousSpace() {}
264
265 protected:
266 ContinuousSpace(const std::string& name, GcRetentionPolicy gc_retention_policy,
Mathieu Chartier590fee92013-09-13 13:46:47 -0700267 byte* begin, byte* end, byte* limit) :
268 Space(name, gc_retention_policy), begin_(begin), end_(end), limit_(limit) {
Ian Rogers1d54e732013-05-02 21:10:01 -0700269 }
270
Ian Rogers1d54e732013-05-02 21:10:01 -0700271 // The beginning of the storage for fast access.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700272 byte* begin_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700273
274 // Current end of the space.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700275 byte* volatile end_;
276
277 // Limit of the space.
278 byte* limit_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700279
280 private:
281 DISALLOW_COPY_AND_ASSIGN(ContinuousSpace);
282};
283
284// A space where objects may be allocated higgledy-piggledy throughout virtual memory. Currently
285// the card table can't cover these objects and so the write barrier shouldn't be triggered. This
286// is suitable for use for large primitive arrays.
287class DiscontinuousSpace : public Space {
288 public:
289 accounting::SpaceSetMap* GetLiveObjects() const {
290 return live_objects_.get();
291 }
292
293 accounting::SpaceSetMap* GetMarkObjects() const {
294 return mark_objects_.get();
295 }
296
Mathieu Chartier590fee92013-09-13 13:46:47 -0700297 virtual bool IsDiscontinuousSpace() const {
298 return true;
299 }
300
Ian Rogers1d54e732013-05-02 21:10:01 -0700301 virtual ~DiscontinuousSpace() {}
302
303 protected:
304 DiscontinuousSpace(const std::string& name, GcRetentionPolicy gc_retention_policy);
305
306 UniquePtr<accounting::SpaceSetMap> live_objects_;
307 UniquePtr<accounting::SpaceSetMap> mark_objects_;
308
309 private:
310 DISALLOW_COPY_AND_ASSIGN(DiscontinuousSpace);
311};
312
313class MemMapSpace : public ContinuousSpace {
314 public:
Ian Rogers1d54e732013-05-02 21:10:01 -0700315 // Size of the space without a limit on its growth. By default this is just the Capacity, but
316 // for the allocation space we support starting with a small heap and then extending it.
317 virtual size_t NonGrowthLimitCapacity() const {
318 return Capacity();
319 }
320
Ian Rogers1d54e732013-05-02 21:10:01 -0700321 MemMap* GetMemMap() {
322 return mem_map_.get();
323 }
324
325 const MemMap* GetMemMap() const {
326 return mem_map_.get();
327 }
328
Mathieu Chartier590fee92013-09-13 13:46:47 -0700329 protected:
330 MemMapSpace(const std::string& name, MemMap* mem_map, byte* begin, byte* end, byte* limit,
331 GcRetentionPolicy gc_retention_policy)
332 : ContinuousSpace(name, gc_retention_policy, begin, end, limit),
333 mem_map_(mem_map) {
334 }
335
Ian Rogers1d54e732013-05-02 21:10:01 -0700336 // Underlying storage of the space
337 UniquePtr<MemMap> mem_map_;
338
Mathieu Chartier590fee92013-09-13 13:46:47 -0700339 private:
Ian Rogers1d54e732013-05-02 21:10:01 -0700340 DISALLOW_COPY_AND_ASSIGN(MemMapSpace);
341};
342
Mathieu Chartier590fee92013-09-13 13:46:47 -0700343// Used by the heap compaction interface to enable copying from one type of alloc space to another.
344class ContinuousMemMapAllocSpace : public MemMapSpace, public AllocSpace {
345 public:
346 virtual bool IsAllocSpace() const {
347 return true;
348 }
349
350 virtual AllocSpace* AsAllocSpace() {
351 return this;
352 }
353
354 virtual void Clear() {
355 LOG(FATAL) << "Unimplemented";
356 }
357
358 protected:
359 ContinuousMemMapAllocSpace(const std::string& name, MemMap* mem_map, byte* begin,
360 byte* end, byte* limit, GcRetentionPolicy gc_retention_policy)
361 : MemMapSpace(name, mem_map, begin, end, limit, gc_retention_policy) {
362 }
363
364 private:
365 DISALLOW_COPY_AND_ASSIGN(ContinuousMemMapAllocSpace);
366};
367
Ian Rogers1d54e732013-05-02 21:10:01 -0700368} // namespace space
369} // namespace gc
370} // namespace art
371
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700372#endif // ART_RUNTIME_GC_SPACE_SPACE_H_