blob: 523d4fe8fd1b4591d8b637fc8083b5f3883c14f0 [file] [log] [blame]
Ian Rogers1d54e732013-05-02 21:10:01 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_GC_SPACE_SPACE_H_
18#define ART_RUNTIME_GC_SPACE_SPACE_H_
Ian Rogers1d54e732013-05-02 21:10:01 -070019
Ian Rogers700a4022014-05-19 16:49:03 -070020#include <memory>
Ian Rogers1d54e732013-05-02 21:10:01 -070021#include <string>
22
Ian Rogersbe2a1df2014-07-10 00:56:36 -070023#include "atomic.h"
Ian Rogers1d54e732013-05-02 21:10:01 -070024#include "base/macros.h"
25#include "base/mutex.h"
26#include "gc/accounting/space_bitmap.h"
Mathieu Chartier10fb83a2014-06-15 15:15:43 -070027#include "gc/collector/garbage_collector.h"
Ian Rogers1d54e732013-05-02 21:10:01 -070028#include "globals.h"
29#include "image.h"
30#include "mem_map.h"
31
32namespace art {
33namespace mirror {
34 class Object;
35} // namespace mirror
36
37namespace gc {
38
Ian Rogers1d54e732013-05-02 21:10:01 -070039class Heap;
40
41namespace space {
42
Mathieu Chartier590fee92013-09-13 13:46:47 -070043class AllocSpace;
Mathieu Chartier7410f292013-11-24 13:17:35 -080044class BumpPointerSpace;
Mathieu Chartiera1602f22014-01-13 17:19:19 -080045class ContinuousMemMapAllocSpace;
Mathieu Chartier590fee92013-09-13 13:46:47 -070046class ContinuousSpace;
Mathieu Chartier590fee92013-09-13 13:46:47 -070047class DiscontinuousSpace;
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -070048class MallocSpace;
49class DlMallocSpace;
50class RosAllocSpace;
Ian Rogers1d54e732013-05-02 21:10:01 -070051class ImageSpace;
52class LargeObjectSpace;
Mathieu Chartiera1602f22014-01-13 17:19:19 -080053class ZygoteSpace;
Ian Rogers1d54e732013-05-02 21:10:01 -070054
Mathieu Chartier0f72e412013-09-06 16:40:01 -070055static constexpr bool kDebugSpaces = kIsDebugBuild;
Ian Rogers1d54e732013-05-02 21:10:01 -070056
57// See Space::GetGcRetentionPolicy.
58enum GcRetentionPolicy {
59 // Objects are retained forever with this policy for a space.
60 kGcRetentionPolicyNeverCollect,
61 // Every GC cycle will attempt to collect objects in this space.
62 kGcRetentionPolicyAlwaysCollect,
63 // Objects will be considered for collection only in "full" GC cycles, ie faster partial
64 // collections won't scan these areas such as the Zygote.
65 kGcRetentionPolicyFullCollect,
66};
67std::ostream& operator<<(std::ostream& os, const GcRetentionPolicy& policy);
68
69enum SpaceType {
70 kSpaceTypeImageSpace,
Mathieu Chartiera1602f22014-01-13 17:19:19 -080071 kSpaceTypeMallocSpace,
Ian Rogers1d54e732013-05-02 21:10:01 -070072 kSpaceTypeZygoteSpace,
Mathieu Chartier590fee92013-09-13 13:46:47 -070073 kSpaceTypeBumpPointerSpace,
Ian Rogers1d54e732013-05-02 21:10:01 -070074 kSpaceTypeLargeObjectSpace,
75};
76std::ostream& operator<<(std::ostream& os, const SpaceType& space_type);
77
78// A space contains memory allocated for managed objects.
79class Space {
80 public:
81 // Dump space. Also key method for C++ vtables.
82 virtual void Dump(std::ostream& os) const;
83
84 // Name of the space. May vary, for example before/after the Zygote fork.
85 const char* GetName() const {
86 return name_.c_str();
87 }
88
89 // The policy of when objects are collected associated with this space.
90 GcRetentionPolicy GetGcRetentionPolicy() const {
91 return gc_retention_policy_;
92 }
93
Ian Rogers1d54e732013-05-02 21:10:01 -070094 // Is the given object contained within this space?
95 virtual bool Contains(const mirror::Object* obj) const = 0;
96
97 // The kind of space this: image, alloc, zygote, large object.
98 virtual SpaceType GetType() const = 0;
99
100 // Is this an image space, ie one backed by a memory mapped image file.
101 bool IsImageSpace() const {
102 return GetType() == kSpaceTypeImageSpace;
103 }
104 ImageSpace* AsImageSpace();
105
106 // Is this a dlmalloc backed allocation space?
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700107 bool IsMallocSpace() const {
Ian Rogers1d54e732013-05-02 21:10:01 -0700108 SpaceType type = GetType();
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800109 return type == kSpaceTypeMallocSpace;
Ian Rogers1d54e732013-05-02 21:10:01 -0700110 }
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700111 MallocSpace* AsMallocSpace();
112
113 virtual bool IsDlMallocSpace() const {
114 return false;
115 }
Ian Rogers6fac4472014-02-25 17:01:10 -0800116 virtual DlMallocSpace* AsDlMallocSpace();
117
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700118 virtual bool IsRosAllocSpace() const {
119 return false;
120 }
Ian Rogers6fac4472014-02-25 17:01:10 -0800121 virtual RosAllocSpace* AsRosAllocSpace();
Ian Rogers1d54e732013-05-02 21:10:01 -0700122
Ian Rogers6fac4472014-02-25 17:01:10 -0800123 // Is this the space allocated into by the Zygote and no-longer in use for allocation?
Ian Rogers1d54e732013-05-02 21:10:01 -0700124 bool IsZygoteSpace() const {
125 return GetType() == kSpaceTypeZygoteSpace;
126 }
Ian Rogers6fac4472014-02-25 17:01:10 -0800127 virtual ZygoteSpace* AsZygoteSpace();
Ian Rogers1d54e732013-05-02 21:10:01 -0700128
Mathieu Chartier590fee92013-09-13 13:46:47 -0700129 // Is this space a bump pointer space?
130 bool IsBumpPointerSpace() const {
131 return GetType() == kSpaceTypeBumpPointerSpace;
132 }
Ian Rogers6fac4472014-02-25 17:01:10 -0800133 virtual BumpPointerSpace* AsBumpPointerSpace();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700134
Ian Rogers1d54e732013-05-02 21:10:01 -0700135 // Does this space hold large objects and implement the large object space abstraction?
136 bool IsLargeObjectSpace() const {
137 return GetType() == kSpaceTypeLargeObjectSpace;
138 }
139 LargeObjectSpace* AsLargeObjectSpace();
140
Mathieu Chartier590fee92013-09-13 13:46:47 -0700141 virtual bool IsContinuousSpace() const {
142 return false;
143 }
144 ContinuousSpace* AsContinuousSpace();
145
146 virtual bool IsDiscontinuousSpace() const {
147 return false;
148 }
149 DiscontinuousSpace* AsDiscontinuousSpace();
150
151 virtual bool IsAllocSpace() const {
152 return false;
153 }
Ian Rogers6fac4472014-02-25 17:01:10 -0800154 virtual AllocSpace* AsAllocSpace();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700155
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800156 virtual bool IsContinuousMemMapAllocSpace() const {
157 return false;
158 }
Ian Rogers6fac4472014-02-25 17:01:10 -0800159 virtual ContinuousMemMapAllocSpace* AsContinuousMemMapAllocSpace();
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800160
Mathieu Chartier31f44142014-04-08 14:40:03 -0700161 // Returns true if objects in the space are movable.
162 virtual bool CanMoveObjects() const = 0;
163
Ian Rogers1d54e732013-05-02 21:10:01 -0700164 virtual ~Space() {}
165
166 protected:
167 Space(const std::string& name, GcRetentionPolicy gc_retention_policy);
168
169 void SetGcRetentionPolicy(GcRetentionPolicy gc_retention_policy) {
170 gc_retention_policy_ = gc_retention_policy;
171 }
172
173 // Name of the space that may vary due to the Zygote fork.
174 std::string name_;
175
Mathieu Chartier590fee92013-09-13 13:46:47 -0700176 protected:
Ian Rogers1d54e732013-05-02 21:10:01 -0700177 // When should objects within this space be reclaimed? Not constant as we vary it in the case
178 // of Zygote forking.
179 GcRetentionPolicy gc_retention_policy_;
180
Mathieu Chartier590fee92013-09-13 13:46:47 -0700181 private:
Ian Rogers1d54e732013-05-02 21:10:01 -0700182 friend class art::gc::Heap;
Ian Rogers1d54e732013-05-02 21:10:01 -0700183 DISALLOW_COPY_AND_ASSIGN(Space);
184};
185std::ostream& operator<<(std::ostream& os, const Space& space);
186
187// AllocSpace interface.
188class AllocSpace {
189 public:
190 // Number of bytes currently allocated.
Hiroshi Yamauchibe031ff2013-10-08 16:42:37 -0700191 virtual uint64_t GetBytesAllocated() = 0;
Ian Rogers1d54e732013-05-02 21:10:01 -0700192 // Number of objects currently allocated.
Hiroshi Yamauchibe031ff2013-10-08 16:42:37 -0700193 virtual uint64_t GetObjectsAllocated() = 0;
Ian Rogers1d54e732013-05-02 21:10:01 -0700194
Hiroshi Yamauchi50b29282013-07-30 13:58:37 -0700195 // Allocate num_bytes without allowing growth. If the allocation
196 // succeeds, the output parameter bytes_allocated will be set to the
197 // actually allocated bytes which is >= num_bytes.
Mathieu Chartier0651d412014-04-29 14:37:57 -0700198 // Alloc can be called from multiple threads at the same time and must be thread-safe.
Ian Rogers6fac4472014-02-25 17:01:10 -0800199 virtual mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated,
200 size_t* usable_size) = 0;
Ian Rogers1d54e732013-05-02 21:10:01 -0700201
Mathieu Chartier0651d412014-04-29 14:37:57 -0700202 // Thread-unsafe allocation for when mutators are suspended, used by the semispace collector.
203 virtual mirror::Object* AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated,
204 size_t* usable_size)
205 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) {
206 return Alloc(self, num_bytes, bytes_allocated, usable_size);
207 }
208
Ian Rogers1d54e732013-05-02 21:10:01 -0700209 // Return the storage space required by obj.
Ian Rogers6fac4472014-02-25 17:01:10 -0800210 virtual size_t AllocationSize(mirror::Object* obj, size_t* usable_size) = 0;
Ian Rogers1d54e732013-05-02 21:10:01 -0700211
212 // Returns how many bytes were freed.
213 virtual size_t Free(Thread* self, mirror::Object* ptr) = 0;
214
215 // Returns how many bytes were freed.
216 virtual size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs) = 0;
217
Ian Rogers6fac4472014-02-25 17:01:10 -0800218 // Revoke any sort of thread-local buffers that are used to speed up allocations for the given
219 // thread, if the alloc space implementation uses any.
220 virtual void RevokeThreadLocalBuffers(Thread* thread) = 0;
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700221
Ian Rogers6fac4472014-02-25 17:01:10 -0800222 // Revoke any sort of thread-local buffers that are used to speed up allocations for all the
223 // threads, if the alloc space implementation uses any.
224 virtual void RevokeAllThreadLocalBuffers() = 0;
Hiroshi Yamauchicf58d4a2013-09-26 14:21:22 -0700225
Mathieu Chartierb363f662014-07-16 13:28:58 -0700226 virtual void LogFragmentationAllocFailure(std::ostream& os, size_t failed_alloc_bytes) = 0;
227
Ian Rogers1d54e732013-05-02 21:10:01 -0700228 protected:
Mathieu Chartier10fb83a2014-06-15 15:15:43 -0700229 struct SweepCallbackContext {
230 SweepCallbackContext(bool swap_bitmaps, space::Space* space);
231 const bool swap_bitmaps;
232 space::Space* const space;
233 Thread* const self;
234 collector::ObjectBytePair freed;
235 };
236
Ian Rogers1d54e732013-05-02 21:10:01 -0700237 AllocSpace() {}
238 virtual ~AllocSpace() {}
239
240 private:
241 DISALLOW_COPY_AND_ASSIGN(AllocSpace);
242};
243
244// Continuous spaces have bitmaps, and an address range. Although not required, objects within
245// continuous spaces can be marked in the card table.
246class ContinuousSpace : public Space {
247 public:
Mathieu Chartier590fee92013-09-13 13:46:47 -0700248 // Address at which the space begins.
Ian Rogers1d54e732013-05-02 21:10:01 -0700249 byte* Begin() const {
250 return begin_;
251 }
252
Mathieu Chartier590fee92013-09-13 13:46:47 -0700253 // Current address at which the space ends, which may vary as the space is filled.
Ian Rogers1d54e732013-05-02 21:10:01 -0700254 byte* End() const {
Ian Rogersbe2a1df2014-07-10 00:56:36 -0700255 return end_.LoadRelaxed();
Ian Rogers1d54e732013-05-02 21:10:01 -0700256 }
257
Mathieu Chartier590fee92013-09-13 13:46:47 -0700258 // The end of the address range covered by the space.
259 byte* Limit() const {
260 return limit_;
261 }
262
263 // Change the end of the space. Be careful with use since changing the end of a space to an
264 // invalid value may break the GC.
265 void SetEnd(byte* end) {
Ian Rogersbe2a1df2014-07-10 00:56:36 -0700266 end_.StoreRelaxed(end);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700267 }
268
269 void SetLimit(byte* limit) {
270 limit_ = limit;
271 }
272
Ian Rogers1d54e732013-05-02 21:10:01 -0700273 // Current size of space
274 size_t Size() const {
275 return End() - Begin();
276 }
277
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700278 virtual accounting::ContinuousSpaceBitmap* GetLiveBitmap() const = 0;
279 virtual accounting::ContinuousSpaceBitmap* GetMarkBitmap() const = 0;
Ian Rogers1d54e732013-05-02 21:10:01 -0700280
Mathieu Chartier590fee92013-09-13 13:46:47 -0700281 // Maximum which the mapped space can grow to.
282 virtual size_t Capacity() const {
283 return Limit() - Begin();
284 }
285
Ian Rogers1d54e732013-05-02 21:10:01 -0700286 // Is object within this space? We check to see if the pointer is beyond the end first as
287 // continuous spaces are iterated over from low to high.
288 bool HasAddress(const mirror::Object* obj) const {
289 const byte* byte_ptr = reinterpret_cast<const byte*>(obj);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700290 return byte_ptr >= Begin() && byte_ptr < Limit();
Ian Rogers1d54e732013-05-02 21:10:01 -0700291 }
292
293 bool Contains(const mirror::Object* obj) const {
294 return HasAddress(obj);
295 }
296
Mathieu Chartier590fee92013-09-13 13:46:47 -0700297 virtual bool IsContinuousSpace() const {
298 return true;
299 }
300
Ian Rogers1d54e732013-05-02 21:10:01 -0700301 virtual ~ContinuousSpace() {}
302
303 protected:
304 ContinuousSpace(const std::string& name, GcRetentionPolicy gc_retention_policy,
Mathieu Chartier590fee92013-09-13 13:46:47 -0700305 byte* begin, byte* end, byte* limit) :
306 Space(name, gc_retention_policy), begin_(begin), end_(end), limit_(limit) {
Ian Rogers1d54e732013-05-02 21:10:01 -0700307 }
308
Ian Rogers1d54e732013-05-02 21:10:01 -0700309 // The beginning of the storage for fast access.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700310 byte* begin_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700311
312 // Current end of the space.
Ian Rogersbe2a1df2014-07-10 00:56:36 -0700313 Atomic<byte*> end_;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700314
315 // Limit of the space.
316 byte* limit_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700317
318 private:
319 DISALLOW_COPY_AND_ASSIGN(ContinuousSpace);
320};
321
322// A space where objects may be allocated higgledy-piggledy throughout virtual memory. Currently
323// the card table can't cover these objects and so the write barrier shouldn't be triggered. This
324// is suitable for use for large primitive arrays.
325class DiscontinuousSpace : public Space {
326 public:
Mathieu Chartierbbd695c2014-04-16 09:48:48 -0700327 accounting::LargeObjectBitmap* GetLiveBitmap() const {
328 return live_bitmap_.get();
Ian Rogers1d54e732013-05-02 21:10:01 -0700329 }
330
Mathieu Chartierbbd695c2014-04-16 09:48:48 -0700331 accounting::LargeObjectBitmap* GetMarkBitmap() const {
332 return mark_bitmap_.get();
Ian Rogers1d54e732013-05-02 21:10:01 -0700333 }
334
Mathieu Chartierbbd695c2014-04-16 09:48:48 -0700335 virtual bool IsDiscontinuousSpace() const OVERRIDE {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700336 return true;
337 }
338
Ian Rogers1d54e732013-05-02 21:10:01 -0700339 virtual ~DiscontinuousSpace() {}
340
341 protected:
342 DiscontinuousSpace(const std::string& name, GcRetentionPolicy gc_retention_policy);
343
Ian Rogers700a4022014-05-19 16:49:03 -0700344 std::unique_ptr<accounting::LargeObjectBitmap> live_bitmap_;
345 std::unique_ptr<accounting::LargeObjectBitmap> mark_bitmap_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700346
347 private:
348 DISALLOW_COPY_AND_ASSIGN(DiscontinuousSpace);
349};
350
351class MemMapSpace : public ContinuousSpace {
352 public:
Ian Rogers1d54e732013-05-02 21:10:01 -0700353 // Size of the space without a limit on its growth. By default this is just the Capacity, but
354 // for the allocation space we support starting with a small heap and then extending it.
355 virtual size_t NonGrowthLimitCapacity() const {
356 return Capacity();
357 }
358
Ian Rogers1d54e732013-05-02 21:10:01 -0700359 MemMap* GetMemMap() {
360 return mem_map_.get();
361 }
362
363 const MemMap* GetMemMap() const {
364 return mem_map_.get();
365 }
366
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800367 MemMap* ReleaseMemMap() {
368 return mem_map_.release();
369 }
370
Mathieu Chartier590fee92013-09-13 13:46:47 -0700371 protected:
372 MemMapSpace(const std::string& name, MemMap* mem_map, byte* begin, byte* end, byte* limit,
373 GcRetentionPolicy gc_retention_policy)
374 : ContinuousSpace(name, gc_retention_policy, begin, end, limit),
375 mem_map_(mem_map) {
376 }
377
Ian Rogers1d54e732013-05-02 21:10:01 -0700378 // Underlying storage of the space
Ian Rogers700a4022014-05-19 16:49:03 -0700379 std::unique_ptr<MemMap> mem_map_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700380
Mathieu Chartier590fee92013-09-13 13:46:47 -0700381 private:
Ian Rogers1d54e732013-05-02 21:10:01 -0700382 DISALLOW_COPY_AND_ASSIGN(MemMapSpace);
383};
384
Mathieu Chartier590fee92013-09-13 13:46:47 -0700385// Used by the heap compaction interface to enable copying from one type of alloc space to another.
386class ContinuousMemMapAllocSpace : public MemMapSpace, public AllocSpace {
387 public:
Ian Rogers6fac4472014-02-25 17:01:10 -0800388 bool IsAllocSpace() const OVERRIDE {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700389 return true;
390 }
Ian Rogers6fac4472014-02-25 17:01:10 -0800391 AllocSpace* AsAllocSpace() OVERRIDE {
Mathieu Chartier590fee92013-09-13 13:46:47 -0700392 return this;
393 }
394
Ian Rogers6fac4472014-02-25 17:01:10 -0800395 bool IsContinuousMemMapAllocSpace() const OVERRIDE {
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800396 return true;
397 }
Ian Rogers6fac4472014-02-25 17:01:10 -0800398 ContinuousMemMapAllocSpace* AsContinuousMemMapAllocSpace() {
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800399 return this;
400 }
401
402 bool HasBoundBitmaps() const EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
403 void BindLiveToMarkBitmap()
404 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
405 void UnBindBitmaps() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier1f3b5352014-02-03 14:00:42 -0800406 // Swap the live and mark bitmaps of this space. This is used by the GC for concurrent sweeping.
407 void SwapBitmaps();
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800408
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700409 // Clear the space back to an empty space.
Ian Rogers6fac4472014-02-25 17:01:10 -0800410 virtual void Clear() = 0;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700411
Mathieu Chartier4c13a3f2014-07-14 14:57:16 -0700412 accounting::ContinuousSpaceBitmap* GetLiveBitmap() const OVERRIDE {
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800413 return live_bitmap_.get();
414 }
Ian Rogers6fac4472014-02-25 17:01:10 -0800415
Mathieu Chartier4c13a3f2014-07-14 14:57:16 -0700416 accounting::ContinuousSpaceBitmap* GetMarkBitmap() const OVERRIDE {
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800417 return mark_bitmap_.get();
418 }
419
Mathieu Chartier10fb83a2014-06-15 15:15:43 -0700420 collector::ObjectBytePair Sweep(bool swap_bitmaps);
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700421 virtual accounting::ContinuousSpaceBitmap::SweepCallback* GetSweepCallback() = 0;
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800422
Mathieu Chartier590fee92013-09-13 13:46:47 -0700423 protected:
Ian Rogers700a4022014-05-19 16:49:03 -0700424 std::unique_ptr<accounting::ContinuousSpaceBitmap> live_bitmap_;
425 std::unique_ptr<accounting::ContinuousSpaceBitmap> mark_bitmap_;
426 std::unique_ptr<accounting::ContinuousSpaceBitmap> temp_bitmap_;
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800427
Mathieu Chartier590fee92013-09-13 13:46:47 -0700428 ContinuousMemMapAllocSpace(const std::string& name, MemMap* mem_map, byte* begin,
429 byte* end, byte* limit, GcRetentionPolicy gc_retention_policy)
430 : MemMapSpace(name, mem_map, begin, end, limit, gc_retention_policy) {
431 }
432
433 private:
Mathieu Chartiera1602f22014-01-13 17:19:19 -0800434 friend class gc::Heap;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700435 DISALLOW_COPY_AND_ASSIGN(ContinuousMemMapAllocSpace);
436};
437
Ian Rogers1d54e732013-05-02 21:10:01 -0700438} // namespace space
439} // namespace gc
440} // namespace art
441
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700442#endif // ART_RUNTIME_GC_SPACE_SPACE_H_