blob: 1654f9cf84d4caa9a5d0761eb456ccbaf8a4679e [file] [log] [blame]
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_SRC_GC_LARGE_OBJECT_SPACE_H_
18#define ART_SRC_GC_LARGE_OBJECT_SPACE_H_
19
20#include "space.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080021#include "safe_map.h"
22
23#include <set>
24#include <vector>
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -070025
26namespace art {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080027class SpaceSetMap;
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -070028
Ian Rogers22a20862013-03-16 16:34:57 -070029// Abstraction implemented by all large object spaces.
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -070030class LargeObjectSpace : public DiscontinuousSpace, public AllocSpace {
31 public:
32 virtual bool CanAllocateInto() const {
33 return true;
34 }
35
36 virtual bool IsCompactible() const {
37 return true;
38 }
39
40 virtual SpaceType GetType() const {
41 return kSpaceTypeLargeObjectSpace;
42 }
43
44 virtual SpaceSetMap* GetLiveObjects() const {
45 return live_objects_.get();
46 }
47
48 virtual SpaceSetMap* GetMarkObjects() const {
49 return mark_objects_.get();
50 }
51
52 virtual void SwapBitmaps();
53 virtual void CopyLiveToMarked();
54 virtual void Walk(DlMallocSpace::WalkCallback, void* arg) = 0;
55 virtual ~LargeObjectSpace() {}
56
57 uint64_t GetNumBytesAllocated() const {
58 return num_bytes_allocated_;
59 }
60
61 uint64_t GetNumObjectsAllocated() const {
62 return num_objects_allocated_;
63 }
64
65 uint64_t GetTotalBytesAllocated() const {
66 return total_bytes_allocated_;
67 }
68
69 uint64_t GetTotalObjectsAllocated() const {
70 return total_objects_allocated_;
71 }
72
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080073 size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs);
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -070074
75 protected:
76
77 LargeObjectSpace(const std::string& name);
78
79 // Approximate number of bytes which have been allocated into the space.
80 size_t num_bytes_allocated_;
81 size_t num_objects_allocated_;
82 size_t total_bytes_allocated_;
83 size_t total_objects_allocated_;
84
85 UniquePtr<SpaceSetMap> live_objects_;
86 UniquePtr<SpaceSetMap> mark_objects_;
87
88 friend class Space;
89};
90
Ian Rogers22a20862013-03-16 16:34:57 -070091// A discontinuous large object space implemented by individual mmap/munmap calls.
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -070092class LargeObjectMapSpace : public LargeObjectSpace {
93 public:
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -070094 // Creates a large object space. Allocations into the large object space use memory maps instead
95 // of malloc.
96 static LargeObjectMapSpace* Create(const std::string& name);
97
98 // Return the storage space required by obj.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080099 virtual size_t AllocationSize(const mirror::Object* obj);
100 virtual mirror::Object* Alloc(Thread* self, size_t num_bytes);
101 size_t Free(Thread* self, mirror::Object* ptr);
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -0700102 virtual void Walk(DlMallocSpace::WalkCallback, void* arg);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800103 virtual bool Contains(const mirror::Object* obj) const;
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -0700104private:
105 LargeObjectMapSpace(const std::string& name);
106 virtual ~LargeObjectMapSpace() {}
107
108 // Used to ensure mutual exclusion when the allocation spaces data structures are being modified.
Ian Rogers22a20862013-03-16 16:34:57 -0700109 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
110 std::vector<mirror::Object*> large_objects_ GUARDED_BY(lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800111 typedef SafeMap<mirror::Object*, MemMap*> MemMaps;
Ian Rogers22a20862013-03-16 16:34:57 -0700112 MemMaps mem_maps_ GUARDED_BY(lock_);
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -0700113};
114
Ian Rogers22a20862013-03-16 16:34:57 -0700115// A continuous large object space with a free-list to handle holes.
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -0700116class FreeListSpace : public LargeObjectSpace {
117 public:
118 virtual ~FreeListSpace();
119 static FreeListSpace* Create(const std::string& name, byte* requested_begin, size_t capacity);
120
Ian Rogers22a20862013-03-16 16:34:57 -0700121 size_t AllocationSize(const mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800122 mirror::Object* Alloc(Thread* self, size_t num_bytes);
123 size_t Free(Thread* self, mirror::Object* obj);
124 bool Contains(const mirror::Object* obj) const;
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -0700125 void Walk(DlMallocSpace::WalkCallback callback, void* arg);
126
Ian Rogers22a20862013-03-16 16:34:57 -0700127 // Address at which the space begins.
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -0700128 byte* Begin() const {
129 return begin_;
130 }
131
132 // Address at which the space ends, which may vary as the space is filled.
133 byte* End() const {
134 return end_;
135 }
136
137 // Current size of space
138 size_t Size() const {
139 return End() - Begin();
140 }
Mathieu Chartier128c52c2012-10-16 14:12:41 -0700141
142 virtual void Dump(std::ostream& os) const;
143
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -0700144 private:
145 static const size_t kAlignment = kPageSize;
146
147 class Chunk {
148 public:
149 static const size_t kFreeFlag = 0x80000000;
150
151 struct SortBySize {
152 bool operator()(const Chunk* a, const Chunk* b) const {
153 return a->GetSize() < b->GetSize();
154 }
155 };
156
157 bool IsFree() const {
158 return (m_size & kFreeFlag) != 0;
159 }
160
161 void SetSize(size_t size, bool is_free = false) {
162 m_size = size | (is_free ? kFreeFlag : 0);
163 }
164
165 size_t GetSize() const {
166 return m_size & (kFreeFlag - 1);
167 }
168
169 Chunk* GetPrevious() {
170 return m_previous;
171 }
172
173 void SetPrevious(Chunk* previous) {
174 m_previous = previous;
175 DCHECK(m_previous == NULL ||
176 (m_previous != NULL && m_previous + m_previous->GetSize() / kAlignment == this));
177 }
178 private:
179 size_t m_size;
180 Chunk* m_previous;
181 };
182
183 FreeListSpace(const std::string& name, MemMap* mem_map, byte* begin, byte* end);
Ian Rogers22a20862013-03-16 16:34:57 -0700184 void AddFreeChunk(void* address, size_t size, Chunk* previous) EXCLUSIVE_LOCKS_REQUIRED(lock_);
185 Chunk* ChunkFromAddr(void* address) EXCLUSIVE_LOCKS_REQUIRED(lock_);
186 void* AddrFromChunk(Chunk* chunk) EXCLUSIVE_LOCKS_REQUIRED(lock_);
187 void RemoveFreeChunk(Chunk* chunk) EXCLUSIVE_LOCKS_REQUIRED(lock_);
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -0700188 Chunk* GetNextChunk(Chunk* chunk);
189
190 typedef std::multiset<Chunk*, Chunk::SortBySize> FreeChunks;
Ian Rogers22a20862013-03-16 16:34:57 -0700191 byte* const begin_;
192 byte* const end_;
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -0700193 UniquePtr<MemMap> mem_map_;
Ian Rogers22a20862013-03-16 16:34:57 -0700194 Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
195 std::vector<Chunk> chunks_ GUARDED_BY(lock_);
196 FreeChunks free_chunks_ GUARDED_BY(lock_);
Mathieu Chartier1c23e1e2012-10-12 14:14:11 -0700197};
198
199}
200
201#endif // ART_SRC_GC_LARGE_OBJECT_SPACE_H_