Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_GC_SPACE_LARGE_OBJECT_SPACE_H_ |
| 18 | #define ART_RUNTIME_GC_SPACE_LARGE_OBJECT_SPACE_H_ |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 19 | |
Mathieu Chartier | 0a9dc05 | 2013-07-25 11:01:28 -0700 | [diff] [blame] | 20 | #include "gc/accounting/gc_allocator.h" |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 21 | #include "dlmalloc_space.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 22 | #include "safe_map.h" |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 23 | #include "space.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 24 | |
| 25 | #include <set> |
| 26 | #include <vector> |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 27 | |
| 28 | namespace art { |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 29 | namespace gc { |
| 30 | namespace space { |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 31 | |
Ian Rogers | 22a2086 | 2013-03-16 16:34:57 -0700 | [diff] [blame] | 32 | // Abstraction implemented by all large object spaces. |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 33 | class LargeObjectSpace : public DiscontinuousSpace, public AllocSpace { |
| 34 | public: |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 35 | virtual SpaceType GetType() const { |
| 36 | return kSpaceTypeLargeObjectSpace; |
| 37 | } |
| 38 | |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 39 | virtual void SwapBitmaps(); |
| 40 | virtual void CopyLiveToMarked(); |
| 41 | virtual void Walk(DlMallocSpace::WalkCallback, void* arg) = 0; |
| 42 | virtual ~LargeObjectSpace() {} |
| 43 | |
Hiroshi Yamauchi | be031ff | 2013-10-08 16:42:37 -0700 | [diff] [blame] | 44 | uint64_t GetBytesAllocated() { |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 45 | return num_bytes_allocated_; |
| 46 | } |
| 47 | |
Hiroshi Yamauchi | be031ff | 2013-10-08 16:42:37 -0700 | [diff] [blame] | 48 | uint64_t GetObjectsAllocated() { |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 49 | return num_objects_allocated_; |
| 50 | } |
| 51 | |
Hiroshi Yamauchi | be031ff | 2013-10-08 16:42:37 -0700 | [diff] [blame] | 52 | uint64_t GetTotalBytesAllocated() { |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 53 | return total_bytes_allocated_; |
| 54 | } |
| 55 | |
Hiroshi Yamauchi | be031ff | 2013-10-08 16:42:37 -0700 | [diff] [blame] | 56 | uint64_t GetTotalObjectsAllocated() { |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 57 | return total_objects_allocated_; |
| 58 | } |
| 59 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 60 | size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs); |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 61 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 62 | virtual bool IsAllocSpace() const { |
| 63 | return true; |
| 64 | } |
| 65 | |
| 66 | virtual AllocSpace* AsAllocSpace() { |
| 67 | return this; |
| 68 | } |
| 69 | |
Mathieu Chartier | db7f37d | 2014-01-10 11:09:06 -0800 | [diff] [blame] | 70 | virtual void Sweep(bool swap_bitmaps, size_t* freed_objects, size_t* freed_bytes); |
| 71 | |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 72 | protected: |
Brian Carlstrom | 93ba893 | 2013-07-17 21:31:49 -0700 | [diff] [blame] | 73 | explicit LargeObjectSpace(const std::string& name); |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 74 | |
| 75 | // Approximate number of bytes which have been allocated into the space. |
| 76 | size_t num_bytes_allocated_; |
| 77 | size_t num_objects_allocated_; |
| 78 | size_t total_bytes_allocated_; |
| 79 | size_t total_objects_allocated_; |
| 80 | |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 81 | friend class Space; |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 82 | |
| 83 | private: |
| 84 | DISALLOW_COPY_AND_ASSIGN(LargeObjectSpace); |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 85 | }; |
| 86 | |
Ian Rogers | 22a2086 | 2013-03-16 16:34:57 -0700 | [diff] [blame] | 87 | // A discontinuous large object space implemented by individual mmap/munmap calls. |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 88 | class LargeObjectMapSpace : public LargeObjectSpace { |
| 89 | public: |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 90 | // Creates a large object space. Allocations into the large object space use memory maps instead |
| 91 | // of malloc. |
| 92 | static LargeObjectMapSpace* Create(const std::string& name); |
| 93 | |
| 94 | // Return the storage space required by obj. |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 95 | size_t AllocationSize(const mirror::Object* obj); |
Hiroshi Yamauchi | 50b2928 | 2013-07-30 13:58:37 -0700 | [diff] [blame] | 96 | mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 97 | size_t Free(Thread* self, mirror::Object* ptr); |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 98 | void Walk(DlMallocSpace::WalkCallback, void* arg) LOCKS_EXCLUDED(lock_); |
Ian Rogers | a3dd0b3 | 2013-03-19 19:30:59 -0700 | [diff] [blame] | 99 | // TODO: disabling thread safety analysis as this may be called when we already hold lock_. |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 100 | bool Contains(const mirror::Object* obj) const NO_THREAD_SAFETY_ANALYSIS; |
| 101 | |
Brian Carlstrom | 02c8cc6 | 2013-07-18 15:54:44 -0700 | [diff] [blame] | 102 | private: |
Brian Carlstrom | 93ba893 | 2013-07-17 21:31:49 -0700 | [diff] [blame] | 103 | explicit LargeObjectMapSpace(const std::string& name); |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 104 | virtual ~LargeObjectMapSpace() {} |
| 105 | |
| 106 | // Used to ensure mutual exclusion when the allocation spaces data structures are being modified. |
Ian Rogers | 22a2086 | 2013-03-16 16:34:57 -0700 | [diff] [blame] | 107 | mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; |
Mathieu Chartier | 0a9dc05 | 2013-07-25 11:01:28 -0700 | [diff] [blame] | 108 | std::vector<mirror::Object*, |
Ian Rogers | 8d31bbd | 2013-10-13 10:44:14 -0700 | [diff] [blame] | 109 | accounting::GcAllocator<mirror::Object*> > large_objects_ GUARDED_BY(lock_); |
Mathieu Chartier | 0a9dc05 | 2013-07-25 11:01:28 -0700 | [diff] [blame] | 110 | typedef SafeMap<mirror::Object*, MemMap*, std::less<mirror::Object*>, |
Ian Rogers | 8d31bbd | 2013-10-13 10:44:14 -0700 | [diff] [blame] | 111 | accounting::GcAllocator<std::pair<const mirror::Object*, MemMap*> > > MemMaps; |
Ian Rogers | 22a2086 | 2013-03-16 16:34:57 -0700 | [diff] [blame] | 112 | MemMaps mem_maps_ GUARDED_BY(lock_); |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 113 | }; |
| 114 | |
Ian Rogers | 22a2086 | 2013-03-16 16:34:57 -0700 | [diff] [blame] | 115 | // A continuous large object space with a free-list to handle holes. |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 116 | class FreeListSpace : public LargeObjectSpace { |
| 117 | public: |
| 118 | virtual ~FreeListSpace(); |
| 119 | static FreeListSpace* Create(const std::string& name, byte* requested_begin, size_t capacity); |
| 120 | |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 121 | size_t AllocationSize(const mirror::Object* obj) |
| 122 | EXCLUSIVE_LOCKS_REQUIRED(lock_); |
Hiroshi Yamauchi | 50b2928 | 2013-07-30 13:58:37 -0700 | [diff] [blame] | 123 | mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 124 | size_t Free(Thread* self, mirror::Object* obj); |
| 125 | bool Contains(const mirror::Object* obj) const; |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 126 | void Walk(DlMallocSpace::WalkCallback callback, void* arg) LOCKS_EXCLUDED(lock_); |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 127 | |
Ian Rogers | 22a2086 | 2013-03-16 16:34:57 -0700 | [diff] [blame] | 128 | // Address at which the space begins. |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 129 | byte* Begin() const { |
| 130 | return begin_; |
| 131 | } |
| 132 | |
| 133 | // Address at which the space ends, which may vary as the space is filled. |
| 134 | byte* End() const { |
| 135 | return end_; |
| 136 | } |
| 137 | |
| 138 | // Current size of space |
| 139 | size_t Size() const { |
| 140 | return End() - Begin(); |
| 141 | } |
Mathieu Chartier | 128c52c | 2012-10-16 14:12:41 -0700 | [diff] [blame] | 142 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 143 | void Dump(std::ostream& os) const; |
Mathieu Chartier | 128c52c | 2012-10-16 14:12:41 -0700 | [diff] [blame] | 144 | |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 145 | private: |
| 146 | static const size_t kAlignment = kPageSize; |
| 147 | |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 148 | class AllocationHeader { |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 149 | public: |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 150 | // Returns the allocation size, includes the header. |
| 151 | size_t AllocationSize() const { |
| 152 | return alloc_size_; |
| 153 | } |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 154 | |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 155 | // Updates the allocation size in the header, the allocation size includes the header itself. |
| 156 | void SetAllocationSize(size_t size) { |
| 157 | DCHECK(IsAligned<kPageSize>(size)); |
| 158 | alloc_size_ = size; |
| 159 | } |
| 160 | |
| 161 | bool IsFree() const { |
| 162 | return AllocationSize() == 0; |
| 163 | } |
| 164 | |
| 165 | // Returns the previous free allocation header by using the prev_free_ member to figure out |
| 166 | // where it is. If prev free is 0 then we just return ourself. |
| 167 | AllocationHeader* GetPrevFreeAllocationHeader() { |
| 168 | return reinterpret_cast<AllocationHeader*>(reinterpret_cast<uintptr_t>(this) - prev_free_); |
| 169 | } |
| 170 | |
| 171 | // Returns the address of the object associated with this allocation header. |
| 172 | mirror::Object* GetObjectAddress() { |
| 173 | return reinterpret_cast<mirror::Object*>(reinterpret_cast<uintptr_t>(this) + sizeof(*this)); |
| 174 | } |
| 175 | |
| 176 | // Returns the next allocation header after the object associated with this allocation header. |
| 177 | AllocationHeader* GetNextAllocationHeader() { |
| 178 | DCHECK_NE(alloc_size_, 0U); |
| 179 | return reinterpret_cast<AllocationHeader*>(reinterpret_cast<uintptr_t>(this) + alloc_size_); |
| 180 | } |
| 181 | |
| 182 | // Returns how many free bytes there is before the block. |
| 183 | size_t GetPrevFree() const { |
| 184 | return prev_free_; |
| 185 | } |
| 186 | |
| 187 | // Update the size of the free block prior to the allocation. |
| 188 | void SetPrevFree(size_t prev_free) { |
| 189 | DCHECK(IsAligned<kPageSize>(prev_free)); |
| 190 | prev_free_ = prev_free; |
| 191 | } |
| 192 | |
| 193 | // Finds and returns the next non free allocation header after ourself. |
| 194 | // TODO: Optimize, currently O(n) for n free following pages. |
| 195 | AllocationHeader* GetNextNonFree(); |
| 196 | |
| 197 | // Used to implement best fit object allocation. Each allocation has an AllocationHeader which |
| 198 | // contains the size of the previous free block preceding it. Implemented in such a way that we |
| 199 | // can also find the iterator for any allocation header pointer. |
| 200 | class SortByPrevFree { |
| 201 | public: |
| 202 | bool operator()(const AllocationHeader* a, const AllocationHeader* b) const { |
| 203 | if (a->GetPrevFree() < b->GetPrevFree()) return true; |
| 204 | if (a->GetPrevFree() > b->GetPrevFree()) return false; |
| 205 | if (a->AllocationSize() < b->AllocationSize()) return true; |
| 206 | if (a->AllocationSize() > b->AllocationSize()) return false; |
| 207 | return reinterpret_cast<uintptr_t>(a) < reinterpret_cast<uintptr_t>(b); |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 208 | } |
| 209 | }; |
| 210 | |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 211 | private: |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 212 | // Contains the size of the previous free block, if 0 then the memory preceding us is an |
| 213 | // allocation. |
| 214 | size_t prev_free_; |
| 215 | |
| 216 | // Allocation size of this object, 0 means that the allocation header is free memory. |
| 217 | size_t alloc_size_; |
| 218 | |
| 219 | friend class FreeListSpace; |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 220 | }; |
| 221 | |
| 222 | FreeListSpace(const std::string& name, MemMap* mem_map, byte* begin, byte* end); |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 223 | |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 224 | // Removes header from the free blocks set by finding the corresponding iterator and erasing it. |
| 225 | void RemoveFreePrev(AllocationHeader* header) EXCLUSIVE_LOCKS_REQUIRED(lock_); |
| 226 | |
| 227 | // Finds the allocation header corresponding to obj. |
| 228 | AllocationHeader* GetAllocationHeader(const mirror::Object* obj); |
| 229 | |
| 230 | typedef std::set<AllocationHeader*, AllocationHeader::SortByPrevFree, |
Ian Rogers | 8d31bbd | 2013-10-13 10:44:14 -0700 | [diff] [blame] | 231 | accounting::GcAllocator<AllocationHeader*> > FreeBlocks; |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 232 | |
Ian Rogers | 22a2086 | 2013-03-16 16:34:57 -0700 | [diff] [blame] | 233 | byte* const begin_; |
| 234 | byte* const end_; |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 235 | |
| 236 | // There is not footer for any allocations at the end of the space, so we keep track of how much |
| 237 | // free space there is at the end manually. |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 238 | UniquePtr<MemMap> mem_map_; |
Ian Rogers | 22a2086 | 2013-03-16 16:34:57 -0700 | [diff] [blame] | 239 | Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 240 | size_t free_end_ GUARDED_BY(lock_); |
| 241 | FreeBlocks free_blocks_ GUARDED_BY(lock_); |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 242 | }; |
| 243 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 244 | } // namespace space |
| 245 | } // namespace gc |
| 246 | } // namespace art |
Mathieu Chartier | 1c23e1e | 2012-10-12 14:14:11 -0700 | [diff] [blame] | 247 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 248 | #endif // ART_RUNTIME_GC_SPACE_LARGE_OBJECT_SPACE_H_ |