Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_GC_SPACE_DLMALLOC_SPACE_H_ |
| 18 | #define ART_RUNTIME_GC_SPACE_DLMALLOC_SPACE_H_ |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 19 | |
| 20 | #include "gc/allocator/dlmalloc.h" |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 21 | #include "malloc_space.h" |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 22 | #include "space.h" |
| 23 | |
| 24 | namespace art { |
| 25 | namespace gc { |
| 26 | |
| 27 | namespace collector { |
| 28 | class MarkSweep; |
| 29 | } // namespace collector |
| 30 | |
| 31 | namespace space { |
| 32 | |
| 33 | // An alloc space is a space where objects may be allocated and garbage collected. |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 34 | class DlMallocSpace : public MallocSpace { |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 35 | public: |
Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 36 | // Create a DlMallocSpace from an existing mem_map. |
| 37 | static DlMallocSpace* CreateFromMemMap(MemMap* mem_map, const std::string& name, |
| 38 | size_t starting_size, size_t initial_size, |
| 39 | size_t growth_limit, size_t capacity); |
| 40 | |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 41 | // Create a DlMallocSpace with the requested sizes. The requested |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 42 | // base address is not guaranteed to be granted, if it is required, |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 43 | // the caller should call Begin on the returned space to confirm the |
| 44 | // request was granted. |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 45 | static DlMallocSpace* Create(const std::string& name, size_t initial_size, size_t growth_limit, |
| 46 | size_t capacity, byte* requested_begin); |
| 47 | |
Hiroshi Yamauchi | 50b2928 | 2013-07-30 13:58:37 -0700 | [diff] [blame] | 48 | virtual mirror::Object* AllocWithGrowth(Thread* self, size_t num_bytes, |
| 49 | size_t* bytes_allocated) LOCKS_EXCLUDED(lock_); |
Hiroshi Yamauchi | 50b2928 | 2013-07-30 13:58:37 -0700 | [diff] [blame] | 50 | virtual mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame^] | 51 | virtual size_t AllocationSize(mirror::Object* obj); |
| 52 | virtual size_t Free(Thread* self, mirror::Object* ptr) |
| 53 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 54 | virtual size_t FreeList(Thread* self, size_t num_ptrs, mirror::Object** ptrs) |
| 55 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 56 | |
Hiroshi Yamauchi | 50b2928 | 2013-07-30 13:58:37 -0700 | [diff] [blame] | 57 | mirror::Object* AllocNonvirtual(Thread* self, size_t num_bytes, size_t* bytes_allocated); |
| 58 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame^] | 59 | size_t AllocationSizeNonvirtual(mirror::Object* obj) { |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 60 | void* obj_ptr = const_cast<void*>(reinterpret_cast<const void*>(obj)); |
| 61 | return mspace_usable_size(obj_ptr) + kChunkOverhead; |
Hiroshi Yamauchi | 50b2928 | 2013-07-30 13:58:37 -0700 | [diff] [blame] | 62 | } |
| 63 | |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 64 | #ifndef NDEBUG |
| 65 | // Override only in the debug build. |
| 66 | void CheckMoreCoreForPrecondition(); |
| 67 | #endif |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 68 | |
| 69 | void* GetMspace() const { |
| 70 | return mspace_; |
| 71 | } |
| 72 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 73 | size_t Trim(); |
| 74 | |
| 75 | // Perform a mspace_inspect_all which calls back for each allocation chunk. The chunk may not be |
| 76 | // in use, indicated by num_bytes equaling zero. |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 77 | void Walk(WalkCallback callback, void* arg) LOCKS_EXCLUDED(lock_); |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 78 | |
Hiroshi Yamauchi | 09b07a9 | 2013-07-15 13:17:06 -0700 | [diff] [blame] | 79 | // Returns the number of bytes that the space has currently obtained from the system. This is |
| 80 | // greater or equal to the amount of live data in the space. |
| 81 | size_t GetFootprint(); |
| 82 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 83 | // Returns the number of bytes that the heap is allowed to obtain from the system via MoreCore. |
| 84 | size_t GetFootprintLimit(); |
| 85 | |
| 86 | // Set the maximum number of bytes that the heap is allowed to obtain from the system via |
| 87 | // MoreCore. Note this is used to stop the mspace growing beyond the limit to Capacity. When |
| 88 | // allocations fail we GC before increasing the footprint limit and allowing the mspace to grow. |
| 89 | void SetFootprintLimit(size_t limit); |
| 90 | |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 91 | MallocSpace* CreateInstance(const std::string& name, MemMap* mem_map, void* allocator, |
| 92 | byte* begin, byte* end, byte* limit, size_t growth_limit); |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 93 | |
Hiroshi Yamauchi | be031ff | 2013-10-08 16:42:37 -0700 | [diff] [blame] | 94 | uint64_t GetBytesAllocated(); |
| 95 | uint64_t GetObjectsAllocated(); |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 96 | |
Mathieu Chartier | 0f72e41 | 2013-09-06 16:40:01 -0700 | [diff] [blame] | 97 | // Returns the class of a recently freed object. |
| 98 | mirror::Class* FindRecentFreedObject(const mirror::Object* obj); |
| 99 | |
Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 100 | virtual void Clear(); |
| 101 | |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 102 | virtual bool IsDlMallocSpace() const { |
| 103 | return true; |
| 104 | } |
| 105 | virtual DlMallocSpace* AsDlMallocSpace() { |
| 106 | return this; |
| 107 | } |
| 108 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 109 | protected: |
| 110 | DlMallocSpace(const std::string& name, MemMap* mem_map, void* mspace, byte* begin, byte* end, |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 111 | byte* limit, size_t growth_limit); |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 112 | |
| 113 | private: |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 114 | mirror::Object* AllocWithoutGrowthLocked(Thread* self, size_t num_bytes, size_t* bytes_allocated) |
Hiroshi Yamauchi | 50b2928 | 2013-07-30 13:58:37 -0700 | [diff] [blame] | 115 | EXCLUSIVE_LOCKS_REQUIRED(lock_); |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 116 | |
Hiroshi Yamauchi | 573f7d2 | 2013-12-17 11:54:23 -0800 | [diff] [blame] | 117 | void* CreateAllocator(void* base, size_t morecore_start, size_t initial_size, bool /*low_memory_mode*/) { |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 118 | return CreateMspace(base, morecore_start, initial_size); |
| 119 | } |
| 120 | static void* CreateMspace(void* base, size_t morecore_start, size_t initial_size); |
Mathieu Chartier | 0f72e41 | 2013-09-06 16:40:01 -0700 | [diff] [blame] | 121 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 122 | // The boundary tag overhead. |
| 123 | static const size_t kChunkOverhead = kWordSize; |
| 124 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 125 | // Underlying malloc space |
Hiroshi Yamauchi | 4ce1f00 | 2013-11-18 14:49:09 -0800 | [diff] [blame] | 126 | void* const mspace_; |
| 127 | |
| 128 | // A mspace pointer used for allocation. Equals to what mspace_ |
| 129 | // points to or nullptr after InvalidateAllocator() is called. |
| 130 | void* mspace_for_alloc_; |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 131 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 132 | friend class collector::MarkSweep; |
| 133 | |
| 134 | DISALLOW_COPY_AND_ASSIGN(DlMallocSpace); |
| 135 | }; |
| 136 | |
| 137 | } // namespace space |
| 138 | } // namespace gc |
| 139 | } // namespace art |
| 140 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 141 | #endif // ART_RUNTIME_GC_SPACE_DLMALLOC_SPACE_H_ |