blob: d9723b57de107c947a37e9521ce4d0ecfd8a4efc [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierb666f482015-02-18 14:33:14 -080017#ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
18#define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
buzbee862a7602013-04-05 10:58:54 -070019
20#include <stdint.h>
21#include <stddef.h>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022
Vladimir Marko80afd022015-05-19 18:08:00 +010023#include "base/bit_utils.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "debug_stack.h"
25#include "macros.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080026#include "mutex.h"
buzbee862a7602013-04-05 10:58:54 -070027
28namespace art {
29
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070030class Arena;
31class ArenaPool;
32class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000033class ArenaStack;
34class ScopedArenaAllocator;
Vladimir Marko3481ba22015-04-13 12:22:36 +010035class MemMap;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000036class MemStats;
37
Vladimir Marko8081d2b2014-07-31 15:33:43 +010038template <typename T>
39class ArenaAllocatorAdapter;
40
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000041static constexpr bool kArenaAllocatorCountAllocations = false;
42
43// Type of allocation for memory tuning.
44enum ArenaAllocKind {
45 kArenaAllocMisc,
46 kArenaAllocBB,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010047 kArenaAllocBBList,
48 kArenaAllocBBPredecessors,
49 kArenaAllocDfsPreOrder,
50 kArenaAllocDfsPostOrder,
51 kArenaAllocDomPostOrder,
52 kArenaAllocTopologicalSortOrder,
53 kArenaAllocLoweringInfo,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000054 kArenaAllocLIR,
Vladimir Marko8dea81c2014-06-06 14:50:36 +010055 kArenaAllocLIRResourceMask,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010056 kArenaAllocSwitchTable,
57 kArenaAllocFillArrayData,
58 kArenaAllocSlowPaths,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000059 kArenaAllocMIR,
60 kArenaAllocDFInfo,
61 kArenaAllocGrowableArray,
62 kArenaAllocGrowableBitMap,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010063 kArenaAllocSSAToDalvikMap,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000064 kArenaAllocDalvikToSSAMap,
65 kArenaAllocDebugInfo,
66 kArenaAllocSuccessor,
67 kArenaAllocRegAlloc,
68 kArenaAllocData,
69 kArenaAllocPredecessors,
70 kArenaAllocSTL,
71 kNumArenaAllocKinds
72};
73
74template <bool kCount>
75class ArenaAllocatorStatsImpl;
76
77template <>
78class ArenaAllocatorStatsImpl<false> {
79 public:
80 ArenaAllocatorStatsImpl() = default;
81 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
82 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
83
84 void Copy(const ArenaAllocatorStatsImpl& other) { UNUSED(other); }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070085 void RecordAlloc(size_t bytes, ArenaAllocKind kind) { UNUSED(bytes, kind); }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000086 size_t NumAllocations() const { return 0u; }
87 size_t BytesAllocated() const { return 0u; }
88 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const {
89 UNUSED(os); UNUSED(first); UNUSED(lost_bytes_adjustment);
90 }
91};
92
93template <bool kCount>
94class ArenaAllocatorStatsImpl {
95 public:
96 ArenaAllocatorStatsImpl();
97 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
98 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
99
100 void Copy(const ArenaAllocatorStatsImpl& other);
101 void RecordAlloc(size_t bytes, ArenaAllocKind kind);
102 size_t NumAllocations() const;
103 size_t BytesAllocated() const;
104 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
105
106 private:
107 size_t num_allocations_;
108 // TODO: Use std::array<size_t, kNumArenaAllocKinds> from C++11 when we upgrade the STL.
109 size_t alloc_stats_[kNumArenaAllocKinds]; // Bytes used by various allocation kinds.
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000110
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100111 static const char* const kAllocNames[];
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000112};
113
114typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
buzbee862a7602013-04-05 10:58:54 -0700115
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700116class Arena {
117 public:
118 static constexpr size_t kDefaultSize = 128 * KB;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700119 Arena();
120 virtual ~Arena() { }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700121 // Reset is for pre-use and uses memset for performance.
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700122 void Reset();
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700123 // Release is used inbetween uses and uses madvise for memory usage.
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700124 virtual void Release() { }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700125 uint8_t* Begin() {
126 return memory_;
buzbee862a7602013-04-05 10:58:54 -0700127 }
128
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700129 uint8_t* End() {
130 return memory_ + size_;
131 }
132
133 size_t Size() const {
134 return size_;
135 }
136
137 size_t RemainingSpace() const {
138 return Size() - bytes_allocated_;
139 }
140
Mathieu Chartier49285c52014-12-02 15:43:48 -0800141 size_t GetBytesAllocated() const {
142 return bytes_allocated_;
143 }
144
Mathieu Chartiere401d142015-04-22 13:56:20 -0700145 // Return true if ptr is contained in the arena.
146 bool Contains(const void* ptr) const {
147 return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
148 }
149
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700150 protected:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700151 size_t bytes_allocated_;
152 uint8_t* memory_;
153 size_t size_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700154 Arena* next_;
155 friend class ArenaPool;
156 friend class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000157 friend class ArenaStack;
158 friend class ScopedArenaAllocator;
159 template <bool kCount> friend class ArenaAllocatorStatsImpl;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700160
161 private:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700162 DISALLOW_COPY_AND_ASSIGN(Arena);
163};
164
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700165class MallocArena FINAL : public Arena {
166 public:
167 explicit MallocArena(size_t size = Arena::kDefaultSize);
168 virtual ~MallocArena();
169};
170
171class MemMapArena FINAL : public Arena {
172 public:
Mathieu Chartierc7853442015-03-27 14:35:38 -0700173 explicit MemMapArena(size_t size, bool low_4gb);
Vladimir Marko3481ba22015-04-13 12:22:36 +0100174 virtual ~MemMapArena();
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700175 void Release() OVERRIDE;
176
177 private:
178 std::unique_ptr<MemMap> map_;
179};
180
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700181class ArenaPool {
182 public:
Mathieu Chartierc7853442015-03-27 14:35:38 -0700183 explicit ArenaPool(bool use_malloc = true, bool low_4gb = false);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700184 ~ArenaPool();
Mathieu Chartier49285c52014-12-02 15:43:48 -0800185 Arena* AllocArena(size_t size) LOCKS_EXCLUDED(lock_);
186 void FreeArenaChain(Arena* first) LOCKS_EXCLUDED(lock_);
187 size_t GetBytesAllocated() const LOCKS_EXCLUDED(lock_);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700188 // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
189 // use_malloc is false.
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700190 void TrimMaps() LOCKS_EXCLUDED(lock_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700191
192 private:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700193 const bool use_malloc_;
Mathieu Chartier49285c52014-12-02 15:43:48 -0800194 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700195 Arena* free_arenas_ GUARDED_BY(lock_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700196 const bool low_4gb_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700197 DISALLOW_COPY_AND_ASSIGN(ArenaPool);
198};
199
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100200class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700201 public:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700202 explicit ArenaAllocator(ArenaPool* pool);
203 ~ArenaAllocator();
204
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100205 // Get adapter for use in STL containers. See arena_containers.h .
206 ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
207
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700208 // Returns zeroed memory.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000209 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Mathieu Chartier75165d02013-09-12 14:00:31 -0700210 if (UNLIKELY(running_on_valgrind_)) {
211 return AllocValgrind(bytes, kind);
212 }
Mathieu Chartierb666f482015-02-18 14:33:14 -0800213 bytes = RoundUp(bytes, kAlignment);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700214 if (UNLIKELY(ptr_ + bytes > end_)) {
215 // Obtain a new block.
216 ObtainNewArenaForAllocation(bytes);
217 if (UNLIKELY(ptr_ == nullptr)) {
218 return nullptr;
219 }
220 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000221 ArenaAllocatorStats::RecordAlloc(bytes, kind);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700222 uint8_t* ret = ptr_;
223 ptr_ += bytes;
224 return ret;
225 }
226
Mathieu Chartiere401d142015-04-22 13:56:20 -0700227 // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
228 void* Realloc(void* ptr, size_t ptr_size, size_t new_size,
229 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
230 DCHECK_GE(new_size, ptr_size);
231 DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
232 auto* end = reinterpret_cast<uint8_t*>(ptr) + ptr_size;
233 // If we haven't allocated anything else, we can safely extend.
234 if (end == ptr_) {
235 const size_t size_delta = new_size - ptr_size;
236 // Check remain space.
237 const size_t remain = end_ - ptr_;
238 if (remain >= size_delta) {
239 ptr_ += size_delta;
240 ArenaAllocatorStats::RecordAlloc(size_delta, kind);
241 return ptr;
242 }
243 }
244 auto* new_ptr = Alloc(new_size, kind);
245 memcpy(new_ptr, ptr, ptr_size);
246 // TODO: Call free on ptr if linear alloc supports free.
247 return new_ptr;
248 }
249
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000250 template <typename T>
251 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
252 return static_cast<T*>(Alloc(length * sizeof(T), kind));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100253 }
254
Mathieu Chartier75165d02013-09-12 14:00:31 -0700255 void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700256
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700257 void ObtainNewArenaForAllocation(size_t allocation_size);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700258
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700259 size_t BytesAllocated() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700260
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000261 MemStats GetMemStats() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700262
Mathieu Chartierc7853442015-03-27 14:35:38 -0700263 // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
264 // TODO: Change BytesAllocated to this behavior?
265 size_t BytesUsed() const;
buzbee862a7602013-04-05 10:58:54 -0700266
Mathieu Chartiere401d142015-04-22 13:56:20 -0700267 ArenaPool* GetArenaPool() const {
268 return pool_;
269 }
270
271 bool Contains(const void* ptr) const;
272
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700273 private:
Mathieu Chartierb666f482015-02-18 14:33:14 -0800274 static constexpr size_t kAlignment = 8;
275
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700276 void UpdateBytesAllocated();
buzbee862a7602013-04-05 10:58:54 -0700277
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700278 ArenaPool* pool_;
279 uint8_t* begin_;
280 uint8_t* end_;
281 uint8_t* ptr_;
282 Arena* arena_head_;
Mathieu Chartier75165d02013-09-12 14:00:31 -0700283 bool running_on_valgrind_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700284
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100285 template <typename U>
286 friend class ArenaAllocatorAdapter;
287
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700288 DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
buzbee862a7602013-04-05 10:58:54 -0700289}; // ArenaAllocator
290
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000291class MemStats {
292 public:
293 MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
294 ssize_t lost_bytes_adjustment = 0);
295 void Dump(std::ostream& os) const;
296
297 private:
298 const char* const name_;
299 const ArenaAllocatorStats* const stats_;
300 const Arena* const first_arena_;
301 const ssize_t lost_bytes_adjustment_;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700302}; // MemStats
buzbee862a7602013-04-05 10:58:54 -0700303
304} // namespace art
305
Mathieu Chartierb666f482015-02-18 14:33:14 -0800306#endif // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_