blob: 17045c62d375dfcbc2b7417c9ac61b180f7bf0f1 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierb666f482015-02-18 14:33:14 -080017#ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
18#define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
buzbee862a7602013-04-05 10:58:54 -070019
20#include <stdint.h>
21#include <stddef.h>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022
Vladimir Marko80afd022015-05-19 18:08:00 +010023#include "base/bit_utils.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "debug_stack.h"
25#include "macros.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080026#include "mutex.h"
buzbee862a7602013-04-05 10:58:54 -070027
28namespace art {
29
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070030class Arena;
31class ArenaPool;
32class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000033class ArenaStack;
34class ScopedArenaAllocator;
Vladimir Marko3481ba22015-04-13 12:22:36 +010035class MemMap;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000036class MemStats;
37
Vladimir Marko8081d2b2014-07-31 15:33:43 +010038template <typename T>
39class ArenaAllocatorAdapter;
40
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000041static constexpr bool kArenaAllocatorCountAllocations = false;
42
43// Type of allocation for memory tuning.
44enum ArenaAllocKind {
45 kArenaAllocMisc,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010046 kArenaAllocBBList,
47 kArenaAllocBBPredecessors,
48 kArenaAllocDfsPreOrder,
49 kArenaAllocDfsPostOrder,
50 kArenaAllocDomPostOrder,
51 kArenaAllocTopologicalSortOrder,
52 kArenaAllocLoweringInfo,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000053 kArenaAllocLIR,
Vladimir Marko8dea81c2014-06-06 14:50:36 +010054 kArenaAllocLIRResourceMask,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010055 kArenaAllocSwitchTable,
56 kArenaAllocFillArrayData,
57 kArenaAllocSlowPaths,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000058 kArenaAllocMIR,
59 kArenaAllocDFInfo,
60 kArenaAllocGrowableArray,
61 kArenaAllocGrowableBitMap,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010062 kArenaAllocSSAToDalvikMap,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000063 kArenaAllocDalvikToSSAMap,
64 kArenaAllocDebugInfo,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000065 kArenaAllocRegAlloc,
66 kArenaAllocData,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000067 kArenaAllocSTL,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010068 kArenaAllocGraphBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010069 kArenaAllocGraph,
70 kArenaAllocBasicBlock,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010071 kArenaAllocBlockList,
72 kArenaAllocReversePostOrder,
73 kArenaAllocLinearOrder,
74 kArenaAllocConstantsMap,
Vladimir Marko60584552015-09-03 13:35:12 +000075 kArenaAllocPredecessors,
76 kArenaAllocSuccessors,
77 kArenaAllocDominated,
Vladimir Markof9f64412015-09-02 14:05:49 +010078 kArenaAllocInstruction,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010079 kArenaAllocInvokeInputs,
80 kArenaAllocPhiInputs,
Vladimir Markof9f64412015-09-02 14:05:49 +010081 kArenaAllocLoopInfo,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010082 kArenaAllocLoopInfoBackEdges,
Vladimir Markof9f64412015-09-02 14:05:49 +010083 kArenaAllocTryCatchInfo,
84 kArenaAllocUseListNode,
85 kArenaAllocEnvironment,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010086 kArenaAllocEnvironmentVRegs,
87 kArenaAllocEnvironmentLocations,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010088 kArenaAllocLocationSummary,
Vladimir Marko71bf8092015-09-15 15:33:14 +010089 kArenaAllocSsaBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010090 kArenaAllocMoveOperands,
91 kArenaAllocCodeBuffer,
92 kArenaAllocStackMaps,
93 kArenaAllocBaselineMaps,
94 kArenaAllocOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010095 kArenaAllocGvn,
96 kArenaAllocSsaLiveness,
97 kArenaAllocSsaPhiElimination,
98 kArenaAllocReferenceTypePropagation,
99 kArenaAllocPrimitiveTypePropagation,
100 kArenaAllocSideEffectsAnalysis,
101 kArenaAllocRegisterAllocator,
Vladimir Marko225b6462015-09-28 12:17:40 +0100102 kArenaAllocStackMapStream,
103 kArenaAllocCodeGenerator,
104 kArenaAllocParallelMoveResolver,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000105 kNumArenaAllocKinds
106};
107
108template <bool kCount>
109class ArenaAllocatorStatsImpl;
110
111template <>
112class ArenaAllocatorStatsImpl<false> {
113 public:
114 ArenaAllocatorStatsImpl() = default;
115 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
116 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
117
118 void Copy(const ArenaAllocatorStatsImpl& other) { UNUSED(other); }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700119 void RecordAlloc(size_t bytes, ArenaAllocKind kind) { UNUSED(bytes, kind); }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000120 size_t NumAllocations() const { return 0u; }
121 size_t BytesAllocated() const { return 0u; }
122 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const {
123 UNUSED(os); UNUSED(first); UNUSED(lost_bytes_adjustment);
124 }
125};
126
127template <bool kCount>
128class ArenaAllocatorStatsImpl {
129 public:
130 ArenaAllocatorStatsImpl();
131 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
132 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
133
134 void Copy(const ArenaAllocatorStatsImpl& other);
135 void RecordAlloc(size_t bytes, ArenaAllocKind kind);
136 size_t NumAllocations() const;
137 size_t BytesAllocated() const;
138 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
139
140 private:
141 size_t num_allocations_;
142 // TODO: Use std::array<size_t, kNumArenaAllocKinds> from C++11 when we upgrade the STL.
143 size_t alloc_stats_[kNumArenaAllocKinds]; // Bytes used by various allocation kinds.
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000144
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100145 static const char* const kAllocNames[];
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000146};
147
148typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
buzbee862a7602013-04-05 10:58:54 -0700149
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700150class Arena {
151 public:
152 static constexpr size_t kDefaultSize = 128 * KB;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700153 Arena();
154 virtual ~Arena() { }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700155 // Reset is for pre-use and uses memset for performance.
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700156 void Reset();
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700157 // Release is used inbetween uses and uses madvise for memory usage.
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700158 virtual void Release() { }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700159 uint8_t* Begin() {
160 return memory_;
buzbee862a7602013-04-05 10:58:54 -0700161 }
162
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700163 uint8_t* End() {
164 return memory_ + size_;
165 }
166
167 size_t Size() const {
168 return size_;
169 }
170
171 size_t RemainingSpace() const {
172 return Size() - bytes_allocated_;
173 }
174
Mathieu Chartier49285c52014-12-02 15:43:48 -0800175 size_t GetBytesAllocated() const {
176 return bytes_allocated_;
177 }
178
Mathieu Chartiere401d142015-04-22 13:56:20 -0700179 // Return true if ptr is contained in the arena.
180 bool Contains(const void* ptr) const {
181 return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
182 }
183
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700184 protected:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700185 size_t bytes_allocated_;
186 uint8_t* memory_;
187 size_t size_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700188 Arena* next_;
189 friend class ArenaPool;
190 friend class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000191 friend class ArenaStack;
192 friend class ScopedArenaAllocator;
193 template <bool kCount> friend class ArenaAllocatorStatsImpl;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700194
195 private:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700196 DISALLOW_COPY_AND_ASSIGN(Arena);
197};
198
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700199class MallocArena FINAL : public Arena {
200 public:
201 explicit MallocArena(size_t size = Arena::kDefaultSize);
202 virtual ~MallocArena();
203};
204
205class MemMapArena FINAL : public Arena {
206 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100207 MemMapArena(size_t size, bool low_4gb);
Vladimir Marko3481ba22015-04-13 12:22:36 +0100208 virtual ~MemMapArena();
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700209 void Release() OVERRIDE;
210
211 private:
212 std::unique_ptr<MemMap> map_;
213};
214
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700215class ArenaPool {
216 public:
Mathieu Chartierc7853442015-03-27 14:35:38 -0700217 explicit ArenaPool(bool use_malloc = true, bool low_4gb = false);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700218 ~ArenaPool();
Mathieu Chartier90443472015-07-16 20:32:27 -0700219 Arena* AllocArena(size_t size) REQUIRES(!lock_);
220 void FreeArenaChain(Arena* first) REQUIRES(!lock_);
221 size_t GetBytesAllocated() const REQUIRES(!lock_);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700222 // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
223 // use_malloc is false.
Mathieu Chartier90443472015-07-16 20:32:27 -0700224 void TrimMaps() REQUIRES(!lock_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700225
226 private:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700227 const bool use_malloc_;
Mathieu Chartier49285c52014-12-02 15:43:48 -0800228 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700229 Arena* free_arenas_ GUARDED_BY(lock_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700230 const bool low_4gb_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700231 DISALLOW_COPY_AND_ASSIGN(ArenaPool);
232};
233
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100234class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700235 public:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700236 explicit ArenaAllocator(ArenaPool* pool);
237 ~ArenaAllocator();
238
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100239 // Get adapter for use in STL containers. See arena_containers.h .
240 ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
241
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700242 // Returns zeroed memory.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000243 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700244 if (UNLIKELY(is_running_on_memory_tool_)) {
Mathieu Chartier75165d02013-09-12 14:00:31 -0700245 return AllocValgrind(bytes, kind);
246 }
Mathieu Chartierb666f482015-02-18 14:33:14 -0800247 bytes = RoundUp(bytes, kAlignment);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700248 if (UNLIKELY(ptr_ + bytes > end_)) {
249 // Obtain a new block.
250 ObtainNewArenaForAllocation(bytes);
251 if (UNLIKELY(ptr_ == nullptr)) {
252 return nullptr;
253 }
254 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000255 ArenaAllocatorStats::RecordAlloc(bytes, kind);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700256 uint8_t* ret = ptr_;
257 ptr_ += bytes;
258 return ret;
259 }
260
Mathieu Chartiere401d142015-04-22 13:56:20 -0700261 // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
262 void* Realloc(void* ptr, size_t ptr_size, size_t new_size,
263 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
264 DCHECK_GE(new_size, ptr_size);
265 DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
266 auto* end = reinterpret_cast<uint8_t*>(ptr) + ptr_size;
267 // If we haven't allocated anything else, we can safely extend.
268 if (end == ptr_) {
269 const size_t size_delta = new_size - ptr_size;
270 // Check remain space.
271 const size_t remain = end_ - ptr_;
272 if (remain >= size_delta) {
273 ptr_ += size_delta;
274 ArenaAllocatorStats::RecordAlloc(size_delta, kind);
275 return ptr;
276 }
277 }
278 auto* new_ptr = Alloc(new_size, kind);
279 memcpy(new_ptr, ptr, ptr_size);
280 // TODO: Call free on ptr if linear alloc supports free.
281 return new_ptr;
282 }
283
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000284 template <typename T>
285 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
286 return static_cast<T*>(Alloc(length * sizeof(T), kind));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100287 }
288
Mathieu Chartier75165d02013-09-12 14:00:31 -0700289 void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700290
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700291 void ObtainNewArenaForAllocation(size_t allocation_size);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700292
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700293 size_t BytesAllocated() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700294
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000295 MemStats GetMemStats() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700296
Mathieu Chartierc7853442015-03-27 14:35:38 -0700297 // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
298 // TODO: Change BytesAllocated to this behavior?
299 size_t BytesUsed() const;
buzbee862a7602013-04-05 10:58:54 -0700300
Mathieu Chartiere401d142015-04-22 13:56:20 -0700301 ArenaPool* GetArenaPool() const {
302 return pool_;
303 }
304
305 bool Contains(const void* ptr) const;
306
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700307 private:
Mathieu Chartierb666f482015-02-18 14:33:14 -0800308 static constexpr size_t kAlignment = 8;
309
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700310 void UpdateBytesAllocated();
buzbee862a7602013-04-05 10:58:54 -0700311
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700312 ArenaPool* pool_;
313 uint8_t* begin_;
314 uint8_t* end_;
315 uint8_t* ptr_;
316 Arena* arena_head_;
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700317 bool is_running_on_memory_tool_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700318
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100319 template <typename U>
320 friend class ArenaAllocatorAdapter;
321
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700322 DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
buzbee862a7602013-04-05 10:58:54 -0700323}; // ArenaAllocator
324
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000325class MemStats {
326 public:
327 MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
328 ssize_t lost_bytes_adjustment = 0);
329 void Dump(std::ostream& os) const;
330
331 private:
332 const char* const name_;
333 const ArenaAllocatorStats* const stats_;
334 const Arena* const first_arena_;
335 const ssize_t lost_bytes_adjustment_;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700336}; // MemStats
buzbee862a7602013-04-05 10:58:54 -0700337
338} // namespace art
339
Mathieu Chartierb666f482015-02-18 14:33:14 -0800340#endif // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_