blob: f1cc5b1bf16527a0eda2777c9d8314ea960cb908 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierb666f482015-02-18 14:33:14 -080017#ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
18#define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
buzbee862a7602013-04-05 10:58:54 -070019
20#include <stdint.h>
21#include <stddef.h>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022
Vladimir Marko80afd022015-05-19 18:08:00 +010023#include "base/bit_utils.h"
Vladimir Marko2a408a32015-09-18 14:11:00 +010024#include "base/memory_tool.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080025#include "debug_stack.h"
26#include "macros.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080027#include "mutex.h"
buzbee862a7602013-04-05 10:58:54 -070028
29namespace art {
30
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070031class Arena;
32class ArenaPool;
33class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000034class ArenaStack;
35class ScopedArenaAllocator;
Vladimir Marko3481ba22015-04-13 12:22:36 +010036class MemMap;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000037class MemStats;
38
Vladimir Marko8081d2b2014-07-31 15:33:43 +010039template <typename T>
40class ArenaAllocatorAdapter;
41
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000042static constexpr bool kArenaAllocatorCountAllocations = false;
43
44// Type of allocation for memory tuning.
45enum ArenaAllocKind {
46 kArenaAllocMisc,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010047 kArenaAllocBBList,
48 kArenaAllocBBPredecessors,
49 kArenaAllocDfsPreOrder,
50 kArenaAllocDfsPostOrder,
51 kArenaAllocDomPostOrder,
52 kArenaAllocTopologicalSortOrder,
53 kArenaAllocLoweringInfo,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000054 kArenaAllocLIR,
Vladimir Marko8dea81c2014-06-06 14:50:36 +010055 kArenaAllocLIRResourceMask,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010056 kArenaAllocSwitchTable,
57 kArenaAllocFillArrayData,
58 kArenaAllocSlowPaths,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000059 kArenaAllocMIR,
60 kArenaAllocDFInfo,
61 kArenaAllocGrowableArray,
62 kArenaAllocGrowableBitMap,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010063 kArenaAllocSSAToDalvikMap,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000064 kArenaAllocDalvikToSSAMap,
65 kArenaAllocDebugInfo,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000066 kArenaAllocRegAlloc,
67 kArenaAllocData,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000068 kArenaAllocSTL,
Vladimir Markof9f64412015-09-02 14:05:49 +010069 kArenaAllocGraph,
70 kArenaAllocBasicBlock,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010071 kArenaAllocBlockList,
72 kArenaAllocReversePostOrder,
73 kArenaAllocLinearOrder,
74 kArenaAllocConstantsMap,
Vladimir Marko60584552015-09-03 13:35:12 +000075 kArenaAllocPredecessors,
76 kArenaAllocSuccessors,
77 kArenaAllocDominated,
Vladimir Markof9f64412015-09-02 14:05:49 +010078 kArenaAllocInstruction,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010079 kArenaAllocInvokeInputs,
80 kArenaAllocPhiInputs,
Vladimir Markof9f64412015-09-02 14:05:49 +010081 kArenaAllocLoopInfo,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010082 kArenaAllocLoopInfoBackEdges,
Vladimir Markof9f64412015-09-02 14:05:49 +010083 kArenaAllocTryCatchInfo,
84 kArenaAllocUseListNode,
85 kArenaAllocEnvironment,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010086 kArenaAllocEnvironmentVRegs,
87 kArenaAllocEnvironmentLocations,
Vladimir Marko71bf8092015-09-15 15:33:14 +010088 kArenaAllocSsaBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010089 kArenaAllocMoveOperands,
90 kArenaAllocCodeBuffer,
91 kArenaAllocStackMaps,
92 kArenaAllocBaselineMaps,
93 kArenaAllocOptimization,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000094 kNumArenaAllocKinds
95};
96
97template <bool kCount>
98class ArenaAllocatorStatsImpl;
99
100template <>
101class ArenaAllocatorStatsImpl<false> {
102 public:
103 ArenaAllocatorStatsImpl() = default;
104 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
105 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
106
107 void Copy(const ArenaAllocatorStatsImpl& other) { UNUSED(other); }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700108 void RecordAlloc(size_t bytes, ArenaAllocKind kind) { UNUSED(bytes, kind); }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000109 size_t NumAllocations() const { return 0u; }
110 size_t BytesAllocated() const { return 0u; }
111 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const {
112 UNUSED(os); UNUSED(first); UNUSED(lost_bytes_adjustment);
113 }
114};
115
116template <bool kCount>
117class ArenaAllocatorStatsImpl {
118 public:
119 ArenaAllocatorStatsImpl();
120 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
121 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
122
123 void Copy(const ArenaAllocatorStatsImpl& other);
124 void RecordAlloc(size_t bytes, ArenaAllocKind kind);
125 size_t NumAllocations() const;
126 size_t BytesAllocated() const;
127 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
128
129 private:
130 size_t num_allocations_;
131 // TODO: Use std::array<size_t, kNumArenaAllocKinds> from C++11 when we upgrade the STL.
132 size_t alloc_stats_[kNumArenaAllocKinds]; // Bytes used by various allocation kinds.
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000133
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100134 static const char* const kAllocNames[];
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000135};
136
137typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
buzbee862a7602013-04-05 10:58:54 -0700138
Vladimir Marko2a408a32015-09-18 14:11:00 +0100139template <bool kAvailable, bool kValgrind>
140class ArenaAllocatorMemoryToolCheckImpl {
141 // This is the generic template but since there is a partial specialization
142 // for kValgrind == false, this can be instantiated only for kValgrind == true.
143 static_assert(kValgrind, "This template can be instantiated only for Valgrind.");
144 static_assert(kAvailable, "Valgrind implies memory tool availability.");
145
146 public:
147 ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { }
148 bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; }
149
150 private:
151 const bool is_running_on_valgrind_;
152};
153
154template <bool kAvailable>
155class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> {
156 public:
157 ArenaAllocatorMemoryToolCheckImpl() { }
158 bool IsRunningOnMemoryTool() { return kAvailable; }
159};
160
161typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind>
162 ArenaAllocatorMemoryToolCheck;
163
164class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck {
165 public:
166 using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool;
167
168 void MakeDefined(void* ptr, size_t size) {
169 if (IsRunningOnMemoryTool()) {
170 MEMORY_TOOL_MAKE_DEFINED(ptr, size);
171 }
172 }
173 void MakeUndefined(void* ptr, size_t size) {
174 if (IsRunningOnMemoryTool()) {
175 MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
176 }
177 }
178 void MakeInaccessible(void* ptr, size_t size) {
179 if (IsRunningOnMemoryTool()) {
180 MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
181 }
182 }
183};
184
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700185class Arena {
186 public:
187 static constexpr size_t kDefaultSize = 128 * KB;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700188 Arena();
189 virtual ~Arena() { }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700190 // Reset is for pre-use and uses memset for performance.
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700191 void Reset();
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700192 // Release is used inbetween uses and uses madvise for memory usage.
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700193 virtual void Release() { }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700194 uint8_t* Begin() {
195 return memory_;
buzbee862a7602013-04-05 10:58:54 -0700196 }
197
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700198 uint8_t* End() {
199 return memory_ + size_;
200 }
201
202 size_t Size() const {
203 return size_;
204 }
205
206 size_t RemainingSpace() const {
207 return Size() - bytes_allocated_;
208 }
209
Mathieu Chartier49285c52014-12-02 15:43:48 -0800210 size_t GetBytesAllocated() const {
211 return bytes_allocated_;
212 }
213
Mathieu Chartiere401d142015-04-22 13:56:20 -0700214 // Return true if ptr is contained in the arena.
215 bool Contains(const void* ptr) const {
216 return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
217 }
218
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700219 protected:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700220 size_t bytes_allocated_;
221 uint8_t* memory_;
222 size_t size_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700223 Arena* next_;
224 friend class ArenaPool;
225 friend class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000226 friend class ArenaStack;
227 friend class ScopedArenaAllocator;
228 template <bool kCount> friend class ArenaAllocatorStatsImpl;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700229
230 private:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700231 DISALLOW_COPY_AND_ASSIGN(Arena);
232};
233
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700234class MallocArena FINAL : public Arena {
235 public:
236 explicit MallocArena(size_t size = Arena::kDefaultSize);
237 virtual ~MallocArena();
238};
239
240class MemMapArena FINAL : public Arena {
241 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100242 MemMapArena(size_t size, bool low_4gb);
Vladimir Marko3481ba22015-04-13 12:22:36 +0100243 virtual ~MemMapArena();
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700244 void Release() OVERRIDE;
245
246 private:
247 std::unique_ptr<MemMap> map_;
248};
249
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700250class ArenaPool {
251 public:
Mathieu Chartierc7853442015-03-27 14:35:38 -0700252 explicit ArenaPool(bool use_malloc = true, bool low_4gb = false);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700253 ~ArenaPool();
Mathieu Chartier90443472015-07-16 20:32:27 -0700254 Arena* AllocArena(size_t size) REQUIRES(!lock_);
255 void FreeArenaChain(Arena* first) REQUIRES(!lock_);
256 size_t GetBytesAllocated() const REQUIRES(!lock_);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700257 // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
258 // use_malloc is false.
Mathieu Chartier90443472015-07-16 20:32:27 -0700259 void TrimMaps() REQUIRES(!lock_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700260
261 private:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700262 const bool use_malloc_;
Mathieu Chartier49285c52014-12-02 15:43:48 -0800263 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700264 Arena* free_arenas_ GUARDED_BY(lock_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700265 const bool low_4gb_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700266 DISALLOW_COPY_AND_ASSIGN(ArenaPool);
267};
268
Vladimir Marko2a408a32015-09-18 14:11:00 +0100269class ArenaAllocator
270 : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700271 public:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700272 explicit ArenaAllocator(ArenaPool* pool);
273 ~ArenaAllocator();
274
Vladimir Marko2a408a32015-09-18 14:11:00 +0100275 using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
276 using ArenaAllocatorMemoryTool::MakeDefined;
277 using ArenaAllocatorMemoryTool::MakeUndefined;
278 using ArenaAllocatorMemoryTool::MakeInaccessible;
279
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100280 // Get adapter for use in STL containers. See arena_containers.h .
281 ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
282
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700283 // Returns zeroed memory.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000284 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Vladimir Marko2a408a32015-09-18 14:11:00 +0100285 if (UNLIKELY(IsRunningOnMemoryTool())) {
286 return AllocWithMemoryTool(bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700287 }
Mathieu Chartierb666f482015-02-18 14:33:14 -0800288 bytes = RoundUp(bytes, kAlignment);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700289 if (UNLIKELY(ptr_ + bytes > end_)) {
290 // Obtain a new block.
291 ObtainNewArenaForAllocation(bytes);
292 if (UNLIKELY(ptr_ == nullptr)) {
293 return nullptr;
294 }
295 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000296 ArenaAllocatorStats::RecordAlloc(bytes, kind);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700297 uint8_t* ret = ptr_;
298 ptr_ += bytes;
299 return ret;
300 }
301
Mathieu Chartiere401d142015-04-22 13:56:20 -0700302 // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
303 void* Realloc(void* ptr, size_t ptr_size, size_t new_size,
304 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
305 DCHECK_GE(new_size, ptr_size);
306 DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
307 auto* end = reinterpret_cast<uint8_t*>(ptr) + ptr_size;
308 // If we haven't allocated anything else, we can safely extend.
309 if (end == ptr_) {
Vladimir Marko2a408a32015-09-18 14:11:00 +0100310 DCHECK(!IsRunningOnMemoryTool()); // Red zone prevents end == ptr_.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700311 const size_t size_delta = new_size - ptr_size;
312 // Check remain space.
313 const size_t remain = end_ - ptr_;
314 if (remain >= size_delta) {
315 ptr_ += size_delta;
316 ArenaAllocatorStats::RecordAlloc(size_delta, kind);
317 return ptr;
318 }
319 }
320 auto* new_ptr = Alloc(new_size, kind);
321 memcpy(new_ptr, ptr, ptr_size);
322 // TODO: Call free on ptr if linear alloc supports free.
323 return new_ptr;
324 }
325
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000326 template <typename T>
327 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
328 return static_cast<T*>(Alloc(length * sizeof(T), kind));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100329 }
330
Vladimir Marko2a408a32015-09-18 14:11:00 +0100331 void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700332
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700333 void ObtainNewArenaForAllocation(size_t allocation_size);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700334
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700335 size_t BytesAllocated() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700336
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000337 MemStats GetMemStats() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700338
Mathieu Chartierc7853442015-03-27 14:35:38 -0700339 // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
340 // TODO: Change BytesAllocated to this behavior?
341 size_t BytesUsed() const;
buzbee862a7602013-04-05 10:58:54 -0700342
Mathieu Chartiere401d142015-04-22 13:56:20 -0700343 ArenaPool* GetArenaPool() const {
344 return pool_;
345 }
346
347 bool Contains(const void* ptr) const;
348
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700349 private:
Mathieu Chartierb666f482015-02-18 14:33:14 -0800350 static constexpr size_t kAlignment = 8;
351
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700352 void UpdateBytesAllocated();
buzbee862a7602013-04-05 10:58:54 -0700353
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700354 ArenaPool* pool_;
355 uint8_t* begin_;
356 uint8_t* end_;
357 uint8_t* ptr_;
358 Arena* arena_head_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700359
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100360 template <typename U>
361 friend class ArenaAllocatorAdapter;
362
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700363 DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
buzbee862a7602013-04-05 10:58:54 -0700364}; // ArenaAllocator
365
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000366class MemStats {
367 public:
368 MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
369 ssize_t lost_bytes_adjustment = 0);
370 void Dump(std::ostream& os) const;
371
372 private:
373 const char* const name_;
374 const ArenaAllocatorStats* const stats_;
375 const Arena* const first_arena_;
376 const ssize_t lost_bytes_adjustment_;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700377}; // MemStats
buzbee862a7602013-04-05 10:58:54 -0700378
379} // namespace art
380
Mathieu Chartierb666f482015-02-18 14:33:14 -0800381#endif // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_