blob: 62cd2a7561de279eb422695fe40002c0b62d52a2 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierb666f482015-02-18 14:33:14 -080017#ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
18#define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
buzbee862a7602013-04-05 10:58:54 -070019
20#include <stdint.h>
21#include <stddef.h>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022
Vladimir Marko80afd022015-05-19 18:08:00 +010023#include "base/bit_utils.h"
Vladimir Marko2a408a32015-09-18 14:11:00 +010024#include "base/memory_tool.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080025#include "debug_stack.h"
26#include "macros.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080027#include "mutex.h"
buzbee862a7602013-04-05 10:58:54 -070028
29namespace art {
30
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070031class Arena;
32class ArenaPool;
33class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000034class ArenaStack;
35class ScopedArenaAllocator;
Vladimir Marko3481ba22015-04-13 12:22:36 +010036class MemMap;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000037class MemStats;
38
Vladimir Marko8081d2b2014-07-31 15:33:43 +010039template <typename T>
40class ArenaAllocatorAdapter;
41
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000042static constexpr bool kArenaAllocatorCountAllocations = false;
43
44// Type of allocation for memory tuning.
45enum ArenaAllocKind {
46 kArenaAllocMisc,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010047 kArenaAllocSwitchTable,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010048 kArenaAllocSlowPaths,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000049 kArenaAllocGrowableBitMap,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000050 kArenaAllocSTL,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010051 kArenaAllocGraphBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010052 kArenaAllocGraph,
53 kArenaAllocBasicBlock,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010054 kArenaAllocBlockList,
55 kArenaAllocReversePostOrder,
56 kArenaAllocLinearOrder,
57 kArenaAllocConstantsMap,
Vladimir Marko60584552015-09-03 13:35:12 +000058 kArenaAllocPredecessors,
59 kArenaAllocSuccessors,
60 kArenaAllocDominated,
Vladimir Markof9f64412015-09-02 14:05:49 +010061 kArenaAllocInstruction,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010062 kArenaAllocInvokeInputs,
63 kArenaAllocPhiInputs,
Vladimir Markof9f64412015-09-02 14:05:49 +010064 kArenaAllocLoopInfo,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010065 kArenaAllocLoopInfoBackEdges,
Vladimir Markof9f64412015-09-02 14:05:49 +010066 kArenaAllocTryCatchInfo,
67 kArenaAllocUseListNode,
68 kArenaAllocEnvironment,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010069 kArenaAllocEnvironmentVRegs,
70 kArenaAllocEnvironmentLocations,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010071 kArenaAllocLocationSummary,
Vladimir Marko71bf8092015-09-15 15:33:14 +010072 kArenaAllocSsaBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010073 kArenaAllocMoveOperands,
74 kArenaAllocCodeBuffer,
75 kArenaAllocStackMaps,
Vladimir Markof9f64412015-09-02 14:05:49 +010076 kArenaAllocOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010077 kArenaAllocGvn,
Vladimir Marko5233f932015-09-29 19:01:15 +010078 kArenaAllocInductionVarAnalysis,
79 kArenaAllocBoundsCheckElimination,
Vladimir Markof6a35de2016-03-21 12:01:50 +000080 kArenaAllocDCE,
81 kArenaAllocLSE,
82 kArenaAllocLICM,
Aart Bik96202302016-10-04 17:33:56 -070083 kArenaAllocLoopOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010084 kArenaAllocSsaLiveness,
85 kArenaAllocSsaPhiElimination,
86 kArenaAllocReferenceTypePropagation,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010087 kArenaAllocSideEffectsAnalysis,
88 kArenaAllocRegisterAllocator,
Vladimir Markof6a35de2016-03-21 12:01:50 +000089 kArenaAllocRegisterAllocatorValidate,
Vladimir Marko225b6462015-09-28 12:17:40 +010090 kArenaAllocStackMapStream,
91 kArenaAllocCodeGenerator,
Vladimir Marko93205e32016-04-13 11:59:46 +010092 kArenaAllocAssembler,
Vladimir Marko225b6462015-09-28 12:17:40 +010093 kArenaAllocParallelMoveResolver,
Vladimir Marko655e5852015-10-12 10:38:28 +010094 kArenaAllocGraphChecker,
Mathieu Chartierde40d472015-10-15 17:47:48 -070095 kArenaAllocVerifier,
Vladimir Marko93205e32016-04-13 11:59:46 +010096 kArenaAllocCallingConvention,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000097 kNumArenaAllocKinds
98};
99
100template <bool kCount>
101class ArenaAllocatorStatsImpl;
102
103template <>
104class ArenaAllocatorStatsImpl<false> {
105 public:
106 ArenaAllocatorStatsImpl() = default;
107 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
108 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
109
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100110 void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
111 void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000112 size_t NumAllocations() const { return 0u; }
113 size_t BytesAllocated() const { return 0u; }
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100114 void Dump(std::ostream& os ATTRIBUTE_UNUSED,
115 const Arena* first ATTRIBUTE_UNUSED,
116 ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000117};
118
119template <bool kCount>
120class ArenaAllocatorStatsImpl {
121 public:
122 ArenaAllocatorStatsImpl();
123 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
124 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
125
126 void Copy(const ArenaAllocatorStatsImpl& other);
127 void RecordAlloc(size_t bytes, ArenaAllocKind kind);
128 size_t NumAllocations() const;
129 size_t BytesAllocated() const;
130 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
131
132 private:
133 size_t num_allocations_;
134 // TODO: Use std::array<size_t, kNumArenaAllocKinds> from C++11 when we upgrade the STL.
135 size_t alloc_stats_[kNumArenaAllocKinds]; // Bytes used by various allocation kinds.
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000136
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100137 static const char* const kAllocNames[];
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000138};
139
140typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
buzbee862a7602013-04-05 10:58:54 -0700141
Vladimir Marko2a408a32015-09-18 14:11:00 +0100142template <bool kAvailable, bool kValgrind>
143class ArenaAllocatorMemoryToolCheckImpl {
144 // This is the generic template but since there is a partial specialization
145 // for kValgrind == false, this can be instantiated only for kValgrind == true.
146 static_assert(kValgrind, "This template can be instantiated only for Valgrind.");
147 static_assert(kAvailable, "Valgrind implies memory tool availability.");
148
149 public:
150 ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { }
151 bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; }
152
153 private:
154 const bool is_running_on_valgrind_;
155};
156
157template <bool kAvailable>
158class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> {
159 public:
160 ArenaAllocatorMemoryToolCheckImpl() { }
161 bool IsRunningOnMemoryTool() { return kAvailable; }
162};
163
164typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind>
165 ArenaAllocatorMemoryToolCheck;
166
167class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck {
168 public:
169 using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool;
170
171 void MakeDefined(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000172 if (UNLIKELY(IsRunningOnMemoryTool())) {
173 DoMakeDefined(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100174 }
175 }
176 void MakeUndefined(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000177 if (UNLIKELY(IsRunningOnMemoryTool())) {
178 DoMakeUndefined(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100179 }
180 }
181 void MakeInaccessible(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000182 if (UNLIKELY(IsRunningOnMemoryTool())) {
183 DoMakeInaccessible(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100184 }
185 }
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000186
187 private:
188 void DoMakeDefined(void* ptr, size_t size);
189 void DoMakeUndefined(void* ptr, size_t size);
190 void DoMakeInaccessible(void* ptr, size_t size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100191};
192
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700193class Arena {
194 public:
195 static constexpr size_t kDefaultSize = 128 * KB;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700196 Arena();
197 virtual ~Arena() { }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700198 // Reset is for pre-use and uses memset for performance.
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700199 void Reset();
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700200 // Release is used inbetween uses and uses madvise for memory usage.
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700201 virtual void Release() { }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700202 uint8_t* Begin() {
203 return memory_;
buzbee862a7602013-04-05 10:58:54 -0700204 }
205
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700206 uint8_t* End() {
207 return memory_ + size_;
208 }
209
210 size_t Size() const {
211 return size_;
212 }
213
214 size_t RemainingSpace() const {
215 return Size() - bytes_allocated_;
216 }
217
Mathieu Chartier49285c52014-12-02 15:43:48 -0800218 size_t GetBytesAllocated() const {
219 return bytes_allocated_;
220 }
221
Mathieu Chartiere401d142015-04-22 13:56:20 -0700222 // Return true if ptr is contained in the arena.
223 bool Contains(const void* ptr) const {
224 return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
225 }
226
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700227 protected:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700228 size_t bytes_allocated_;
229 uint8_t* memory_;
230 size_t size_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700231 Arena* next_;
232 friend class ArenaPool;
233 friend class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000234 friend class ArenaStack;
235 friend class ScopedArenaAllocator;
236 template <bool kCount> friend class ArenaAllocatorStatsImpl;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700237
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100238 friend class ArenaAllocatorTest;
239
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700240 private:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700241 DISALLOW_COPY_AND_ASSIGN(Arena);
242};
243
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700244class MallocArena FINAL : public Arena {
245 public:
246 explicit MallocArena(size_t size = Arena::kDefaultSize);
247 virtual ~MallocArena();
248};
249
250class MemMapArena FINAL : public Arena {
251 public:
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000252 MemMapArena(size_t size, bool low_4gb, const char* name);
Vladimir Marko3481ba22015-04-13 12:22:36 +0100253 virtual ~MemMapArena();
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700254 void Release() OVERRIDE;
255
256 private:
257 std::unique_ptr<MemMap> map_;
258};
259
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700260class ArenaPool {
261 public:
Chih-Hung Hsieha5931182016-09-01 15:08:13 -0700262 explicit ArenaPool(bool use_malloc = true,
263 bool low_4gb = false,
264 const char* name = "LinearAlloc");
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700265 ~ArenaPool();
Mathieu Chartier90443472015-07-16 20:32:27 -0700266 Arena* AllocArena(size_t size) REQUIRES(!lock_);
267 void FreeArenaChain(Arena* first) REQUIRES(!lock_);
268 size_t GetBytesAllocated() const REQUIRES(!lock_);
Jean-Philippe Halimica76a1a2016-02-02 19:48:52 +0100269 void ReclaimMemory() NO_THREAD_SAFETY_ANALYSIS;
270 void LockReclaimMemory() REQUIRES(!lock_);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700271 // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
272 // use_malloc is false.
Mathieu Chartier90443472015-07-16 20:32:27 -0700273 void TrimMaps() REQUIRES(!lock_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700274
275 private:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700276 const bool use_malloc_;
Mathieu Chartier49285c52014-12-02 15:43:48 -0800277 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700278 Arena* free_arenas_ GUARDED_BY(lock_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700279 const bool low_4gb_;
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000280 const char* name_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700281 DISALLOW_COPY_AND_ASSIGN(ArenaPool);
282};
283
Vladimir Markofda04322015-11-11 18:45:50 +0000284// Fast single-threaded allocator for zero-initialized memory chunks.
285//
286// Memory is allocated from ArenaPool in large chunks and then rationed through
287// the ArenaAllocator. It's returned to the ArenaPool only when the ArenaAllocator
288// is destroyed.
Vladimir Marko2a408a32015-09-18 14:11:00 +0100289class ArenaAllocator
290 : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700291 public:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700292 explicit ArenaAllocator(ArenaPool* pool);
293 ~ArenaAllocator();
294
Vladimir Marko2a408a32015-09-18 14:11:00 +0100295 using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
296 using ArenaAllocatorMemoryTool::MakeDefined;
297 using ArenaAllocatorMemoryTool::MakeUndefined;
298 using ArenaAllocatorMemoryTool::MakeInaccessible;
299
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100300 // Get adapter for use in STL containers. See arena_containers.h .
301 ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
302
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700303 // Returns zeroed memory.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000304 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Vladimir Marko2a408a32015-09-18 14:11:00 +0100305 if (UNLIKELY(IsRunningOnMemoryTool())) {
306 return AllocWithMemoryTool(bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700307 }
Mathieu Chartierb666f482015-02-18 14:33:14 -0800308 bytes = RoundUp(bytes, kAlignment);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000309 ArenaAllocatorStats::RecordAlloc(bytes, kind);
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100310 if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
311 return AllocFromNewArena(bytes);
312 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700313 uint8_t* ret = ptr_;
Andreas Gampef6dd8292016-08-19 20:22:19 -0700314 DCHECK_ALIGNED(ret, kAlignment);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700315 ptr_ += bytes;
316 return ret;
317 }
318
Mathieu Chartiere401d142015-04-22 13:56:20 -0700319 // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
320 void* Realloc(void* ptr, size_t ptr_size, size_t new_size,
321 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
322 DCHECK_GE(new_size, ptr_size);
323 DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700324 // We always allocate aligned.
325 const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
326 auto* end = reinterpret_cast<uint8_t*>(ptr) + aligned_ptr_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700327 // If we haven't allocated anything else, we can safely extend.
328 if (end == ptr_) {
Vladimir Marko2a408a32015-09-18 14:11:00 +0100329 DCHECK(!IsRunningOnMemoryTool()); // Red zone prevents end == ptr_.
Andreas Gampef6dd8292016-08-19 20:22:19 -0700330 const size_t aligned_new_size = RoundUp(new_size, kAlignment);
331 const size_t size_delta = aligned_new_size - aligned_ptr_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700332 // Check remain space.
333 const size_t remain = end_ - ptr_;
334 if (remain >= size_delta) {
335 ptr_ += size_delta;
336 ArenaAllocatorStats::RecordAlloc(size_delta, kind);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700337 DCHECK_ALIGNED(ptr_, kAlignment);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700338 return ptr;
339 }
340 }
Andreas Gampef6dd8292016-08-19 20:22:19 -0700341 auto* new_ptr = Alloc(new_size, kind); // Note: Alloc will take care of aligning new_size.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700342 memcpy(new_ptr, ptr, ptr_size);
343 // TODO: Call free on ptr if linear alloc supports free.
344 return new_ptr;
345 }
346
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000347 template <typename T>
Vladimir Markof6a35de2016-03-21 12:01:50 +0000348 T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
349 return AllocArray<T>(1, kind);
350 }
351
352 template <typename T>
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000353 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
354 return static_cast<T*>(Alloc(length * sizeof(T), kind));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100355 }
356
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700357 size_t BytesAllocated() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700358
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000359 MemStats GetMemStats() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700360
Mathieu Chartierc7853442015-03-27 14:35:38 -0700361 // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
362 // TODO: Change BytesAllocated to this behavior?
363 size_t BytesUsed() const;
buzbee862a7602013-04-05 10:58:54 -0700364
Mathieu Chartiere401d142015-04-22 13:56:20 -0700365 ArenaPool* GetArenaPool() const {
366 return pool_;
367 }
368
369 bool Contains(const void* ptr) const;
370
Andreas Gampef6dd8292016-08-19 20:22:19 -0700371 static constexpr size_t kAlignment = 8;
372
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700373 private:
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100374 void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100375 uint8_t* AllocFromNewArena(size_t bytes);
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100376
Mathieu Chartierb666f482015-02-18 14:33:14 -0800377
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700378 void UpdateBytesAllocated();
buzbee862a7602013-04-05 10:58:54 -0700379
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700380 ArenaPool* pool_;
381 uint8_t* begin_;
382 uint8_t* end_;
383 uint8_t* ptr_;
384 Arena* arena_head_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700385
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100386 template <typename U>
387 friend class ArenaAllocatorAdapter;
388
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100389 friend class ArenaAllocatorTest;
390
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700391 DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
buzbee862a7602013-04-05 10:58:54 -0700392}; // ArenaAllocator
393
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000394class MemStats {
395 public:
396 MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
397 ssize_t lost_bytes_adjustment = 0);
398 void Dump(std::ostream& os) const;
399
400 private:
401 const char* const name_;
402 const ArenaAllocatorStats* const stats_;
403 const Arena* const first_arena_;
404 const ssize_t lost_bytes_adjustment_;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700405}; // MemStats
buzbee862a7602013-04-05 10:58:54 -0700406
407} // namespace art
408
Mathieu Chartierb666f482015-02-18 14:33:14 -0800409#endif // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_