blob: ace6c388af4878cfe2f73422ccf04dd41818115d [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierb666f482015-02-18 14:33:14 -080017#ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
18#define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
buzbee862a7602013-04-05 10:58:54 -070019
20#include <stdint.h>
21#include <stddef.h>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022
Vladimir Marko80afd022015-05-19 18:08:00 +010023#include "base/bit_utils.h"
Vladimir Marko2a408a32015-09-18 14:11:00 +010024#include "base/memory_tool.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080025#include "debug_stack.h"
26#include "macros.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080027#include "mutex.h"
buzbee862a7602013-04-05 10:58:54 -070028
29namespace art {
30
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070031class Arena;
32class ArenaPool;
33class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000034class ArenaStack;
35class ScopedArenaAllocator;
Vladimir Marko3481ba22015-04-13 12:22:36 +010036class MemMap;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000037class MemStats;
38
Vladimir Marko8081d2b2014-07-31 15:33:43 +010039template <typename T>
40class ArenaAllocatorAdapter;
41
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000042static constexpr bool kArenaAllocatorCountAllocations = false;
43
44// Type of allocation for memory tuning.
45enum ArenaAllocKind {
46 kArenaAllocMisc,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010047 kArenaAllocBBList,
48 kArenaAllocBBPredecessors,
49 kArenaAllocDfsPreOrder,
50 kArenaAllocDfsPostOrder,
51 kArenaAllocDomPostOrder,
52 kArenaAllocTopologicalSortOrder,
53 kArenaAllocLoweringInfo,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000054 kArenaAllocLIR,
Vladimir Marko8dea81c2014-06-06 14:50:36 +010055 kArenaAllocLIRResourceMask,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010056 kArenaAllocSwitchTable,
57 kArenaAllocFillArrayData,
58 kArenaAllocSlowPaths,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000059 kArenaAllocMIR,
60 kArenaAllocDFInfo,
61 kArenaAllocGrowableArray,
62 kArenaAllocGrowableBitMap,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010063 kArenaAllocSSAToDalvikMap,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000064 kArenaAllocDalvikToSSAMap,
65 kArenaAllocDebugInfo,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000066 kArenaAllocRegAlloc,
67 kArenaAllocData,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000068 kArenaAllocSTL,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010069 kArenaAllocGraphBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010070 kArenaAllocGraph,
71 kArenaAllocBasicBlock,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010072 kArenaAllocBlockList,
73 kArenaAllocReversePostOrder,
74 kArenaAllocLinearOrder,
75 kArenaAllocConstantsMap,
Vladimir Marko60584552015-09-03 13:35:12 +000076 kArenaAllocPredecessors,
77 kArenaAllocSuccessors,
78 kArenaAllocDominated,
Vladimir Markof9f64412015-09-02 14:05:49 +010079 kArenaAllocInstruction,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010080 kArenaAllocInvokeInputs,
81 kArenaAllocPhiInputs,
Vladimir Markof9f64412015-09-02 14:05:49 +010082 kArenaAllocLoopInfo,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010083 kArenaAllocLoopInfoBackEdges,
Vladimir Markof9f64412015-09-02 14:05:49 +010084 kArenaAllocTryCatchInfo,
85 kArenaAllocUseListNode,
86 kArenaAllocEnvironment,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010087 kArenaAllocEnvironmentVRegs,
88 kArenaAllocEnvironmentLocations,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010089 kArenaAllocLocationSummary,
Vladimir Marko71bf8092015-09-15 15:33:14 +010090 kArenaAllocSsaBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010091 kArenaAllocMoveOperands,
92 kArenaAllocCodeBuffer,
93 kArenaAllocStackMaps,
94 kArenaAllocBaselineMaps,
95 kArenaAllocOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010096 kArenaAllocGvn,
Vladimir Marko5233f932015-09-29 19:01:15 +010097 kArenaAllocInductionVarAnalysis,
98 kArenaAllocBoundsCheckElimination,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010099 kArenaAllocSsaLiveness,
100 kArenaAllocSsaPhiElimination,
101 kArenaAllocReferenceTypePropagation,
102 kArenaAllocPrimitiveTypePropagation,
103 kArenaAllocSideEffectsAnalysis,
104 kArenaAllocRegisterAllocator,
Vladimir Marko225b6462015-09-28 12:17:40 +0100105 kArenaAllocStackMapStream,
106 kArenaAllocCodeGenerator,
107 kArenaAllocParallelMoveResolver,
Vladimir Marko655e5852015-10-12 10:38:28 +0100108 kArenaAllocGraphChecker,
Mingyao Yang8df69d42015-10-22 15:40:58 -0700109 kArenaAllocLSE,
Mathieu Chartierde40d472015-10-15 17:47:48 -0700110 kArenaAllocVerifier,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000111 kNumArenaAllocKinds
112};
113
114template <bool kCount>
115class ArenaAllocatorStatsImpl;
116
117template <>
118class ArenaAllocatorStatsImpl<false> {
119 public:
120 ArenaAllocatorStatsImpl() = default;
121 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
122 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
123
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100124 void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
125 void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000126 size_t NumAllocations() const { return 0u; }
127 size_t BytesAllocated() const { return 0u; }
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100128 void Dump(std::ostream& os ATTRIBUTE_UNUSED,
129 const Arena* first ATTRIBUTE_UNUSED,
130 ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000131};
132
133template <bool kCount>
134class ArenaAllocatorStatsImpl {
135 public:
136 ArenaAllocatorStatsImpl();
137 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
138 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
139
140 void Copy(const ArenaAllocatorStatsImpl& other);
141 void RecordAlloc(size_t bytes, ArenaAllocKind kind);
142 size_t NumAllocations() const;
143 size_t BytesAllocated() const;
144 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
145
146 private:
147 size_t num_allocations_;
148 // TODO: Use std::array<size_t, kNumArenaAllocKinds> from C++11 when we upgrade the STL.
149 size_t alloc_stats_[kNumArenaAllocKinds]; // Bytes used by various allocation kinds.
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000150
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100151 static const char* const kAllocNames[];
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000152};
153
154typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
buzbee862a7602013-04-05 10:58:54 -0700155
Vladimir Marko2a408a32015-09-18 14:11:00 +0100156template <bool kAvailable, bool kValgrind>
157class ArenaAllocatorMemoryToolCheckImpl {
158 // This is the generic template but since there is a partial specialization
159 // for kValgrind == false, this can be instantiated only for kValgrind == true.
160 static_assert(kValgrind, "This template can be instantiated only for Valgrind.");
161 static_assert(kAvailable, "Valgrind implies memory tool availability.");
162
163 public:
164 ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { }
165 bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; }
166
167 private:
168 const bool is_running_on_valgrind_;
169};
170
171template <bool kAvailable>
172class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> {
173 public:
174 ArenaAllocatorMemoryToolCheckImpl() { }
175 bool IsRunningOnMemoryTool() { return kAvailable; }
176};
177
178typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind>
179 ArenaAllocatorMemoryToolCheck;
180
181class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck {
182 public:
183 using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool;
184
185 void MakeDefined(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000186 if (UNLIKELY(IsRunningOnMemoryTool())) {
187 DoMakeDefined(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100188 }
189 }
190 void MakeUndefined(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000191 if (UNLIKELY(IsRunningOnMemoryTool())) {
192 DoMakeUndefined(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100193 }
194 }
195 void MakeInaccessible(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000196 if (UNLIKELY(IsRunningOnMemoryTool())) {
197 DoMakeInaccessible(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100198 }
199 }
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000200
201 private:
202 void DoMakeDefined(void* ptr, size_t size);
203 void DoMakeUndefined(void* ptr, size_t size);
204 void DoMakeInaccessible(void* ptr, size_t size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100205};
206
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700207class Arena {
208 public:
209 static constexpr size_t kDefaultSize = 128 * KB;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700210 Arena();
211 virtual ~Arena() { }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700212 // Reset is for pre-use and uses memset for performance.
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700213 void Reset();
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700214 // Release is used inbetween uses and uses madvise for memory usage.
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700215 virtual void Release() { }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700216 uint8_t* Begin() {
217 return memory_;
buzbee862a7602013-04-05 10:58:54 -0700218 }
219
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700220 uint8_t* End() {
221 return memory_ + size_;
222 }
223
224 size_t Size() const {
225 return size_;
226 }
227
228 size_t RemainingSpace() const {
229 return Size() - bytes_allocated_;
230 }
231
Mathieu Chartier49285c52014-12-02 15:43:48 -0800232 size_t GetBytesAllocated() const {
233 return bytes_allocated_;
234 }
235
Mathieu Chartiere401d142015-04-22 13:56:20 -0700236 // Return true if ptr is contained in the arena.
237 bool Contains(const void* ptr) const {
238 return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
239 }
240
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700241 protected:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700242 size_t bytes_allocated_;
243 uint8_t* memory_;
244 size_t size_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700245 Arena* next_;
246 friend class ArenaPool;
247 friend class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000248 friend class ArenaStack;
249 friend class ScopedArenaAllocator;
250 template <bool kCount> friend class ArenaAllocatorStatsImpl;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700251
252 private:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700253 DISALLOW_COPY_AND_ASSIGN(Arena);
254};
255
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700256class MallocArena FINAL : public Arena {
257 public:
258 explicit MallocArena(size_t size = Arena::kDefaultSize);
259 virtual ~MallocArena();
260};
261
262class MemMapArena FINAL : public Arena {
263 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100264 MemMapArena(size_t size, bool low_4gb);
Vladimir Marko3481ba22015-04-13 12:22:36 +0100265 virtual ~MemMapArena();
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700266 void Release() OVERRIDE;
267
268 private:
269 std::unique_ptr<MemMap> map_;
270};
271
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700272class ArenaPool {
273 public:
Mathieu Chartierc7853442015-03-27 14:35:38 -0700274 explicit ArenaPool(bool use_malloc = true, bool low_4gb = false);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700275 ~ArenaPool();
Mathieu Chartier90443472015-07-16 20:32:27 -0700276 Arena* AllocArena(size_t size) REQUIRES(!lock_);
277 void FreeArenaChain(Arena* first) REQUIRES(!lock_);
278 size_t GetBytesAllocated() const REQUIRES(!lock_);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700279 // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
280 // use_malloc is false.
Mathieu Chartier90443472015-07-16 20:32:27 -0700281 void TrimMaps() REQUIRES(!lock_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700282
283 private:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700284 const bool use_malloc_;
Mathieu Chartier49285c52014-12-02 15:43:48 -0800285 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700286 Arena* free_arenas_ GUARDED_BY(lock_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700287 const bool low_4gb_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700288 DISALLOW_COPY_AND_ASSIGN(ArenaPool);
289};
290
Vladimir Marko2a408a32015-09-18 14:11:00 +0100291class ArenaAllocator
292 : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700293 public:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700294 explicit ArenaAllocator(ArenaPool* pool);
295 ~ArenaAllocator();
296
Vladimir Marko2a408a32015-09-18 14:11:00 +0100297 using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
298 using ArenaAllocatorMemoryTool::MakeDefined;
299 using ArenaAllocatorMemoryTool::MakeUndefined;
300 using ArenaAllocatorMemoryTool::MakeInaccessible;
301
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100302 // Get adapter for use in STL containers. See arena_containers.h .
303 ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
304
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700305 // Returns zeroed memory.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000306 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Vladimir Marko2a408a32015-09-18 14:11:00 +0100307 if (UNLIKELY(IsRunningOnMemoryTool())) {
308 return AllocWithMemoryTool(bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700309 }
Mathieu Chartierb666f482015-02-18 14:33:14 -0800310 bytes = RoundUp(bytes, kAlignment);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700311 if (UNLIKELY(ptr_ + bytes > end_)) {
312 // Obtain a new block.
313 ObtainNewArenaForAllocation(bytes);
314 if (UNLIKELY(ptr_ == nullptr)) {
315 return nullptr;
316 }
317 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000318 ArenaAllocatorStats::RecordAlloc(bytes, kind);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700319 uint8_t* ret = ptr_;
320 ptr_ += bytes;
321 return ret;
322 }
323
Mathieu Chartiere401d142015-04-22 13:56:20 -0700324 // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
325 void* Realloc(void* ptr, size_t ptr_size, size_t new_size,
326 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
327 DCHECK_GE(new_size, ptr_size);
328 DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
329 auto* end = reinterpret_cast<uint8_t*>(ptr) + ptr_size;
330 // If we haven't allocated anything else, we can safely extend.
331 if (end == ptr_) {
Vladimir Marko2a408a32015-09-18 14:11:00 +0100332 DCHECK(!IsRunningOnMemoryTool()); // Red zone prevents end == ptr_.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700333 const size_t size_delta = new_size - ptr_size;
334 // Check remain space.
335 const size_t remain = end_ - ptr_;
336 if (remain >= size_delta) {
337 ptr_ += size_delta;
338 ArenaAllocatorStats::RecordAlloc(size_delta, kind);
339 return ptr;
340 }
341 }
342 auto* new_ptr = Alloc(new_size, kind);
343 memcpy(new_ptr, ptr, ptr_size);
344 // TODO: Call free on ptr if linear alloc supports free.
345 return new_ptr;
346 }
347
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000348 template <typename T>
349 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
350 return static_cast<T*>(Alloc(length * sizeof(T), kind));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100351 }
352
Vladimir Marko2a408a32015-09-18 14:11:00 +0100353 void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700354
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700355 void ObtainNewArenaForAllocation(size_t allocation_size);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700356
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700357 size_t BytesAllocated() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700358
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000359 MemStats GetMemStats() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700360
Mathieu Chartierc7853442015-03-27 14:35:38 -0700361 // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
362 // TODO: Change BytesAllocated to this behavior?
363 size_t BytesUsed() const;
buzbee862a7602013-04-05 10:58:54 -0700364
Mathieu Chartiere401d142015-04-22 13:56:20 -0700365 ArenaPool* GetArenaPool() const {
366 return pool_;
367 }
368
369 bool Contains(const void* ptr) const;
370
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700371 private:
Mathieu Chartierb666f482015-02-18 14:33:14 -0800372 static constexpr size_t kAlignment = 8;
373
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700374 void UpdateBytesAllocated();
buzbee862a7602013-04-05 10:58:54 -0700375
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700376 ArenaPool* pool_;
377 uint8_t* begin_;
378 uint8_t* end_;
379 uint8_t* ptr_;
380 Arena* arena_head_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700381
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100382 template <typename U>
383 friend class ArenaAllocatorAdapter;
384
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700385 DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
buzbee862a7602013-04-05 10:58:54 -0700386}; // ArenaAllocator
387
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000388class MemStats {
389 public:
390 MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
391 ssize_t lost_bytes_adjustment = 0);
392 void Dump(std::ostream& os) const;
393
394 private:
395 const char* const name_;
396 const ArenaAllocatorStats* const stats_;
397 const Arena* const first_arena_;
398 const ssize_t lost_bytes_adjustment_;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700399}; // MemStats
buzbee862a7602013-04-05 10:58:54 -0700400
401} // namespace art
402
Mathieu Chartierb666f482015-02-18 14:33:14 -0800403#endif // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_