blob: 1caf0c075e72ffc0fbd0f4929bc49e6825c495f5 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070025#include "thread-inl.h"
Mathieu Chartier32ce2ad2016-03-04 14:58:03 -080026#include "systrace.h"
buzbee862a7602013-04-05 10:58:54 -070027
28namespace art {
29
Evgenii Stepanov1e133742015-05-20 12:30:59 -070030static constexpr size_t kMemoryToolRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080031constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032
Vladimir Markobd9e9db2014-03-07 19:41:05 +000033template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010034const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Vladimir Markof9f64412015-09-02 14:05:49 +010035 "Misc ",
Vladimir Markof9f64412015-09-02 14:05:49 +010036 "SwitchTbl ",
Vladimir Markof9f64412015-09-02 14:05:49 +010037 "SlowPaths ",
Vladimir Markof9f64412015-09-02 14:05:49 +010038 "GrowBitMap ",
Vladimir Markof9f64412015-09-02 14:05:49 +010039 "STL ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010040 "GraphBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010041 "Graph ",
42 "BasicBlock ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010043 "BlockList ",
44 "RevPostOrder ",
45 "LinearOrder ",
46 "ConstantsMap ",
Vladimir Marko60584552015-09-03 13:35:12 +000047 "Predecessors ",
48 "Successors ",
49 "Dominated ",
Vladimir Markof9f64412015-09-02 14:05:49 +010050 "Instruction ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010051 "InvokeInputs ",
52 "PhiInputs ",
Vladimir Markof9f64412015-09-02 14:05:49 +010053 "LoopInfo ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010054 "LIBackEdges ",
Vladimir Markof9f64412015-09-02 14:05:49 +010055 "TryCatchInf ",
56 "UseListNode ",
57 "Environment ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010058 "EnvVRegs ",
59 "EnvLocations ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010060 "LocSummary ",
Vladimir Marko71bf8092015-09-15 15:33:14 +010061 "SsaBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010062 "MoveOperands ",
63 "CodeBuffer ",
64 "StackMaps ",
Vladimir Markof9f64412015-09-02 14:05:49 +010065 "Optimization ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010066 "GVN ",
Vladimir Marko5233f932015-09-29 19:01:15 +010067 "InductionVar ",
68 "BCE ",
Vladimir Markof6a35de2016-03-21 12:01:50 +000069 "DCE ",
70 "LSE ",
71 "LICM ",
Aart Bik96202302016-10-04 17:33:56 -070072 "LoopOpt ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010073 "SsaLiveness ",
74 "SsaPhiElim ",
75 "RefTypeProp ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010076 "SideEffects ",
77 "RegAllocator ",
Vladimir Markof6a35de2016-03-21 12:01:50 +000078 "RegAllocVldt ",
Vladimir Marko225b6462015-09-28 12:17:40 +010079 "StackMapStm ",
80 "CodeGen ",
Vladimir Marko93205e32016-04-13 11:59:46 +010081 "Assembler ",
Vladimir Marko225b6462015-09-28 12:17:40 +010082 "ParallelMove ",
Vladimir Marko655e5852015-10-12 10:38:28 +010083 "GraphChecker ",
Mathieu Chartierde40d472015-10-15 17:47:48 -070084 "Verifier ",
Vladimir Marko93205e32016-04-13 11:59:46 +010085 "CallingConv ",
Vladimir Marko4e335d02016-12-19 16:04:33 +000086 "CHA ",
buzbee862a7602013-04-05 10:58:54 -070087};
88
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000089template <bool kCount>
90ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
Vladimir Marko4e335d02016-12-19 16:04:33 +000091 : num_allocations_(0u),
92 alloc_stats_(kNumArenaAllocKinds, 0u) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000093}
94
95template <bool kCount>
96void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
97 num_allocations_ = other.num_allocations_;
Vladimir Marko4e335d02016-12-19 16:04:33 +000098 std::copy_n(other.alloc_stats_.begin(), kNumArenaAllocKinds, alloc_stats_.begin());
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000099}
100
101template <bool kCount>
102void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
103 alloc_stats_[kind] += bytes;
104 ++num_allocations_;
105}
106
107template <bool kCount>
108size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
109 return num_allocations_;
110}
111
112template <bool kCount>
113size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
114 const size_t init = 0u; // Initial value of the correct type.
Vladimir Marko4e335d02016-12-19 16:04:33 +0000115 return std::accumulate(alloc_stats_.begin(), alloc_stats_.end(), init);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000116}
117
118template <bool kCount>
119void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
120 ssize_t lost_bytes_adjustment) const {
121 size_t malloc_bytes = 0u;
122 size_t lost_bytes = 0u;
123 size_t num_arenas = 0u;
124 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
125 malloc_bytes += arena->Size();
126 lost_bytes += arena->RemainingSpace();
127 ++num_arenas;
128 }
129 // The lost_bytes_adjustment is used to make up for the fact that the current arena
130 // may not have the bytes_allocated_ updated correctly.
131 lost_bytes += lost_bytes_adjustment;
132 const size_t bytes_allocated = BytesAllocated();
133 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
134 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000135 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000136 if (num_allocations != 0) {
137 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
138 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
139 }
140 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800141 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000142 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000143 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000144 }
145}
146
Stephen Hines445584d2016-12-13 19:06:49 -0800147#pragma GCC diagnostic push
148#pragma GCC diagnostic ignored "-Winstantiation-after-specialization"
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000149// Explicitly instantiate the used implementation.
150template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
Stephen Hines445584d2016-12-13 19:06:49 -0800151#pragma GCC diagnostic pop
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000152
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000153void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
154 MEMORY_TOOL_MAKE_DEFINED(ptr, size);
155}
156
157void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
158 MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
159}
160
161void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
162 MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
163}
164
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700165Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700166}
167
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700168MallocArena::MallocArena(size_t size) {
169 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100170 CHECK(memory_ != nullptr); // Abort on OOM.
Andreas Gampef6dd8292016-08-19 20:22:19 -0700171 DCHECK_ALIGNED(memory_, ArenaAllocator::kAlignment);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700172 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700173}
174
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700175MallocArena::~MallocArena() {
176 free(reinterpret_cast<void*>(memory_));
177}
178
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000179MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700180 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700181 map_.reset(MemMap::MapAnonymous(
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000182 name, nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700183 CHECK(map_.get() != nullptr) << error_msg;
184 memory_ = map_->Begin();
185 size_ = map_->Size();
186}
187
Vladimir Marko3481ba22015-04-13 12:22:36 +0100188MemMapArena::~MemMapArena() {
189 // Destroys MemMap via std::unique_ptr<>.
190}
191
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700192void MemMapArena::Release() {
193 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700194 map_->MadviseDontNeedAndZero();
195 bytes_allocated_ = 0;
196 }
197}
198
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700199void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700200 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700201 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700202 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700203 }
buzbee862a7602013-04-05 10:58:54 -0700204}
205
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000206ArenaPool::ArenaPool(bool use_malloc, bool low_4gb, const char* name)
207 : use_malloc_(use_malloc),
208 lock_("Arena pool lock", kArenaPoolLock),
209 free_arenas_(nullptr),
210 low_4gb_(low_4gb),
211 name_(name) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700212 if (low_4gb) {
213 CHECK(!use_malloc) << "low4gb must use map implementation";
214 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700215 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700216 MemMap::Init();
217 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700218}
219
220ArenaPool::~ArenaPool() {
Jean-Philippe Halimica76a1a2016-02-02 19:48:52 +0100221 ReclaimMemory();
222}
223
224void ArenaPool::ReclaimMemory() {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700225 while (free_arenas_ != nullptr) {
226 auto* arena = free_arenas_;
227 free_arenas_ = free_arenas_->next_;
228 delete arena;
229 }
230}
231
Jean-Philippe Halimica76a1a2016-02-02 19:48:52 +0100232void ArenaPool::LockReclaimMemory() {
233 MutexLock lock(Thread::Current(), lock_);
234 ReclaimMemory();
235}
236
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700237Arena* ArenaPool::AllocArena(size_t size) {
238 Thread* self = Thread::Current();
239 Arena* ret = nullptr;
240 {
241 MutexLock lock(self, lock_);
242 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
243 ret = free_arenas_;
244 free_arenas_ = free_arenas_->next_;
245 }
246 }
247 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700248 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000249 new MemMapArena(size, low_4gb_, name_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700250 }
251 ret->Reset();
252 return ret;
253}
254
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700255void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700256 if (!use_malloc_) {
Mathieu Chartier32ce2ad2016-03-04 14:58:03 -0800257 ScopedTrace trace(__PRETTY_FUNCTION__);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700258 // Doesn't work for malloc.
259 MutexLock lock(Thread::Current(), lock_);
260 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
261 arena->Release();
262 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700263 }
264}
265
Mathieu Chartier49285c52014-12-02 15:43:48 -0800266size_t ArenaPool::GetBytesAllocated() const {
267 size_t total = 0;
268 MutexLock lock(Thread::Current(), lock_);
269 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
270 total += arena->GetBytesAllocated();
271 }
272 return total;
273}
274
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000275void ArenaPool::FreeArenaChain(Arena* first) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700276 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000277 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700278 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000279 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700280 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000281 if (first != nullptr) {
282 Arena* last = first;
283 while (last->next_ != nullptr) {
284 last = last->next_;
285 }
286 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700287 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000288 last->next_ = free_arenas_;
289 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700290 }
291}
292
293size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000294 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700295}
296
Mathieu Chartierc7853442015-03-27 14:35:38 -0700297size_t ArenaAllocator::BytesUsed() const {
298 size_t total = ptr_ - begin_;
299 if (arena_head_ != nullptr) {
300 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
301 cur_arena = cur_arena->next_) {
302 total += cur_arena->GetBytesAllocated();
303 }
304 }
305 return total;
306}
307
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700308ArenaAllocator::ArenaAllocator(ArenaPool* pool)
309 : pool_(pool),
310 begin_(nullptr),
311 end_(nullptr),
312 ptr_(nullptr),
Vladimir Marko2a408a32015-09-18 14:11:00 +0100313 arena_head_(nullptr) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700314}
315
316void ArenaAllocator::UpdateBytesAllocated() {
317 if (arena_head_ != nullptr) {
318 // Update how many bytes we have allocated into the arena so that the arena pool knows how
319 // much memory to zero out.
320 arena_head_->bytes_allocated_ = ptr_ - begin_;
321 }
322}
323
Vladimir Marko2a408a32015-09-18 14:11:00 +0100324void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
Vladimir Marko75001932015-11-10 20:54:22 +0000325 // We mark all memory for a newly retrieved arena as inaccessible and then
326 // mark only the actually allocated memory as defined. That leaves red zones
327 // and padding between allocations marked as inaccessible.
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700328 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000329 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100330 uint8_t* ret;
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100331 if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100332 ret = AllocFromNewArena(rounded_bytes);
333 uint8_t* noaccess_begin = ret + bytes;
334 uint8_t* noaccess_end;
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100335 if (ret == arena_head_->Begin()) {
336 DCHECK(ptr_ - rounded_bytes == ret);
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100337 noaccess_end = end_;
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100338 } else {
339 // We're still using the old arena but `ret` comes from a new one just after it.
340 DCHECK(arena_head_->next_ != nullptr);
341 DCHECK(ret == arena_head_->next_->Begin());
342 DCHECK_EQ(rounded_bytes, arena_head_->next_->GetBytesAllocated());
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100343 noaccess_end = arena_head_->next_->End();
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100344 }
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100345 MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin);
346 } else {
347 ret = ptr_;
348 ptr_ += rounded_bytes;
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100349 }
Vladimir Marko2a408a32015-09-18 14:11:00 +0100350 MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
Vladimir Marko75001932015-11-10 20:54:22 +0000351 // Check that the memory is already zeroed out.
352 DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
Mathieu Chartier75165d02013-09-12 14:00:31 -0700353 return ret;
354}
355
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700356ArenaAllocator::~ArenaAllocator() {
357 // Reclaim all the arenas by giving them back to the thread pool.
358 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000359 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700360}
361
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100362uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) {
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100363 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, bytes));
364 DCHECK(new_arena != nullptr);
365 DCHECK_LE(bytes, new_arena->Size());
366 if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) {
367 // The old arena has more space remaining than the new one, so keep using it.
368 // This can happen when the requested size is over half of the default size.
369 DCHECK(arena_head_ != nullptr);
370 new_arena->bytes_allocated_ = bytes; // UpdateBytesAllocated() on the new_arena.
371 new_arena->next_ = arena_head_->next_;
372 arena_head_->next_ = new_arena;
373 } else {
374 UpdateBytesAllocated();
375 new_arena->next_ = arena_head_;
376 arena_head_ = new_arena;
377 // Update our internal data structures.
378 begin_ = new_arena->Begin();
Andreas Gampef6dd8292016-08-19 20:22:19 -0700379 DCHECK_ALIGNED(begin_, kAlignment);
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100380 ptr_ = begin_ + bytes;
381 end_ = new_arena->End();
382 }
383 return new_arena->Begin();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700384}
385
Mathieu Chartiere401d142015-04-22 13:56:20 -0700386bool ArenaAllocator::Contains(const void* ptr) const {
387 if (ptr >= begin_ && ptr < end_) {
388 return true;
389 }
390 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
391 if (cur_arena->Contains(ptr)) {
392 return true;
393 }
394 }
395 return false;
396}
397
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000398MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
399 ssize_t lost_bytes_adjustment)
400 : name_(name),
401 stats_(stats),
402 first_arena_(first_arena),
403 lost_bytes_adjustment_(lost_bytes_adjustment) {
404}
405
406void MemStats::Dump(std::ostream& os) const {
407 os << name_ << " stats:\n";
408 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
409}
410
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700411// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000412MemStats ArenaAllocator::GetMemStats() const {
413 ssize_t lost_bytes_adjustment =
414 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
415 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700416}
417
418} // namespace art