blob: 170468856503ea31915153bf01913e59d98af907 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070025#include "thread-inl.h"
buzbee862a7602013-04-05 10:58:54 -070026
27namespace art {
28
Evgenii Stepanov1e133742015-05-20 12:30:59 -070029static constexpr size_t kMemoryToolRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080030constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070031
Vladimir Markobd9e9db2014-03-07 19:41:05 +000032template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010033const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Vladimir Markof9f64412015-09-02 14:05:49 +010034 "Misc ",
35 "BBList ",
36 "BBPreds ",
37 "DfsPreOrd ",
38 "DfsPostOrd ",
39 "DomPostOrd ",
40 "TopoOrd ",
41 "Lowering ",
42 "LIR ",
43 "LIR masks ",
44 "SwitchTbl ",
45 "FillArray ",
46 "SlowPaths ",
47 "MIR ",
48 "DataFlow ",
49 "GrowList ",
50 "GrowBitMap ",
51 "SSA2Dalvik ",
52 "Dalvik2SSA ",
53 "DebugInfo ",
Vladimir Markof9f64412015-09-02 14:05:49 +010054 "RegAlloc ",
55 "Data ",
Vladimir Markof9f64412015-09-02 14:05:49 +010056 "STL ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010057 "GraphBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010058 "Graph ",
59 "BasicBlock ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010060 "BlockList ",
61 "RevPostOrder ",
62 "LinearOrder ",
63 "ConstantsMap ",
Vladimir Marko60584552015-09-03 13:35:12 +000064 "Predecessors ",
65 "Successors ",
66 "Dominated ",
Vladimir Markof9f64412015-09-02 14:05:49 +010067 "Instruction ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010068 "InvokeInputs ",
69 "PhiInputs ",
Vladimir Markof9f64412015-09-02 14:05:49 +010070 "LoopInfo ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010071 "LIBackEdges ",
Vladimir Markof9f64412015-09-02 14:05:49 +010072 "TryCatchInf ",
73 "UseListNode ",
74 "Environment ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010075 "EnvVRegs ",
76 "EnvLocations ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010077 "LocSummary ",
Vladimir Marko71bf8092015-09-15 15:33:14 +010078 "SsaBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010079 "MoveOperands ",
80 "CodeBuffer ",
81 "StackMaps ",
82 "BaselineMaps ",
83 "Optimization ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010084 "GVN ",
Vladimir Marko5233f932015-09-29 19:01:15 +010085 "InductionVar ",
86 "BCE ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010087 "SsaLiveness ",
88 "SsaPhiElim ",
89 "RefTypeProp ",
90 "PrimTypeProp ",
91 "SideEffects ",
92 "RegAllocator ",
Vladimir Marko225b6462015-09-28 12:17:40 +010093 "StackMapStm ",
94 "CodeGen ",
95 "ParallelMove ",
Vladimir Marko655e5852015-10-12 10:38:28 +010096 "GraphChecker ",
buzbee862a7602013-04-05 10:58:54 -070097};
98
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000099template <bool kCount>
100ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
101 : num_allocations_(0u) {
102 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
103}
104
105template <bool kCount>
106void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
107 num_allocations_ = other.num_allocations_;
108 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
109}
110
111template <bool kCount>
112void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
113 alloc_stats_[kind] += bytes;
114 ++num_allocations_;
115}
116
117template <bool kCount>
118size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
119 return num_allocations_;
120}
121
122template <bool kCount>
123size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
124 const size_t init = 0u; // Initial value of the correct type.
125 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
126}
127
128template <bool kCount>
129void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
130 ssize_t lost_bytes_adjustment) const {
131 size_t malloc_bytes = 0u;
132 size_t lost_bytes = 0u;
133 size_t num_arenas = 0u;
134 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
135 malloc_bytes += arena->Size();
136 lost_bytes += arena->RemainingSpace();
137 ++num_arenas;
138 }
139 // The lost_bytes_adjustment is used to make up for the fact that the current arena
140 // may not have the bytes_allocated_ updated correctly.
141 lost_bytes += lost_bytes_adjustment;
142 const size_t bytes_allocated = BytesAllocated();
143 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
144 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000145 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000146 if (num_allocations != 0) {
147 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
148 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
149 }
150 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800151 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000152 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000153 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000154 }
155}
156
157// Explicitly instantiate the used implementation.
158template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
159
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000160void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
161 MEMORY_TOOL_MAKE_DEFINED(ptr, size);
162}
163
164void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
165 MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
166}
167
168void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
169 MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
170}
171
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700172Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700173}
174
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700175MallocArena::MallocArena(size_t size) {
176 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
177 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700178}
179
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700180MallocArena::~MallocArena() {
181 free(reinterpret_cast<void*>(memory_));
182}
183
Mathieu Chartierc7853442015-03-27 14:35:38 -0700184MemMapArena::MemMapArena(size_t size, bool low_4gb) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700185 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700186 map_.reset(MemMap::MapAnonymous(
187 "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700188 CHECK(map_.get() != nullptr) << error_msg;
189 memory_ = map_->Begin();
190 size_ = map_->Size();
191}
192
Vladimir Marko3481ba22015-04-13 12:22:36 +0100193MemMapArena::~MemMapArena() {
194 // Destroys MemMap via std::unique_ptr<>.
195}
196
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700197void MemMapArena::Release() {
198 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700199 map_->MadviseDontNeedAndZero();
200 bytes_allocated_ = 0;
201 }
202}
203
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700204void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700205 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700206 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700207 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700208 }
buzbee862a7602013-04-05 10:58:54 -0700209}
210
Mathieu Chartierc7853442015-03-27 14:35:38 -0700211ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
212 : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
213 low_4gb_(low_4gb) {
214 if (low_4gb) {
215 CHECK(!use_malloc) << "low4gb must use map implementation";
216 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700217 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700218 MemMap::Init();
219 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700220}
221
222ArenaPool::~ArenaPool() {
223 while (free_arenas_ != nullptr) {
224 auto* arena = free_arenas_;
225 free_arenas_ = free_arenas_->next_;
226 delete arena;
227 }
228}
229
230Arena* ArenaPool::AllocArena(size_t size) {
231 Thread* self = Thread::Current();
232 Arena* ret = nullptr;
233 {
234 MutexLock lock(self, lock_);
235 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
236 ret = free_arenas_;
237 free_arenas_ = free_arenas_->next_;
238 }
239 }
240 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700241 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
242 new MemMapArena(size, low_4gb_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700243 }
244 ret->Reset();
245 return ret;
246}
247
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700248void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700249 if (!use_malloc_) {
250 // Doesn't work for malloc.
251 MutexLock lock(Thread::Current(), lock_);
252 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
253 arena->Release();
254 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700255 }
256}
257
Mathieu Chartier49285c52014-12-02 15:43:48 -0800258size_t ArenaPool::GetBytesAllocated() const {
259 size_t total = 0;
260 MutexLock lock(Thread::Current(), lock_);
261 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
262 total += arena->GetBytesAllocated();
263 }
264 return total;
265}
266
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000267void ArenaPool::FreeArenaChain(Arena* first) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700268 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000269 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700270 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000271 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700272 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000273 if (first != nullptr) {
274 Arena* last = first;
275 while (last->next_ != nullptr) {
276 last = last->next_;
277 }
278 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700279 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000280 last->next_ = free_arenas_;
281 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700282 }
283}
284
285size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000286 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700287}
288
Mathieu Chartierc7853442015-03-27 14:35:38 -0700289size_t ArenaAllocator::BytesUsed() const {
290 size_t total = ptr_ - begin_;
291 if (arena_head_ != nullptr) {
292 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
293 cur_arena = cur_arena->next_) {
294 total += cur_arena->GetBytesAllocated();
295 }
296 }
297 return total;
298}
299
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700300ArenaAllocator::ArenaAllocator(ArenaPool* pool)
301 : pool_(pool),
302 begin_(nullptr),
303 end_(nullptr),
304 ptr_(nullptr),
Vladimir Marko2a408a32015-09-18 14:11:00 +0100305 arena_head_(nullptr) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700306}
307
308void ArenaAllocator::UpdateBytesAllocated() {
309 if (arena_head_ != nullptr) {
310 // Update how many bytes we have allocated into the arena so that the arena pool knows how
311 // much memory to zero out.
312 arena_head_->bytes_allocated_ = ptr_ - begin_;
313 }
314}
315
Vladimir Marko2a408a32015-09-18 14:11:00 +0100316void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700317 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700318 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
319 // Obtain a new block.
320 ObtainNewArenaForAllocation(rounded_bytes);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100321 CHECK(ptr_ != nullptr);
322 MEMORY_TOOL_MAKE_UNDEFINED(ptr_, end_ - ptr_);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700323 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000324 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700325 uint8_t* ret = ptr_;
326 ptr_ += rounded_bytes;
327 // Check that the memory is already zeroed out.
328 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
329 CHECK_EQ(*ptr, 0U);
330 }
Vladimir Marko2a408a32015-09-18 14:11:00 +0100331 MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700332 MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700333 return ret;
334}
335
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700336ArenaAllocator::~ArenaAllocator() {
337 // Reclaim all the arenas by giving them back to the thread pool.
338 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000339 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700340}
341
342void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
343 UpdateBytesAllocated();
344 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
345 new_arena->next_ = arena_head_;
346 arena_head_ = new_arena;
347 // Update our internal data structures.
348 ptr_ = begin_ = new_arena->Begin();
349 end_ = new_arena->End();
350}
351
Mathieu Chartiere401d142015-04-22 13:56:20 -0700352bool ArenaAllocator::Contains(const void* ptr) const {
353 if (ptr >= begin_ && ptr < end_) {
354 return true;
355 }
356 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
357 if (cur_arena->Contains(ptr)) {
358 return true;
359 }
360 }
361 return false;
362}
363
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000364MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
365 ssize_t lost_bytes_adjustment)
366 : name_(name),
367 stats_(stats),
368 first_arena_(first_arena),
369 lost_bytes_adjustment_(lost_bytes_adjustment) {
370}
371
372void MemStats::Dump(std::ostream& os) const {
373 os << name_ << " stats:\n";
374 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
375}
376
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700377// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000378MemStats ArenaAllocator::GetMemStats() const {
379 ssize_t lost_bytes_adjustment =
380 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
381 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700382}
383
384} // namespace art