blob: 7aa71f9c3ebac9688a7ff883181a4fedcc5dacb9 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070025#include "thread-inl.h"
buzbee862a7602013-04-05 10:58:54 -070026
27namespace art {
28
Evgenii Stepanov1e133742015-05-20 12:30:59 -070029static constexpr size_t kMemoryToolRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080030constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070031
Vladimir Markobd9e9db2014-03-07 19:41:05 +000032template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010033const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Vladimir Markof9f64412015-09-02 14:05:49 +010034 "Misc ",
35 "BBList ",
36 "BBPreds ",
37 "DfsPreOrd ",
38 "DfsPostOrd ",
39 "DomPostOrd ",
40 "TopoOrd ",
41 "Lowering ",
42 "LIR ",
43 "LIR masks ",
44 "SwitchTbl ",
45 "FillArray ",
46 "SlowPaths ",
47 "MIR ",
48 "DataFlow ",
49 "GrowList ",
50 "GrowBitMap ",
51 "SSA2Dalvik ",
52 "Dalvik2SSA ",
53 "DebugInfo ",
Vladimir Markof9f64412015-09-02 14:05:49 +010054 "RegAlloc ",
55 "Data ",
Vladimir Markof9f64412015-09-02 14:05:49 +010056 "STL ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010057 "GraphBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010058 "Graph ",
59 "BasicBlock ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010060 "BlockList ",
61 "RevPostOrder ",
62 "LinearOrder ",
63 "ConstantsMap ",
Vladimir Marko60584552015-09-03 13:35:12 +000064 "Predecessors ",
65 "Successors ",
66 "Dominated ",
Vladimir Markof9f64412015-09-02 14:05:49 +010067 "Instruction ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010068 "InvokeInputs ",
69 "PhiInputs ",
Vladimir Markof9f64412015-09-02 14:05:49 +010070 "LoopInfo ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010071 "LIBackEdges ",
Vladimir Markof9f64412015-09-02 14:05:49 +010072 "TryCatchInf ",
73 "UseListNode ",
74 "Environment ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010075 "EnvVRegs ",
76 "EnvLocations ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010077 "LocSummary ",
Vladimir Marko71bf8092015-09-15 15:33:14 +010078 "SsaBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010079 "MoveOperands ",
80 "CodeBuffer ",
81 "StackMaps ",
82 "BaselineMaps ",
83 "Optimization ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010084 "GVN ",
Vladimir Marko5233f932015-09-29 19:01:15 +010085 "InductionVar ",
86 "BCE ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010087 "SsaLiveness ",
88 "SsaPhiElim ",
89 "RefTypeProp ",
90 "PrimTypeProp ",
91 "SideEffects ",
92 "RegAllocator ",
Vladimir Marko225b6462015-09-28 12:17:40 +010093 "StackMapStm ",
94 "CodeGen ",
95 "ParallelMove ",
buzbee862a7602013-04-05 10:58:54 -070096};
97
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000098template <bool kCount>
99ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
100 : num_allocations_(0u) {
101 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
102}
103
104template <bool kCount>
105void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
106 num_allocations_ = other.num_allocations_;
107 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
108}
109
110template <bool kCount>
111void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
112 alloc_stats_[kind] += bytes;
113 ++num_allocations_;
114}
115
116template <bool kCount>
117size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
118 return num_allocations_;
119}
120
121template <bool kCount>
122size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
123 const size_t init = 0u; // Initial value of the correct type.
124 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
125}
126
127template <bool kCount>
128void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
129 ssize_t lost_bytes_adjustment) const {
130 size_t malloc_bytes = 0u;
131 size_t lost_bytes = 0u;
132 size_t num_arenas = 0u;
133 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
134 malloc_bytes += arena->Size();
135 lost_bytes += arena->RemainingSpace();
136 ++num_arenas;
137 }
138 // The lost_bytes_adjustment is used to make up for the fact that the current arena
139 // may not have the bytes_allocated_ updated correctly.
140 lost_bytes += lost_bytes_adjustment;
141 const size_t bytes_allocated = BytesAllocated();
142 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
143 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000144 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000145 if (num_allocations != 0) {
146 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
147 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
148 }
149 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800150 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000151 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000152 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000153 }
154}
155
156// Explicitly instantiate the used implementation.
157template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
158
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000159void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
160 MEMORY_TOOL_MAKE_DEFINED(ptr, size);
161}
162
163void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
164 MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
165}
166
167void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
168 MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
169}
170
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700171Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700172}
173
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700174MallocArena::MallocArena(size_t size) {
175 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
176 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700177}
178
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700179MallocArena::~MallocArena() {
180 free(reinterpret_cast<void*>(memory_));
181}
182
Mathieu Chartierc7853442015-03-27 14:35:38 -0700183MemMapArena::MemMapArena(size_t size, bool low_4gb) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700184 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700185 map_.reset(MemMap::MapAnonymous(
186 "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700187 CHECK(map_.get() != nullptr) << error_msg;
188 memory_ = map_->Begin();
189 size_ = map_->Size();
190}
191
Vladimir Marko3481ba22015-04-13 12:22:36 +0100192MemMapArena::~MemMapArena() {
193 // Destroys MemMap via std::unique_ptr<>.
194}
195
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700196void MemMapArena::Release() {
197 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700198 map_->MadviseDontNeedAndZero();
199 bytes_allocated_ = 0;
200 }
201}
202
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700203void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700204 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700205 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700206 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700207 }
buzbee862a7602013-04-05 10:58:54 -0700208}
209
Mathieu Chartierc7853442015-03-27 14:35:38 -0700210ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
211 : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
212 low_4gb_(low_4gb) {
213 if (low_4gb) {
214 CHECK(!use_malloc) << "low4gb must use map implementation";
215 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700216 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700217 MemMap::Init();
218 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700219}
220
221ArenaPool::~ArenaPool() {
222 while (free_arenas_ != nullptr) {
223 auto* arena = free_arenas_;
224 free_arenas_ = free_arenas_->next_;
225 delete arena;
226 }
227}
228
229Arena* ArenaPool::AllocArena(size_t size) {
230 Thread* self = Thread::Current();
231 Arena* ret = nullptr;
232 {
233 MutexLock lock(self, lock_);
234 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
235 ret = free_arenas_;
236 free_arenas_ = free_arenas_->next_;
237 }
238 }
239 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700240 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
241 new MemMapArena(size, low_4gb_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700242 }
243 ret->Reset();
244 return ret;
245}
246
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700247void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700248 if (!use_malloc_) {
249 // Doesn't work for malloc.
250 MutexLock lock(Thread::Current(), lock_);
251 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
252 arena->Release();
253 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700254 }
255}
256
Mathieu Chartier49285c52014-12-02 15:43:48 -0800257size_t ArenaPool::GetBytesAllocated() const {
258 size_t total = 0;
259 MutexLock lock(Thread::Current(), lock_);
260 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
261 total += arena->GetBytesAllocated();
262 }
263 return total;
264}
265
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000266void ArenaPool::FreeArenaChain(Arena* first) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700267 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000268 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700269 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000270 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700271 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000272 if (first != nullptr) {
273 Arena* last = first;
274 while (last->next_ != nullptr) {
275 last = last->next_;
276 }
277 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700278 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000279 last->next_ = free_arenas_;
280 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700281 }
282}
283
284size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000285 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700286}
287
Mathieu Chartierc7853442015-03-27 14:35:38 -0700288size_t ArenaAllocator::BytesUsed() const {
289 size_t total = ptr_ - begin_;
290 if (arena_head_ != nullptr) {
291 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
292 cur_arena = cur_arena->next_) {
293 total += cur_arena->GetBytesAllocated();
294 }
295 }
296 return total;
297}
298
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700299ArenaAllocator::ArenaAllocator(ArenaPool* pool)
300 : pool_(pool),
301 begin_(nullptr),
302 end_(nullptr),
303 ptr_(nullptr),
Vladimir Marko2a408a32015-09-18 14:11:00 +0100304 arena_head_(nullptr) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700305}
306
307void ArenaAllocator::UpdateBytesAllocated() {
308 if (arena_head_ != nullptr) {
309 // Update how many bytes we have allocated into the arena so that the arena pool knows how
310 // much memory to zero out.
311 arena_head_->bytes_allocated_ = ptr_ - begin_;
312 }
313}
314
Vladimir Marko2a408a32015-09-18 14:11:00 +0100315void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700316 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700317 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
318 // Obtain a new block.
319 ObtainNewArenaForAllocation(rounded_bytes);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100320 CHECK(ptr_ != nullptr);
321 MEMORY_TOOL_MAKE_UNDEFINED(ptr_, end_ - ptr_);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700322 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000323 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700324 uint8_t* ret = ptr_;
325 ptr_ += rounded_bytes;
326 // Check that the memory is already zeroed out.
327 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
328 CHECK_EQ(*ptr, 0U);
329 }
Vladimir Marko2a408a32015-09-18 14:11:00 +0100330 MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700331 MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700332 return ret;
333}
334
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700335ArenaAllocator::~ArenaAllocator() {
336 // Reclaim all the arenas by giving them back to the thread pool.
337 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000338 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700339}
340
341void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
342 UpdateBytesAllocated();
343 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
344 new_arena->next_ = arena_head_;
345 arena_head_ = new_arena;
346 // Update our internal data structures.
347 ptr_ = begin_ = new_arena->Begin();
348 end_ = new_arena->End();
349}
350
Mathieu Chartiere401d142015-04-22 13:56:20 -0700351bool ArenaAllocator::Contains(const void* ptr) const {
352 if (ptr >= begin_ && ptr < end_) {
353 return true;
354 }
355 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
356 if (cur_arena->Contains(ptr)) {
357 return true;
358 }
359 }
360 return false;
361}
362
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000363MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
364 ssize_t lost_bytes_adjustment)
365 : name_(name),
366 stats_(stats),
367 first_arena_(first_arena),
368 lost_bytes_adjustment_(lost_bytes_adjustment) {
369}
370
371void MemStats::Dump(std::ostream& os) const {
372 os << name_ << " stats:\n";
373 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
374}
375
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700376// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000377MemStats ArenaAllocator::GetMemStats() const {
378 ssize_t lost_bytes_adjustment =
379 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
380 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700381}
382
383} // namespace art