blob: a4b38ea9633e0a1821b23262ba9e449e177bf212 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070025#include "thread-inl.h"
buzbee862a7602013-04-05 10:58:54 -070026
27namespace art {
28
Evgenii Stepanov1e133742015-05-20 12:30:59 -070029static constexpr size_t kMemoryToolRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080030constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070031
Vladimir Markobd9e9db2014-03-07 19:41:05 +000032template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010033const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Vladimir Markof9f64412015-09-02 14:05:49 +010034 "Misc ",
35 "BBList ",
36 "BBPreds ",
37 "DfsPreOrd ",
38 "DfsPostOrd ",
39 "DomPostOrd ",
40 "TopoOrd ",
41 "Lowering ",
42 "LIR ",
43 "LIR masks ",
44 "SwitchTbl ",
45 "FillArray ",
46 "SlowPaths ",
47 "MIR ",
48 "DataFlow ",
49 "GrowList ",
50 "GrowBitMap ",
51 "SSA2Dalvik ",
52 "Dalvik2SSA ",
53 "DebugInfo ",
Vladimir Markof9f64412015-09-02 14:05:49 +010054 "RegAlloc ",
55 "Data ",
Vladimir Markof9f64412015-09-02 14:05:49 +010056 "STL ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010057 "GraphBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010058 "Graph ",
59 "BasicBlock ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010060 "BlockList ",
61 "RevPostOrder ",
62 "LinearOrder ",
63 "ConstantsMap ",
Vladimir Marko60584552015-09-03 13:35:12 +000064 "Predecessors ",
65 "Successors ",
66 "Dominated ",
Vladimir Markof9f64412015-09-02 14:05:49 +010067 "Instruction ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010068 "InvokeInputs ",
69 "PhiInputs ",
Vladimir Markof9f64412015-09-02 14:05:49 +010070 "LoopInfo ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010071 "LIBackEdges ",
Vladimir Markof9f64412015-09-02 14:05:49 +010072 "TryCatchInf ",
73 "UseListNode ",
74 "Environment ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010075 "EnvVRegs ",
76 "EnvLocations ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010077 "LocSummary ",
Vladimir Marko71bf8092015-09-15 15:33:14 +010078 "SsaBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010079 "MoveOperands ",
80 "CodeBuffer ",
81 "StackMaps ",
82 "BaselineMaps ",
83 "Optimization ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010084 "GVN ",
Vladimir Marko5233f932015-09-29 19:01:15 +010085 "InductionVar ",
86 "BCE ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010087 "SsaLiveness ",
88 "SsaPhiElim ",
89 "RefTypeProp ",
90 "PrimTypeProp ",
91 "SideEffects ",
92 "RegAllocator ",
Vladimir Marko225b6462015-09-28 12:17:40 +010093 "StackMapStm ",
94 "CodeGen ",
95 "ParallelMove ",
Vladimir Marko655e5852015-10-12 10:38:28 +010096 "GraphChecker ",
Mathieu Chartierde40d472015-10-15 17:47:48 -070097 "LSE ",
98 "Verifier ",
buzbee862a7602013-04-05 10:58:54 -070099};
100
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000101template <bool kCount>
102ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
103 : num_allocations_(0u) {
104 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
105}
106
107template <bool kCount>
108void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
109 num_allocations_ = other.num_allocations_;
110 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
111}
112
113template <bool kCount>
114void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
115 alloc_stats_[kind] += bytes;
116 ++num_allocations_;
117}
118
119template <bool kCount>
120size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
121 return num_allocations_;
122}
123
124template <bool kCount>
125size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
126 const size_t init = 0u; // Initial value of the correct type.
127 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
128}
129
130template <bool kCount>
131void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
132 ssize_t lost_bytes_adjustment) const {
133 size_t malloc_bytes = 0u;
134 size_t lost_bytes = 0u;
135 size_t num_arenas = 0u;
136 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
137 malloc_bytes += arena->Size();
138 lost_bytes += arena->RemainingSpace();
139 ++num_arenas;
140 }
141 // The lost_bytes_adjustment is used to make up for the fact that the current arena
142 // may not have the bytes_allocated_ updated correctly.
143 lost_bytes += lost_bytes_adjustment;
144 const size_t bytes_allocated = BytesAllocated();
145 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
146 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000147 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000148 if (num_allocations != 0) {
149 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
150 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
151 }
152 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800153 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000154 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000155 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000156 }
157}
158
159// Explicitly instantiate the used implementation.
160template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
161
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000162void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
163 MEMORY_TOOL_MAKE_DEFINED(ptr, size);
164}
165
166void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
167 MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
168}
169
170void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
171 MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
172}
173
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700174Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700175}
176
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700177MallocArena::MallocArena(size_t size) {
178 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
179 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700180}
181
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700182MallocArena::~MallocArena() {
183 free(reinterpret_cast<void*>(memory_));
184}
185
Mathieu Chartierc7853442015-03-27 14:35:38 -0700186MemMapArena::MemMapArena(size_t size, bool low_4gb) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700187 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700188 map_.reset(MemMap::MapAnonymous(
189 "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700190 CHECK(map_.get() != nullptr) << error_msg;
191 memory_ = map_->Begin();
192 size_ = map_->Size();
193}
194
Vladimir Marko3481ba22015-04-13 12:22:36 +0100195MemMapArena::~MemMapArena() {
196 // Destroys MemMap via std::unique_ptr<>.
197}
198
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700199void MemMapArena::Release() {
200 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700201 map_->MadviseDontNeedAndZero();
202 bytes_allocated_ = 0;
203 }
204}
205
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700206void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700207 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700208 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700209 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700210 }
buzbee862a7602013-04-05 10:58:54 -0700211}
212
Mathieu Chartierc7853442015-03-27 14:35:38 -0700213ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
214 : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
215 low_4gb_(low_4gb) {
216 if (low_4gb) {
217 CHECK(!use_malloc) << "low4gb must use map implementation";
218 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700219 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700220 MemMap::Init();
221 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700222}
223
224ArenaPool::~ArenaPool() {
Jean-Philippe Halimica76a1a2016-02-02 19:48:52 +0100225 ReclaimMemory();
226}
227
228void ArenaPool::ReclaimMemory() {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700229 while (free_arenas_ != nullptr) {
230 auto* arena = free_arenas_;
231 free_arenas_ = free_arenas_->next_;
232 delete arena;
233 }
234}
235
Jean-Philippe Halimica76a1a2016-02-02 19:48:52 +0100236void ArenaPool::LockReclaimMemory() {
237 MutexLock lock(Thread::Current(), lock_);
238 ReclaimMemory();
239}
240
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700241Arena* ArenaPool::AllocArena(size_t size) {
242 Thread* self = Thread::Current();
243 Arena* ret = nullptr;
244 {
245 MutexLock lock(self, lock_);
246 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
247 ret = free_arenas_;
248 free_arenas_ = free_arenas_->next_;
249 }
250 }
251 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700252 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
253 new MemMapArena(size, low_4gb_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700254 }
255 ret->Reset();
256 return ret;
257}
258
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700259void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700260 if (!use_malloc_) {
261 // Doesn't work for malloc.
262 MutexLock lock(Thread::Current(), lock_);
263 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
264 arena->Release();
265 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700266 }
267}
268
Mathieu Chartier49285c52014-12-02 15:43:48 -0800269size_t ArenaPool::GetBytesAllocated() const {
270 size_t total = 0;
271 MutexLock lock(Thread::Current(), lock_);
272 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
273 total += arena->GetBytesAllocated();
274 }
275 return total;
276}
277
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000278void ArenaPool::FreeArenaChain(Arena* first) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700279 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000280 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700281 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000282 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700283 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000284 if (first != nullptr) {
285 Arena* last = first;
286 while (last->next_ != nullptr) {
287 last = last->next_;
288 }
289 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700290 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000291 last->next_ = free_arenas_;
292 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700293 }
294}
295
296size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000297 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700298}
299
Mathieu Chartierc7853442015-03-27 14:35:38 -0700300size_t ArenaAllocator::BytesUsed() const {
301 size_t total = ptr_ - begin_;
302 if (arena_head_ != nullptr) {
303 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
304 cur_arena = cur_arena->next_) {
305 total += cur_arena->GetBytesAllocated();
306 }
307 }
308 return total;
309}
310
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700311ArenaAllocator::ArenaAllocator(ArenaPool* pool)
312 : pool_(pool),
313 begin_(nullptr),
314 end_(nullptr),
315 ptr_(nullptr),
Vladimir Marko2a408a32015-09-18 14:11:00 +0100316 arena_head_(nullptr) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700317}
318
319void ArenaAllocator::UpdateBytesAllocated() {
320 if (arena_head_ != nullptr) {
321 // Update how many bytes we have allocated into the arena so that the arena pool knows how
322 // much memory to zero out.
323 arena_head_->bytes_allocated_ = ptr_ - begin_;
324 }
325}
326
Vladimir Marko2a408a32015-09-18 14:11:00 +0100327void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
Vladimir Marko75001932015-11-10 20:54:22 +0000328 // We mark all memory for a newly retrieved arena as inaccessible and then
329 // mark only the actually allocated memory as defined. That leaves red zones
330 // and padding between allocations marked as inaccessible.
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700331 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700332 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
333 // Obtain a new block.
334 ObtainNewArenaForAllocation(rounded_bytes);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100335 CHECK(ptr_ != nullptr);
Vladimir Marko75001932015-11-10 20:54:22 +0000336 MEMORY_TOOL_MAKE_NOACCESS(ptr_, end_ - ptr_);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700337 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000338 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700339 uint8_t* ret = ptr_;
340 ptr_ += rounded_bytes;
Vladimir Marko2a408a32015-09-18 14:11:00 +0100341 MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
Vladimir Marko75001932015-11-10 20:54:22 +0000342 // Check that the memory is already zeroed out.
343 DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
Mathieu Chartier75165d02013-09-12 14:00:31 -0700344 return ret;
345}
346
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700347ArenaAllocator::~ArenaAllocator() {
348 // Reclaim all the arenas by giving them back to the thread pool.
349 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000350 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700351}
352
353void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
354 UpdateBytesAllocated();
355 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
356 new_arena->next_ = arena_head_;
357 arena_head_ = new_arena;
358 // Update our internal data structures.
359 ptr_ = begin_ = new_arena->Begin();
360 end_ = new_arena->End();
361}
362
Mathieu Chartiere401d142015-04-22 13:56:20 -0700363bool ArenaAllocator::Contains(const void* ptr) const {
364 if (ptr >= begin_ && ptr < end_) {
365 return true;
366 }
367 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
368 if (cur_arena->Contains(ptr)) {
369 return true;
370 }
371 }
372 return false;
373}
374
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000375MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
376 ssize_t lost_bytes_adjustment)
377 : name_(name),
378 stats_(stats),
379 first_arena_(first_arena),
380 lost_bytes_adjustment_(lost_bytes_adjustment) {
381}
382
383void MemStats::Dump(std::ostream& os) const {
384 os << name_ << " stats:\n";
385 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
386}
387
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700388// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000389MemStats ArenaAllocator::GetMemStats() const {
390 ssize_t lost_bytes_adjustment =
391 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
392 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700393}
394
395} // namespace art