blob: 44af3f75b973438e8540a483997fd9900539c479 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070025#include "thread-inl.h"
buzbee862a7602013-04-05 10:58:54 -070026
27namespace art {
28
Evgenii Stepanov1e133742015-05-20 12:30:59 -070029static constexpr size_t kMemoryToolRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080030constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070031
Vladimir Markobd9e9db2014-03-07 19:41:05 +000032template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010033const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Vladimir Markof9f64412015-09-02 14:05:49 +010034 "Misc ",
35 "BBList ",
36 "BBPreds ",
37 "DfsPreOrd ",
38 "DfsPostOrd ",
39 "DomPostOrd ",
40 "TopoOrd ",
41 "Lowering ",
42 "LIR ",
43 "LIR masks ",
44 "SwitchTbl ",
45 "FillArray ",
46 "SlowPaths ",
47 "MIR ",
48 "DataFlow ",
49 "GrowList ",
50 "GrowBitMap ",
51 "SSA2Dalvik ",
52 "Dalvik2SSA ",
53 "DebugInfo ",
Vladimir Markof9f64412015-09-02 14:05:49 +010054 "RegAlloc ",
55 "Data ",
Vladimir Markof9f64412015-09-02 14:05:49 +010056 "STL ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010057 "GraphBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010058 "Graph ",
59 "BasicBlock ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010060 "BlockList ",
61 "RevPostOrder ",
62 "LinearOrder ",
63 "ConstantsMap ",
Vladimir Marko60584552015-09-03 13:35:12 +000064 "Predecessors ",
65 "Successors ",
66 "Dominated ",
Vladimir Markof9f64412015-09-02 14:05:49 +010067 "Instruction ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010068 "InvokeInputs ",
69 "PhiInputs ",
Vladimir Markof9f64412015-09-02 14:05:49 +010070 "LoopInfo ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010071 "LIBackEdges ",
Vladimir Markof9f64412015-09-02 14:05:49 +010072 "TryCatchInf ",
73 "UseListNode ",
74 "Environment ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010075 "EnvVRegs ",
76 "EnvLocations ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010077 "LocSummary ",
Vladimir Marko71bf8092015-09-15 15:33:14 +010078 "SsaBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010079 "MoveOperands ",
80 "CodeBuffer ",
81 "StackMaps ",
82 "BaselineMaps ",
83 "Optimization ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010084 "GVN ",
Vladimir Marko5233f932015-09-29 19:01:15 +010085 "InductionVar ",
86 "BCE ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010087 "SsaLiveness ",
88 "SsaPhiElim ",
89 "RefTypeProp ",
90 "PrimTypeProp ",
91 "SideEffects ",
92 "RegAllocator ",
Vladimir Marko225b6462015-09-28 12:17:40 +010093 "StackMapStm ",
94 "CodeGen ",
95 "ParallelMove ",
Vladimir Marko655e5852015-10-12 10:38:28 +010096 "GraphChecker ",
Mathieu Chartierde40d472015-10-15 17:47:48 -070097 "LSE ",
98 "Verifier ",
buzbee862a7602013-04-05 10:58:54 -070099};
100
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000101template <bool kCount>
102ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
103 : num_allocations_(0u) {
104 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
105}
106
107template <bool kCount>
108void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
109 num_allocations_ = other.num_allocations_;
110 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
111}
112
113template <bool kCount>
114void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
115 alloc_stats_[kind] += bytes;
116 ++num_allocations_;
117}
118
119template <bool kCount>
120size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
121 return num_allocations_;
122}
123
124template <bool kCount>
125size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
126 const size_t init = 0u; // Initial value of the correct type.
127 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
128}
129
130template <bool kCount>
131void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
132 ssize_t lost_bytes_adjustment) const {
133 size_t malloc_bytes = 0u;
134 size_t lost_bytes = 0u;
135 size_t num_arenas = 0u;
136 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
137 malloc_bytes += arena->Size();
138 lost_bytes += arena->RemainingSpace();
139 ++num_arenas;
140 }
141 // The lost_bytes_adjustment is used to make up for the fact that the current arena
142 // may not have the bytes_allocated_ updated correctly.
143 lost_bytes += lost_bytes_adjustment;
144 const size_t bytes_allocated = BytesAllocated();
145 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
146 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000147 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000148 if (num_allocations != 0) {
149 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
150 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
151 }
152 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800153 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000154 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000155 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000156 }
157}
158
159// Explicitly instantiate the used implementation.
160template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
161
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000162void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
163 MEMORY_TOOL_MAKE_DEFINED(ptr, size);
164}
165
166void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
167 MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
168}
169
170void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
171 MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
172}
173
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700174Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700175}
176
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700177MallocArena::MallocArena(size_t size) {
178 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
179 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700180}
181
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700182MallocArena::~MallocArena() {
183 free(reinterpret_cast<void*>(memory_));
184}
185
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000186MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700187 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700188 map_.reset(MemMap::MapAnonymous(
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000189 name, nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700190 CHECK(map_.get() != nullptr) << error_msg;
191 memory_ = map_->Begin();
192 size_ = map_->Size();
193}
194
Vladimir Marko3481ba22015-04-13 12:22:36 +0100195MemMapArena::~MemMapArena() {
196 // Destroys MemMap via std::unique_ptr<>.
197}
198
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700199void MemMapArena::Release() {
200 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700201 map_->MadviseDontNeedAndZero();
202 bytes_allocated_ = 0;
203 }
204}
205
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700206void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700207 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700208 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700209 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700210 }
buzbee862a7602013-04-05 10:58:54 -0700211}
212
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000213ArenaPool::ArenaPool(bool use_malloc, bool low_4gb, const char* name)
214 : use_malloc_(use_malloc),
215 lock_("Arena pool lock", kArenaPoolLock),
216 free_arenas_(nullptr),
217 low_4gb_(low_4gb),
218 name_(name) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700219 if (low_4gb) {
220 CHECK(!use_malloc) << "low4gb must use map implementation";
221 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700222 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700223 MemMap::Init();
224 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700225}
226
227ArenaPool::~ArenaPool() {
Jean-Philippe Halimica76a1a2016-02-02 19:48:52 +0100228 ReclaimMemory();
229}
230
231void ArenaPool::ReclaimMemory() {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700232 while (free_arenas_ != nullptr) {
233 auto* arena = free_arenas_;
234 free_arenas_ = free_arenas_->next_;
235 delete arena;
236 }
237}
238
Jean-Philippe Halimica76a1a2016-02-02 19:48:52 +0100239void ArenaPool::LockReclaimMemory() {
240 MutexLock lock(Thread::Current(), lock_);
241 ReclaimMemory();
242}
243
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700244Arena* ArenaPool::AllocArena(size_t size) {
245 Thread* self = Thread::Current();
246 Arena* ret = nullptr;
247 {
248 MutexLock lock(self, lock_);
249 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
250 ret = free_arenas_;
251 free_arenas_ = free_arenas_->next_;
252 }
253 }
254 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700255 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000256 new MemMapArena(size, low_4gb_, name_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700257 }
258 ret->Reset();
259 return ret;
260}
261
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700262void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700263 if (!use_malloc_) {
264 // Doesn't work for malloc.
265 MutexLock lock(Thread::Current(), lock_);
266 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
267 arena->Release();
268 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700269 }
270}
271
Mathieu Chartier49285c52014-12-02 15:43:48 -0800272size_t ArenaPool::GetBytesAllocated() const {
273 size_t total = 0;
274 MutexLock lock(Thread::Current(), lock_);
275 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
276 total += arena->GetBytesAllocated();
277 }
278 return total;
279}
280
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000281void ArenaPool::FreeArenaChain(Arena* first) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700282 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000283 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700284 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000285 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700286 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000287 if (first != nullptr) {
288 Arena* last = first;
289 while (last->next_ != nullptr) {
290 last = last->next_;
291 }
292 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700293 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000294 last->next_ = free_arenas_;
295 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700296 }
297}
298
299size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000300 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700301}
302
Mathieu Chartierc7853442015-03-27 14:35:38 -0700303size_t ArenaAllocator::BytesUsed() const {
304 size_t total = ptr_ - begin_;
305 if (arena_head_ != nullptr) {
306 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
307 cur_arena = cur_arena->next_) {
308 total += cur_arena->GetBytesAllocated();
309 }
310 }
311 return total;
312}
313
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700314ArenaAllocator::ArenaAllocator(ArenaPool* pool)
315 : pool_(pool),
316 begin_(nullptr),
317 end_(nullptr),
318 ptr_(nullptr),
Vladimir Marko2a408a32015-09-18 14:11:00 +0100319 arena_head_(nullptr) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700320}
321
322void ArenaAllocator::UpdateBytesAllocated() {
323 if (arena_head_ != nullptr) {
324 // Update how many bytes we have allocated into the arena so that the arena pool knows how
325 // much memory to zero out.
326 arena_head_->bytes_allocated_ = ptr_ - begin_;
327 }
328}
329
Vladimir Marko2a408a32015-09-18 14:11:00 +0100330void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
Vladimir Marko75001932015-11-10 20:54:22 +0000331 // We mark all memory for a newly retrieved arena as inaccessible and then
332 // mark only the actually allocated memory as defined. That leaves red zones
333 // and padding between allocations marked as inaccessible.
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700334 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700335 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
336 // Obtain a new block.
337 ObtainNewArenaForAllocation(rounded_bytes);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100338 CHECK(ptr_ != nullptr);
Vladimir Marko75001932015-11-10 20:54:22 +0000339 MEMORY_TOOL_MAKE_NOACCESS(ptr_, end_ - ptr_);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700340 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000341 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700342 uint8_t* ret = ptr_;
343 ptr_ += rounded_bytes;
Vladimir Marko2a408a32015-09-18 14:11:00 +0100344 MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
Vladimir Marko75001932015-11-10 20:54:22 +0000345 // Check that the memory is already zeroed out.
346 DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
Mathieu Chartier75165d02013-09-12 14:00:31 -0700347 return ret;
348}
349
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700350ArenaAllocator::~ArenaAllocator() {
351 // Reclaim all the arenas by giving them back to the thread pool.
352 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000353 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700354}
355
356void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
357 UpdateBytesAllocated();
358 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
359 new_arena->next_ = arena_head_;
360 arena_head_ = new_arena;
361 // Update our internal data structures.
362 ptr_ = begin_ = new_arena->Begin();
363 end_ = new_arena->End();
364}
365
Mathieu Chartiere401d142015-04-22 13:56:20 -0700366bool ArenaAllocator::Contains(const void* ptr) const {
367 if (ptr >= begin_ && ptr < end_) {
368 return true;
369 }
370 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
371 if (cur_arena->Contains(ptr)) {
372 return true;
373 }
374 }
375 return false;
376}
377
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000378MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
379 ssize_t lost_bytes_adjustment)
380 : name_(name),
381 stats_(stats),
382 first_arena_(first_arena),
383 lost_bytes_adjustment_(lost_bytes_adjustment) {
384}
385
386void MemStats::Dump(std::ostream& os) const {
387 os << name_ << " stats:\n";
388 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
389}
390
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700391// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000392MemStats ArenaAllocator::GetMemStats() const {
393 ssize_t lost_bytes_adjustment =
394 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
395 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700396}
397
398} // namespace art