blob: f871543862c7b16f9bff35a32fcc26f0655c9f89 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070025#include "thread-inl.h"
Mathieu Chartier32ce2ad2016-03-04 14:58:03 -080026#include "systrace.h"
buzbee862a7602013-04-05 10:58:54 -070027
28namespace art {
29
Evgenii Stepanov1e133742015-05-20 12:30:59 -070030static constexpr size_t kMemoryToolRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080031constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032
Vladimir Markobd9e9db2014-03-07 19:41:05 +000033template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010034const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Vladimir Markof9f64412015-09-02 14:05:49 +010035 "Misc ",
36 "BBList ",
37 "BBPreds ",
38 "DfsPreOrd ",
39 "DfsPostOrd ",
40 "DomPostOrd ",
41 "TopoOrd ",
42 "Lowering ",
43 "LIR ",
44 "LIR masks ",
45 "SwitchTbl ",
46 "FillArray ",
47 "SlowPaths ",
48 "MIR ",
49 "DataFlow ",
50 "GrowList ",
51 "GrowBitMap ",
52 "SSA2Dalvik ",
53 "Dalvik2SSA ",
54 "DebugInfo ",
Vladimir Markof9f64412015-09-02 14:05:49 +010055 "RegAlloc ",
56 "Data ",
Vladimir Markof9f64412015-09-02 14:05:49 +010057 "STL ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010058 "GraphBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010059 "Graph ",
60 "BasicBlock ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010061 "BlockList ",
62 "RevPostOrder ",
63 "LinearOrder ",
64 "ConstantsMap ",
Vladimir Marko60584552015-09-03 13:35:12 +000065 "Predecessors ",
66 "Successors ",
67 "Dominated ",
Vladimir Markof9f64412015-09-02 14:05:49 +010068 "Instruction ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010069 "InvokeInputs ",
70 "PhiInputs ",
Vladimir Markof9f64412015-09-02 14:05:49 +010071 "LoopInfo ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010072 "LIBackEdges ",
Vladimir Markof9f64412015-09-02 14:05:49 +010073 "TryCatchInf ",
74 "UseListNode ",
75 "Environment ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010076 "EnvVRegs ",
77 "EnvLocations ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010078 "LocSummary ",
Vladimir Marko71bf8092015-09-15 15:33:14 +010079 "SsaBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010080 "MoveOperands ",
81 "CodeBuffer ",
82 "StackMaps ",
83 "BaselineMaps ",
84 "Optimization ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010085 "GVN ",
Vladimir Marko5233f932015-09-29 19:01:15 +010086 "InductionVar ",
87 "BCE ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010088 "SsaLiveness ",
89 "SsaPhiElim ",
90 "RefTypeProp ",
91 "PrimTypeProp ",
92 "SideEffects ",
93 "RegAllocator ",
Vladimir Marko225b6462015-09-28 12:17:40 +010094 "StackMapStm ",
95 "CodeGen ",
96 "ParallelMove ",
Vladimir Marko655e5852015-10-12 10:38:28 +010097 "GraphChecker ",
Mathieu Chartierde40d472015-10-15 17:47:48 -070098 "LSE ",
99 "Verifier ",
buzbee862a7602013-04-05 10:58:54 -0700100};
101
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000102template <bool kCount>
103ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
104 : num_allocations_(0u) {
105 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
106}
107
108template <bool kCount>
109void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
110 num_allocations_ = other.num_allocations_;
111 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
112}
113
114template <bool kCount>
115void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
116 alloc_stats_[kind] += bytes;
117 ++num_allocations_;
118}
119
120template <bool kCount>
121size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
122 return num_allocations_;
123}
124
125template <bool kCount>
126size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
127 const size_t init = 0u; // Initial value of the correct type.
128 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
129}
130
131template <bool kCount>
132void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
133 ssize_t lost_bytes_adjustment) const {
134 size_t malloc_bytes = 0u;
135 size_t lost_bytes = 0u;
136 size_t num_arenas = 0u;
137 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
138 malloc_bytes += arena->Size();
139 lost_bytes += arena->RemainingSpace();
140 ++num_arenas;
141 }
142 // The lost_bytes_adjustment is used to make up for the fact that the current arena
143 // may not have the bytes_allocated_ updated correctly.
144 lost_bytes += lost_bytes_adjustment;
145 const size_t bytes_allocated = BytesAllocated();
146 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
147 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000148 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000149 if (num_allocations != 0) {
150 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
151 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
152 }
153 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800154 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000155 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000156 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000157 }
158}
159
160// Explicitly instantiate the used implementation.
161template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
162
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000163void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
164 MEMORY_TOOL_MAKE_DEFINED(ptr, size);
165}
166
167void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
168 MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
169}
170
171void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
172 MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
173}
174
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700175Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700176}
177
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700178MallocArena::MallocArena(size_t size) {
179 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
180 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700181}
182
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700183MallocArena::~MallocArena() {
184 free(reinterpret_cast<void*>(memory_));
185}
186
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000187MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700188 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700189 map_.reset(MemMap::MapAnonymous(
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000190 name, nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700191 CHECK(map_.get() != nullptr) << error_msg;
192 memory_ = map_->Begin();
193 size_ = map_->Size();
194}
195
Vladimir Marko3481ba22015-04-13 12:22:36 +0100196MemMapArena::~MemMapArena() {
197 // Destroys MemMap via std::unique_ptr<>.
198}
199
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700200void MemMapArena::Release() {
201 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700202 map_->MadviseDontNeedAndZero();
203 bytes_allocated_ = 0;
204 }
205}
206
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700207void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700208 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700209 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700210 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700211 }
buzbee862a7602013-04-05 10:58:54 -0700212}
213
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000214ArenaPool::ArenaPool(bool use_malloc, bool low_4gb, const char* name)
215 : use_malloc_(use_malloc),
216 lock_("Arena pool lock", kArenaPoolLock),
217 free_arenas_(nullptr),
218 low_4gb_(low_4gb),
219 name_(name) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700220 if (low_4gb) {
221 CHECK(!use_malloc) << "low4gb must use map implementation";
222 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700223 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700224 MemMap::Init();
225 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700226}
227
228ArenaPool::~ArenaPool() {
Jean-Philippe Halimica76a1a2016-02-02 19:48:52 +0100229 ReclaimMemory();
230}
231
232void ArenaPool::ReclaimMemory() {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700233 while (free_arenas_ != nullptr) {
234 auto* arena = free_arenas_;
235 free_arenas_ = free_arenas_->next_;
236 delete arena;
237 }
238}
239
Jean-Philippe Halimica76a1a2016-02-02 19:48:52 +0100240void ArenaPool::LockReclaimMemory() {
241 MutexLock lock(Thread::Current(), lock_);
242 ReclaimMemory();
243}
244
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700245Arena* ArenaPool::AllocArena(size_t size) {
246 Thread* self = Thread::Current();
247 Arena* ret = nullptr;
248 {
249 MutexLock lock(self, lock_);
250 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
251 ret = free_arenas_;
252 free_arenas_ = free_arenas_->next_;
253 }
254 }
255 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700256 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000257 new MemMapArena(size, low_4gb_, name_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700258 }
259 ret->Reset();
260 return ret;
261}
262
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700263void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700264 if (!use_malloc_) {
Mathieu Chartier32ce2ad2016-03-04 14:58:03 -0800265 ScopedTrace trace(__PRETTY_FUNCTION__);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700266 // Doesn't work for malloc.
267 MutexLock lock(Thread::Current(), lock_);
268 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
269 arena->Release();
270 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700271 }
272}
273
Mathieu Chartier49285c52014-12-02 15:43:48 -0800274size_t ArenaPool::GetBytesAllocated() const {
275 size_t total = 0;
276 MutexLock lock(Thread::Current(), lock_);
277 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
278 total += arena->GetBytesAllocated();
279 }
280 return total;
281}
282
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000283void ArenaPool::FreeArenaChain(Arena* first) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700284 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000285 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700286 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000287 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700288 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000289 if (first != nullptr) {
290 Arena* last = first;
291 while (last->next_ != nullptr) {
292 last = last->next_;
293 }
294 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700295 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000296 last->next_ = free_arenas_;
297 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700298 }
299}
300
301size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000302 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700303}
304
Mathieu Chartierc7853442015-03-27 14:35:38 -0700305size_t ArenaAllocator::BytesUsed() const {
306 size_t total = ptr_ - begin_;
307 if (arena_head_ != nullptr) {
308 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
309 cur_arena = cur_arena->next_) {
310 total += cur_arena->GetBytesAllocated();
311 }
312 }
313 return total;
314}
315
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700316ArenaAllocator::ArenaAllocator(ArenaPool* pool)
317 : pool_(pool),
318 begin_(nullptr),
319 end_(nullptr),
320 ptr_(nullptr),
Vladimir Marko2a408a32015-09-18 14:11:00 +0100321 arena_head_(nullptr) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700322}
323
324void ArenaAllocator::UpdateBytesAllocated() {
325 if (arena_head_ != nullptr) {
326 // Update how many bytes we have allocated into the arena so that the arena pool knows how
327 // much memory to zero out.
328 arena_head_->bytes_allocated_ = ptr_ - begin_;
329 }
330}
331
Vladimir Marko2a408a32015-09-18 14:11:00 +0100332void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
Vladimir Marko75001932015-11-10 20:54:22 +0000333 // We mark all memory for a newly retrieved arena as inaccessible and then
334 // mark only the actually allocated memory as defined. That leaves red zones
335 // and padding between allocations marked as inaccessible.
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700336 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700337 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
338 // Obtain a new block.
339 ObtainNewArenaForAllocation(rounded_bytes);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100340 CHECK(ptr_ != nullptr);
Vladimir Marko75001932015-11-10 20:54:22 +0000341 MEMORY_TOOL_MAKE_NOACCESS(ptr_, end_ - ptr_);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700342 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000343 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700344 uint8_t* ret = ptr_;
345 ptr_ += rounded_bytes;
Vladimir Marko2a408a32015-09-18 14:11:00 +0100346 MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
Vladimir Marko75001932015-11-10 20:54:22 +0000347 // Check that the memory is already zeroed out.
348 DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
Mathieu Chartier75165d02013-09-12 14:00:31 -0700349 return ret;
350}
351
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700352ArenaAllocator::~ArenaAllocator() {
353 // Reclaim all the arenas by giving them back to the thread pool.
354 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000355 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700356}
357
358void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
359 UpdateBytesAllocated();
360 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
361 new_arena->next_ = arena_head_;
362 arena_head_ = new_arena;
363 // Update our internal data structures.
364 ptr_ = begin_ = new_arena->Begin();
365 end_ = new_arena->End();
366}
367
Mathieu Chartiere401d142015-04-22 13:56:20 -0700368bool ArenaAllocator::Contains(const void* ptr) const {
369 if (ptr >= begin_ && ptr < end_) {
370 return true;
371 }
372 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
373 if (cur_arena->Contains(ptr)) {
374 return true;
375 }
376 }
377 return false;
378}
379
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000380MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
381 ssize_t lost_bytes_adjustment)
382 : name_(name),
383 stats_(stats),
384 first_arena_(first_arena),
385 lost_bytes_adjustment_(lost_bytes_adjustment) {
386}
387
388void MemStats::Dump(std::ostream& os) const {
389 os << name_ << " stats:\n";
390 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
391}
392
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700393// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000394MemStats ArenaAllocator::GetMemStats() const {
395 ssize_t lost_bytes_adjustment =
396 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
397 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700398}
399
400} // namespace art