blob: 691b57ff2ddc87f19ccebaa78250f6a69ddac219 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070025#include "thread-inl.h"
Evgenii Stepanov1e133742015-05-20 12:30:59 -070026#include "base/memory_tool.h"
buzbee862a7602013-04-05 10:58:54 -070027
28namespace art {
29
Evgenii Stepanov1e133742015-05-20 12:30:59 -070030static constexpr size_t kMemoryToolRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080031constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032
Vladimir Markobd9e9db2014-03-07 19:41:05 +000033template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010034const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Vladimir Markof9f64412015-09-02 14:05:49 +010035 "Misc ",
36 "BBList ",
37 "BBPreds ",
38 "DfsPreOrd ",
39 "DfsPostOrd ",
40 "DomPostOrd ",
41 "TopoOrd ",
42 "Lowering ",
43 "LIR ",
44 "LIR masks ",
45 "SwitchTbl ",
46 "FillArray ",
47 "SlowPaths ",
48 "MIR ",
49 "DataFlow ",
50 "GrowList ",
51 "GrowBitMap ",
52 "SSA2Dalvik ",
53 "Dalvik2SSA ",
54 "DebugInfo ",
Vladimir Markof9f64412015-09-02 14:05:49 +010055 "RegAlloc ",
56 "Data ",
Vladimir Markof9f64412015-09-02 14:05:49 +010057 "STL ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010058 "GraphBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010059 "Graph ",
60 "BasicBlock ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010061 "BlockList ",
62 "RevPostOrder ",
63 "LinearOrder ",
64 "ConstantsMap ",
Vladimir Marko60584552015-09-03 13:35:12 +000065 "Predecessors ",
66 "Successors ",
67 "Dominated ",
Vladimir Markof9f64412015-09-02 14:05:49 +010068 "Instruction ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010069 "InvokeInputs ",
70 "PhiInputs ",
Vladimir Markof9f64412015-09-02 14:05:49 +010071 "LoopInfo ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010072 "LIBackEdges ",
Vladimir Markof9f64412015-09-02 14:05:49 +010073 "TryCatchInf ",
74 "UseListNode ",
75 "Environment ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010076 "EnvVRegs ",
77 "EnvLocations ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010078 "LocSummary ",
Vladimir Marko71bf8092015-09-15 15:33:14 +010079 "SsaBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010080 "MoveOperands ",
81 "CodeBuffer ",
82 "StackMaps ",
83 "BaselineMaps ",
84 "Optimization ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010085 "GVN ",
86 "SsaLiveness ",
87 "SsaPhiElim ",
88 "RefTypeProp ",
89 "PrimTypeProp ",
90 "SideEffects ",
91 "RegAllocator ",
Vladimir Marko225b6462015-09-28 12:17:40 +010092 "StackMapStm ",
93 "CodeGen ",
94 "ParallelMove ",
buzbee862a7602013-04-05 10:58:54 -070095};
96
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000097template <bool kCount>
98ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
99 : num_allocations_(0u) {
100 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
101}
102
103template <bool kCount>
104void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
105 num_allocations_ = other.num_allocations_;
106 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
107}
108
109template <bool kCount>
110void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
111 alloc_stats_[kind] += bytes;
112 ++num_allocations_;
113}
114
115template <bool kCount>
116size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
117 return num_allocations_;
118}
119
120template <bool kCount>
121size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
122 const size_t init = 0u; // Initial value of the correct type.
123 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
124}
125
126template <bool kCount>
127void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
128 ssize_t lost_bytes_adjustment) const {
129 size_t malloc_bytes = 0u;
130 size_t lost_bytes = 0u;
131 size_t num_arenas = 0u;
132 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
133 malloc_bytes += arena->Size();
134 lost_bytes += arena->RemainingSpace();
135 ++num_arenas;
136 }
137 // The lost_bytes_adjustment is used to make up for the fact that the current arena
138 // may not have the bytes_allocated_ updated correctly.
139 lost_bytes += lost_bytes_adjustment;
140 const size_t bytes_allocated = BytesAllocated();
141 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
142 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000143 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000144 if (num_allocations != 0) {
145 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
146 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
147 }
148 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800149 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000150 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000151 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000152 }
153}
154
155// Explicitly instantiate the used implementation.
156template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
157
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700158Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700159}
160
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700161MallocArena::MallocArena(size_t size) {
162 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
163 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700164}
165
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700166MallocArena::~MallocArena() {
167 free(reinterpret_cast<void*>(memory_));
168}
169
Mathieu Chartierc7853442015-03-27 14:35:38 -0700170MemMapArena::MemMapArena(size_t size, bool low_4gb) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700171 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700172 map_.reset(MemMap::MapAnonymous(
173 "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700174 CHECK(map_.get() != nullptr) << error_msg;
175 memory_ = map_->Begin();
176 size_ = map_->Size();
177}
178
Vladimir Marko3481ba22015-04-13 12:22:36 +0100179MemMapArena::~MemMapArena() {
180 // Destroys MemMap via std::unique_ptr<>.
181}
182
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700183void MemMapArena::Release() {
184 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700185 map_->MadviseDontNeedAndZero();
186 bytes_allocated_ = 0;
187 }
188}
189
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700190void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700191 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700192 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700193 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700194 }
buzbee862a7602013-04-05 10:58:54 -0700195}
196
Mathieu Chartierc7853442015-03-27 14:35:38 -0700197ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
198 : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
199 low_4gb_(low_4gb) {
200 if (low_4gb) {
201 CHECK(!use_malloc) << "low4gb must use map implementation";
202 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700203 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700204 MemMap::Init();
205 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700206}
207
208ArenaPool::~ArenaPool() {
209 while (free_arenas_ != nullptr) {
210 auto* arena = free_arenas_;
211 free_arenas_ = free_arenas_->next_;
212 delete arena;
213 }
214}
215
216Arena* ArenaPool::AllocArena(size_t size) {
217 Thread* self = Thread::Current();
218 Arena* ret = nullptr;
219 {
220 MutexLock lock(self, lock_);
221 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
222 ret = free_arenas_;
223 free_arenas_ = free_arenas_->next_;
224 }
225 }
226 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700227 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
228 new MemMapArena(size, low_4gb_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700229 }
230 ret->Reset();
231 return ret;
232}
233
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700234void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700235 if (!use_malloc_) {
236 // Doesn't work for malloc.
237 MutexLock lock(Thread::Current(), lock_);
238 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
239 arena->Release();
240 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700241 }
242}
243
Mathieu Chartier49285c52014-12-02 15:43:48 -0800244size_t ArenaPool::GetBytesAllocated() const {
245 size_t total = 0;
246 MutexLock lock(Thread::Current(), lock_);
247 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
248 total += arena->GetBytesAllocated();
249 }
250 return total;
251}
252
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000253void ArenaPool::FreeArenaChain(Arena* first) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700254 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000255 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700256 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000257 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700258 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000259 if (first != nullptr) {
260 Arena* last = first;
261 while (last->next_ != nullptr) {
262 last = last->next_;
263 }
264 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700265 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000266 last->next_ = free_arenas_;
267 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700268 }
269}
270
271size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000272 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700273}
274
Mathieu Chartierc7853442015-03-27 14:35:38 -0700275size_t ArenaAllocator::BytesUsed() const {
276 size_t total = ptr_ - begin_;
277 if (arena_head_ != nullptr) {
278 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
279 cur_arena = cur_arena->next_) {
280 total += cur_arena->GetBytesAllocated();
281 }
282 }
283 return total;
284}
285
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700286ArenaAllocator::ArenaAllocator(ArenaPool* pool)
287 : pool_(pool),
288 begin_(nullptr),
289 end_(nullptr),
290 ptr_(nullptr),
291 arena_head_(nullptr),
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700292 is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700293}
294
295void ArenaAllocator::UpdateBytesAllocated() {
296 if (arena_head_ != nullptr) {
297 // Update how many bytes we have allocated into the arena so that the arena pool knows how
298 // much memory to zero out.
299 arena_head_->bytes_allocated_ = ptr_ - begin_;
300 }
301}
302
Mathieu Chartier75165d02013-09-12 14:00:31 -0700303void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700304 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700305 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
306 // Obtain a new block.
307 ObtainNewArenaForAllocation(rounded_bytes);
308 if (UNLIKELY(ptr_ == nullptr)) {
309 return nullptr;
310 }
311 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000312 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700313 uint8_t* ret = ptr_;
314 ptr_ += rounded_bytes;
315 // Check that the memory is already zeroed out.
316 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
317 CHECK_EQ(*ptr, 0U);
318 }
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700319 MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700320 return ret;
321}
322
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700323ArenaAllocator::~ArenaAllocator() {
324 // Reclaim all the arenas by giving them back to the thread pool.
325 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000326 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700327}
328
329void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
330 UpdateBytesAllocated();
331 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
332 new_arena->next_ = arena_head_;
333 arena_head_ = new_arena;
334 // Update our internal data structures.
335 ptr_ = begin_ = new_arena->Begin();
336 end_ = new_arena->End();
337}
338
Mathieu Chartiere401d142015-04-22 13:56:20 -0700339bool ArenaAllocator::Contains(const void* ptr) const {
340 if (ptr >= begin_ && ptr < end_) {
341 return true;
342 }
343 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
344 if (cur_arena->Contains(ptr)) {
345 return true;
346 }
347 }
348 return false;
349}
350
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000351MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
352 ssize_t lost_bytes_adjustment)
353 : name_(name),
354 stats_(stats),
355 first_arena_(first_arena),
356 lost_bytes_adjustment_(lost_bytes_adjustment) {
357}
358
359void MemStats::Dump(std::ostream& os) const {
360 os << name_ << " stats:\n";
361 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
362}
363
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700364// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000365MemStats ArenaAllocator::GetMemStats() const {
366 ssize_t lost_bytes_adjustment =
367 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
368 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700369}
370
371} // namespace art