blob: c1a108839b7326a7993c2ac4b91da2ab58a6f627 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070025#include "thread-inl.h"
Evgenii Stepanov1e133742015-05-20 12:30:59 -070026#include "base/memory_tool.h"
buzbee862a7602013-04-05 10:58:54 -070027
28namespace art {
29
Evgenii Stepanov1e133742015-05-20 12:30:59 -070030static constexpr size_t kMemoryToolRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080031constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032
Vladimir Markobd9e9db2014-03-07 19:41:05 +000033template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010034const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Vladimir Markof9f64412015-09-02 14:05:49 +010035 "Misc ",
36 "BBList ",
37 "BBPreds ",
38 "DfsPreOrd ",
39 "DfsPostOrd ",
40 "DomPostOrd ",
41 "TopoOrd ",
42 "Lowering ",
43 "LIR ",
44 "LIR masks ",
45 "SwitchTbl ",
46 "FillArray ",
47 "SlowPaths ",
48 "MIR ",
49 "DataFlow ",
50 "GrowList ",
51 "GrowBitMap ",
52 "SSA2Dalvik ",
53 "Dalvik2SSA ",
54 "DebugInfo ",
Vladimir Markof9f64412015-09-02 14:05:49 +010055 "RegAlloc ",
56 "Data ",
Vladimir Markof9f64412015-09-02 14:05:49 +010057 "STL ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010058 "GraphBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010059 "Graph ",
60 "BasicBlock ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010061 "BlockList ",
62 "RevPostOrder ",
63 "LinearOrder ",
64 "ConstantsMap ",
Vladimir Marko60584552015-09-03 13:35:12 +000065 "Predecessors ",
66 "Successors ",
67 "Dominated ",
Vladimir Markof9f64412015-09-02 14:05:49 +010068 "Instruction ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010069 "InvokeInputs ",
70 "PhiInputs ",
Vladimir Markof9f64412015-09-02 14:05:49 +010071 "LoopInfo ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010072 "LIBackEdges ",
Vladimir Markof9f64412015-09-02 14:05:49 +010073 "TryCatchInf ",
74 "UseListNode ",
75 "Environment ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010076 "EnvVRegs ",
77 "EnvLocations ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010078 "LocSummary ",
Vladimir Marko71bf8092015-09-15 15:33:14 +010079 "SsaBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010080 "MoveOperands ",
81 "CodeBuffer ",
82 "StackMaps ",
83 "BaselineMaps ",
84 "Optimization ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010085 "GVN ",
86 "SsaLiveness ",
87 "SsaPhiElim ",
88 "RefTypeProp ",
89 "PrimTypeProp ",
90 "SideEffects ",
91 "RegAllocator ",
buzbee862a7602013-04-05 10:58:54 -070092};
93
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000094template <bool kCount>
95ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
96 : num_allocations_(0u) {
97 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
98}
99
100template <bool kCount>
101void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
102 num_allocations_ = other.num_allocations_;
103 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
104}
105
106template <bool kCount>
107void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
108 alloc_stats_[kind] += bytes;
109 ++num_allocations_;
110}
111
112template <bool kCount>
113size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
114 return num_allocations_;
115}
116
117template <bool kCount>
118size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
119 const size_t init = 0u; // Initial value of the correct type.
120 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
121}
122
123template <bool kCount>
124void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
125 ssize_t lost_bytes_adjustment) const {
126 size_t malloc_bytes = 0u;
127 size_t lost_bytes = 0u;
128 size_t num_arenas = 0u;
129 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
130 malloc_bytes += arena->Size();
131 lost_bytes += arena->RemainingSpace();
132 ++num_arenas;
133 }
134 // The lost_bytes_adjustment is used to make up for the fact that the current arena
135 // may not have the bytes_allocated_ updated correctly.
136 lost_bytes += lost_bytes_adjustment;
137 const size_t bytes_allocated = BytesAllocated();
138 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
139 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000140 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000141 if (num_allocations != 0) {
142 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
143 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
144 }
145 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800146 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000147 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000148 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000149 }
150}
151
152// Explicitly instantiate the used implementation.
153template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
154
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700155Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700156}
157
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700158MallocArena::MallocArena(size_t size) {
159 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
160 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700161}
162
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700163MallocArena::~MallocArena() {
164 free(reinterpret_cast<void*>(memory_));
165}
166
Mathieu Chartierc7853442015-03-27 14:35:38 -0700167MemMapArena::MemMapArena(size_t size, bool low_4gb) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700168 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700169 map_.reset(MemMap::MapAnonymous(
170 "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700171 CHECK(map_.get() != nullptr) << error_msg;
172 memory_ = map_->Begin();
173 size_ = map_->Size();
174}
175
Vladimir Marko3481ba22015-04-13 12:22:36 +0100176MemMapArena::~MemMapArena() {
177 // Destroys MemMap via std::unique_ptr<>.
178}
179
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700180void MemMapArena::Release() {
181 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700182 map_->MadviseDontNeedAndZero();
183 bytes_allocated_ = 0;
184 }
185}
186
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700187void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700188 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700189 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700190 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700191 }
buzbee862a7602013-04-05 10:58:54 -0700192}
193
Mathieu Chartierc7853442015-03-27 14:35:38 -0700194ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
195 : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
196 low_4gb_(low_4gb) {
197 if (low_4gb) {
198 CHECK(!use_malloc) << "low4gb must use map implementation";
199 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700200 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700201 MemMap::Init();
202 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700203}
204
205ArenaPool::~ArenaPool() {
206 while (free_arenas_ != nullptr) {
207 auto* arena = free_arenas_;
208 free_arenas_ = free_arenas_->next_;
209 delete arena;
210 }
211}
212
213Arena* ArenaPool::AllocArena(size_t size) {
214 Thread* self = Thread::Current();
215 Arena* ret = nullptr;
216 {
217 MutexLock lock(self, lock_);
218 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
219 ret = free_arenas_;
220 free_arenas_ = free_arenas_->next_;
221 }
222 }
223 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700224 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
225 new MemMapArena(size, low_4gb_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700226 }
227 ret->Reset();
228 return ret;
229}
230
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700231void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700232 if (!use_malloc_) {
233 // Doesn't work for malloc.
234 MutexLock lock(Thread::Current(), lock_);
235 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
236 arena->Release();
237 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700238 }
239}
240
Mathieu Chartier49285c52014-12-02 15:43:48 -0800241size_t ArenaPool::GetBytesAllocated() const {
242 size_t total = 0;
243 MutexLock lock(Thread::Current(), lock_);
244 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
245 total += arena->GetBytesAllocated();
246 }
247 return total;
248}
249
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000250void ArenaPool::FreeArenaChain(Arena* first) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700251 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000252 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700253 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000254 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700255 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000256 if (first != nullptr) {
257 Arena* last = first;
258 while (last->next_ != nullptr) {
259 last = last->next_;
260 }
261 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700262 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000263 last->next_ = free_arenas_;
264 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700265 }
266}
267
268size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000269 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700270}
271
Mathieu Chartierc7853442015-03-27 14:35:38 -0700272size_t ArenaAllocator::BytesUsed() const {
273 size_t total = ptr_ - begin_;
274 if (arena_head_ != nullptr) {
275 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
276 cur_arena = cur_arena->next_) {
277 total += cur_arena->GetBytesAllocated();
278 }
279 }
280 return total;
281}
282
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700283ArenaAllocator::ArenaAllocator(ArenaPool* pool)
284 : pool_(pool),
285 begin_(nullptr),
286 end_(nullptr),
287 ptr_(nullptr),
288 arena_head_(nullptr),
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700289 is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700290}
291
292void ArenaAllocator::UpdateBytesAllocated() {
293 if (arena_head_ != nullptr) {
294 // Update how many bytes we have allocated into the arena so that the arena pool knows how
295 // much memory to zero out.
296 arena_head_->bytes_allocated_ = ptr_ - begin_;
297 }
298}
299
Mathieu Chartier75165d02013-09-12 14:00:31 -0700300void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700301 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700302 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
303 // Obtain a new block.
304 ObtainNewArenaForAllocation(rounded_bytes);
305 if (UNLIKELY(ptr_ == nullptr)) {
306 return nullptr;
307 }
308 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000309 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700310 uint8_t* ret = ptr_;
311 ptr_ += rounded_bytes;
312 // Check that the memory is already zeroed out.
313 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
314 CHECK_EQ(*ptr, 0U);
315 }
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700316 MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700317 return ret;
318}
319
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700320ArenaAllocator::~ArenaAllocator() {
321 // Reclaim all the arenas by giving them back to the thread pool.
322 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000323 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700324}
325
326void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
327 UpdateBytesAllocated();
328 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
329 new_arena->next_ = arena_head_;
330 arena_head_ = new_arena;
331 // Update our internal data structures.
332 ptr_ = begin_ = new_arena->Begin();
333 end_ = new_arena->End();
334}
335
Mathieu Chartiere401d142015-04-22 13:56:20 -0700336bool ArenaAllocator::Contains(const void* ptr) const {
337 if (ptr >= begin_ && ptr < end_) {
338 return true;
339 }
340 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
341 if (cur_arena->Contains(ptr)) {
342 return true;
343 }
344 }
345 return false;
346}
347
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000348MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
349 ssize_t lost_bytes_adjustment)
350 : name_(name),
351 stats_(stats),
352 first_arena_(first_arena),
353 lost_bytes_adjustment_(lost_bytes_adjustment) {
354}
355
356void MemStats::Dump(std::ostream& os) const {
357 os << name_ << " stats:\n";
358 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
359}
360
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700361// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000362MemStats ArenaAllocator::GetMemStats() const {
363 ssize_t lost_bytes_adjustment =
364 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
365 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700366}
367
368} // namespace art