blob: 3a4bccd94c07a0158f2f9f7d1c0fe663cf9971f6 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070025#include "thread-inl.h"
Evgenii Stepanov1e133742015-05-20 12:30:59 -070026#include "base/memory_tool.h"
buzbee862a7602013-04-05 10:58:54 -070027
28namespace art {
29
Evgenii Stepanov1e133742015-05-20 12:30:59 -070030static constexpr size_t kMemoryToolRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080031constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032
Vladimir Markobd9e9db2014-03-07 19:41:05 +000033template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010034const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Vladimir Markof9f64412015-09-02 14:05:49 +010035 "Misc ",
36 "BBList ",
37 "BBPreds ",
38 "DfsPreOrd ",
39 "DfsPostOrd ",
40 "DomPostOrd ",
41 "TopoOrd ",
42 "Lowering ",
43 "LIR ",
44 "LIR masks ",
45 "SwitchTbl ",
46 "FillArray ",
47 "SlowPaths ",
48 "MIR ",
49 "DataFlow ",
50 "GrowList ",
51 "GrowBitMap ",
52 "SSA2Dalvik ",
53 "Dalvik2SSA ",
54 "DebugInfo ",
Vladimir Marko145acc52015-09-03 13:33:25 +000055 "Successor ",
Vladimir Markof9f64412015-09-02 14:05:49 +010056 "RegAlloc ",
57 "Data ",
Vladimir Marko145acc52015-09-03 13:33:25 +000058 "Preds ",
Vladimir Markof9f64412015-09-02 14:05:49 +010059 "STL ",
60 "Graph ",
61 "BasicBlock ",
62 "Instruction ",
63 "LoopInfo ",
64 "TryCatchInf ",
65 "UseListNode ",
66 "Environment ",
67 "MoveOperands ",
68 "CodeBuffer ",
69 "StackMaps ",
70 "BaselineMaps ",
71 "Optimization ",
buzbee862a7602013-04-05 10:58:54 -070072};
73
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000074template <bool kCount>
75ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
76 : num_allocations_(0u) {
77 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
78}
79
80template <bool kCount>
81void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
82 num_allocations_ = other.num_allocations_;
83 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
84}
85
86template <bool kCount>
87void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
88 alloc_stats_[kind] += bytes;
89 ++num_allocations_;
90}
91
92template <bool kCount>
93size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
94 return num_allocations_;
95}
96
97template <bool kCount>
98size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
99 const size_t init = 0u; // Initial value of the correct type.
100 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
101}
102
103template <bool kCount>
104void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
105 ssize_t lost_bytes_adjustment) const {
106 size_t malloc_bytes = 0u;
107 size_t lost_bytes = 0u;
108 size_t num_arenas = 0u;
109 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
110 malloc_bytes += arena->Size();
111 lost_bytes += arena->RemainingSpace();
112 ++num_arenas;
113 }
114 // The lost_bytes_adjustment is used to make up for the fact that the current arena
115 // may not have the bytes_allocated_ updated correctly.
116 lost_bytes += lost_bytes_adjustment;
117 const size_t bytes_allocated = BytesAllocated();
118 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
119 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000120 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000121 if (num_allocations != 0) {
122 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
123 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
124 }
125 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800126 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000127 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000128 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000129 }
130}
131
132// Explicitly instantiate the used implementation.
133template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
134
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700135Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700136}
137
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700138MallocArena::MallocArena(size_t size) {
139 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
140 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700141}
142
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700143MallocArena::~MallocArena() {
144 free(reinterpret_cast<void*>(memory_));
145}
146
Mathieu Chartierc7853442015-03-27 14:35:38 -0700147MemMapArena::MemMapArena(size_t size, bool low_4gb) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700148 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700149 map_.reset(MemMap::MapAnonymous(
150 "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700151 CHECK(map_.get() != nullptr) << error_msg;
152 memory_ = map_->Begin();
153 size_ = map_->Size();
154}
155
Vladimir Marko3481ba22015-04-13 12:22:36 +0100156MemMapArena::~MemMapArena() {
157 // Destroys MemMap via std::unique_ptr<>.
158}
159
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700160void MemMapArena::Release() {
161 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700162 map_->MadviseDontNeedAndZero();
163 bytes_allocated_ = 0;
164 }
165}
166
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700167void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700168 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700169 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700170 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700171 }
buzbee862a7602013-04-05 10:58:54 -0700172}
173
Mathieu Chartierc7853442015-03-27 14:35:38 -0700174ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
175 : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
176 low_4gb_(low_4gb) {
177 if (low_4gb) {
178 CHECK(!use_malloc) << "low4gb must use map implementation";
179 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700180 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700181 MemMap::Init();
182 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700183}
184
185ArenaPool::~ArenaPool() {
186 while (free_arenas_ != nullptr) {
187 auto* arena = free_arenas_;
188 free_arenas_ = free_arenas_->next_;
189 delete arena;
190 }
191}
192
193Arena* ArenaPool::AllocArena(size_t size) {
194 Thread* self = Thread::Current();
195 Arena* ret = nullptr;
196 {
197 MutexLock lock(self, lock_);
198 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
199 ret = free_arenas_;
200 free_arenas_ = free_arenas_->next_;
201 }
202 }
203 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700204 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
205 new MemMapArena(size, low_4gb_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700206 }
207 ret->Reset();
208 return ret;
209}
210
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700211void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700212 if (!use_malloc_) {
213 // Doesn't work for malloc.
214 MutexLock lock(Thread::Current(), lock_);
215 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
216 arena->Release();
217 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700218 }
219}
220
Mathieu Chartier49285c52014-12-02 15:43:48 -0800221size_t ArenaPool::GetBytesAllocated() const {
222 size_t total = 0;
223 MutexLock lock(Thread::Current(), lock_);
224 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
225 total += arena->GetBytesAllocated();
226 }
227 return total;
228}
229
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000230void ArenaPool::FreeArenaChain(Arena* first) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700231 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000232 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700233 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000234 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700235 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000236 if (first != nullptr) {
237 Arena* last = first;
238 while (last->next_ != nullptr) {
239 last = last->next_;
240 }
241 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700242 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000243 last->next_ = free_arenas_;
244 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700245 }
246}
247
248size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000249 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700250}
251
Mathieu Chartierc7853442015-03-27 14:35:38 -0700252size_t ArenaAllocator::BytesUsed() const {
253 size_t total = ptr_ - begin_;
254 if (arena_head_ != nullptr) {
255 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
256 cur_arena = cur_arena->next_) {
257 total += cur_arena->GetBytesAllocated();
258 }
259 }
260 return total;
261}
262
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700263ArenaAllocator::ArenaAllocator(ArenaPool* pool)
264 : pool_(pool),
265 begin_(nullptr),
266 end_(nullptr),
267 ptr_(nullptr),
268 arena_head_(nullptr),
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700269 is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700270}
271
272void ArenaAllocator::UpdateBytesAllocated() {
273 if (arena_head_ != nullptr) {
274 // Update how many bytes we have allocated into the arena so that the arena pool knows how
275 // much memory to zero out.
276 arena_head_->bytes_allocated_ = ptr_ - begin_;
277 }
278}
279
Mathieu Chartier75165d02013-09-12 14:00:31 -0700280void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700281 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700282 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
283 // Obtain a new block.
284 ObtainNewArenaForAllocation(rounded_bytes);
285 if (UNLIKELY(ptr_ == nullptr)) {
286 return nullptr;
287 }
288 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000289 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700290 uint8_t* ret = ptr_;
291 ptr_ += rounded_bytes;
292 // Check that the memory is already zeroed out.
293 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
294 CHECK_EQ(*ptr, 0U);
295 }
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700296 MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700297 return ret;
298}
299
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700300ArenaAllocator::~ArenaAllocator() {
301 // Reclaim all the arenas by giving them back to the thread pool.
302 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000303 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700304}
305
306void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
307 UpdateBytesAllocated();
308 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
309 new_arena->next_ = arena_head_;
310 arena_head_ = new_arena;
311 // Update our internal data structures.
312 ptr_ = begin_ = new_arena->Begin();
313 end_ = new_arena->End();
314}
315
Mathieu Chartiere401d142015-04-22 13:56:20 -0700316bool ArenaAllocator::Contains(const void* ptr) const {
317 if (ptr >= begin_ && ptr < end_) {
318 return true;
319 }
320 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
321 if (cur_arena->Contains(ptr)) {
322 return true;
323 }
324 }
325 return false;
326}
327
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000328MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
329 ssize_t lost_bytes_adjustment)
330 : name_(name),
331 stats_(stats),
332 first_arena_(first_arena),
333 lost_bytes_adjustment_(lost_bytes_adjustment) {
334}
335
336void MemStats::Dump(std::ostream& os) const {
337 os << name_ << " stats:\n";
338 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
339}
340
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700341// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000342MemStats ArenaAllocator::GetMemStats() const {
343 ssize_t lost_bytes_adjustment =
344 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
345 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700346}
347
348} // namespace art