blob: 8f2d94b564a1e46e8e0dc624b0f7a1e16505dd35 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070025#include "thread-inl.h"
Mathieu Chartier75165d02013-09-12 14:00:31 -070026#include <memcheck/memcheck.h>
buzbee862a7602013-04-05 10:58:54 -070027
28namespace art {
29
Mathieu Chartier75165d02013-09-12 14:00:31 -070030static constexpr size_t kValgrindRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080031constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032
Vladimir Markobd9e9db2014-03-07 19:41:05 +000033template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010034const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
buzbee862a7602013-04-05 10:58:54 -070035 "Misc ",
36 "BasicBlock ",
Vladimir Marko850cd162015-03-17 11:05:20 +000037 "BBList ",
Vladimir Markoe39c54e2014-09-22 14:50:02 +010038 "BBPreds ",
39 "DfsPreOrd ",
40 "DfsPostOrd ",
41 "DomPostOrd ",
42 "TopoOrd ",
43 "Lowering ",
buzbee862a7602013-04-05 10:58:54 -070044 "LIR ",
Vladimir Marko8dea81c2014-06-06 14:50:36 +010045 "LIR masks ",
Vladimir Markoe39c54e2014-09-22 14:50:02 +010046 "SwitchTbl ",
47 "FillArray ",
48 "SlowPaths ",
buzbee862a7602013-04-05 10:58:54 -070049 "MIR ",
50 "DataFlow ",
51 "GrowList ",
52 "GrowBitMap ",
Vladimir Markoe39c54e2014-09-22 14:50:02 +010053 "SSA2Dalvik ",
buzbee862a7602013-04-05 10:58:54 -070054 "Dalvik2SSA ",
55 "DebugInfo ",
56 "Successor ",
57 "RegAlloc ",
58 "Data ",
59 "Preds ",
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000060 "STL ",
buzbee862a7602013-04-05 10:58:54 -070061};
62
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000063template <bool kCount>
64ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
65 : num_allocations_(0u) {
66 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
67}
68
69template <bool kCount>
70void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
71 num_allocations_ = other.num_allocations_;
72 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
73}
74
75template <bool kCount>
76void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
77 alloc_stats_[kind] += bytes;
78 ++num_allocations_;
79}
80
81template <bool kCount>
82size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
83 return num_allocations_;
84}
85
86template <bool kCount>
87size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
88 const size_t init = 0u; // Initial value of the correct type.
89 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
90}
91
92template <bool kCount>
93void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
94 ssize_t lost_bytes_adjustment) const {
95 size_t malloc_bytes = 0u;
96 size_t lost_bytes = 0u;
97 size_t num_arenas = 0u;
98 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
99 malloc_bytes += arena->Size();
100 lost_bytes += arena->RemainingSpace();
101 ++num_arenas;
102 }
103 // The lost_bytes_adjustment is used to make up for the fact that the current arena
104 // may not have the bytes_allocated_ updated correctly.
105 lost_bytes += lost_bytes_adjustment;
106 const size_t bytes_allocated = BytesAllocated();
107 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
108 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000109 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000110 if (num_allocations != 0) {
111 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
112 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
113 }
114 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800115 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000116 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000117 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000118 }
119}
120
121// Explicitly instantiate the used implementation.
122template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
123
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700124Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700125}
126
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700127MallocArena::MallocArena(size_t size) {
128 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
129 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700130}
131
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700132MallocArena::~MallocArena() {
133 free(reinterpret_cast<void*>(memory_));
134}
135
Mathieu Chartierc7853442015-03-27 14:35:38 -0700136MemMapArena::MemMapArena(size_t size, bool low_4gb) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700137 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700138 map_.reset(MemMap::MapAnonymous(
139 "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700140 CHECK(map_.get() != nullptr) << error_msg;
141 memory_ = map_->Begin();
142 size_ = map_->Size();
143}
144
Vladimir Marko3481ba22015-04-13 12:22:36 +0100145MemMapArena::~MemMapArena() {
146 // Destroys MemMap via std::unique_ptr<>.
147}
148
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700149void MemMapArena::Release() {
150 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700151 map_->MadviseDontNeedAndZero();
152 bytes_allocated_ = 0;
153 }
154}
155
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700156void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700157 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700158 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700159 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700160 }
buzbee862a7602013-04-05 10:58:54 -0700161}
162
Mathieu Chartierc7853442015-03-27 14:35:38 -0700163ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
164 : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
165 low_4gb_(low_4gb) {
166 if (low_4gb) {
167 CHECK(!use_malloc) << "low4gb must use map implementation";
168 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700169 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700170 MemMap::Init();
171 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700172}
173
174ArenaPool::~ArenaPool() {
175 while (free_arenas_ != nullptr) {
176 auto* arena = free_arenas_;
177 free_arenas_ = free_arenas_->next_;
178 delete arena;
179 }
180}
181
182Arena* ArenaPool::AllocArena(size_t size) {
183 Thread* self = Thread::Current();
184 Arena* ret = nullptr;
185 {
186 MutexLock lock(self, lock_);
187 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
188 ret = free_arenas_;
189 free_arenas_ = free_arenas_->next_;
190 }
191 }
192 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700193 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
194 new MemMapArena(size, low_4gb_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700195 }
196 ret->Reset();
197 return ret;
198}
199
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700200void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700201 if (!use_malloc_) {
202 // Doesn't work for malloc.
203 MutexLock lock(Thread::Current(), lock_);
204 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
205 arena->Release();
206 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700207 }
208}
209
Mathieu Chartier49285c52014-12-02 15:43:48 -0800210size_t ArenaPool::GetBytesAllocated() const {
211 size_t total = 0;
212 MutexLock lock(Thread::Current(), lock_);
213 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
214 total += arena->GetBytesAllocated();
215 }
216 return total;
217}
218
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000219void ArenaPool::FreeArenaChain(Arena* first) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800220 if (UNLIKELY(RUNNING_ON_VALGRIND > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000221 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
222 VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
223 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700224 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000225 if (first != nullptr) {
226 Arena* last = first;
227 while (last->next_ != nullptr) {
228 last = last->next_;
229 }
230 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700231 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000232 last->next_ = free_arenas_;
233 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700234 }
235}
236
237size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000238 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700239}
240
Mathieu Chartierc7853442015-03-27 14:35:38 -0700241size_t ArenaAllocator::BytesUsed() const {
242 size_t total = ptr_ - begin_;
243 if (arena_head_ != nullptr) {
244 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
245 cur_arena = cur_arena->next_) {
246 total += cur_arena->GetBytesAllocated();
247 }
248 }
249 return total;
250}
251
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700252ArenaAllocator::ArenaAllocator(ArenaPool* pool)
253 : pool_(pool),
254 begin_(nullptr),
255 end_(nullptr),
256 ptr_(nullptr),
257 arena_head_(nullptr),
Mathieu Chartier661974a2014-01-09 11:23:53 -0800258 running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700259}
260
261void ArenaAllocator::UpdateBytesAllocated() {
262 if (arena_head_ != nullptr) {
263 // Update how many bytes we have allocated into the arena so that the arena pool knows how
264 // much memory to zero out.
265 arena_head_->bytes_allocated_ = ptr_ - begin_;
266 }
267}
268
Mathieu Chartier75165d02013-09-12 14:00:31 -0700269void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
Vladimir Marko22a0ef82014-06-10 14:47:51 +0100270 size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700271 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
272 // Obtain a new block.
273 ObtainNewArenaForAllocation(rounded_bytes);
274 if (UNLIKELY(ptr_ == nullptr)) {
275 return nullptr;
276 }
277 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000278 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700279 uint8_t* ret = ptr_;
280 ptr_ += rounded_bytes;
281 // Check that the memory is already zeroed out.
282 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
283 CHECK_EQ(*ptr, 0U);
284 }
285 VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
286 return ret;
287}
288
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700289ArenaAllocator::~ArenaAllocator() {
290 // Reclaim all the arenas by giving them back to the thread pool.
291 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000292 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700293}
294
295void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
296 UpdateBytesAllocated();
297 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
298 new_arena->next_ = arena_head_;
299 arena_head_ = new_arena;
300 // Update our internal data structures.
301 ptr_ = begin_ = new_arena->Begin();
302 end_ = new_arena->End();
303}
304
Mathieu Chartiere401d142015-04-22 13:56:20 -0700305bool ArenaAllocator::Contains(const void* ptr) const {
306 if (ptr >= begin_ && ptr < end_) {
307 return true;
308 }
309 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
310 if (cur_arena->Contains(ptr)) {
311 return true;
312 }
313 }
314 return false;
315}
316
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000317MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
318 ssize_t lost_bytes_adjustment)
319 : name_(name),
320 stats_(stats),
321 first_arena_(first_arena),
322 lost_bytes_adjustment_(lost_bytes_adjustment) {
323}
324
325void MemStats::Dump(std::ostream& os) const {
326 os << name_ << " stats:\n";
327 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
328}
329
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700330// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000331MemStats ArenaAllocator::GetMemStats() const {
332 ssize_t lost_bytes_adjustment =
333 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
334 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700335}
336
337} // namespace art