blob: 59d38ad5932ef42f01b11878e181d73d8da55175 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
23#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070024#include "thread-inl.h"
Mathieu Chartier75165d02013-09-12 14:00:31 -070025#include <memcheck/memcheck.h>
buzbee862a7602013-04-05 10:58:54 -070026
27namespace art {
28
Mathieu Chartier75165d02013-09-12 14:00:31 -070029static constexpr size_t kValgrindRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080030constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070031
Vladimir Markobd9e9db2014-03-07 19:41:05 +000032template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010033const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
buzbee862a7602013-04-05 10:58:54 -070034 "Misc ",
35 "BasicBlock ",
Vladimir Marko850cd162015-03-17 11:05:20 +000036 "BBList ",
Vladimir Markoe39c54e2014-09-22 14:50:02 +010037 "BBPreds ",
38 "DfsPreOrd ",
39 "DfsPostOrd ",
40 "DomPostOrd ",
41 "TopoOrd ",
42 "Lowering ",
buzbee862a7602013-04-05 10:58:54 -070043 "LIR ",
Vladimir Marko8dea81c2014-06-06 14:50:36 +010044 "LIR masks ",
Vladimir Markoe39c54e2014-09-22 14:50:02 +010045 "SwitchTbl ",
46 "FillArray ",
47 "SlowPaths ",
buzbee862a7602013-04-05 10:58:54 -070048 "MIR ",
49 "DataFlow ",
50 "GrowList ",
51 "GrowBitMap ",
Vladimir Markoe39c54e2014-09-22 14:50:02 +010052 "SSA2Dalvik ",
buzbee862a7602013-04-05 10:58:54 -070053 "Dalvik2SSA ",
54 "DebugInfo ",
55 "Successor ",
56 "RegAlloc ",
57 "Data ",
58 "Preds ",
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000059 "STL ",
buzbee862a7602013-04-05 10:58:54 -070060};
61
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000062template <bool kCount>
63ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
64 : num_allocations_(0u) {
65 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
66}
67
68template <bool kCount>
69void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
70 num_allocations_ = other.num_allocations_;
71 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
72}
73
74template <bool kCount>
75void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
76 alloc_stats_[kind] += bytes;
77 ++num_allocations_;
78}
79
80template <bool kCount>
81size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
82 return num_allocations_;
83}
84
85template <bool kCount>
86size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
87 const size_t init = 0u; // Initial value of the correct type.
88 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
89}
90
91template <bool kCount>
92void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
93 ssize_t lost_bytes_adjustment) const {
94 size_t malloc_bytes = 0u;
95 size_t lost_bytes = 0u;
96 size_t num_arenas = 0u;
97 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
98 malloc_bytes += arena->Size();
99 lost_bytes += arena->RemainingSpace();
100 ++num_arenas;
101 }
102 // The lost_bytes_adjustment is used to make up for the fact that the current arena
103 // may not have the bytes_allocated_ updated correctly.
104 lost_bytes += lost_bytes_adjustment;
105 const size_t bytes_allocated = BytesAllocated();
106 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
107 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000108 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000109 if (num_allocations != 0) {
110 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
111 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
112 }
113 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800114 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000115 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000116 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000117 }
118}
119
120// Explicitly instantiate the used implementation.
121template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
122
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700123Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700124}
125
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700126MallocArena::MallocArena(size_t size) {
127 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
128 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700129}
130
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700131MallocArena::~MallocArena() {
132 free(reinterpret_cast<void*>(memory_));
133}
134
Mathieu Chartierc7853442015-03-27 14:35:38 -0700135MemMapArena::MemMapArena(size_t size, bool low_4gb) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700136 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700137 map_.reset(MemMap::MapAnonymous(
138 "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700139 CHECK(map_.get() != nullptr) << error_msg;
140 memory_ = map_->Begin();
141 size_ = map_->Size();
142}
143
144void MemMapArena::Release() {
145 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700146 map_->MadviseDontNeedAndZero();
147 bytes_allocated_ = 0;
148 }
149}
150
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700151void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700152 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700153 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700154 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700155 }
buzbee862a7602013-04-05 10:58:54 -0700156}
157
Mathieu Chartierc7853442015-03-27 14:35:38 -0700158ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
159 : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
160 low_4gb_(low_4gb) {
161 if (low_4gb) {
162 CHECK(!use_malloc) << "low4gb must use map implementation";
163 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700164 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700165 MemMap::Init();
166 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700167}
168
169ArenaPool::~ArenaPool() {
170 while (free_arenas_ != nullptr) {
171 auto* arena = free_arenas_;
172 free_arenas_ = free_arenas_->next_;
173 delete arena;
174 }
175}
176
177Arena* ArenaPool::AllocArena(size_t size) {
178 Thread* self = Thread::Current();
179 Arena* ret = nullptr;
180 {
181 MutexLock lock(self, lock_);
182 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
183 ret = free_arenas_;
184 free_arenas_ = free_arenas_->next_;
185 }
186 }
187 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700188 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
189 new MemMapArena(size, low_4gb_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700190 }
191 ret->Reset();
192 return ret;
193}
194
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700195void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700196 if (!use_malloc_) {
197 // Doesn't work for malloc.
198 MutexLock lock(Thread::Current(), lock_);
199 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
200 arena->Release();
201 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700202 }
203}
204
Mathieu Chartier49285c52014-12-02 15:43:48 -0800205size_t ArenaPool::GetBytesAllocated() const {
206 size_t total = 0;
207 MutexLock lock(Thread::Current(), lock_);
208 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
209 total += arena->GetBytesAllocated();
210 }
211 return total;
212}
213
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000214void ArenaPool::FreeArenaChain(Arena* first) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800215 if (UNLIKELY(RUNNING_ON_VALGRIND > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000216 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
217 VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
218 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700219 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000220 if (first != nullptr) {
221 Arena* last = first;
222 while (last->next_ != nullptr) {
223 last = last->next_;
224 }
225 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700226 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000227 last->next_ = free_arenas_;
228 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700229 }
230}
231
232size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000233 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700234}
235
Mathieu Chartierc7853442015-03-27 14:35:38 -0700236size_t ArenaAllocator::BytesUsed() const {
237 size_t total = ptr_ - begin_;
238 if (arena_head_ != nullptr) {
239 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
240 cur_arena = cur_arena->next_) {
241 total += cur_arena->GetBytesAllocated();
242 }
243 }
244 return total;
245}
246
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700247ArenaAllocator::ArenaAllocator(ArenaPool* pool)
248 : pool_(pool),
249 begin_(nullptr),
250 end_(nullptr),
251 ptr_(nullptr),
252 arena_head_(nullptr),
Mathieu Chartier661974a2014-01-09 11:23:53 -0800253 running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700254}
255
256void ArenaAllocator::UpdateBytesAllocated() {
257 if (arena_head_ != nullptr) {
258 // Update how many bytes we have allocated into the arena so that the arena pool knows how
259 // much memory to zero out.
260 arena_head_->bytes_allocated_ = ptr_ - begin_;
261 }
262}
263
Mathieu Chartier75165d02013-09-12 14:00:31 -0700264void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
Vladimir Marko22a0ef82014-06-10 14:47:51 +0100265 size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700266 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
267 // Obtain a new block.
268 ObtainNewArenaForAllocation(rounded_bytes);
269 if (UNLIKELY(ptr_ == nullptr)) {
270 return nullptr;
271 }
272 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000273 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700274 uint8_t* ret = ptr_;
275 ptr_ += rounded_bytes;
276 // Check that the memory is already zeroed out.
277 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
278 CHECK_EQ(*ptr, 0U);
279 }
280 VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
281 return ret;
282}
283
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700284ArenaAllocator::~ArenaAllocator() {
285 // Reclaim all the arenas by giving them back to the thread pool.
286 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000287 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700288}
289
290void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
291 UpdateBytesAllocated();
292 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
293 new_arena->next_ = arena_head_;
294 arena_head_ = new_arena;
295 // Update our internal data structures.
296 ptr_ = begin_ = new_arena->Begin();
297 end_ = new_arena->End();
298}
299
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000300MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
301 ssize_t lost_bytes_adjustment)
302 : name_(name),
303 stats_(stats),
304 first_arena_(first_arena),
305 lost_bytes_adjustment_(lost_bytes_adjustment) {
306}
307
308void MemStats::Dump(std::ostream& os) const {
309 os << name_ << " stats:\n";
310 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
311}
312
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700313// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000314MemStats ArenaAllocator::GetMemStats() const {
315 ssize_t lost_bytes_adjustment =
316 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
317 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700318}
319
320} // namespace art