blob: 337df382b9f32ca154a96c373e0753d0a8be5061 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070025#include "thread-inl.h"
Evgenii Stepanov1e133742015-05-20 12:30:59 -070026#include "base/memory_tool.h"
buzbee862a7602013-04-05 10:58:54 -070027
28namespace art {
29
Evgenii Stepanov1e133742015-05-20 12:30:59 -070030static constexpr size_t kMemoryToolRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080031constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032
Vladimir Markobd9e9db2014-03-07 19:41:05 +000033template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010034const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Vladimir Markof9f64412015-09-02 14:05:49 +010035 "Misc ",
36 "BBList ",
37 "BBPreds ",
38 "DfsPreOrd ",
39 "DfsPostOrd ",
40 "DomPostOrd ",
41 "TopoOrd ",
42 "Lowering ",
43 "LIR ",
44 "LIR masks ",
45 "SwitchTbl ",
46 "FillArray ",
47 "SlowPaths ",
48 "MIR ",
49 "DataFlow ",
50 "GrowList ",
51 "GrowBitMap ",
52 "SSA2Dalvik ",
53 "Dalvik2SSA ",
54 "DebugInfo ",
Vladimir Markof9f64412015-09-02 14:05:49 +010055 "RegAlloc ",
56 "Data ",
Vladimir Markof9f64412015-09-02 14:05:49 +010057 "STL ",
58 "Graph ",
59 "BasicBlock ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010060 "BlockList ",
61 "RevPostOrder ",
62 "LinearOrder ",
63 "ConstantsMap ",
Vladimir Marko60584552015-09-03 13:35:12 +000064 "Predecessors ",
65 "Successors ",
66 "Dominated ",
Vladimir Markof9f64412015-09-02 14:05:49 +010067 "Instruction ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010068 "InvokeInputs ",
69 "PhiInputs ",
Vladimir Markof9f64412015-09-02 14:05:49 +010070 "LoopInfo ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010071 "LIBackEdges ",
Vladimir Markof9f64412015-09-02 14:05:49 +010072 "TryCatchInf ",
73 "UseListNode ",
74 "Environment ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010075 "EnvVRegs ",
76 "EnvLocations ",
Vladimir Markof9f64412015-09-02 14:05:49 +010077 "MoveOperands ",
78 "CodeBuffer ",
79 "StackMaps ",
80 "BaselineMaps ",
81 "Optimization ",
buzbee862a7602013-04-05 10:58:54 -070082};
83
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000084template <bool kCount>
85ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
86 : num_allocations_(0u) {
87 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
88}
89
90template <bool kCount>
91void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
92 num_allocations_ = other.num_allocations_;
93 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
94}
95
96template <bool kCount>
97void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
98 alloc_stats_[kind] += bytes;
99 ++num_allocations_;
100}
101
102template <bool kCount>
103size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
104 return num_allocations_;
105}
106
107template <bool kCount>
108size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
109 const size_t init = 0u; // Initial value of the correct type.
110 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
111}
112
113template <bool kCount>
114void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
115 ssize_t lost_bytes_adjustment) const {
116 size_t malloc_bytes = 0u;
117 size_t lost_bytes = 0u;
118 size_t num_arenas = 0u;
119 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
120 malloc_bytes += arena->Size();
121 lost_bytes += arena->RemainingSpace();
122 ++num_arenas;
123 }
124 // The lost_bytes_adjustment is used to make up for the fact that the current arena
125 // may not have the bytes_allocated_ updated correctly.
126 lost_bytes += lost_bytes_adjustment;
127 const size_t bytes_allocated = BytesAllocated();
128 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
129 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000130 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000131 if (num_allocations != 0) {
132 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
133 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
134 }
135 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800136 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000137 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000138 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000139 }
140}
141
142// Explicitly instantiate the used implementation.
143template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
144
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700145Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700146}
147
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700148MallocArena::MallocArena(size_t size) {
149 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
150 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700151}
152
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700153MallocArena::~MallocArena() {
154 free(reinterpret_cast<void*>(memory_));
155}
156
Mathieu Chartierc7853442015-03-27 14:35:38 -0700157MemMapArena::MemMapArena(size_t size, bool low_4gb) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700158 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700159 map_.reset(MemMap::MapAnonymous(
160 "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700161 CHECK(map_.get() != nullptr) << error_msg;
162 memory_ = map_->Begin();
163 size_ = map_->Size();
164}
165
Vladimir Marko3481ba22015-04-13 12:22:36 +0100166MemMapArena::~MemMapArena() {
167 // Destroys MemMap via std::unique_ptr<>.
168}
169
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700170void MemMapArena::Release() {
171 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700172 map_->MadviseDontNeedAndZero();
173 bytes_allocated_ = 0;
174 }
175}
176
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700177void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700178 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700179 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700180 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700181 }
buzbee862a7602013-04-05 10:58:54 -0700182}
183
Mathieu Chartierc7853442015-03-27 14:35:38 -0700184ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
185 : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
186 low_4gb_(low_4gb) {
187 if (low_4gb) {
188 CHECK(!use_malloc) << "low4gb must use map implementation";
189 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700190 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700191 MemMap::Init();
192 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700193}
194
195ArenaPool::~ArenaPool() {
196 while (free_arenas_ != nullptr) {
197 auto* arena = free_arenas_;
198 free_arenas_ = free_arenas_->next_;
199 delete arena;
200 }
201}
202
203Arena* ArenaPool::AllocArena(size_t size) {
204 Thread* self = Thread::Current();
205 Arena* ret = nullptr;
206 {
207 MutexLock lock(self, lock_);
208 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
209 ret = free_arenas_;
210 free_arenas_ = free_arenas_->next_;
211 }
212 }
213 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700214 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
215 new MemMapArena(size, low_4gb_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700216 }
217 ret->Reset();
218 return ret;
219}
220
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700221void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700222 if (!use_malloc_) {
223 // Doesn't work for malloc.
224 MutexLock lock(Thread::Current(), lock_);
225 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
226 arena->Release();
227 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700228 }
229}
230
Mathieu Chartier49285c52014-12-02 15:43:48 -0800231size_t ArenaPool::GetBytesAllocated() const {
232 size_t total = 0;
233 MutexLock lock(Thread::Current(), lock_);
234 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
235 total += arena->GetBytesAllocated();
236 }
237 return total;
238}
239
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000240void ArenaPool::FreeArenaChain(Arena* first) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700241 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000242 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700243 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000244 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700245 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000246 if (first != nullptr) {
247 Arena* last = first;
248 while (last->next_ != nullptr) {
249 last = last->next_;
250 }
251 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700252 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000253 last->next_ = free_arenas_;
254 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700255 }
256}
257
258size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000259 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700260}
261
Mathieu Chartierc7853442015-03-27 14:35:38 -0700262size_t ArenaAllocator::BytesUsed() const {
263 size_t total = ptr_ - begin_;
264 if (arena_head_ != nullptr) {
265 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
266 cur_arena = cur_arena->next_) {
267 total += cur_arena->GetBytesAllocated();
268 }
269 }
270 return total;
271}
272
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700273ArenaAllocator::ArenaAllocator(ArenaPool* pool)
274 : pool_(pool),
275 begin_(nullptr),
276 end_(nullptr),
277 ptr_(nullptr),
278 arena_head_(nullptr),
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700279 is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700280}
281
282void ArenaAllocator::UpdateBytesAllocated() {
283 if (arena_head_ != nullptr) {
284 // Update how many bytes we have allocated into the arena so that the arena pool knows how
285 // much memory to zero out.
286 arena_head_->bytes_allocated_ = ptr_ - begin_;
287 }
288}
289
Mathieu Chartier75165d02013-09-12 14:00:31 -0700290void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700291 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700292 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
293 // Obtain a new block.
294 ObtainNewArenaForAllocation(rounded_bytes);
295 if (UNLIKELY(ptr_ == nullptr)) {
296 return nullptr;
297 }
298 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000299 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700300 uint8_t* ret = ptr_;
301 ptr_ += rounded_bytes;
302 // Check that the memory is already zeroed out.
303 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
304 CHECK_EQ(*ptr, 0U);
305 }
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700306 MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700307 return ret;
308}
309
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700310ArenaAllocator::~ArenaAllocator() {
311 // Reclaim all the arenas by giving them back to the thread pool.
312 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000313 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700314}
315
316void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
317 UpdateBytesAllocated();
318 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
319 new_arena->next_ = arena_head_;
320 arena_head_ = new_arena;
321 // Update our internal data structures.
322 ptr_ = begin_ = new_arena->Begin();
323 end_ = new_arena->End();
324}
325
Mathieu Chartiere401d142015-04-22 13:56:20 -0700326bool ArenaAllocator::Contains(const void* ptr) const {
327 if (ptr >= begin_ && ptr < end_) {
328 return true;
329 }
330 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
331 if (cur_arena->Contains(ptr)) {
332 return true;
333 }
334 }
335 return false;
336}
337
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000338MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
339 ssize_t lost_bytes_adjustment)
340 : name_(name),
341 stats_(stats),
342 first_arena_(first_arena),
343 lost_bytes_adjustment_(lost_bytes_adjustment) {
344}
345
346void MemStats::Dump(std::ostream& os) const {
347 os << name_ << " stats:\n";
348 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
349}
350
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700351// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000352MemStats ArenaAllocator::GetMemStats() const {
353 ssize_t lost_bytes_adjustment =
354 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
355 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700356}
357
358} // namespace art