blob: 4e51f5555d5d363254bbfd04269e7b4c3a2784a5 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070025#include "thread-inl.h"
Evgenii Stepanov1e133742015-05-20 12:30:59 -070026#include "base/memory_tool.h"
buzbee862a7602013-04-05 10:58:54 -070027
28namespace art {
29
Evgenii Stepanov1e133742015-05-20 12:30:59 -070030static constexpr size_t kMemoryToolRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080031constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032
Vladimir Markobd9e9db2014-03-07 19:41:05 +000033template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010034const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Vladimir Markof9f64412015-09-02 14:05:49 +010035 "Misc ",
36 "BBList ",
37 "BBPreds ",
38 "DfsPreOrd ",
39 "DfsPostOrd ",
40 "DomPostOrd ",
41 "TopoOrd ",
42 "Lowering ",
43 "LIR ",
44 "LIR masks ",
45 "SwitchTbl ",
46 "FillArray ",
47 "SlowPaths ",
48 "MIR ",
49 "DataFlow ",
50 "GrowList ",
51 "GrowBitMap ",
52 "SSA2Dalvik ",
53 "Dalvik2SSA ",
54 "DebugInfo ",
Vladimir Markof9f64412015-09-02 14:05:49 +010055 "RegAlloc ",
56 "Data ",
Vladimir Markof9f64412015-09-02 14:05:49 +010057 "STL ",
58 "Graph ",
59 "BasicBlock ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010060 "BlockList ",
61 "RevPostOrder ",
62 "LinearOrder ",
63 "ConstantsMap ",
Vladimir Marko60584552015-09-03 13:35:12 +000064 "Predecessors ",
65 "Successors ",
66 "Dominated ",
Vladimir Markof9f64412015-09-02 14:05:49 +010067 "Instruction ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010068 "InvokeInputs ",
69 "PhiInputs ",
Vladimir Markof9f64412015-09-02 14:05:49 +010070 "LoopInfo ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010071 "LIBackEdges ",
Vladimir Markof9f64412015-09-02 14:05:49 +010072 "TryCatchInf ",
73 "UseListNode ",
74 "Environment ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010075 "EnvVRegs ",
76 "EnvLocations ",
Vladimir Marko71bf8092015-09-15 15:33:14 +010077 "SsaBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010078 "MoveOperands ",
79 "CodeBuffer ",
80 "StackMaps ",
81 "BaselineMaps ",
82 "Optimization ",
buzbee862a7602013-04-05 10:58:54 -070083};
84
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000085template <bool kCount>
86ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
87 : num_allocations_(0u) {
88 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
89}
90
91template <bool kCount>
92void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
93 num_allocations_ = other.num_allocations_;
94 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
95}
96
97template <bool kCount>
98void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
99 alloc_stats_[kind] += bytes;
100 ++num_allocations_;
101}
102
103template <bool kCount>
104size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
105 return num_allocations_;
106}
107
108template <bool kCount>
109size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
110 const size_t init = 0u; // Initial value of the correct type.
111 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
112}
113
114template <bool kCount>
115void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
116 ssize_t lost_bytes_adjustment) const {
117 size_t malloc_bytes = 0u;
118 size_t lost_bytes = 0u;
119 size_t num_arenas = 0u;
120 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
121 malloc_bytes += arena->Size();
122 lost_bytes += arena->RemainingSpace();
123 ++num_arenas;
124 }
125 // The lost_bytes_adjustment is used to make up for the fact that the current arena
126 // may not have the bytes_allocated_ updated correctly.
127 lost_bytes += lost_bytes_adjustment;
128 const size_t bytes_allocated = BytesAllocated();
129 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
130 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000131 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000132 if (num_allocations != 0) {
133 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
134 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
135 }
136 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800137 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000138 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000139 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000140 }
141}
142
143// Explicitly instantiate the used implementation.
144template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
145
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700146Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700147}
148
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700149MallocArena::MallocArena(size_t size) {
150 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
151 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700152}
153
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700154MallocArena::~MallocArena() {
155 free(reinterpret_cast<void*>(memory_));
156}
157
Mathieu Chartierc7853442015-03-27 14:35:38 -0700158MemMapArena::MemMapArena(size_t size, bool low_4gb) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700159 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700160 map_.reset(MemMap::MapAnonymous(
161 "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700162 CHECK(map_.get() != nullptr) << error_msg;
163 memory_ = map_->Begin();
164 size_ = map_->Size();
165}
166
Vladimir Marko3481ba22015-04-13 12:22:36 +0100167MemMapArena::~MemMapArena() {
168 // Destroys MemMap via std::unique_ptr<>.
169}
170
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700171void MemMapArena::Release() {
172 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700173 map_->MadviseDontNeedAndZero();
174 bytes_allocated_ = 0;
175 }
176}
177
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700178void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700179 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700180 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700181 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700182 }
buzbee862a7602013-04-05 10:58:54 -0700183}
184
Mathieu Chartierc7853442015-03-27 14:35:38 -0700185ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
186 : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
187 low_4gb_(low_4gb) {
188 if (low_4gb) {
189 CHECK(!use_malloc) << "low4gb must use map implementation";
190 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700191 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700192 MemMap::Init();
193 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700194}
195
196ArenaPool::~ArenaPool() {
197 while (free_arenas_ != nullptr) {
198 auto* arena = free_arenas_;
199 free_arenas_ = free_arenas_->next_;
200 delete arena;
201 }
202}
203
204Arena* ArenaPool::AllocArena(size_t size) {
205 Thread* self = Thread::Current();
206 Arena* ret = nullptr;
207 {
208 MutexLock lock(self, lock_);
209 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
210 ret = free_arenas_;
211 free_arenas_ = free_arenas_->next_;
212 }
213 }
214 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700215 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
216 new MemMapArena(size, low_4gb_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700217 }
218 ret->Reset();
219 return ret;
220}
221
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700222void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700223 if (!use_malloc_) {
224 // Doesn't work for malloc.
225 MutexLock lock(Thread::Current(), lock_);
226 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
227 arena->Release();
228 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700229 }
230}
231
Mathieu Chartier49285c52014-12-02 15:43:48 -0800232size_t ArenaPool::GetBytesAllocated() const {
233 size_t total = 0;
234 MutexLock lock(Thread::Current(), lock_);
235 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
236 total += arena->GetBytesAllocated();
237 }
238 return total;
239}
240
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000241void ArenaPool::FreeArenaChain(Arena* first) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700242 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000243 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700244 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000245 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700246 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000247 if (first != nullptr) {
248 Arena* last = first;
249 while (last->next_ != nullptr) {
250 last = last->next_;
251 }
252 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700253 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000254 last->next_ = free_arenas_;
255 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700256 }
257}
258
259size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000260 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700261}
262
Mathieu Chartierc7853442015-03-27 14:35:38 -0700263size_t ArenaAllocator::BytesUsed() const {
264 size_t total = ptr_ - begin_;
265 if (arena_head_ != nullptr) {
266 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
267 cur_arena = cur_arena->next_) {
268 total += cur_arena->GetBytesAllocated();
269 }
270 }
271 return total;
272}
273
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700274ArenaAllocator::ArenaAllocator(ArenaPool* pool)
275 : pool_(pool),
276 begin_(nullptr),
277 end_(nullptr),
278 ptr_(nullptr),
279 arena_head_(nullptr),
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700280 is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700281}
282
283void ArenaAllocator::UpdateBytesAllocated() {
284 if (arena_head_ != nullptr) {
285 // Update how many bytes we have allocated into the arena so that the arena pool knows how
286 // much memory to zero out.
287 arena_head_->bytes_allocated_ = ptr_ - begin_;
288 }
289}
290
Mathieu Chartier75165d02013-09-12 14:00:31 -0700291void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700292 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700293 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
294 // Obtain a new block.
295 ObtainNewArenaForAllocation(rounded_bytes);
296 if (UNLIKELY(ptr_ == nullptr)) {
297 return nullptr;
298 }
299 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000300 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700301 uint8_t* ret = ptr_;
302 ptr_ += rounded_bytes;
303 // Check that the memory is already zeroed out.
304 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
305 CHECK_EQ(*ptr, 0U);
306 }
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700307 MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700308 return ret;
309}
310
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700311ArenaAllocator::~ArenaAllocator() {
312 // Reclaim all the arenas by giving them back to the thread pool.
313 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000314 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700315}
316
317void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
318 UpdateBytesAllocated();
319 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
320 new_arena->next_ = arena_head_;
321 arena_head_ = new_arena;
322 // Update our internal data structures.
323 ptr_ = begin_ = new_arena->Begin();
324 end_ = new_arena->End();
325}
326
Mathieu Chartiere401d142015-04-22 13:56:20 -0700327bool ArenaAllocator::Contains(const void* ptr) const {
328 if (ptr >= begin_ && ptr < end_) {
329 return true;
330 }
331 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
332 if (cur_arena->Contains(ptr)) {
333 return true;
334 }
335 }
336 return false;
337}
338
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000339MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
340 ssize_t lost_bytes_adjustment)
341 : name_(name),
342 stats_(stats),
343 first_arena_(first_arena),
344 lost_bytes_adjustment_(lost_bytes_adjustment) {
345}
346
347void MemStats::Dump(std::ostream& os) const {
348 os << name_ << " stats:\n";
349 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
350}
351
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700352// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000353MemStats ArenaAllocator::GetMemStats() const {
354 ssize_t lost_bytes_adjustment =
355 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
356 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700357}
358
359} // namespace art