blob: 345428c2a664e4248916409f440f1dc31d31b5f1 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080022#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010023#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070025#include "thread-inl.h"
Evgenii Stepanov1e133742015-05-20 12:30:59 -070026#include "base/memory_tool.h"
buzbee862a7602013-04-05 10:58:54 -070027
28namespace art {
29
Evgenii Stepanov1e133742015-05-20 12:30:59 -070030static constexpr size_t kMemoryToolRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080031constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032
Vladimir Markobd9e9db2014-03-07 19:41:05 +000033template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010034const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Vladimir Markof9f64412015-09-02 14:05:49 +010035 "Misc ",
36 "BBList ",
37 "BBPreds ",
38 "DfsPreOrd ",
39 "DfsPostOrd ",
40 "DomPostOrd ",
41 "TopoOrd ",
42 "Lowering ",
43 "LIR ",
44 "LIR masks ",
45 "SwitchTbl ",
46 "FillArray ",
47 "SlowPaths ",
48 "MIR ",
49 "DataFlow ",
50 "GrowList ",
51 "GrowBitMap ",
52 "SSA2Dalvik ",
53 "Dalvik2SSA ",
54 "DebugInfo ",
Vladimir Markof9f64412015-09-02 14:05:49 +010055 "RegAlloc ",
56 "Data ",
Vladimir Markof9f64412015-09-02 14:05:49 +010057 "STL ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010058 "GraphBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010059 "Graph ",
60 "BasicBlock ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010061 "BlockList ",
62 "RevPostOrder ",
63 "LinearOrder ",
64 "ConstantsMap ",
Vladimir Marko60584552015-09-03 13:35:12 +000065 "Predecessors ",
66 "Successors ",
67 "Dominated ",
Vladimir Markof9f64412015-09-02 14:05:49 +010068 "Instruction ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010069 "InvokeInputs ",
70 "PhiInputs ",
Vladimir Markof9f64412015-09-02 14:05:49 +010071 "LoopInfo ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010072 "LIBackEdges ",
Vladimir Markof9f64412015-09-02 14:05:49 +010073 "TryCatchInf ",
74 "UseListNode ",
75 "Environment ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010076 "EnvVRegs ",
77 "EnvLocations ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010078 "LocSummary ",
Vladimir Marko71bf8092015-09-15 15:33:14 +010079 "SsaBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010080 "MoveOperands ",
81 "CodeBuffer ",
82 "StackMaps ",
83 "BaselineMaps ",
84 "Optimization ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010085 "GVN ",
Vladimir Marko5233f932015-09-29 19:01:15 +010086 "InductionVar ",
87 "BCE ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010088 "SsaLiveness ",
89 "SsaPhiElim ",
90 "RefTypeProp ",
91 "PrimTypeProp ",
92 "SideEffects ",
93 "RegAllocator ",
Vladimir Marko225b6462015-09-28 12:17:40 +010094 "StackMapStm ",
95 "CodeGen ",
96 "ParallelMove ",
buzbee862a7602013-04-05 10:58:54 -070097};
98
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000099template <bool kCount>
100ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
101 : num_allocations_(0u) {
102 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
103}
104
105template <bool kCount>
106void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
107 num_allocations_ = other.num_allocations_;
108 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
109}
110
111template <bool kCount>
112void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
113 alloc_stats_[kind] += bytes;
114 ++num_allocations_;
115}
116
117template <bool kCount>
118size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
119 return num_allocations_;
120}
121
122template <bool kCount>
123size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
124 const size_t init = 0u; // Initial value of the correct type.
125 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
126}
127
128template <bool kCount>
129void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
130 ssize_t lost_bytes_adjustment) const {
131 size_t malloc_bytes = 0u;
132 size_t lost_bytes = 0u;
133 size_t num_arenas = 0u;
134 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
135 malloc_bytes += arena->Size();
136 lost_bytes += arena->RemainingSpace();
137 ++num_arenas;
138 }
139 // The lost_bytes_adjustment is used to make up for the fact that the current arena
140 // may not have the bytes_allocated_ updated correctly.
141 lost_bytes += lost_bytes_adjustment;
142 const size_t bytes_allocated = BytesAllocated();
143 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
144 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000145 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000146 if (num_allocations != 0) {
147 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
148 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
149 }
150 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800151 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000152 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000153 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000154 }
155}
156
157// Explicitly instantiate the used implementation.
158template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
159
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700160Arena::Arena() : bytes_allocated_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700161}
162
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700163MallocArena::MallocArena(size_t size) {
164 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
165 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700166}
167
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700168MallocArena::~MallocArena() {
169 free(reinterpret_cast<void*>(memory_));
170}
171
Mathieu Chartierc7853442015-03-27 14:35:38 -0700172MemMapArena::MemMapArena(size_t size, bool low_4gb) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700173 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700174 map_.reset(MemMap::MapAnonymous(
175 "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700176 CHECK(map_.get() != nullptr) << error_msg;
177 memory_ = map_->Begin();
178 size_ = map_->Size();
179}
180
Vladimir Marko3481ba22015-04-13 12:22:36 +0100181MemMapArena::~MemMapArena() {
182 // Destroys MemMap via std::unique_ptr<>.
183}
184
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700185void MemMapArena::Release() {
186 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700187 map_->MadviseDontNeedAndZero();
188 bytes_allocated_ = 0;
189 }
190}
191
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700192void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700193 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700194 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700195 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700196 }
buzbee862a7602013-04-05 10:58:54 -0700197}
198
Mathieu Chartierc7853442015-03-27 14:35:38 -0700199ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
200 : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
201 low_4gb_(low_4gb) {
202 if (low_4gb) {
203 CHECK(!use_malloc) << "low4gb must use map implementation";
204 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700205 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700206 MemMap::Init();
207 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700208}
209
210ArenaPool::~ArenaPool() {
211 while (free_arenas_ != nullptr) {
212 auto* arena = free_arenas_;
213 free_arenas_ = free_arenas_->next_;
214 delete arena;
215 }
216}
217
218Arena* ArenaPool::AllocArena(size_t size) {
219 Thread* self = Thread::Current();
220 Arena* ret = nullptr;
221 {
222 MutexLock lock(self, lock_);
223 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
224 ret = free_arenas_;
225 free_arenas_ = free_arenas_->next_;
226 }
227 }
228 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700229 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
230 new MemMapArena(size, low_4gb_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700231 }
232 ret->Reset();
233 return ret;
234}
235
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700236void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700237 if (!use_malloc_) {
238 // Doesn't work for malloc.
239 MutexLock lock(Thread::Current(), lock_);
240 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
241 arena->Release();
242 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700243 }
244}
245
Mathieu Chartier49285c52014-12-02 15:43:48 -0800246size_t ArenaPool::GetBytesAllocated() const {
247 size_t total = 0;
248 MutexLock lock(Thread::Current(), lock_);
249 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
250 total += arena->GetBytesAllocated();
251 }
252 return total;
253}
254
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000255void ArenaPool::FreeArenaChain(Arena* first) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700256 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000257 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700258 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000259 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700260 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000261 if (first != nullptr) {
262 Arena* last = first;
263 while (last->next_ != nullptr) {
264 last = last->next_;
265 }
266 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700267 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000268 last->next_ = free_arenas_;
269 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700270 }
271}
272
273size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000274 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700275}
276
Mathieu Chartierc7853442015-03-27 14:35:38 -0700277size_t ArenaAllocator::BytesUsed() const {
278 size_t total = ptr_ - begin_;
279 if (arena_head_ != nullptr) {
280 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
281 cur_arena = cur_arena->next_) {
282 total += cur_arena->GetBytesAllocated();
283 }
284 }
285 return total;
286}
287
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700288ArenaAllocator::ArenaAllocator(ArenaPool* pool)
289 : pool_(pool),
290 begin_(nullptr),
291 end_(nullptr),
292 ptr_(nullptr),
293 arena_head_(nullptr),
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700294 is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700295}
296
297void ArenaAllocator::UpdateBytesAllocated() {
298 if (arena_head_ != nullptr) {
299 // Update how many bytes we have allocated into the arena so that the arena pool knows how
300 // much memory to zero out.
301 arena_head_->bytes_allocated_ = ptr_ - begin_;
302 }
303}
304
Mathieu Chartier75165d02013-09-12 14:00:31 -0700305void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700306 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700307 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
308 // Obtain a new block.
309 ObtainNewArenaForAllocation(rounded_bytes);
310 if (UNLIKELY(ptr_ == nullptr)) {
311 return nullptr;
312 }
313 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000314 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700315 uint8_t* ret = ptr_;
316 ptr_ += rounded_bytes;
317 // Check that the memory is already zeroed out.
318 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
319 CHECK_EQ(*ptr, 0U);
320 }
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700321 MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700322 return ret;
323}
324
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700325ArenaAllocator::~ArenaAllocator() {
326 // Reclaim all the arenas by giving them back to the thread pool.
327 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000328 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700329}
330
331void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
332 UpdateBytesAllocated();
333 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
334 new_arena->next_ = arena_head_;
335 arena_head_ = new_arena;
336 // Update our internal data structures.
337 ptr_ = begin_ = new_arena->Begin();
338 end_ = new_arena->End();
339}
340
Mathieu Chartiere401d142015-04-22 13:56:20 -0700341bool ArenaAllocator::Contains(const void* ptr) const {
342 if (ptr >= begin_ && ptr < end_) {
343 return true;
344 }
345 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
346 if (cur_arena->Contains(ptr)) {
347 return true;
348 }
349 }
350 return false;
351}
352
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000353MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
354 ssize_t lost_bytes_adjustment)
355 : name_(name),
356 stats_(stats),
357 first_arena_(first_arena),
358 lost_bytes_adjustment_(lost_bytes_adjustment) {
359}
360
361void MemStats::Dump(std::ostream& os) const {
362 os << name_ << " stats:\n";
363 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
364}
365
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700366// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000367MemStats ArenaAllocator::GetMemStats() const {
368 ssize_t lost_bytes_adjustment =
369 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
370 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700371}
372
373} // namespace art