blob: 516ac2b38857567d7a828e0fc395c24aed762ad5 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
18#include <numeric>
19
buzbee862a7602013-04-05 10:58:54 -070020#include "arena_allocator.h"
21#include "base/logging.h"
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022#include "base/mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070023#include "thread-inl.h"
Mathieu Chartier75165d02013-09-12 14:00:31 -070024#include <memcheck/memcheck.h>
buzbee862a7602013-04-05 10:58:54 -070025
26namespace art {
27
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070028// Memmap is a bit slower than malloc according to my measurements.
29static constexpr bool kUseMemMap = false;
30static constexpr bool kUseMemSet = true && kUseMemMap;
Mathieu Chartier75165d02013-09-12 14:00:31 -070031static constexpr size_t kValgrindRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080032constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070033
Vladimir Markobd9e9db2014-03-07 19:41:05 +000034template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010035const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
buzbee862a7602013-04-05 10:58:54 -070036 "Misc ",
37 "BasicBlock ",
Vladimir Markoe39c54e2014-09-22 14:50:02 +010038 "BBList "
39 "BBPreds ",
40 "DfsPreOrd ",
41 "DfsPostOrd ",
42 "DomPostOrd ",
43 "TopoOrd ",
44 "Lowering ",
buzbee862a7602013-04-05 10:58:54 -070045 "LIR ",
Vladimir Marko8dea81c2014-06-06 14:50:36 +010046 "LIR masks ",
Vladimir Markoe39c54e2014-09-22 14:50:02 +010047 "SwitchTbl ",
48 "FillArray ",
49 "SlowPaths ",
buzbee862a7602013-04-05 10:58:54 -070050 "MIR ",
51 "DataFlow ",
52 "GrowList ",
53 "GrowBitMap ",
Vladimir Markoe39c54e2014-09-22 14:50:02 +010054 "SSA2Dalvik ",
buzbee862a7602013-04-05 10:58:54 -070055 "Dalvik2SSA ",
56 "DebugInfo ",
57 "Successor ",
58 "RegAlloc ",
59 "Data ",
60 "Preds ",
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000061 "STL ",
buzbee862a7602013-04-05 10:58:54 -070062};
63
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000064template <bool kCount>
65ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
66 : num_allocations_(0u) {
67 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
68}
69
70template <bool kCount>
71void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
72 num_allocations_ = other.num_allocations_;
73 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
74}
75
76template <bool kCount>
77void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
78 alloc_stats_[kind] += bytes;
79 ++num_allocations_;
80}
81
82template <bool kCount>
83size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
84 return num_allocations_;
85}
86
87template <bool kCount>
88size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
89 const size_t init = 0u; // Initial value of the correct type.
90 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
91}
92
93template <bool kCount>
94void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
95 ssize_t lost_bytes_adjustment) const {
96 size_t malloc_bytes = 0u;
97 size_t lost_bytes = 0u;
98 size_t num_arenas = 0u;
99 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
100 malloc_bytes += arena->Size();
101 lost_bytes += arena->RemainingSpace();
102 ++num_arenas;
103 }
104 // The lost_bytes_adjustment is used to make up for the fact that the current arena
105 // may not have the bytes_allocated_ updated correctly.
106 lost_bytes += lost_bytes_adjustment;
107 const size_t bytes_allocated = BytesAllocated();
108 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
109 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000110 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000111 if (num_allocations != 0) {
112 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
113 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
114 }
115 os << "===== Allocation by kind\n";
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100116 COMPILE_ASSERT(arraysize(kAllocNames) == kNumArenaAllocKinds, check_arraysize_kAllocNames);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000117 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000118 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000119 }
120}
121
122// Explicitly instantiate the used implementation.
123template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
124
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700125Arena::Arena(size_t size)
126 : bytes_allocated_(0),
127 map_(nullptr),
128 next_(nullptr) {
129 if (kUseMemMap) {
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700130 std::string error_msg;
Ian Rogersef7d42f2014-01-06 12:55:46 -0800131 map_ = MemMap::MapAnonymous("dalvik-arena", NULL, size, PROT_READ | PROT_WRITE, false,
132 &error_msg);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700133 CHECK(map_ != nullptr) << error_msg;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700134 memory_ = map_->Begin();
135 size_ = map_->Size();
136 } else {
137 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
138 size_ = size;
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700139 }
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700140}
141
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700142Arena::~Arena() {
143 if (kUseMemMap) {
144 delete map_;
145 } else {
146 free(reinterpret_cast<void*>(memory_));
147 }
buzbee862a7602013-04-05 10:58:54 -0700148}
149
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700150void Arena::Reset() {
151 if (bytes_allocated_) {
152 if (kUseMemSet || !kUseMemMap) {
153 memset(Begin(), 0, bytes_allocated_);
buzbeea5abf702013-04-12 14:39:29 -0700154 } else {
Ian Rogersc5f17732014-06-05 20:48:42 -0700155 map_->MadviseDontNeedAndZero();
buzbeea5abf702013-04-12 14:39:29 -0700156 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700157 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700158 }
buzbee862a7602013-04-05 10:58:54 -0700159}
160
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700161ArenaPool::ArenaPool()
162 : lock_("Arena pool lock"),
163 free_arenas_(nullptr) {
164}
165
166ArenaPool::~ArenaPool() {
167 while (free_arenas_ != nullptr) {
168 auto* arena = free_arenas_;
169 free_arenas_ = free_arenas_->next_;
170 delete arena;
171 }
172}
173
174Arena* ArenaPool::AllocArena(size_t size) {
175 Thread* self = Thread::Current();
176 Arena* ret = nullptr;
177 {
178 MutexLock lock(self, lock_);
179 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
180 ret = free_arenas_;
181 free_arenas_ = free_arenas_->next_;
182 }
183 }
184 if (ret == nullptr) {
185 ret = new Arena(size);
186 }
187 ret->Reset();
188 return ret;
189}
190
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000191void ArenaPool::FreeArenaChain(Arena* first) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800192 if (UNLIKELY(RUNNING_ON_VALGRIND > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000193 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
194 VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
195 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700196 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000197 if (first != nullptr) {
198 Arena* last = first;
199 while (last->next_ != nullptr) {
200 last = last->next_;
201 }
202 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700203 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000204 last->next_ = free_arenas_;
205 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700206 }
207}
208
209size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000210 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700211}
212
213ArenaAllocator::ArenaAllocator(ArenaPool* pool)
214 : pool_(pool),
215 begin_(nullptr),
216 end_(nullptr),
217 ptr_(nullptr),
218 arena_head_(nullptr),
Mathieu Chartier661974a2014-01-09 11:23:53 -0800219 running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700220}
221
222void ArenaAllocator::UpdateBytesAllocated() {
223 if (arena_head_ != nullptr) {
224 // Update how many bytes we have allocated into the arena so that the arena pool knows how
225 // much memory to zero out.
226 arena_head_->bytes_allocated_ = ptr_ - begin_;
227 }
228}
229
Mathieu Chartier75165d02013-09-12 14:00:31 -0700230void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
Vladimir Marko22a0ef82014-06-10 14:47:51 +0100231 size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700232 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
233 // Obtain a new block.
234 ObtainNewArenaForAllocation(rounded_bytes);
235 if (UNLIKELY(ptr_ == nullptr)) {
236 return nullptr;
237 }
238 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000239 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700240 uint8_t* ret = ptr_;
241 ptr_ += rounded_bytes;
242 // Check that the memory is already zeroed out.
243 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
244 CHECK_EQ(*ptr, 0U);
245 }
246 VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
247 return ret;
248}
249
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700250ArenaAllocator::~ArenaAllocator() {
251 // Reclaim all the arenas by giving them back to the thread pool.
252 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000253 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700254}
255
256void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
257 UpdateBytesAllocated();
258 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
259 new_arena->next_ = arena_head_;
260 arena_head_ = new_arena;
261 // Update our internal data structures.
262 ptr_ = begin_ = new_arena->Begin();
263 end_ = new_arena->End();
264}
265
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000266MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
267 ssize_t lost_bytes_adjustment)
268 : name_(name),
269 stats_(stats),
270 first_arena_(first_arena),
271 lost_bytes_adjustment_(lost_bytes_adjustment) {
272}
273
274void MemStats::Dump(std::ostream& os) const {
275 os << name_ << " stats:\n";
276 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
277}
278
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700279// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000280MemStats ArenaAllocator::GetMemStats() const {
281 ssize_t lost_bytes_adjustment =
282 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
283 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700284}
285
286} // namespace art