blob: da49524ee2221e95e332f9d63fdc281bc34c4030 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
18#include <numeric>
19
buzbee862a7602013-04-05 10:58:54 -070020#include "arena_allocator.h"
21#include "base/logging.h"
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022#include "base/mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070023#include "thread-inl.h"
Mathieu Chartier75165d02013-09-12 14:00:31 -070024#include <memcheck/memcheck.h>
buzbee862a7602013-04-05 10:58:54 -070025
26namespace art {
27
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070028// Memmap is a bit slower than malloc according to my measurements.
29static constexpr bool kUseMemMap = false;
30static constexpr bool kUseMemSet = true && kUseMemMap;
Mathieu Chartier75165d02013-09-12 14:00:31 -070031static constexpr size_t kValgrindRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080032constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070033
Vladimir Markobd9e9db2014-03-07 19:41:05 +000034template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010035const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
buzbee862a7602013-04-05 10:58:54 -070036 "Misc ",
37 "BasicBlock ",
38 "LIR ",
Vladimir Marko8dea81c2014-06-06 14:50:36 +010039 "LIR masks ",
buzbee862a7602013-04-05 10:58:54 -070040 "MIR ",
41 "DataFlow ",
42 "GrowList ",
43 "GrowBitMap ",
44 "Dalvik2SSA ",
45 "DebugInfo ",
46 "Successor ",
47 "RegAlloc ",
48 "Data ",
49 "Preds ",
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000050 "STL ",
buzbee862a7602013-04-05 10:58:54 -070051};
52
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000053template <bool kCount>
54ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
55 : num_allocations_(0u) {
56 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
57}
58
59template <bool kCount>
60void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
61 num_allocations_ = other.num_allocations_;
62 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
63}
64
65template <bool kCount>
66void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
67 alloc_stats_[kind] += bytes;
68 ++num_allocations_;
69}
70
71template <bool kCount>
72size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
73 return num_allocations_;
74}
75
76template <bool kCount>
77size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
78 const size_t init = 0u; // Initial value of the correct type.
79 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
80}
81
82template <bool kCount>
83void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
84 ssize_t lost_bytes_adjustment) const {
85 size_t malloc_bytes = 0u;
86 size_t lost_bytes = 0u;
87 size_t num_arenas = 0u;
88 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
89 malloc_bytes += arena->Size();
90 lost_bytes += arena->RemainingSpace();
91 ++num_arenas;
92 }
93 // The lost_bytes_adjustment is used to make up for the fact that the current arena
94 // may not have the bytes_allocated_ updated correctly.
95 lost_bytes += lost_bytes_adjustment;
96 const size_t bytes_allocated = BytesAllocated();
97 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
98 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +000099 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000100 if (num_allocations != 0) {
101 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
102 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
103 }
104 os << "===== Allocation by kind\n";
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100105 COMPILE_ASSERT(arraysize(kAllocNames) == kNumArenaAllocKinds, check_arraysize_kAllocNames);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000106 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000107 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000108 }
109}
110
111// Explicitly instantiate the used implementation.
112template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
113
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700114Arena::Arena(size_t size)
115 : bytes_allocated_(0),
116 map_(nullptr),
117 next_(nullptr) {
118 if (kUseMemMap) {
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700119 std::string error_msg;
Ian Rogersef7d42f2014-01-06 12:55:46 -0800120 map_ = MemMap::MapAnonymous("dalvik-arena", NULL, size, PROT_READ | PROT_WRITE, false,
121 &error_msg);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700122 CHECK(map_ != nullptr) << error_msg;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700123 memory_ = map_->Begin();
124 size_ = map_->Size();
125 } else {
126 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
127 size_ = size;
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700128 }
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700129}
130
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700131Arena::~Arena() {
132 if (kUseMemMap) {
133 delete map_;
134 } else {
135 free(reinterpret_cast<void*>(memory_));
136 }
buzbee862a7602013-04-05 10:58:54 -0700137}
138
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700139void Arena::Reset() {
140 if (bytes_allocated_) {
141 if (kUseMemSet || !kUseMemMap) {
142 memset(Begin(), 0, bytes_allocated_);
buzbeea5abf702013-04-12 14:39:29 -0700143 } else {
Ian Rogersc5f17732014-06-05 20:48:42 -0700144 map_->MadviseDontNeedAndZero();
buzbeea5abf702013-04-12 14:39:29 -0700145 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700146 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700147 }
buzbee862a7602013-04-05 10:58:54 -0700148}
149
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700150ArenaPool::ArenaPool()
151 : lock_("Arena pool lock"),
152 free_arenas_(nullptr) {
153}
154
155ArenaPool::~ArenaPool() {
156 while (free_arenas_ != nullptr) {
157 auto* arena = free_arenas_;
158 free_arenas_ = free_arenas_->next_;
159 delete arena;
160 }
161}
162
163Arena* ArenaPool::AllocArena(size_t size) {
164 Thread* self = Thread::Current();
165 Arena* ret = nullptr;
166 {
167 MutexLock lock(self, lock_);
168 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
169 ret = free_arenas_;
170 free_arenas_ = free_arenas_->next_;
171 }
172 }
173 if (ret == nullptr) {
174 ret = new Arena(size);
175 }
176 ret->Reset();
177 return ret;
178}
179
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000180void ArenaPool::FreeArenaChain(Arena* first) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800181 if (UNLIKELY(RUNNING_ON_VALGRIND > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000182 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
183 VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
184 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700185 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000186 if (first != nullptr) {
187 Arena* last = first;
188 while (last->next_ != nullptr) {
189 last = last->next_;
190 }
191 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700192 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000193 last->next_ = free_arenas_;
194 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700195 }
196}
197
198size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000199 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700200}
201
202ArenaAllocator::ArenaAllocator(ArenaPool* pool)
203 : pool_(pool),
204 begin_(nullptr),
205 end_(nullptr),
206 ptr_(nullptr),
207 arena_head_(nullptr),
Mathieu Chartier661974a2014-01-09 11:23:53 -0800208 running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700209}
210
211void ArenaAllocator::UpdateBytesAllocated() {
212 if (arena_head_ != nullptr) {
213 // Update how many bytes we have allocated into the arena so that the arena pool knows how
214 // much memory to zero out.
215 arena_head_->bytes_allocated_ = ptr_ - begin_;
216 }
217}
218
Mathieu Chartier75165d02013-09-12 14:00:31 -0700219void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
Vladimir Marko22a0ef82014-06-10 14:47:51 +0100220 size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700221 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
222 // Obtain a new block.
223 ObtainNewArenaForAllocation(rounded_bytes);
224 if (UNLIKELY(ptr_ == nullptr)) {
225 return nullptr;
226 }
227 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000228 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700229 uint8_t* ret = ptr_;
230 ptr_ += rounded_bytes;
231 // Check that the memory is already zeroed out.
232 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
233 CHECK_EQ(*ptr, 0U);
234 }
235 VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
236 return ret;
237}
238
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700239ArenaAllocator::~ArenaAllocator() {
240 // Reclaim all the arenas by giving them back to the thread pool.
241 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000242 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700243}
244
245void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
246 UpdateBytesAllocated();
247 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
248 new_arena->next_ = arena_head_;
249 arena_head_ = new_arena;
250 // Update our internal data structures.
251 ptr_ = begin_ = new_arena->Begin();
252 end_ = new_arena->End();
253}
254
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000255MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
256 ssize_t lost_bytes_adjustment)
257 : name_(name),
258 stats_(stats),
259 first_arena_(first_arena),
260 lost_bytes_adjustment_(lost_bytes_adjustment) {
261}
262
263void MemStats::Dump(std::ostream& os) const {
264 os << name_ << " stats:\n";
265 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
266}
267
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700268// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000269MemStats ArenaAllocator::GetMemStats() const {
270 ssize_t lost_bytes_adjustment =
271 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
272 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700273}
274
275} // namespace art