blob: ca4635d35291be826a8d5d05016cb9fe71b97429 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
18#include <numeric>
19
buzbee862a7602013-04-05 10:58:54 -070020#include "arena_allocator.h"
21#include "base/logging.h"
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022#include "base/mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070023#include "thread-inl.h"
Mathieu Chartier75165d02013-09-12 14:00:31 -070024#include <memcheck/memcheck.h>
buzbee862a7602013-04-05 10:58:54 -070025
26namespace art {
27
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070028// Memmap is a bit slower than malloc according to my measurements.
29static constexpr bool kUseMemMap = false;
30static constexpr bool kUseMemSet = true && kUseMemMap;
Mathieu Chartier75165d02013-09-12 14:00:31 -070031static constexpr size_t kValgrindRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080032constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070033
Vladimir Markobd9e9db2014-03-07 19:41:05 +000034template <bool kCount>
35const char* ArenaAllocatorStatsImpl<kCount>::kAllocNames[kNumArenaAllocKinds] = {
buzbee862a7602013-04-05 10:58:54 -070036 "Misc ",
37 "BasicBlock ",
38 "LIR ",
39 "MIR ",
40 "DataFlow ",
41 "GrowList ",
42 "GrowBitMap ",
43 "Dalvik2SSA ",
44 "DebugInfo ",
45 "Successor ",
46 "RegAlloc ",
47 "Data ",
48 "Preds ",
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000049 "STL ",
buzbee862a7602013-04-05 10:58:54 -070050};
51
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000052template <bool kCount>
53ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
54 : num_allocations_(0u) {
55 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
56}
57
58template <bool kCount>
59void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
60 num_allocations_ = other.num_allocations_;
61 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
62}
63
64template <bool kCount>
65void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
66 alloc_stats_[kind] += bytes;
67 ++num_allocations_;
68}
69
70template <bool kCount>
71size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
72 return num_allocations_;
73}
74
75template <bool kCount>
76size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
77 const size_t init = 0u; // Initial value of the correct type.
78 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
79}
80
81template <bool kCount>
82void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
83 ssize_t lost_bytes_adjustment) const {
84 size_t malloc_bytes = 0u;
85 size_t lost_bytes = 0u;
86 size_t num_arenas = 0u;
87 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
88 malloc_bytes += arena->Size();
89 lost_bytes += arena->RemainingSpace();
90 ++num_arenas;
91 }
92 // The lost_bytes_adjustment is used to make up for the fact that the current arena
93 // may not have the bytes_allocated_ updated correctly.
94 lost_bytes += lost_bytes_adjustment;
95 const size_t bytes_allocated = BytesAllocated();
96 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
97 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +000098 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000099 if (num_allocations != 0) {
100 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
101 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
102 }
103 os << "===== Allocation by kind\n";
104 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000105 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000106 }
107}
108
109// Explicitly instantiate the used implementation.
110template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
111
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700112Arena::Arena(size_t size)
113 : bytes_allocated_(0),
114 map_(nullptr),
115 next_(nullptr) {
116 if (kUseMemMap) {
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700117 std::string error_msg;
Ian Rogersef7d42f2014-01-06 12:55:46 -0800118 map_ = MemMap::MapAnonymous("dalvik-arena", NULL, size, PROT_READ | PROT_WRITE, false,
119 &error_msg);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700120 CHECK(map_ != nullptr) << error_msg;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700121 memory_ = map_->Begin();
122 size_ = map_->Size();
123 } else {
124 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
125 size_ = size;
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700126 }
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700127}
128
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700129Arena::~Arena() {
130 if (kUseMemMap) {
131 delete map_;
132 } else {
133 free(reinterpret_cast<void*>(memory_));
134 }
buzbee862a7602013-04-05 10:58:54 -0700135}
136
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700137void Arena::Reset() {
138 if (bytes_allocated_) {
139 if (kUseMemSet || !kUseMemMap) {
140 memset(Begin(), 0, bytes_allocated_);
buzbeea5abf702013-04-12 14:39:29 -0700141 } else {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700142 madvise(Begin(), bytes_allocated_, MADV_DONTNEED);
buzbeea5abf702013-04-12 14:39:29 -0700143 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700144 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700145 }
buzbee862a7602013-04-05 10:58:54 -0700146}
147
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700148ArenaPool::ArenaPool()
149 : lock_("Arena pool lock"),
150 free_arenas_(nullptr) {
151}
152
153ArenaPool::~ArenaPool() {
154 while (free_arenas_ != nullptr) {
155 auto* arena = free_arenas_;
156 free_arenas_ = free_arenas_->next_;
157 delete arena;
158 }
159}
160
161Arena* ArenaPool::AllocArena(size_t size) {
162 Thread* self = Thread::Current();
163 Arena* ret = nullptr;
164 {
165 MutexLock lock(self, lock_);
166 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
167 ret = free_arenas_;
168 free_arenas_ = free_arenas_->next_;
169 }
170 }
171 if (ret == nullptr) {
172 ret = new Arena(size);
173 }
174 ret->Reset();
175 return ret;
176}
177
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000178void ArenaPool::FreeArenaChain(Arena* first) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800179 if (UNLIKELY(RUNNING_ON_VALGRIND > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000180 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
181 VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
182 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700183 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000184 if (first != nullptr) {
185 Arena* last = first;
186 while (last->next_ != nullptr) {
187 last = last->next_;
188 }
189 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700190 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000191 last->next_ = free_arenas_;
192 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700193 }
194}
195
196size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000197 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700198}
199
200ArenaAllocator::ArenaAllocator(ArenaPool* pool)
201 : pool_(pool),
202 begin_(nullptr),
203 end_(nullptr),
204 ptr_(nullptr),
205 arena_head_(nullptr),
Mathieu Chartier661974a2014-01-09 11:23:53 -0800206 running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700207}
208
209void ArenaAllocator::UpdateBytesAllocated() {
210 if (arena_head_ != nullptr) {
211 // Update how many bytes we have allocated into the arena so that the arena pool knows how
212 // much memory to zero out.
213 arena_head_->bytes_allocated_ = ptr_ - begin_;
214 }
215}
216
Mathieu Chartier75165d02013-09-12 14:00:31 -0700217void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
218 size_t rounded_bytes = (bytes + 3 + kValgrindRedZoneBytes) & ~3;
219 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
220 // Obtain a new block.
221 ObtainNewArenaForAllocation(rounded_bytes);
222 if (UNLIKELY(ptr_ == nullptr)) {
223 return nullptr;
224 }
225 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000226 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700227 uint8_t* ret = ptr_;
228 ptr_ += rounded_bytes;
229 // Check that the memory is already zeroed out.
230 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
231 CHECK_EQ(*ptr, 0U);
232 }
233 VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
234 return ret;
235}
236
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700237ArenaAllocator::~ArenaAllocator() {
238 // Reclaim all the arenas by giving them back to the thread pool.
239 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000240 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700241}
242
243void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
244 UpdateBytesAllocated();
245 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
246 new_arena->next_ = arena_head_;
247 arena_head_ = new_arena;
248 // Update our internal data structures.
249 ptr_ = begin_ = new_arena->Begin();
250 end_ = new_arena->End();
251}
252
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000253MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
254 ssize_t lost_bytes_adjustment)
255 : name_(name),
256 stats_(stats),
257 first_arena_(first_arena),
258 lost_bytes_adjustment_(lost_bytes_adjustment) {
259}
260
261void MemStats::Dump(std::ostream& os) const {
262 os << name_ << " stats:\n";
263 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
264}
265
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700266// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000267MemStats ArenaAllocator::GetMemStats() const {
268 ssize_t lost_bytes_adjustment =
269 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
270 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700271}
272
273} // namespace art