blob: 365b094e9539b15dc82cfab504bd424a5b6b4f98 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
18#include <numeric>
19
buzbee862a7602013-04-05 10:58:54 -070020#include "arena_allocator.h"
21#include "base/logging.h"
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022#include "base/mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070023#include "thread-inl.h"
Mathieu Chartier75165d02013-09-12 14:00:31 -070024#include <memcheck/memcheck.h>
buzbee862a7602013-04-05 10:58:54 -070025
26namespace art {
27
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070028// Memmap is a bit slower than malloc according to my measurements.
29static constexpr bool kUseMemMap = false;
30static constexpr bool kUseMemSet = true && kUseMemMap;
Mathieu Chartier75165d02013-09-12 14:00:31 -070031static constexpr size_t kValgrindRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080032constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070033
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000034static const char* alloc_names[kNumArenaAllocKinds] = {
buzbee862a7602013-04-05 10:58:54 -070035 "Misc ",
36 "BasicBlock ",
37 "LIR ",
38 "MIR ",
39 "DataFlow ",
40 "GrowList ",
41 "GrowBitMap ",
42 "Dalvik2SSA ",
43 "DebugInfo ",
44 "Successor ",
45 "RegAlloc ",
46 "Data ",
47 "Preds ",
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000048 "STL ",
buzbee862a7602013-04-05 10:58:54 -070049};
50
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000051template <bool kCount>
52ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
53 : num_allocations_(0u) {
54 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
55}
56
57template <bool kCount>
58void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
59 num_allocations_ = other.num_allocations_;
60 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
61}
62
63template <bool kCount>
64void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
65 alloc_stats_[kind] += bytes;
66 ++num_allocations_;
67}
68
69template <bool kCount>
70size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
71 return num_allocations_;
72}
73
74template <bool kCount>
75size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
76 const size_t init = 0u; // Initial value of the correct type.
77 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
78}
79
80template <bool kCount>
81void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
82 ssize_t lost_bytes_adjustment) const {
83 size_t malloc_bytes = 0u;
84 size_t lost_bytes = 0u;
85 size_t num_arenas = 0u;
86 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
87 malloc_bytes += arena->Size();
88 lost_bytes += arena->RemainingSpace();
89 ++num_arenas;
90 }
91 // The lost_bytes_adjustment is used to make up for the fact that the current arena
92 // may not have the bytes_allocated_ updated correctly.
93 lost_bytes += lost_bytes_adjustment;
94 const size_t bytes_allocated = BytesAllocated();
95 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
96 << ", lost: " << lost_bytes << "\n";
97 size_t num_allocations = ArenaAllocatorStats::NumAllocations();
98 if (num_allocations != 0) {
99 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
100 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
101 }
102 os << "===== Allocation by kind\n";
103 for (int i = 0; i < kNumArenaAllocKinds; i++) {
104 os << alloc_names[i] << std::setw(10) << alloc_stats_[i] << "\n";
105 }
106}
107
108// Explicitly instantiate the used implementation.
109template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
110
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700111Arena::Arena(size_t size)
112 : bytes_allocated_(0),
113 map_(nullptr),
114 next_(nullptr) {
115 if (kUseMemMap) {
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700116 std::string error_msg;
Ian Rogersef7d42f2014-01-06 12:55:46 -0800117 map_ = MemMap::MapAnonymous("dalvik-arena", NULL, size, PROT_READ | PROT_WRITE, false,
118 &error_msg);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700119 CHECK(map_ != nullptr) << error_msg;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700120 memory_ = map_->Begin();
121 size_ = map_->Size();
122 } else {
123 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
124 size_ = size;
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700125 }
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700126}
127
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700128Arena::~Arena() {
129 if (kUseMemMap) {
130 delete map_;
131 } else {
132 free(reinterpret_cast<void*>(memory_));
133 }
buzbee862a7602013-04-05 10:58:54 -0700134}
135
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700136void Arena::Reset() {
137 if (bytes_allocated_) {
138 if (kUseMemSet || !kUseMemMap) {
139 memset(Begin(), 0, bytes_allocated_);
buzbeea5abf702013-04-12 14:39:29 -0700140 } else {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700141 madvise(Begin(), bytes_allocated_, MADV_DONTNEED);
buzbeea5abf702013-04-12 14:39:29 -0700142 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700143 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700144 }
buzbee862a7602013-04-05 10:58:54 -0700145}
146
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700147ArenaPool::ArenaPool()
148 : lock_("Arena pool lock"),
149 free_arenas_(nullptr) {
150}
151
152ArenaPool::~ArenaPool() {
153 while (free_arenas_ != nullptr) {
154 auto* arena = free_arenas_;
155 free_arenas_ = free_arenas_->next_;
156 delete arena;
157 }
158}
159
160Arena* ArenaPool::AllocArena(size_t size) {
161 Thread* self = Thread::Current();
162 Arena* ret = nullptr;
163 {
164 MutexLock lock(self, lock_);
165 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
166 ret = free_arenas_;
167 free_arenas_ = free_arenas_->next_;
168 }
169 }
170 if (ret == nullptr) {
171 ret = new Arena(size);
172 }
173 ret->Reset();
174 return ret;
175}
176
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000177void ArenaPool::FreeArenaChain(Arena* first) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800178 if (UNLIKELY(RUNNING_ON_VALGRIND > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000179 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
180 VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
181 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700182 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000183 if (first != nullptr) {
184 Arena* last = first;
185 while (last->next_ != nullptr) {
186 last = last->next_;
187 }
188 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700189 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000190 last->next_ = free_arenas_;
191 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700192 }
193}
194
195size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000196 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700197}
198
199ArenaAllocator::ArenaAllocator(ArenaPool* pool)
200 : pool_(pool),
201 begin_(nullptr),
202 end_(nullptr),
203 ptr_(nullptr),
204 arena_head_(nullptr),
Mathieu Chartier661974a2014-01-09 11:23:53 -0800205 running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700206}
207
208void ArenaAllocator::UpdateBytesAllocated() {
209 if (arena_head_ != nullptr) {
210 // Update how many bytes we have allocated into the arena so that the arena pool knows how
211 // much memory to zero out.
212 arena_head_->bytes_allocated_ = ptr_ - begin_;
213 }
214}
215
Mathieu Chartier75165d02013-09-12 14:00:31 -0700216void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
217 size_t rounded_bytes = (bytes + 3 + kValgrindRedZoneBytes) & ~3;
218 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
219 // Obtain a new block.
220 ObtainNewArenaForAllocation(rounded_bytes);
221 if (UNLIKELY(ptr_ == nullptr)) {
222 return nullptr;
223 }
224 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000225 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700226 uint8_t* ret = ptr_;
227 ptr_ += rounded_bytes;
228 // Check that the memory is already zeroed out.
229 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
230 CHECK_EQ(*ptr, 0U);
231 }
232 VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
233 return ret;
234}
235
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700236ArenaAllocator::~ArenaAllocator() {
237 // Reclaim all the arenas by giving them back to the thread pool.
238 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000239 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700240}
241
242void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
243 UpdateBytesAllocated();
244 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
245 new_arena->next_ = arena_head_;
246 arena_head_ = new_arena;
247 // Update our internal data structures.
248 ptr_ = begin_ = new_arena->Begin();
249 end_ = new_arena->End();
250}
251
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000252MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
253 ssize_t lost_bytes_adjustment)
254 : name_(name),
255 stats_(stats),
256 first_arena_(first_arena),
257 lost_bytes_adjustment_(lost_bytes_adjustment) {
258}
259
260void MemStats::Dump(std::ostream& os) const {
261 os << name_ << " stats:\n";
262 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
263}
264
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700265// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000266MemStats ArenaAllocator::GetMemStats() const {
267 ssize_t lost_bytes_adjustment =
268 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
269 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700270}
271
272} // namespace art