blob: 8d24439277cf6ecb1ceb9b4b727bc8ef62aa6d27 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "compiler_internals.h"
18#include "dex_file-inl.h"
19#include "arena_allocator.h"
20#include "base/logging.h"
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070021#include "base/mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070022#include "thread-inl.h"
Mathieu Chartier75165d02013-09-12 14:00:31 -070023#include <memcheck/memcheck.h>
buzbee862a7602013-04-05 10:58:54 -070024
25namespace art {
26
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070027// Memmap is a bit slower than malloc according to my measurements.
28static constexpr bool kUseMemMap = false;
29static constexpr bool kUseMemSet = true && kUseMemMap;
Mathieu Chartier75165d02013-09-12 14:00:31 -070030static constexpr size_t kValgrindRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080031constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032
buzbee862a7602013-04-05 10:58:54 -070033static const char* alloc_names[ArenaAllocator::kNumAllocKinds] = {
34 "Misc ",
35 "BasicBlock ",
36 "LIR ",
37 "MIR ",
38 "DataFlow ",
39 "GrowList ",
40 "GrowBitMap ",
41 "Dalvik2SSA ",
42 "DebugInfo ",
43 "Successor ",
44 "RegAlloc ",
45 "Data ",
46 "Preds ",
47};
48
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070049Arena::Arena(size_t size)
50 : bytes_allocated_(0),
51 map_(nullptr),
52 next_(nullptr) {
53 if (kUseMemMap) {
Ian Rogers8d31bbd2013-10-13 10:44:14 -070054 std::string error_msg;
Ian Rogersef7d42f2014-01-06 12:55:46 -080055 map_ = MemMap::MapAnonymous("dalvik-arena", NULL, size, PROT_READ | PROT_WRITE, false,
56 &error_msg);
Ian Rogers8d31bbd2013-10-13 10:44:14 -070057 CHECK(map_ != nullptr) << error_msg;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070058 memory_ = map_->Begin();
59 size_ = map_->Size();
60 } else {
61 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
62 size_ = size;
Ian Rogerse7a5b7d2013-04-18 20:09:02 -070063 }
Ian Rogerse7a5b7d2013-04-18 20:09:02 -070064}
65
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070066Arena::~Arena() {
67 if (kUseMemMap) {
68 delete map_;
69 } else {
70 free(reinterpret_cast<void*>(memory_));
71 }
buzbee862a7602013-04-05 10:58:54 -070072}
73
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070074void Arena::Reset() {
75 if (bytes_allocated_) {
76 if (kUseMemSet || !kUseMemMap) {
77 memset(Begin(), 0, bytes_allocated_);
buzbeea5abf702013-04-12 14:39:29 -070078 } else {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070079 madvise(Begin(), bytes_allocated_, MADV_DONTNEED);
buzbeea5abf702013-04-12 14:39:29 -070080 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070081 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -070082 }
buzbee862a7602013-04-05 10:58:54 -070083}
84
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070085ArenaPool::ArenaPool()
86 : lock_("Arena pool lock"),
87 free_arenas_(nullptr) {
88}
89
90ArenaPool::~ArenaPool() {
91 while (free_arenas_ != nullptr) {
92 auto* arena = free_arenas_;
93 free_arenas_ = free_arenas_->next_;
94 delete arena;
95 }
96}
97
98Arena* ArenaPool::AllocArena(size_t size) {
99 Thread* self = Thread::Current();
100 Arena* ret = nullptr;
101 {
102 MutexLock lock(self, lock_);
103 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
104 ret = free_arenas_;
105 free_arenas_ = free_arenas_->next_;
106 }
107 }
108 if (ret == nullptr) {
109 ret = new Arena(size);
110 }
111 ret->Reset();
112 return ret;
113}
114
115void ArenaPool::FreeArena(Arena* arena) {
116 Thread* self = Thread::Current();
Mathieu Chartier75165d02013-09-12 14:00:31 -0700117 if (UNLIKELY(RUNNING_ON_VALGRIND)) {
118 VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
119 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700120 {
121 MutexLock lock(self, lock_);
122 arena->next_ = free_arenas_;
123 free_arenas_ = arena;
124 }
125}
126
127size_t ArenaAllocator::BytesAllocated() const {
buzbee862a7602013-04-05 10:58:54 -0700128 size_t total = 0;
129 for (int i = 0; i < kNumAllocKinds; i++) {
130 total += alloc_stats_[i];
131 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700132 return total;
133}
134
135ArenaAllocator::ArenaAllocator(ArenaPool* pool)
136 : pool_(pool),
137 begin_(nullptr),
138 end_(nullptr),
139 ptr_(nullptr),
140 arena_head_(nullptr),
Mathieu Chartier75165d02013-09-12 14:00:31 -0700141 num_allocations_(0),
142 running_on_valgrind_(RUNNING_ON_VALGRIND) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700143 memset(&alloc_stats_[0], 0, sizeof(alloc_stats_));
144}
145
146void ArenaAllocator::UpdateBytesAllocated() {
147 if (arena_head_ != nullptr) {
148 // Update how many bytes we have allocated into the arena so that the arena pool knows how
149 // much memory to zero out.
150 arena_head_->bytes_allocated_ = ptr_ - begin_;
151 }
152}
153
Mathieu Chartier75165d02013-09-12 14:00:31 -0700154void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
155 size_t rounded_bytes = (bytes + 3 + kValgrindRedZoneBytes) & ~3;
156 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
157 // Obtain a new block.
158 ObtainNewArenaForAllocation(rounded_bytes);
159 if (UNLIKELY(ptr_ == nullptr)) {
160 return nullptr;
161 }
162 }
163 if (kCountAllocations) {
164 alloc_stats_[kind] += rounded_bytes;
165 ++num_allocations_;
166 }
167 uint8_t* ret = ptr_;
168 ptr_ += rounded_bytes;
169 // Check that the memory is already zeroed out.
170 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
171 CHECK_EQ(*ptr, 0U);
172 }
173 VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
174 return ret;
175}
176
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700177ArenaAllocator::~ArenaAllocator() {
178 // Reclaim all the arenas by giving them back to the thread pool.
179 UpdateBytesAllocated();
180 while (arena_head_ != nullptr) {
181 Arena* arena = arena_head_;
182 arena_head_ = arena_head_->next_;
183 pool_->FreeArena(arena);
184 }
185}
186
187void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
188 UpdateBytesAllocated();
189 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
190 new_arena->next_ = arena_head_;
191 arena_head_ = new_arena;
192 // Update our internal data structures.
193 ptr_ = begin_ = new_arena->Begin();
194 end_ = new_arena->End();
195}
196
197// Dump memory usage stats.
198void ArenaAllocator::DumpMemStats(std::ostream& os) const {
199 size_t malloc_bytes = 0;
200 // Start out with how many lost bytes we have in the arena we are currently allocating into.
201 size_t lost_bytes(end_ - ptr_);
202 size_t num_arenas = 0;
203 for (Arena* arena = arena_head_; arena != nullptr; arena = arena->next_) {
204 malloc_bytes += arena->Size();
205 if (arena != arena_head_) {
206 lost_bytes += arena->RemainingSpace();
207 }
208 ++num_arenas;
209 }
210 const size_t bytes_allocated = BytesAllocated();
211 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
212 << ", lost: " << lost_bytes << "\n";
213 if (num_allocations_ != 0) {
214 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
215 << num_allocations_ << ", avg size: " << bytes_allocated / num_allocations_ << "\n";
216 }
buzbee862a7602013-04-05 10:58:54 -0700217 os << "===== Allocation by kind\n";
218 for (int i = 0; i < kNumAllocKinds; i++) {
219 os << alloc_names[i] << std::setw(10) << alloc_stats_[i] << "\n";
220 }
221}
222
223} // namespace art