blob: 224b33e2604045940f8dabe0f3dfbb270ecef7d1 [file] [log] [blame]
Mathieu Chartierb062fdd2012-07-03 09:51:48 -07001/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080017#include "space_bitmap-inl.h"
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070018
19namespace art {
Ian Rogers1d54e732013-05-02 21:10:01 -070020namespace gc {
21namespace accounting {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070022
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070023template<size_t kAlignment>
Mathieu Chartier73d1e172014-04-11 17:53:48 -070024size_t SpaceBitmap<kAlignment>::ComputeBitmapSize(uint64_t capacity) {
25 const uint64_t kBytesCoveredPerWord = kAlignment * kBitsPerWord;
26 return (RoundUp(capacity, kBytesCoveredPerWord) / kBytesCoveredPerWord) * kWordSize;
27}
28
29template<size_t kAlignment>
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070030SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::CreateFromMemMap(
31 const std::string& name, MemMap* mem_map, byte* heap_begin, size_t heap_capacity) {
Mathieu Chartier31e89252013-08-28 11:29:12 -070032 CHECK(mem_map != nullptr);
Andreas Gampecb8aea42014-04-02 15:39:58 -070033 uword* bitmap_begin = reinterpret_cast<uword*>(mem_map->Begin());
Mathieu Chartier73d1e172014-04-11 17:53:48 -070034 const size_t bitmap_size = ComputeBitmapSize(heap_capacity);
Mathieu Chartier31e89252013-08-28 11:29:12 -070035 return new SpaceBitmap(name, mem_map, bitmap_begin, bitmap_size, heap_begin);
36}
37
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070038template<size_t kAlignment>
Mathieu Chartierbbd695c2014-04-16 09:48:48 -070039SpaceBitmap<kAlignment>::SpaceBitmap(const std::string& name, MemMap* mem_map, uword* bitmap_begin,
40 size_t bitmap_size, const void* heap_begin)
41 : mem_map_(mem_map), bitmap_begin_(bitmap_begin), bitmap_size_(bitmap_size),
42 heap_begin_(reinterpret_cast<uintptr_t>(heap_begin)),
43 name_(name) {
44 CHECK(bitmap_begin_ != nullptr);
45 CHECK_NE(bitmap_size, 0U);
46}
47
48template<size_t kAlignment>
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070049SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::Create(
50 const std::string& name, byte* heap_begin, size_t heap_capacity) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070051 // Round up since heap_capacity is not necessarily a multiple of kAlignment * kBitsPerWord.
Mathieu Chartier73d1e172014-04-11 17:53:48 -070052 const size_t bitmap_size = ComputeBitmapSize(heap_capacity);
Ian Rogers8d31bbd2013-10-13 10:44:14 -070053 std::string error_msg;
Ian Rogers700a4022014-05-19 16:49:03 -070054 std::unique_ptr<MemMap> mem_map(MemMap::MapAnonymous(name.c_str(), nullptr, bitmap_size,
Ian Rogersef7d42f2014-01-06 12:55:46 -080055 PROT_READ | PROT_WRITE, false, &error_msg));
Ian Rogers8d31bbd2013-10-13 10:44:14 -070056 if (UNLIKELY(mem_map.get() == nullptr)) {
57 LOG(ERROR) << "Failed to allocate bitmap " << name << ": " << error_msg;
Mathieu Chartierbbd695c2014-04-16 09:48:48 -070058 return nullptr;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070059 }
Mathieu Chartier31e89252013-08-28 11:29:12 -070060 return CreateFromMemMap(name, mem_map.release(), heap_begin, heap_capacity);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070061}
62
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070063template<size_t kAlignment>
64void SpaceBitmap<kAlignment>::SetHeapLimit(uintptr_t new_end) {
Mathieu Chartierdcf8d722012-08-02 14:55:54 -070065 DCHECK(IsAligned<kBitsPerWord * kAlignment>(new_end));
66 size_t new_size = OffsetToIndex(new_end - heap_begin_) * kWordSize;
Mathieu Chartiercc236d72012-07-20 10:29:05 -070067 if (new_size < bitmap_size_) {
68 bitmap_size_ = new_size;
69 }
70 // Not sure if doing this trim is necessary, since nothing past the end of the heap capacity
71 // should be marked.
Mathieu Chartiercc236d72012-07-20 10:29:05 -070072}
73
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070074template<size_t kAlignment>
Ian Rogers576ca0c2014-06-06 15:58:22 -070075std::string SpaceBitmap<kAlignment>::Dump() const {
76 return StringPrintf("%s: %p-%p", name_.c_str(), reinterpret_cast<void*>(HeapBegin()),
77 reinterpret_cast<void*>(HeapLimit()));
78}
79
80template<size_t kAlignment>
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070081void SpaceBitmap<kAlignment>::Clear() {
Ian Rogersc5f17732014-06-05 20:48:42 -070082 if (bitmap_begin_ != nullptr) {
83 mem_map_->MadviseDontNeedAndZero();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070084 }
85}
86
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070087template<size_t kAlignment>
Mathieu Chartierbbd695c2014-04-16 09:48:48 -070088void SpaceBitmap<kAlignment>::CopyFrom(SpaceBitmap* source_bitmap) {
Mathieu Chartier357e9be2012-08-01 11:00:14 -070089 DCHECK_EQ(Size(), source_bitmap->Size());
90 std::copy(source_bitmap->Begin(), source_bitmap->Begin() + source_bitmap->Size() / kWordSize, Begin());
91}
92
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070093template<size_t kAlignment>
Mathieu Chartierbbd695c2014-04-16 09:48:48 -070094void SpaceBitmap<kAlignment>::Walk(ObjectCallback* callback, void* arg) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070095 CHECK(bitmap_begin_ != NULL);
96 CHECK(callback != NULL);
Mathieu Chartier357e9be2012-08-01 11:00:14 -070097
98 uintptr_t end = OffsetToIndex(HeapLimit() - heap_begin_ - 1);
Andreas Gampecb8aea42014-04-02 15:39:58 -070099 uword* bitmap_begin = bitmap_begin_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700100 for (uintptr_t i = 0; i <= end; ++i) {
Andreas Gampecb8aea42014-04-02 15:39:58 -0700101 uword w = bitmap_begin[i];
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700102 if (w != 0) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700103 uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700104 do {
Andreas Gampecb8aea42014-04-02 15:39:58 -0700105 const size_t shift = CTZ(w);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800106 mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700107 (*callback)(obj, arg);
Andreas Gampecb8aea42014-04-02 15:39:58 -0700108 w ^= (static_cast<uword>(1)) << shift;
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700109 } while (w != 0);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700110 }
111 }
112}
113
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700114template<size_t kAlignment>
115void SpaceBitmap<kAlignment>::SweepWalk(const SpaceBitmap<kAlignment>& live_bitmap,
Mathieu Chartierbbd695c2014-04-16 09:48:48 -0700116 const SpaceBitmap<kAlignment>& mark_bitmap,
117 uintptr_t sweep_begin, uintptr_t sweep_end,
118 SpaceBitmap::SweepCallback* callback, void* arg) {
119 CHECK(live_bitmap.bitmap_begin_ != nullptr);
120 CHECK(mark_bitmap.bitmap_begin_ != nullptr);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700121 CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_);
122 CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_);
123 CHECK(callback != NULL);
124 CHECK_LE(sweep_begin, sweep_end);
125 CHECK_GE(sweep_begin, live_bitmap.heap_begin_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700126
127 if (sweep_end <= sweep_begin) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700128 return;
129 }
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700130
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800131 // TODO: rewrite the callbacks to accept a std::vector<mirror::Object*> rather than a mirror::Object**?
Andreas Gampe262a0a32014-06-04 15:25:28 -0700132 constexpr size_t buffer_size = kWordSize * kBitsPerWord;
133#ifdef __LP64__
134 // Heap-allocate for smaller stack frame.
135 std::unique_ptr<mirror::Object*[]> pointer_buf_ptr(new mirror::Object*[buffer_size]);
136 mirror::Object** pointer_buf = pointer_buf_ptr.get();
137#else
138 // Stack-allocate buffer as it's small enough.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800139 mirror::Object* pointer_buf[buffer_size];
Andreas Gampe262a0a32014-06-04 15:25:28 -0700140#endif
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800141 mirror::Object** pb = &pointer_buf[0];
Andreas Gampe262a0a32014-06-04 15:25:28 -0700142
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700143 size_t start = OffsetToIndex(sweep_begin - live_bitmap.heap_begin_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700144 size_t end = OffsetToIndex(sweep_end - live_bitmap.heap_begin_ - 1);
145 CHECK_LT(end, live_bitmap.Size() / kWordSize);
Andreas Gampecb8aea42014-04-02 15:39:58 -0700146 uword* live = live_bitmap.bitmap_begin_;
147 uword* mark = mark_bitmap.bitmap_begin_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700148 for (size_t i = start; i <= end; i++) {
Andreas Gampecb8aea42014-04-02 15:39:58 -0700149 uword garbage = live[i] & ~mark[i];
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700150 if (UNLIKELY(garbage != 0)) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700151 uintptr_t ptr_base = IndexToOffset(i) + live_bitmap.heap_begin_;
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700152 do {
Andreas Gampecb8aea42014-04-02 15:39:58 -0700153 const size_t shift = CTZ(garbage);
154 garbage ^= (static_cast<uword>(1)) << shift;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800155 *pb++ = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700156 } while (garbage != 0);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700157 // Make sure that there are always enough slots available for an
158 // entire word of one bits.
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700159 if (pb >= &pointer_buf[buffer_size - kBitsPerWord]) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700160 (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
161 pb = &pointer_buf[0];
162 }
163 }
164 }
165 if (pb > &pointer_buf[0]) {
166 (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
167 }
168}
169
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700170template<size_t kAlignment>
171void SpaceBitmap<kAlignment>::WalkInstanceFields(SpaceBitmap<kAlignment>* visited,
172 ObjectCallback* callback, mirror::Object* obj,
173 mirror::Class* klass, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700174 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700175 // Visit fields of parent classes first.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800176 mirror::Class* super = klass->GetSuperClass();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700177 if (super != NULL) {
178 WalkInstanceFields(visited, callback, obj, super, arg);
179 }
180 // Walk instance fields
Brian Carlstromea46f952013-07-30 01:26:50 -0700181 mirror::ObjectArray<mirror::ArtField>* fields = klass->GetIFields();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700182 if (fields != NULL) {
183 for (int32_t i = 0; i < fields->GetLength(); i++) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700184 mirror::ArtField* field = fields->Get(i);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -0700185 if (!field->IsPrimitiveType()) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800186 mirror::Object* value = field->GetObj(obj);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700187 if (value != NULL) {
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -0700188 WalkFieldsInOrder(visited, callback, value, arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700189 }
190 }
191 }
192 }
193}
194
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700195template<size_t kAlignment>
196void SpaceBitmap<kAlignment>::WalkFieldsInOrder(SpaceBitmap<kAlignment>* visited,
Mathieu Chartierbbd695c2014-04-16 09:48:48 -0700197 ObjectCallback* callback, mirror::Object* obj,
198 void* arg) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700199 if (visited->Test(obj)) {
200 return;
201 }
202 // visit the object itself
203 (*callback)(obj, arg);
204 visited->Set(obj);
205 // Walk instance fields of all objects
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800206 mirror::Class* klass = obj->GetClass();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700207 WalkInstanceFields(visited, callback, obj, klass, arg);
208 // Walk static fields of a Class
209 if (obj->IsClass()) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700210 mirror::ObjectArray<mirror::ArtField>* fields = klass->GetSFields();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700211 if (fields != NULL) {
212 for (int32_t i = 0; i < fields->GetLength(); i++) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700213 mirror::ArtField* field = fields->Get(i);
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -0700214 if (!field->IsPrimitiveType()) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800215 mirror::Object* value = field->GetObj(NULL);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700216 if (value != NULL) {
217 WalkFieldsInOrder(visited, callback, value, arg);
218 }
219 }
220 }
221 }
222 } else if (obj->IsObjectArray()) {
223 // Walk elements of an object array
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800224 mirror::ObjectArray<mirror::Object>* obj_array = obj->AsObjectArray<mirror::Object>();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700225 int32_t length = obj_array->GetLength();
226 for (int32_t i = 0; i < length; i++) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800227 mirror::Object* value = obj_array->Get(i);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700228 if (value != NULL) {
229 WalkFieldsInOrder(visited, callback, value, arg);
230 }
231 }
232 }
233}
234
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700235template<size_t kAlignment>
236void SpaceBitmap<kAlignment>::InOrderWalk(ObjectCallback* callback, void* arg) {
Ian Rogers700a4022014-05-19 16:49:03 -0700237 std::unique_ptr<SpaceBitmap<kAlignment>> visited(
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700238 Create("bitmap for in-order walk", reinterpret_cast<byte*>(heap_begin_),
239 IndexToOffset(bitmap_size_ / kWordSize)));
240 CHECK(bitmap_begin_ != nullptr);
241 CHECK(callback != nullptr);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700242 uintptr_t end = Size() / kWordSize;
243 for (uintptr_t i = 0; i < end; ++i) {
Andreas Gampecb8aea42014-04-02 15:39:58 -0700244 // Need uint for unsigned shift.
245 uword w = bitmap_begin_[i];
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700246 if (UNLIKELY(w != 0)) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700247 uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
248 while (w != 0) {
Andreas Gampecb8aea42014-04-02 15:39:58 -0700249 const size_t shift = CTZ(w);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800250 mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700251 WalkFieldsInOrder(visited.get(), callback, obj, arg);
Andreas Gampecb8aea42014-04-02 15:39:58 -0700252 w ^= (static_cast<uword>(1)) << shift;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700253 }
254 }
255 }
256}
257
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700258template class SpaceBitmap<kObjectAlignment>;
259template class SpaceBitmap<kPageSize>;
260
Ian Rogers1d54e732013-05-02 21:10:01 -0700261} // namespace accounting
262} // namespace gc
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700263} // namespace art