blob: 7eed05a339bb175619a937e43ec7655ee1b72730 [file] [log] [blame]
Mathieu Chartierb062fdd2012-07-03 09:51:48 -07001/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080017#include "space_bitmap-inl.h"
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070018
19namespace art {
Ian Rogers1d54e732013-05-02 21:10:01 -070020namespace gc {
21namespace accounting {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070022
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070023template<size_t kAlignment>
24SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::CreateFromMemMap(
25 const std::string& name, MemMap* mem_map, byte* heap_begin, size_t heap_capacity) {
Mathieu Chartier31e89252013-08-28 11:29:12 -070026 CHECK(mem_map != nullptr);
Andreas Gampecb8aea42014-04-02 15:39:58 -070027 uword* bitmap_begin = reinterpret_cast<uword*>(mem_map->Begin());
Mathieu Chartier31e89252013-08-28 11:29:12 -070028 size_t bitmap_size = OffsetToIndex(RoundUp(heap_capacity, kAlignment * kBitsPerWord)) * kWordSize;
29 return new SpaceBitmap(name, mem_map, bitmap_begin, bitmap_size, heap_begin);
30}
31
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070032template<size_t kAlignment>
33SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::Create(
34 const std::string& name, byte* heap_begin, size_t heap_capacity) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070035 CHECK(heap_begin != NULL);
36 // Round up since heap_capacity is not necessarily a multiple of kAlignment * kBitsPerWord.
37 size_t bitmap_size = OffsetToIndex(RoundUp(heap_capacity, kAlignment * kBitsPerWord)) * kWordSize;
Ian Rogers8d31bbd2013-10-13 10:44:14 -070038 std::string error_msg;
39 UniquePtr<MemMap> mem_map(MemMap::MapAnonymous(name.c_str(), NULL, bitmap_size,
Ian Rogersef7d42f2014-01-06 12:55:46 -080040 PROT_READ | PROT_WRITE, false, &error_msg));
Ian Rogers8d31bbd2013-10-13 10:44:14 -070041 if (UNLIKELY(mem_map.get() == nullptr)) {
42 LOG(ERROR) << "Failed to allocate bitmap " << name << ": " << error_msg;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070043 return NULL;
44 }
Mathieu Chartier31e89252013-08-28 11:29:12 -070045 return CreateFromMemMap(name, mem_map.release(), heap_begin, heap_capacity);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070046}
47
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070048template<size_t kAlignment>
49void SpaceBitmap<kAlignment>::SetHeapLimit(uintptr_t new_end) {
Mathieu Chartierdcf8d722012-08-02 14:55:54 -070050 DCHECK(IsAligned<kBitsPerWord * kAlignment>(new_end));
51 size_t new_size = OffsetToIndex(new_end - heap_begin_) * kWordSize;
Mathieu Chartiercc236d72012-07-20 10:29:05 -070052 if (new_size < bitmap_size_) {
53 bitmap_size_ = new_size;
54 }
55 // Not sure if doing this trim is necessary, since nothing past the end of the heap capacity
56 // should be marked.
Mathieu Chartiercc236d72012-07-20 10:29:05 -070057}
58
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070059template<size_t kAlignment>
60void SpaceBitmap<kAlignment>::Clear() {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070061 if (bitmap_begin_ != NULL) {
Mathieu Chartier31e89252013-08-28 11:29:12 -070062 // This returns the memory to the system. Successive page faults will return zeroed memory.
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070063 int result = madvise(bitmap_begin_, bitmap_size_, MADV_DONTNEED);
64 if (result == -1) {
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070065 PLOG(FATAL) << "madvise failed";
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070066 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070067 }
68}
69
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070070template<size_t kAlignment>
71inline void SpaceBitmap<kAlignment>::CopyFrom(SpaceBitmap* source_bitmap) {
Mathieu Chartier357e9be2012-08-01 11:00:14 -070072 DCHECK_EQ(Size(), source_bitmap->Size());
73 std::copy(source_bitmap->Begin(), source_bitmap->Begin() + source_bitmap->Size() / kWordSize, Begin());
74}
75
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070076template<size_t kAlignment>
77inline void SpaceBitmap<kAlignment>::Walk(ObjectCallback* callback, void* arg) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070078 CHECK(bitmap_begin_ != NULL);
79 CHECK(callback != NULL);
Mathieu Chartier357e9be2012-08-01 11:00:14 -070080
81 uintptr_t end = OffsetToIndex(HeapLimit() - heap_begin_ - 1);
Andreas Gampecb8aea42014-04-02 15:39:58 -070082 uword* bitmap_begin = bitmap_begin_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070083 for (uintptr_t i = 0; i <= end; ++i) {
Andreas Gampecb8aea42014-04-02 15:39:58 -070084 uword w = bitmap_begin[i];
Mathieu Chartier357e9be2012-08-01 11:00:14 -070085 if (w != 0) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070086 uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
Mathieu Chartier357e9be2012-08-01 11:00:14 -070087 do {
Andreas Gampecb8aea42014-04-02 15:39:58 -070088 const size_t shift = CTZ(w);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080089 mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070090 (*callback)(obj, arg);
Andreas Gampecb8aea42014-04-02 15:39:58 -070091 w ^= (static_cast<uword>(1)) << shift;
Mathieu Chartier357e9be2012-08-01 11:00:14 -070092 } while (w != 0);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070093 }
94 }
95}
96
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070097template<size_t kAlignment>
98void SpaceBitmap<kAlignment>::SweepWalk(const SpaceBitmap<kAlignment>& live_bitmap,
99 const SpaceBitmap<kAlignment>& mark_bitmap,
100 uintptr_t sweep_begin, uintptr_t sweep_end,
101 SpaceBitmap::SweepCallback* callback, void* arg) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700102 CHECK(live_bitmap.bitmap_begin_ != NULL);
103 CHECK(mark_bitmap.bitmap_begin_ != NULL);
104 CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_);
105 CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_);
106 CHECK(callback != NULL);
107 CHECK_LE(sweep_begin, sweep_end);
108 CHECK_GE(sweep_begin, live_bitmap.heap_begin_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700109
110 if (sweep_end <= sweep_begin) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700111 return;
112 }
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700113
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800114 // TODO: rewrite the callbacks to accept a std::vector<mirror::Object*> rather than a mirror::Object**?
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700115 const size_t buffer_size = kWordSize * kBitsPerWord;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800116 mirror::Object* pointer_buf[buffer_size];
117 mirror::Object** pb = &pointer_buf[0];
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700118 size_t start = OffsetToIndex(sweep_begin - live_bitmap.heap_begin_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700119 size_t end = OffsetToIndex(sweep_end - live_bitmap.heap_begin_ - 1);
120 CHECK_LT(end, live_bitmap.Size() / kWordSize);
Andreas Gampecb8aea42014-04-02 15:39:58 -0700121 uword* live = live_bitmap.bitmap_begin_;
122 uword* mark = mark_bitmap.bitmap_begin_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700123 for (size_t i = start; i <= end; i++) {
Andreas Gampecb8aea42014-04-02 15:39:58 -0700124 uword garbage = live[i] & ~mark[i];
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700125 if (UNLIKELY(garbage != 0)) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700126 uintptr_t ptr_base = IndexToOffset(i) + live_bitmap.heap_begin_;
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700127 do {
Andreas Gampecb8aea42014-04-02 15:39:58 -0700128 const size_t shift = CTZ(garbage);
129 garbage ^= (static_cast<uword>(1)) << shift;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800130 *pb++ = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700131 } while (garbage != 0);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700132 // Make sure that there are always enough slots available for an
133 // entire word of one bits.
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700134 if (pb >= &pointer_buf[buffer_size - kBitsPerWord]) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700135 (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
136 pb = &pointer_buf[0];
137 }
138 }
139 }
140 if (pb > &pointer_buf[0]) {
141 (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
142 }
143}
144
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700145template<size_t kAlignment>
146void SpaceBitmap<kAlignment>::WalkInstanceFields(SpaceBitmap<kAlignment>* visited,
147 ObjectCallback* callback, mirror::Object* obj,
148 mirror::Class* klass, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700149 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700150 // Visit fields of parent classes first.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800151 mirror::Class* super = klass->GetSuperClass();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700152 if (super != NULL) {
153 WalkInstanceFields(visited, callback, obj, super, arg);
154 }
155 // Walk instance fields
Brian Carlstromea46f952013-07-30 01:26:50 -0700156 mirror::ObjectArray<mirror::ArtField>* fields = klass->GetIFields();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700157 if (fields != NULL) {
158 for (int32_t i = 0; i < fields->GetLength(); i++) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700159 mirror::ArtField* field = fields->Get(i);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700160 FieldHelper fh(field);
Mathieu Chartier66f19252012-09-18 08:57:04 -0700161 if (!fh.IsPrimitiveType()) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800162 mirror::Object* value = field->GetObj(obj);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700163 if (value != NULL) {
164 WalkFieldsInOrder(visited, callback, value, arg);
165 }
166 }
167 }
168 }
169}
170
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700171template<size_t kAlignment>
172void SpaceBitmap<kAlignment>::WalkFieldsInOrder(SpaceBitmap<kAlignment>* visited,
173 ObjectCallback* callback,
174 mirror::Object* obj, void* arg) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700175 if (visited->Test(obj)) {
176 return;
177 }
178 // visit the object itself
179 (*callback)(obj, arg);
180 visited->Set(obj);
181 // Walk instance fields of all objects
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800182 mirror::Class* klass = obj->GetClass();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700183 WalkInstanceFields(visited, callback, obj, klass, arg);
184 // Walk static fields of a Class
185 if (obj->IsClass()) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700186 mirror::ObjectArray<mirror::ArtField>* fields = klass->GetSFields();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700187 if (fields != NULL) {
188 for (int32_t i = 0; i < fields->GetLength(); i++) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700189 mirror::ArtField* field = fields->Get(i);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700190 FieldHelper fh(field);
Mathieu Chartier66f19252012-09-18 08:57:04 -0700191 if (!fh.IsPrimitiveType()) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800192 mirror::Object* value = field->GetObj(NULL);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700193 if (value != NULL) {
194 WalkFieldsInOrder(visited, callback, value, arg);
195 }
196 }
197 }
198 }
199 } else if (obj->IsObjectArray()) {
200 // Walk elements of an object array
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800201 mirror::ObjectArray<mirror::Object>* obj_array = obj->AsObjectArray<mirror::Object>();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700202 int32_t length = obj_array->GetLength();
203 for (int32_t i = 0; i < length; i++) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800204 mirror::Object* value = obj_array->Get(i);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700205 if (value != NULL) {
206 WalkFieldsInOrder(visited, callback, value, arg);
207 }
208 }
209 }
210}
211
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700212template<size_t kAlignment>
213void SpaceBitmap<kAlignment>::InOrderWalk(ObjectCallback* callback, void* arg) {
214 UniquePtr<SpaceBitmap<kAlignment>> visited(
215 Create("bitmap for in-order walk", reinterpret_cast<byte*>(heap_begin_),
216 IndexToOffset(bitmap_size_ / kWordSize)));
217 CHECK(bitmap_begin_ != nullptr);
218 CHECK(callback != nullptr);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700219 uintptr_t end = Size() / kWordSize;
220 for (uintptr_t i = 0; i < end; ++i) {
Andreas Gampecb8aea42014-04-02 15:39:58 -0700221 // Need uint for unsigned shift.
222 uword w = bitmap_begin_[i];
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700223 if (UNLIKELY(w != 0)) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700224 uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
225 while (w != 0) {
Andreas Gampecb8aea42014-04-02 15:39:58 -0700226 const size_t shift = CTZ(w);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800227 mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700228 WalkFieldsInOrder(visited.get(), callback, obj, arg);
Andreas Gampecb8aea42014-04-02 15:39:58 -0700229 w ^= (static_cast<uword>(1)) << shift;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700230 }
231 }
232 }
233}
234
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700235void ObjectSet::Walk(ObjectCallback* callback, void* arg) {
236 for (const mirror::Object* obj : contained_) {
237 callback(const_cast<mirror::Object*>(obj), arg);
238 }
Ian Rogers1d54e732013-05-02 21:10:01 -0700239}
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700240
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700241template class SpaceBitmap<kObjectAlignment>;
242template class SpaceBitmap<kPageSize>;
243
Ian Rogers1d54e732013-05-02 21:10:01 -0700244} // namespace accounting
245} // namespace gc
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700246} // namespace art