blob: fc722f5dd412b439739123335f5867a804d8a3d4 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
Elliott Hughes5e71b522011-10-20 13:12:32 -070017#include "heap_bitmap.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070018
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070019#include "logging.h"
Elliott Hughesa168c832012-06-12 15:34:20 -070020#include "UniquePtr.h"
Brian Carlstrom27ec9612011-09-19 20:20:38 -070021#include "utils.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070022
23namespace art {
24
Ian Rogers30fab402012-01-23 15:43:46 -080025HeapBitmap* HeapBitmap::Create(const char* name, byte* heap_begin, size_t heap_capacity) {
26 CHECK(heap_begin != NULL);
Mathieu Chartier61c9cb52012-07-03 14:39:54 -070027 // Round up since heap_capacity is not necessarily a multiple of kAlignment * kBitsPerWord.
28 size_t bitmap_size = HB_OFFSET_TO_INDEX(RoundUp(heap_capacity, kAlignment * kBitsPerWord)) * kWordSize;
Ian Rogers30fab402012-01-23 15:43:46 -080029 UniquePtr<MemMap> mem_map(MemMap::MapAnonymous(name, NULL, bitmap_size, PROT_READ | PROT_WRITE));
30 if (mem_map.get() == NULL) {
31 LOG(ERROR) << "Failed to allocate bitmap " << name;
Carl Shapiro69759ea2011-07-21 18:13:35 -070032 return NULL;
Carl Shapiro69759ea2011-07-21 18:13:35 -070033 }
Ian Rogers30fab402012-01-23 15:43:46 -080034 word* bitmap_begin = reinterpret_cast<word*>(mem_map->Begin());
35 return new HeapBitmap(name, mem_map.release(), bitmap_begin, bitmap_size, heap_begin);
Carl Shapiro69759ea2011-07-21 18:13:35 -070036}
37
38// Clean up any resources associated with the bitmap.
Brian Carlstromdb4d5402011-08-09 12:18:28 -070039HeapBitmap::~HeapBitmap() {}
Carl Shapiro69759ea2011-07-21 18:13:35 -070040
41// Fill the bitmap with zeroes. Returns the bitmap's memory to the
42// system as a side-effect.
43void HeapBitmap::Clear() {
Ian Rogers30fab402012-01-23 15:43:46 -080044 if (bitmap_begin_ != NULL) {
Carl Shapiro69759ea2011-07-21 18:13:35 -070045 // This returns the memory to the system. Successive page faults
46 // will return zeroed memory.
Ian Rogers30fab402012-01-23 15:43:46 -080047 int result = madvise(bitmap_begin_, bitmap_size_, MADV_DONTNEED);
Carl Shapiro69759ea2011-07-21 18:13:35 -070048 if (result == -1) {
49 PLOG(WARNING) << "madvise failed";
50 }
Ian Rogers30fab402012-01-23 15:43:46 -080051 heap_end_ = heap_begin_ - 1;
Carl Shapiro69759ea2011-07-21 18:13:35 -070052 }
53}
54
Ian Rogers30fab402012-01-23 15:43:46 -080055// Return true iff <obj> is within the range of pointers that this bitmap could potentially cover,
56// even if a bit has not been set for it.
Carl Shapiro69759ea2011-07-21 18:13:35 -070057bool HeapBitmap::HasAddress(const void* obj) const {
58 if (obj != NULL) {
Ian Rogers30fab402012-01-23 15:43:46 -080059 const uintptr_t offset = (uintptr_t)obj - heap_begin_;
Carl Shapiro69759ea2011-07-21 18:13:35 -070060 const size_t index = HB_OFFSET_TO_INDEX(offset);
Ian Rogers30fab402012-01-23 15:43:46 -080061 return index < bitmap_size_ / kWordSize;
Carl Shapiro69759ea2011-07-21 18:13:35 -070062 }
63 return false;
64}
65
Ian Rogers30fab402012-01-23 15:43:46 -080066void HeapBitmap::VisitRange(uintptr_t visit_begin, uintptr_t visit_end, Callback* visitor, void* arg) const {
67 size_t start = HB_OFFSET_TO_INDEX(visit_begin - heap_begin_);
68 size_t end = HB_OFFSET_TO_INDEX(visit_end - heap_begin_ - 1);
Ian Rogers5d76c432011-10-31 21:42:49 -070069 for (size_t i = start; i <= end; i++) {
Ian Rogers30fab402012-01-23 15:43:46 -080070 word w = bitmap_begin_[i];
Ian Rogers5d76c432011-10-31 21:42:49 -070071 if (w != 0) {
72 word high_bit = 1 << (kBitsPerWord - 1);
Ian Rogers30fab402012-01-23 15:43:46 -080073 uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
Ian Rogers5d76c432011-10-31 21:42:49 -070074 while (w != 0) {
75 const int shift = CLZ(w);
76 Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
77 (*visitor)(obj, arg);
78 w &= ~(high_bit >> shift);
79 }
80 }
81 }
82}
83
Carl Shapiro69759ea2011-07-21 18:13:35 -070084// Visits set bits in address order. The callback is not permitted to
85// change the bitmap bits or max during the traversal.
86void HeapBitmap::Walk(HeapBitmap::Callback* callback, void* arg) {
Ian Rogers30fab402012-01-23 15:43:46 -080087 CHECK(bitmap_begin_ != NULL);
Carl Shapiro69759ea2011-07-21 18:13:35 -070088 CHECK(callback != NULL);
jeffhao25045522012-03-13 19:34:37 -070089 if (heap_end_ < heap_begin_) {
90 return; // Bitmap is empty.
91 }
Ian Rogers30fab402012-01-23 15:43:46 -080092 uintptr_t end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_);
Carl Shapiro69759ea2011-07-21 18:13:35 -070093 for (uintptr_t i = 0; i <= end; ++i) {
Ian Rogers30fab402012-01-23 15:43:46 -080094 word w = bitmap_begin_[i];
Elliott Hughesb0663112011-10-19 18:16:37 -070095 if (UNLIKELY(w != 0)) {
96 word high_bit = 1 << (kBitsPerWord - 1);
Ian Rogers30fab402012-01-23 15:43:46 -080097 uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
Elliott Hughesb0663112011-10-19 18:16:37 -070098 while (w != 0) {
99 const int shift = CLZ(w);
100 Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700101 (*callback)(obj, arg);
Elliott Hughesb0663112011-10-19 18:16:37 -0700102 w &= ~(high_bit >> shift);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700103 }
104 }
105 }
106}
107
Ian Rogers30fab402012-01-23 15:43:46 -0800108// Similar to Walk but the callback routine is permitted to change the bitmap bits and end during
109// traversal. Used by the the root marking scan exclusively.
Carl Shapiro69759ea2011-07-21 18:13:35 -0700110//
Ian Rogers30fab402012-01-23 15:43:46 -0800111// The callback is invoked with a finger argument. The finger is a pointer to an address not yet
112// visited by the traversal. If the callback sets a bit for an address at or above the finger, this
113// address will be visited by the traversal. If the callback sets a bit for an address below the
114// finger, this address will not be visited (typiscally such an address would be placed on the
115// marking stack).
116void HeapBitmap::ScanWalk(uintptr_t scan_begin, uintptr_t scan_end, ScanCallback* callback, void* arg) {
117 CHECK(bitmap_begin_ != NULL);
Ian Rogers5d76c432011-10-31 21:42:49 -0700118 CHECK(callback != NULL);
Ian Rogers30fab402012-01-23 15:43:46 -0800119 CHECK_LE(scan_begin, scan_end);
120 CHECK_GE(scan_begin, heap_begin_);
121 size_t start = HB_OFFSET_TO_INDEX(scan_begin - heap_begin_);
122 if (scan_end < heap_end_) {
Ian Rogers5d76c432011-10-31 21:42:49 -0700123 // The end of the space we're looking at is before the current maximum bitmap PC, scan to that
124 // and don't recompute end on each iteration
Ian Rogers30fab402012-01-23 15:43:46 -0800125 size_t end = HB_OFFSET_TO_INDEX(scan_end - heap_begin_ - 1);
Ian Rogers5d76c432011-10-31 21:42:49 -0700126 for (size_t i = start; i <= end; i++) {
Ian Rogers30fab402012-01-23 15:43:46 -0800127 word w = bitmap_begin_[i];
Ian Rogers5d76c432011-10-31 21:42:49 -0700128 if (UNLIKELY(w != 0)) {
129 word high_bit = 1 << (kBitsPerWord - 1);
Ian Rogers30fab402012-01-23 15:43:46 -0800130 uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
131 void* finger = reinterpret_cast<void*>(HB_INDEX_TO_OFFSET(i + 1) + heap_begin_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700132 while (w != 0) {
133 const int shift = CLZ(w);
134 Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
135 (*callback)(obj, finger, arg);
136 w &= ~(high_bit >> shift);
137 }
138 }
139 }
140 } else {
Ian Rogers30fab402012-01-23 15:43:46 -0800141 size_t end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700142 for (size_t i = start; i <= end; i++) {
Ian Rogers30fab402012-01-23 15:43:46 -0800143 word w = bitmap_begin_[i];
Ian Rogers5d76c432011-10-31 21:42:49 -0700144 if (UNLIKELY(w != 0)) {
145 word high_bit = 1 << (kBitsPerWord - 1);
Ian Rogers30fab402012-01-23 15:43:46 -0800146 uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
147 void* finger = reinterpret_cast<void*>(HB_INDEX_TO_OFFSET(i + 1) + heap_begin_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700148 while (w != 0) {
149 const int shift = CLZ(w);
150 Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
151 (*callback)(obj, finger, arg);
152 w &= ~(high_bit >> shift);
153 }
Carl Shapiro69759ea2011-07-21 18:13:35 -0700154 }
Ian Rogers30fab402012-01-23 15:43:46 -0800155 // update 'end' in case callback modified bitmap
156 end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700157 }
158 }
159}
160
161// Walk through the bitmaps in increasing address order, and find the
162// object pointers that correspond to garbage objects. Call
163// <callback> zero or more times with lists of these object pointers.
164//
165// The callback is not permitted to increase the max of either bitmap.
166void HeapBitmap::SweepWalk(const HeapBitmap& live_bitmap,
167 const HeapBitmap& mark_bitmap,
Ian Rogers30fab402012-01-23 15:43:46 -0800168 uintptr_t sweep_begin, uintptr_t sweep_end,
Carl Shapiro69759ea2011-07-21 18:13:35 -0700169 HeapBitmap::SweepCallback* callback, void* arg) {
Ian Rogers30fab402012-01-23 15:43:46 -0800170 CHECK(live_bitmap.bitmap_begin_ != NULL);
171 CHECK(mark_bitmap.bitmap_begin_ != NULL);
172 CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_);
173 CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700174 CHECK(callback != NULL);
Ian Rogers30fab402012-01-23 15:43:46 -0800175 CHECK_LE(sweep_begin, sweep_end);
176 CHECK_GE(sweep_begin, live_bitmap.heap_begin_);
177 sweep_end = std::min(sweep_end - 1, live_bitmap.heap_end_);
178 if (live_bitmap.heap_end_ < live_bitmap.heap_begin_) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700179 // Easy case; both are obviously empty.
180 // TODO: this should never happen
181 return;
182 }
Ian Rogers30fab402012-01-23 15:43:46 -0800183 // TODO: rewrite the callbacks to accept a std::vector<Object*> rather than a Object**?
184 std::vector<Object*> pointer_buf(4 * kBitsPerWord);
185 Object** pb = &pointer_buf[0];
186 size_t start = HB_OFFSET_TO_INDEX(sweep_begin - live_bitmap.heap_begin_);
187 size_t end = HB_OFFSET_TO_INDEX(sweep_end - live_bitmap.heap_begin_);
188 word* live = live_bitmap.bitmap_begin_;
189 word* mark = mark_bitmap.bitmap_begin_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700190 for (size_t i = start; i <= end; i++) {
Elliott Hughesb0663112011-10-19 18:16:37 -0700191 word garbage = live[i] & ~mark[i];
192 if (UNLIKELY(garbage != 0)) {
193 word high_bit = 1 << (kBitsPerWord - 1);
Ian Rogers30fab402012-01-23 15:43:46 -0800194 uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + live_bitmap.heap_begin_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700195 while (garbage != 0) {
196 int shift = CLZ(garbage);
197 garbage &= ~(high_bit >> shift);
Ian Rogers30fab402012-01-23 15:43:46 -0800198 *pb++ = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700199 }
200 // Make sure that there are always enough slots available for an
201 // entire word of one bits.
Elliott Hughes3b6baaa2011-10-14 19:13:56 -0700202 if (pb >= &pointer_buf[pointer_buf.size() - kBitsPerWord]) {
203 (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
204 pb = &pointer_buf[0];
Carl Shapiro69759ea2011-07-21 18:13:35 -0700205 }
206 }
207 }
Elliott Hughes3b6baaa2011-10-14 19:13:56 -0700208 if (pb > &pointer_buf[0]) {
209 (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700210 }
211}
212
213} // namespace art
Ian Rogers1351b672012-02-24 12:22:57 -0800214
215// Support needed for in order traversal
216#include "object.h"
217#include "object_utils.h"
218
219namespace art {
220
221static void WalkFieldsInOrder(HeapBitmap* visited, HeapBitmap::Callback* callback, Object* obj,
222 void* arg);
223
224// Walk instance fields of the given Class. Separate function to allow recursion on the super
225// class.
226static void WalkInstanceFields(HeapBitmap* visited, HeapBitmap::Callback* callback, Object* obj,
227 Class* klass, void* arg) {
228 // Visit fields of parent classes first.
229 Class* super = klass->GetSuperClass();
230 if (super != NULL) {
231 WalkInstanceFields(visited, callback, obj, super, arg);
232 }
233 // Walk instance fields
234 ObjectArray<Field>* fields = klass->GetIFields();
235 if (fields != NULL) {
236 for (int32_t i = 0; i < fields->GetLength(); i++) {
237 Field* field = fields->Get(i);
238 FieldHelper fh(field);
239 if (!fh.GetType()->IsPrimitive()) {
240 Object* value = field->GetObj(obj);
241 if (value != NULL) {
242 WalkFieldsInOrder(visited, callback, value, arg);
243 }
244 }
245 }
246 }
247}
248
249// For an unvisited object, visit it then all its children found via fields.
250static void WalkFieldsInOrder(HeapBitmap* visited, HeapBitmap::Callback* callback, Object* obj,
251 void* arg) {
252 if (visited->Test(obj)) {
253 return;
254 }
255 // visit the object itself
256 (*callback)(obj, arg);
257 visited->Set(obj);
258 // Walk instance fields of all objects
259 Class* klass = obj->GetClass();
260 WalkInstanceFields(visited, callback, obj, klass, arg);
261 // Walk static fields of a Class
262 if (obj->IsClass()) {
263 ObjectArray<Field>* fields = klass->GetSFields();
264 if (fields != NULL) {
265 for (int32_t i = 0; i < fields->GetLength(); i++) {
266 Field* field = fields->Get(i);
267 FieldHelper fh(field);
268 if (!fh.GetType()->IsPrimitive()) {
269 Object* value = field->GetObj(NULL);
270 if (value != NULL) {
271 WalkFieldsInOrder(visited, callback, value, arg);
272 }
273 }
274 }
275 }
276 } else if (obj->IsObjectArray()) {
277 // Walk elements of an object array
278 ObjectArray<Object>* obj_array = obj->AsObjectArray<Object>();
279 int32_t length = obj_array->GetLength();
280 for (int32_t i = 0; i < length; i++) {
281 Object* value = obj_array->Get(i);
282 if (value != NULL) {
283 WalkFieldsInOrder(visited, callback, value, arg);
284 }
285 }
286 }
287}
288
289// Visits set bits with an in order traversal. The callback is not permitted to change the bitmap
290// bits or max during the traversal.
291void HeapBitmap::InOrderWalk(HeapBitmap::Callback* callback, void* arg) {
Elliott Hughesb25c3f62012-03-26 16:35:06 -0700292 UniquePtr<HeapBitmap> visited(Create("bitmap for in-order walk",
293 reinterpret_cast<byte*>(heap_begin_),
294 HB_INDEX_TO_OFFSET(bitmap_size_ / kWordSize)));
Ian Rogers1351b672012-02-24 12:22:57 -0800295 CHECK(bitmap_begin_ != NULL);
296 CHECK(callback != NULL);
297 uintptr_t end = HB_OFFSET_TO_INDEX(heap_end_ - heap_begin_);
298 for (uintptr_t i = 0; i <= end; ++i) {
299 word w = bitmap_begin_[i];
300 if (UNLIKELY(w != 0)) {
301 word high_bit = 1 << (kBitsPerWord - 1);
302 uintptr_t ptr_base = HB_INDEX_TO_OFFSET(i) + heap_begin_;
303 while (w != 0) {
304 const int shift = CLZ(w);
305 Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
306 WalkFieldsInOrder(visited.get(), callback, obj, arg);
307 w &= ~(high_bit >> shift);
308 }
309 }
310 }
311}
312
313} // namespace art