blob: 7a4c48dacfed3a58925dd0649f39e673630f75ee [file] [log] [blame]
Mathieu Chartierb062fdd2012-07-03 09:51:48 -07001/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "heap_bitmap.h"
18
19#include "logging.h"
20#include "UniquePtr.h"
21#include "utils.h"
22
23namespace art {
24
Mathieu Chartier357e9be2012-08-01 11:00:14 -070025std::string SpaceBitmap::GetName() const {
26 return name_;
27}
28
29void SpaceBitmap::SetName(const std::string& name) {
30 name_ = name;
31}
32
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070033SpaceBitmap* SpaceBitmap::Create(const std::string& name, byte* heap_begin, size_t heap_capacity) {
34 CHECK(heap_begin != NULL);
35 // Round up since heap_capacity is not necessarily a multiple of kAlignment * kBitsPerWord.
36 size_t bitmap_size = OffsetToIndex(RoundUp(heap_capacity, kAlignment * kBitsPerWord)) * kWordSize;
37 UniquePtr<MemMap> mem_map(MemMap::MapAnonymous(name.c_str(), NULL, bitmap_size, PROT_READ | PROT_WRITE));
38 if (mem_map.get() == NULL) {
39 LOG(ERROR) << "Failed to allocate bitmap " << name;
40 return NULL;
41 }
42 word* bitmap_begin = reinterpret_cast<word*>(mem_map->Begin());
43 return new SpaceBitmap(name, mem_map.release(), bitmap_begin, bitmap_size, heap_begin);
44}
45
46// Clean up any resources associated with the bitmap.
47SpaceBitmap::~SpaceBitmap() {}
48
Mathieu Chartierdcf8d722012-08-02 14:55:54 -070049void SpaceBitmap::SetHeapLimit(uintptr_t new_end) {
50 DCHECK(IsAligned<kBitsPerWord * kAlignment>(new_end));
51 size_t new_size = OffsetToIndex(new_end - heap_begin_) * kWordSize;
Mathieu Chartiercc236d72012-07-20 10:29:05 -070052 if (new_size < bitmap_size_) {
53 bitmap_size_ = new_size;
54 }
55 // Not sure if doing this trim is necessary, since nothing past the end of the heap capacity
56 // should be marked.
57 // TODO: Fix this code is, it broken and causes rare heap corruption!
58 // mem_map_->Trim(reinterpret_cast<byte*>(heap_begin_ + bitmap_size_));
59}
60
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070061// Fill the bitmap with zeroes. Returns the bitmap's memory to the
62// system as a side-effect.
63void SpaceBitmap::Clear() {
64 if (bitmap_begin_ != NULL) {
65 // This returns the memory to the system. Successive page faults
66 // will return zeroed memory.
67 int result = madvise(bitmap_begin_, bitmap_size_, MADV_DONTNEED);
68 if (result == -1) {
69 PLOG(WARNING) << "madvise failed";
70 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070071 }
72}
73
Mathieu Chartier357e9be2012-08-01 11:00:14 -070074void SpaceBitmap::CopyFrom(SpaceBitmap* source_bitmap) {
75 DCHECK_EQ(Size(), source_bitmap->Size());
76 std::copy(source_bitmap->Begin(), source_bitmap->Begin() + source_bitmap->Size() / kWordSize, Begin());
77}
78
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070079// Return true iff <obj> is within the range of pointers that this bitmap could potentially cover,
80// even if a bit has not been set for it.
81bool SpaceBitmap::HasAddress(const void* obj) const {
82 // If obj < heap_begin_ then offset underflows to some very large value past the end of the bitmap.
83 const uintptr_t offset = (uintptr_t)obj - heap_begin_;
84 const size_t index = OffsetToIndex(offset);
85 return index < bitmap_size_ / kWordSize;
86}
87
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070088// Visits set bits in address order. The callback is not permitted to
89// change the bitmap bits or max during the traversal.
90void SpaceBitmap::Walk(SpaceBitmap::Callback* callback, void* arg) {
91 CHECK(bitmap_begin_ != NULL);
92 CHECK(callback != NULL);
Mathieu Chartier357e9be2012-08-01 11:00:14 -070093
94 uintptr_t end = OffsetToIndex(HeapLimit() - heap_begin_ - 1);
95 word* bitmap_begin = bitmap_begin_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070096 for (uintptr_t i = 0; i <= end; ++i) {
Mathieu Chartier357e9be2012-08-01 11:00:14 -070097 word w = bitmap_begin[i];
98 if (w != 0) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070099 uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700100 do {
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700101 const size_t shift = CLZ(w);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700102 Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
103 (*callback)(obj, arg);
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700104 w ^= static_cast<size_t>(kWordHighBitMask) >> shift;
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700105 } while (w != 0);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700106 }
107 }
108}
109
110// Walk through the bitmaps in increasing address order, and find the
111// object pointers that correspond to garbage objects. Call
112// <callback> zero or more times with lists of these object pointers.
113//
114// The callback is not permitted to increase the max of either bitmap.
115void SpaceBitmap::SweepWalk(const SpaceBitmap& live_bitmap,
116 const SpaceBitmap& mark_bitmap,
117 uintptr_t sweep_begin, uintptr_t sweep_end,
118 SpaceBitmap::SweepCallback* callback, void* arg) {
119 CHECK(live_bitmap.bitmap_begin_ != NULL);
120 CHECK(mark_bitmap.bitmap_begin_ != NULL);
121 CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_);
122 CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_);
123 CHECK(callback != NULL);
124 CHECK_LE(sweep_begin, sweep_end);
125 CHECK_GE(sweep_begin, live_bitmap.heap_begin_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700126
127 if (sweep_end <= sweep_begin) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700128 return;
129 }
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700130
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700131 // TODO: rewrite the callbacks to accept a std::vector<Object*> rather than a Object**?
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700132 const size_t buffer_size = kWordSize * kBitsPerWord;
133 Object* pointer_buf[buffer_size];
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700134 Object** pb = &pointer_buf[0];
135 size_t start = OffsetToIndex(sweep_begin - live_bitmap.heap_begin_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700136 size_t end = OffsetToIndex(sweep_end - live_bitmap.heap_begin_ - 1);
137 CHECK_LT(end, live_bitmap.Size() / kWordSize);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700138 word* live = live_bitmap.bitmap_begin_;
139 word* mark = mark_bitmap.bitmap_begin_;
140 for (size_t i = start; i <= end; i++) {
141 word garbage = live[i] & ~mark[i];
142 if (UNLIKELY(garbage != 0)) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700143 uintptr_t ptr_base = IndexToOffset(i) + live_bitmap.heap_begin_;
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700144 do {
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700145 const size_t shift = CLZ(garbage);
146 garbage ^= static_cast<size_t>(kWordHighBitMask) >> shift;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700147 *pb++ = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700148 } while (garbage != 0);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700149 // Make sure that there are always enough slots available for an
150 // entire word of one bits.
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700151 if (pb >= &pointer_buf[buffer_size - kBitsPerWord]) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700152 (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
153 pb = &pointer_buf[0];
154 }
155 }
156 }
157 if (pb > &pointer_buf[0]) {
158 (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
159 }
160}
161
162} // namespace art
163
164// Support needed for in order traversal
165#include "object.h"
166#include "object_utils.h"
167
168namespace art {
169
170static void WalkFieldsInOrder(SpaceBitmap* visited, SpaceBitmap::Callback* callback, Object* obj,
171 void* arg);
172
173// Walk instance fields of the given Class. Separate function to allow recursion on the super
174// class.
175static void WalkInstanceFields(SpaceBitmap* visited, SpaceBitmap::Callback* callback, Object* obj,
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700176 Class* klass, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700177 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700178 // Visit fields of parent classes first.
179 Class* super = klass->GetSuperClass();
180 if (super != NULL) {
181 WalkInstanceFields(visited, callback, obj, super, arg);
182 }
183 // Walk instance fields
184 ObjectArray<Field>* fields = klass->GetIFields();
185 if (fields != NULL) {
186 for (int32_t i = 0; i < fields->GetLength(); i++) {
187 Field* field = fields->Get(i);
188 FieldHelper fh(field);
189 if (!fh.GetType()->IsPrimitive()) {
190 Object* value = field->GetObj(obj);
191 if (value != NULL) {
192 WalkFieldsInOrder(visited, callback, value, arg);
193 }
194 }
195 }
196 }
197}
198
199// For an unvisited object, visit it then all its children found via fields.
200static void WalkFieldsInOrder(SpaceBitmap* visited, SpaceBitmap::Callback* callback, Object* obj,
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700201 void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700202 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700203 if (visited->Test(obj)) {
204 return;
205 }
206 // visit the object itself
207 (*callback)(obj, arg);
208 visited->Set(obj);
209 // Walk instance fields of all objects
210 Class* klass = obj->GetClass();
211 WalkInstanceFields(visited, callback, obj, klass, arg);
212 // Walk static fields of a Class
213 if (obj->IsClass()) {
214 ObjectArray<Field>* fields = klass->GetSFields();
215 if (fields != NULL) {
216 for (int32_t i = 0; i < fields->GetLength(); i++) {
217 Field* field = fields->Get(i);
218 FieldHelper fh(field);
219 if (!fh.GetType()->IsPrimitive()) {
220 Object* value = field->GetObj(NULL);
221 if (value != NULL) {
222 WalkFieldsInOrder(visited, callback, value, arg);
223 }
224 }
225 }
226 }
227 } else if (obj->IsObjectArray()) {
228 // Walk elements of an object array
229 ObjectArray<Object>* obj_array = obj->AsObjectArray<Object>();
230 int32_t length = obj_array->GetLength();
231 for (int32_t i = 0; i < length; i++) {
232 Object* value = obj_array->Get(i);
233 if (value != NULL) {
234 WalkFieldsInOrder(visited, callback, value, arg);
235 }
236 }
237 }
238}
239
240// Visits set bits with an in order traversal. The callback is not permitted to change the bitmap
241// bits or max during the traversal.
242void SpaceBitmap::InOrderWalk(SpaceBitmap::Callback* callback, void* arg) {
243 UniquePtr<SpaceBitmap> visited(Create("bitmap for in-order walk",
244 reinterpret_cast<byte*>(heap_begin_),
245 IndexToOffset(bitmap_size_ / kWordSize)));
246 CHECK(bitmap_begin_ != NULL);
247 CHECK(callback != NULL);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700248 uintptr_t end = Size() / kWordSize;
249 for (uintptr_t i = 0; i < end; ++i) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700250 word w = bitmap_begin_[i];
251 if (UNLIKELY(w != 0)) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700252 uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
253 while (w != 0) {
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700254 const size_t shift = CLZ(w);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700255 Object* obj = reinterpret_cast<Object*>(ptr_base + shift * kAlignment);
256 WalkFieldsInOrder(visited.get(), callback, obj, arg);
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700257 w ^= static_cast<size_t>(kWordHighBitMask) >> shift;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700258 }
259 }
260 }
261}
262
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700263std::ostream& operator << (std::ostream& stream, const SpaceBitmap& bitmap) {
264 return stream
265 << bitmap.GetName() << "["
266 << "begin=" << reinterpret_cast<const void*>(bitmap.HeapBegin())
267 << ",end=" << reinterpret_cast<const void*>(bitmap.HeapLimit())
268 << "]";
269 }
270
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700271} // namespace art