blob: 891c8eddab25a4a07902cfe5868ce55623c1b742 [file] [log] [blame]
Mathieu Chartierb062fdd2012-07-03 09:51:48 -07001/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
18#define ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080019
Ian Rogers719d1a32014-03-06 12:13:39 -080020#include "base/mutex.h"
Mathieu Chartier0a9dc052013-07-25 11:01:28 -070021#include "gc_allocator.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080022#include "globals.h"
23#include "mem_map.h"
Mathieu Chartier83c8ee02014-01-28 14:50:23 -080024#include "object_callbacks.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080025#include "UniquePtr.h"
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070026
27#include <limits.h>
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -070028#include <set>
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070029#include <stdint.h>
30#include <vector>
31
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070032namespace art {
Ian Rogers1d54e732013-05-02 21:10:01 -070033
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080034namespace mirror {
Ian Rogers1d54e732013-05-02 21:10:01 -070035 class Object;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080036} // namespace mirror
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070037
Ian Rogers1d54e732013-05-02 21:10:01 -070038namespace gc {
39namespace accounting {
40
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070041template<size_t kAlignment>
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070042class SpaceBitmap {
43 public:
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080044 typedef void ScanCallback(mirror::Object* obj, void* finger, void* arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070045
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080046 typedef void SweepCallback(size_t ptr_count, mirror::Object** ptrs, void* arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070047
Mathieu Chartier31e89252013-08-28 11:29:12 -070048 // Initialize a space bitmap so that it points to a bitmap large enough to cover a heap at
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070049 // heap_begin of heap_capacity bytes, where objects are guaranteed to be kAlignment-aligned.
50 static SpaceBitmap* Create(const std::string& name, byte* heap_begin, size_t heap_capacity);
51
Mathieu Chartier31e89252013-08-28 11:29:12 -070052 // Initialize a space bitmap using the provided mem_map as the live bits. Takes ownership of the
53 // mem map. The address range covered starts at heap_begin and is of size equal to heap_capacity.
54 // Objects are kAlignement-aligned.
55 static SpaceBitmap* CreateFromMemMap(const std::string& name, MemMap* mem_map,
56 byte* heap_begin, size_t heap_capacity);
57
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070058 ~SpaceBitmap() {
59 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070060
61 // <offset> is the difference from .base to a pointer address.
62 // <index> is the index of .bits that contains the bit representing
63 // <offset>.
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070064 static size_t OffsetToIndex(size_t offset) ALWAYS_INLINE {
Ian Rogers1d54e732013-05-02 21:10:01 -070065 return offset / kAlignment / kBitsPerWord;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070066 }
67
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070068 static uintptr_t IndexToOffset(size_t index) ALWAYS_INLINE {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070069 return static_cast<uintptr_t>(index * kAlignment * kBitsPerWord);
70 }
71
Andreas Gampecb8aea42014-04-02 15:39:58 -070072 // Bits are packed in the obvious way.
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070073 static uword OffsetToMask(uintptr_t offset) ALWAYS_INLINE {
Andreas Gampecb8aea42014-04-02 15:39:58 -070074 return (static_cast<size_t>(1)) << ((offset / kAlignment) % kBitsPerWord);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070075 }
76
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070077 bool Set(const mirror::Object* obj) ALWAYS_INLINE {
78 return Modify<true>(obj);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070079 }
80
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070081 bool Clear(const mirror::Object* obj) ALWAYS_INLINE {
82 return Modify<false>(obj);
Mathieu Chartier02b6a782012-10-26 13:51:26 -070083 }
84
85 // Returns true if the object was previously marked.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080086 bool AtomicTestAndSet(const mirror::Object* obj);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070087
Ian Rogers1d54e732013-05-02 21:10:01 -070088 // Fill the bitmap with zeroes. Returns the bitmap's memory to the system as a side-effect.
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070089 void Clear();
90
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080091 bool Test(const mirror::Object* obj) const;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070092
Ian Rogers506de0c2012-09-17 15:39:06 -070093 // Return true iff <obj> is within the range of pointers that this bitmap could potentially cover,
94 // even if a bit has not been set for it.
95 bool HasAddress(const void* obj) const {
96 // If obj < heap_begin_ then offset underflows to some very large value past the end of the
97 // bitmap.
buzbeecbd6d442012-11-17 14:11:25 -080098 const uintptr_t offset = reinterpret_cast<uintptr_t>(obj) - heap_begin_;
Ian Rogers506de0c2012-09-17 15:39:06 -070099 const size_t index = OffsetToIndex(offset);
100 return index < bitmap_size_ / kWordSize;
101 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700102
Mathieu Chartier83c8ee02014-01-28 14:50:23 -0800103 void VisitRange(uintptr_t base, uintptr_t max, ObjectCallback* callback, void* arg) const;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700104
105 class ClearVisitor {
106 public:
107 explicit ClearVisitor(SpaceBitmap* const bitmap)
108 : bitmap_(bitmap) {
109 }
110
Brian Carlstromdf629502013-07-17 22:39:56 -0700111 void operator()(mirror::Object* obj) const {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700112 bitmap_->Clear(obj);
113 }
114 private:
115 SpaceBitmap* const bitmap_;
116 };
117
118 template <typename Visitor>
119 void VisitRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const {
Brian Carlstromdf629502013-07-17 22:39:56 -0700120 for (; visit_begin < visit_end; visit_begin += kAlignment) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800121 visitor(reinterpret_cast<mirror::Object*>(visit_begin));
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700122 }
123 }
124
Andreas Gampebe73e572014-04-03 10:46:42 -0700125 /**
126 * Visit the live objects in the range [visit_begin, visit_end).
127 */
Mathieu Chartier184e3222013-08-03 14:02:57 -0700128 template <typename Visitor>
129 void VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800130 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
131 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700132
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700133 // Visits set bits in address order. The callback is not permitted to change the bitmap bits or
134 // max during the traversal.
Mathieu Chartier83c8ee02014-01-28 14:50:23 -0800135 void Walk(ObjectCallback* callback, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700136 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700137
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700138 // Visits set bits with an in order traversal. The callback is not permitted to change the bitmap
139 // bits or max during the traversal.
Mathieu Chartier83c8ee02014-01-28 14:50:23 -0800140 void InOrderWalk(ObjectCallback* callback, void* arg)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800141 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700142
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700143 // Walk through the bitmaps in increasing address order, and find the object pointers that
144 // correspond to garbage objects. Call <callback> zero or more times with lists of these object
145 // pointers. The callback is not permitted to increase the max of either bitmap.
Mathieu Chartier184e3222013-08-03 14:02:57 -0700146 static void SweepWalk(const SpaceBitmap& live, const SpaceBitmap& mark, uintptr_t base,
147 uintptr_t max, SweepCallback* thunk, void* arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700148
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700149 void CopyFrom(SpaceBitmap* source_bitmap);
150
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700151 // Starting address of our internal storage.
Andreas Gampecb8aea42014-04-02 15:39:58 -0700152 uword* Begin() {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700153 return bitmap_begin_;
154 }
155
156 // Size of our internal storage
157 size_t Size() const {
158 return bitmap_size_;
159 }
160
161 // Size in bytes of the memory that the bitmaps spans.
162 size_t HeapSize() const {
163 return IndexToOffset(Size() / kWordSize);
164 }
165
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700166 uintptr_t HeapBegin() const {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700167 return heap_begin_;
168 }
169
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700170 // The maximum address which the bitmap can span. (HeapBegin() <= object < HeapLimit()).
171 uintptr_t HeapLimit() const {
172 return HeapBegin() + static_cast<uintptr_t>(HeapSize());
173 }
174
175 // Set the max address which can covered by the bitmap.
176 void SetHeapLimit(uintptr_t new_end);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700177
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700178 std::string GetName() const {
179 return name_;
180 }
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700181
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700182 void SetName(const std::string& name) {
183 name_ = name;
184 }
185
186 std::string Dump() const {
187 return StringPrintf("%s: %p-%p", name_.c_str(), reinterpret_cast<void*>(HeapBegin()),
188 reinterpret_cast<void*>(HeapLimit()));
189 }
Ian Rogers1d54e732013-05-02 21:10:01 -0700190
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800191 const void* GetObjectWordAddress(const mirror::Object* obj) const {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700192 uintptr_t addr = reinterpret_cast<uintptr_t>(obj);
193 const uintptr_t offset = addr - heap_begin_;
194 const size_t index = OffsetToIndex(offset);
195 return &bitmap_begin_[index];
196 }
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700197
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700198 private:
199 // TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1,
200 // however, we document that this is expected on heap_end_
Andreas Gampecb8aea42014-04-02 15:39:58 -0700201 SpaceBitmap(const std::string& name, MemMap* mem_map, uword* bitmap_begin, size_t bitmap_size,
Mathieu Chartier184e3222013-08-03 14:02:57 -0700202 const void* heap_begin)
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700203 : mem_map_(mem_map), bitmap_begin_(bitmap_begin), bitmap_size_(bitmap_size),
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700204 heap_begin_(reinterpret_cast<uintptr_t>(heap_begin)),
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700205 name_(name) {}
206
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700207 template<bool kSetBit>
208 bool Modify(const mirror::Object* obj);
209
210 // For an unvisited object, visit it then all its children found via fields.
211 static void WalkFieldsInOrder(SpaceBitmap* visited, ObjectCallback* callback, mirror::Object* obj,
212 void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
213 // Walk instance fields of the given Class. Separate function to allow recursion on the super
214 // class.
215 static void WalkInstanceFields(SpaceBitmap<kAlignment>* visited, ObjectCallback* callback,
216 mirror::Object* obj, mirror::Class* klass, void* arg)
217 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700218
219 // Backing storage for bitmap.
220 UniquePtr<MemMap> mem_map_;
221
222 // This bitmap itself, word sized for efficiency in scanning.
Andreas Gampecb8aea42014-04-02 15:39:58 -0700223 uword* const bitmap_begin_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700224
225 // Size of this bitmap.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700226 size_t bitmap_size_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700227
228 // The base address of the heap, which corresponds to the word containing the first bit in the
229 // bitmap.
230 const uintptr_t heap_begin_;
231
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700232 // Name of this bitmap.
233 std::string name_;
234};
235
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700236// Like a bitmap except it keeps track of objects using sets.
Mathieu Chartierdb7f37d2014-01-10 11:09:06 -0800237class ObjectSet {
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700238 public:
Mathieu Chartier0a9dc052013-07-25 11:01:28 -0700239 typedef std::set<
240 const mirror::Object*, std::less<const mirror::Object*>,
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700241 GcAllocator<const mirror::Object*> > Objects;
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700242
243 bool IsEmpty() const {
244 return contained_.empty();
245 }
246
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800247 inline void Set(const mirror::Object* obj) {
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700248 contained_.insert(obj);
249 }
250
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800251 inline void Clear(const mirror::Object* obj) {
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700252 Objects::iterator found = contained_.find(obj);
253 if (found != contained_.end()) {
254 contained_.erase(found);
255 }
256 }
257
258 void Clear() {
259 contained_.clear();
260 }
261
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800262 inline bool Test(const mirror::Object* obj) const {
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700263 return contained_.find(obj) != contained_.end();
264 }
265
Mathieu Chartierdb7f37d2014-01-10 11:09:06 -0800266 const std::string& GetName() const {
267 return name_;
268 }
269
270 void SetName(const std::string& name) {
271 name_ = name;
272 }
273
274 void CopyFrom(const ObjectSet& space_set) {
275 contained_ = space_set.contained_;
276 }
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700277
Ian Rogers719d1a32014-03-06 12:13:39 -0800278 void Walk(ObjectCallback* callback, void* arg) SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700279
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700280 template <typename Visitor>
281 void Visit(const Visitor& visitor) NO_THREAD_SAFETY_ANALYSIS {
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700282 for (const mirror::Object* obj : contained_) {
283 visitor(const_cast<mirror::Object*>(obj));
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700284 }
285 }
286
Mathieu Chartierdb7f37d2014-01-10 11:09:06 -0800287 explicit ObjectSet(const std::string& name) : name_(name) {}
288 ~ObjectSet() {}
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700289
290 Objects& GetObjects() {
291 return contained_;
292 }
293
294 private:
295 std::string name_;
296 Objects contained_;
297};
298
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700299typedef SpaceBitmap<kObjectAlignment> ContinuousSpaceBitmap;
300// TODO: Replace usage of ObjectSet with LargeObjectBitmap.
301typedef SpaceBitmap<kLargeObjectAlignment> LargeObjectBitmap;
302
303template<size_t kAlignment>
304std::ostream& operator << (std::ostream& stream, const SpaceBitmap<kAlignment>& bitmap);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700305
Ian Rogers1d54e732013-05-02 21:10:01 -0700306} // namespace accounting
307} // namespace gc
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700308} // namespace art
309
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700310#endif // ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_