blob: 0849171c6b9bf51189e1f4f646ebbed8bca27765 [file] [log] [blame]
Mathieu Chartierb062fdd2012-07-03 09:51:48 -07001/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
18#define ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080019
Ian Rogers700a4022014-05-19 16:49:03 -070020#include <limits.h>
21#include <stdint.h>
22#include <memory>
23#include <set>
24#include <vector>
25
Ian Rogers719d1a32014-03-06 12:13:39 -080026#include "base/mutex.h"
Mathieu Chartier0a9dc052013-07-25 11:01:28 -070027#include "gc_allocator.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080028#include "globals.h"
Mathieu Chartier83c8ee02014-01-28 14:50:23 -080029#include "object_callbacks.h"
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070030
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070031namespace art {
Ian Rogers1d54e732013-05-02 21:10:01 -070032
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080033namespace mirror {
Ian Rogers1d54e732013-05-02 21:10:01 -070034 class Object;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080035} // namespace mirror
Ian Rogers576ca0c2014-06-06 15:58:22 -070036class MemMap;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070037
Ian Rogers1d54e732013-05-02 21:10:01 -070038namespace gc {
39namespace accounting {
40
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070041template<size_t kAlignment>
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070042class SpaceBitmap {
43 public:
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080044 typedef void ScanCallback(mirror::Object* obj, void* finger, void* arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070045
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080046 typedef void SweepCallback(size_t ptr_count, mirror::Object** ptrs, void* arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070047
Mathieu Chartier31e89252013-08-28 11:29:12 -070048 // Initialize a space bitmap so that it points to a bitmap large enough to cover a heap at
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070049 // heap_begin of heap_capacity bytes, where objects are guaranteed to be kAlignment-aligned.
50 static SpaceBitmap* Create(const std::string& name, byte* heap_begin, size_t heap_capacity);
51
Mathieu Chartier31e89252013-08-28 11:29:12 -070052 // Initialize a space bitmap using the provided mem_map as the live bits. Takes ownership of the
53 // mem map. The address range covered starts at heap_begin and is of size equal to heap_capacity.
54 // Objects are kAlignement-aligned.
55 static SpaceBitmap* CreateFromMemMap(const std::string& name, MemMap* mem_map,
56 byte* heap_begin, size_t heap_capacity);
57
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070058 ~SpaceBitmap() {
59 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070060
61 // <offset> is the difference from .base to a pointer address.
62 // <index> is the index of .bits that contains the bit representing
63 // <offset>.
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070064 static size_t OffsetToIndex(size_t offset) ALWAYS_INLINE {
Ian Rogers1d54e732013-05-02 21:10:01 -070065 return offset / kAlignment / kBitsPerWord;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070066 }
67
Mathieu Chartier6f365cc2014-04-23 12:42:27 -070068 template<typename T>
69 static T IndexToOffset(T index) {
70 return static_cast<T>(index * kAlignment * kBitsPerWord);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070071 }
72
Andreas Gampecb8aea42014-04-02 15:39:58 -070073 // Bits are packed in the obvious way.
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070074 static uword OffsetToMask(uintptr_t offset) ALWAYS_INLINE {
Andreas Gampecb8aea42014-04-02 15:39:58 -070075 return (static_cast<size_t>(1)) << ((offset / kAlignment) % kBitsPerWord);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070076 }
77
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070078 bool Set(const mirror::Object* obj) ALWAYS_INLINE {
79 return Modify<true>(obj);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070080 }
81
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070082 bool Clear(const mirror::Object* obj) ALWAYS_INLINE {
83 return Modify<false>(obj);
Mathieu Chartier02b6a782012-10-26 13:51:26 -070084 }
85
86 // Returns true if the object was previously marked.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080087 bool AtomicTestAndSet(const mirror::Object* obj);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070088
Ian Rogers1d54e732013-05-02 21:10:01 -070089 // Fill the bitmap with zeroes. Returns the bitmap's memory to the system as a side-effect.
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070090 void Clear();
91
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080092 bool Test(const mirror::Object* obj) const;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070093
Ian Rogers506de0c2012-09-17 15:39:06 -070094 // Return true iff <obj> is within the range of pointers that this bitmap could potentially cover,
95 // even if a bit has not been set for it.
96 bool HasAddress(const void* obj) const {
97 // If obj < heap_begin_ then offset underflows to some very large value past the end of the
98 // bitmap.
buzbeecbd6d442012-11-17 14:11:25 -080099 const uintptr_t offset = reinterpret_cast<uintptr_t>(obj) - heap_begin_;
Ian Rogers506de0c2012-09-17 15:39:06 -0700100 const size_t index = OffsetToIndex(offset);
101 return index < bitmap_size_ / kWordSize;
102 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700103
Mathieu Chartier83c8ee02014-01-28 14:50:23 -0800104 void VisitRange(uintptr_t base, uintptr_t max, ObjectCallback* callback, void* arg) const;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700105
106 class ClearVisitor {
107 public:
108 explicit ClearVisitor(SpaceBitmap* const bitmap)
109 : bitmap_(bitmap) {
110 }
111
Brian Carlstromdf629502013-07-17 22:39:56 -0700112 void operator()(mirror::Object* obj) const {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700113 bitmap_->Clear(obj);
114 }
115 private:
116 SpaceBitmap* const bitmap_;
117 };
118
119 template <typename Visitor>
120 void VisitRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const {
Brian Carlstromdf629502013-07-17 22:39:56 -0700121 for (; visit_begin < visit_end; visit_begin += kAlignment) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800122 visitor(reinterpret_cast<mirror::Object*>(visit_begin));
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700123 }
124 }
125
Mathieu Chartiere9ea70b2014-04-14 15:52:08 -0700126 // Visit the live objects in the range [visit_begin, visit_end).
127 // TODO: Use lock annotations when clang is fixed.
128 // EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier184e3222013-08-03 14:02:57 -0700129 template <typename Visitor>
130 void VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const
Mathieu Chartiere9ea70b2014-04-14 15:52:08 -0700131 NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700132
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700133 // Visits set bits in address order. The callback is not permitted to change the bitmap bits or
134 // max during the traversal.
Mathieu Chartier83c8ee02014-01-28 14:50:23 -0800135 void Walk(ObjectCallback* callback, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700136 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700137
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700138 // Visits set bits with an in order traversal. The callback is not permitted to change the bitmap
139 // bits or max during the traversal.
Mathieu Chartier83c8ee02014-01-28 14:50:23 -0800140 void InOrderWalk(ObjectCallback* callback, void* arg)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800141 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700142
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700143 // Walk through the bitmaps in increasing address order, and find the object pointers that
144 // correspond to garbage objects. Call <callback> zero or more times with lists of these object
145 // pointers. The callback is not permitted to increase the max of either bitmap.
Mathieu Chartier184e3222013-08-03 14:02:57 -0700146 static void SweepWalk(const SpaceBitmap& live, const SpaceBitmap& mark, uintptr_t base,
147 uintptr_t max, SweepCallback* thunk, void* arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700148
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700149 void CopyFrom(SpaceBitmap* source_bitmap);
150
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700151 // Starting address of our internal storage.
Andreas Gampecb8aea42014-04-02 15:39:58 -0700152 uword* Begin() {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700153 return bitmap_begin_;
154 }
155
156 // Size of our internal storage
157 size_t Size() const {
158 return bitmap_size_;
159 }
160
161 // Size in bytes of the memory that the bitmaps spans.
Mathieu Chartier6f365cc2014-04-23 12:42:27 -0700162 uint64_t HeapSize() const {
163 return IndexToOffset<uint64_t>(Size() / kWordSize);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700164 }
165
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700166 uintptr_t HeapBegin() const {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700167 return heap_begin_;
168 }
169
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700170 // The maximum address which the bitmap can span. (HeapBegin() <= object < HeapLimit()).
Mathieu Chartier6f365cc2014-04-23 12:42:27 -0700171 uint64_t HeapLimit() const {
172 return static_cast<uint64_t>(HeapBegin()) + HeapSize();
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700173 }
174
175 // Set the max address which can covered by the bitmap.
176 void SetHeapLimit(uintptr_t new_end);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700177
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700178 std::string GetName() const {
179 return name_;
180 }
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700181
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700182 void SetName(const std::string& name) {
183 name_ = name;
184 }
185
Ian Rogers576ca0c2014-06-06 15:58:22 -0700186 std::string Dump() const;
Ian Rogers1d54e732013-05-02 21:10:01 -0700187
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800188 const void* GetObjectWordAddress(const mirror::Object* obj) const {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700189 uintptr_t addr = reinterpret_cast<uintptr_t>(obj);
190 const uintptr_t offset = addr - heap_begin_;
191 const size_t index = OffsetToIndex(offset);
192 return &bitmap_begin_[index];
193 }
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700194
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700195 private:
196 // TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1,
197 // however, we document that this is expected on heap_end_
Andreas Gampecb8aea42014-04-02 15:39:58 -0700198 SpaceBitmap(const std::string& name, MemMap* mem_map, uword* bitmap_begin, size_t bitmap_size,
Mathieu Chartierbbd695c2014-04-16 09:48:48 -0700199 const void* heap_begin);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700200
Mathieu Chartier73d1e172014-04-11 17:53:48 -0700201 // Helper function for computing bitmap size based on a 64 bit capacity.
202 static size_t ComputeBitmapSize(uint64_t capacity);
203
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700204 template<bool kSetBit>
205 bool Modify(const mirror::Object* obj);
206
207 // For an unvisited object, visit it then all its children found via fields.
208 static void WalkFieldsInOrder(SpaceBitmap* visited, ObjectCallback* callback, mirror::Object* obj,
209 void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
210 // Walk instance fields of the given Class. Separate function to allow recursion on the super
211 // class.
212 static void WalkInstanceFields(SpaceBitmap<kAlignment>* visited, ObjectCallback* callback,
213 mirror::Object* obj, mirror::Class* klass, void* arg)
214 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700215
216 // Backing storage for bitmap.
Ian Rogers700a4022014-05-19 16:49:03 -0700217 std::unique_ptr<MemMap> mem_map_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700218
219 // This bitmap itself, word sized for efficiency in scanning.
Andreas Gampecb8aea42014-04-02 15:39:58 -0700220 uword* const bitmap_begin_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700221
222 // Size of this bitmap.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700223 size_t bitmap_size_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700224
225 // The base address of the heap, which corresponds to the word containing the first bit in the
226 // bitmap.
227 const uintptr_t heap_begin_;
228
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700229 // Name of this bitmap.
230 std::string name_;
231};
232
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700233typedef SpaceBitmap<kObjectAlignment> ContinuousSpaceBitmap;
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700234typedef SpaceBitmap<kLargeObjectAlignment> LargeObjectBitmap;
235
236template<size_t kAlignment>
237std::ostream& operator << (std::ostream& stream, const SpaceBitmap<kAlignment>& bitmap);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700238
Ian Rogers1d54e732013-05-02 21:10:01 -0700239} // namespace accounting
240} // namespace gc
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700241} // namespace art
242
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700243#endif // ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_