blob: a3073bda07aec69283223174b247bd7ed273fb5c [file] [log] [blame]
Mathieu Chartierb062fdd2012-07-03 09:51:48 -07001/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
18#define ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080019
Ian Rogers700a4022014-05-19 16:49:03 -070020#include <limits.h>
21#include <stdint.h>
22#include <memory>
23#include <set>
24#include <vector>
25
Ian Rogers719d1a32014-03-06 12:13:39 -080026#include "base/mutex.h"
Mathieu Chartier0a9dc052013-07-25 11:01:28 -070027#include "gc_allocator.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080028#include "globals.h"
Mathieu Chartier83c8ee02014-01-28 14:50:23 -080029#include "object_callbacks.h"
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070030
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070031namespace art {
Ian Rogers1d54e732013-05-02 21:10:01 -070032
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080033namespace mirror {
Ian Rogers1d54e732013-05-02 21:10:01 -070034 class Object;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080035} // namespace mirror
Ian Rogers576ca0c2014-06-06 15:58:22 -070036class MemMap;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070037
Ian Rogers1d54e732013-05-02 21:10:01 -070038namespace gc {
39namespace accounting {
40
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070041template<size_t kAlignment>
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070042class SpaceBitmap {
43 public:
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080044 typedef void ScanCallback(mirror::Object* obj, void* finger, void* arg);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080045 typedef void SweepCallback(size_t ptr_count, mirror::Object** ptrs, void* arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070046
Mathieu Chartier31e89252013-08-28 11:29:12 -070047 // Initialize a space bitmap so that it points to a bitmap large enough to cover a heap at
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070048 // heap_begin of heap_capacity bytes, where objects are guaranteed to be kAlignment-aligned.
49 static SpaceBitmap* Create(const std::string& name, byte* heap_begin, size_t heap_capacity);
50
Mathieu Chartier31e89252013-08-28 11:29:12 -070051 // Initialize a space bitmap using the provided mem_map as the live bits. Takes ownership of the
52 // mem map. The address range covered starts at heap_begin and is of size equal to heap_capacity.
53 // Objects are kAlignement-aligned.
54 static SpaceBitmap* CreateFromMemMap(const std::string& name, MemMap* mem_map,
55 byte* heap_begin, size_t heap_capacity);
56
Ian Rogers22d5e732014-07-15 22:23:51 -070057 ~SpaceBitmap();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070058
59 // <offset> is the difference from .base to a pointer address.
60 // <index> is the index of .bits that contains the bit representing
61 // <offset>.
Ian Rogersbe2a1df2014-07-10 00:56:36 -070062 static constexpr size_t OffsetToIndex(size_t offset) {
Ian Rogers1d54e732013-05-02 21:10:01 -070063 return offset / kAlignment / kBitsPerWord;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070064 }
65
Mathieu Chartier6f365cc2014-04-23 12:42:27 -070066 template<typename T>
Ian Rogersbe2a1df2014-07-10 00:56:36 -070067 static constexpr T IndexToOffset(T index) {
Mathieu Chartier6f365cc2014-04-23 12:42:27 -070068 return static_cast<T>(index * kAlignment * kBitsPerWord);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070069 }
70
Andreas Gampecb8aea42014-04-02 15:39:58 -070071 // Bits are packed in the obvious way.
Ian Rogersbe2a1df2014-07-10 00:56:36 -070072 static constexpr uword OffsetToMask(uintptr_t offset) {
Andreas Gampecb8aea42014-04-02 15:39:58 -070073 return (static_cast<size_t>(1)) << ((offset / kAlignment) % kBitsPerWord);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070074 }
75
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070076 bool Set(const mirror::Object* obj) ALWAYS_INLINE {
77 return Modify<true>(obj);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070078 }
79
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070080 bool Clear(const mirror::Object* obj) ALWAYS_INLINE {
81 return Modify<false>(obj);
Mathieu Chartier02b6a782012-10-26 13:51:26 -070082 }
83
84 // Returns true if the object was previously marked.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080085 bool AtomicTestAndSet(const mirror::Object* obj);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070086
Ian Rogers1d54e732013-05-02 21:10:01 -070087 // Fill the bitmap with zeroes. Returns the bitmap's memory to the system as a side-effect.
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070088 void Clear();
89
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080090 bool Test(const mirror::Object* obj) const;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070091
Ian Rogers506de0c2012-09-17 15:39:06 -070092 // Return true iff <obj> is within the range of pointers that this bitmap could potentially cover,
93 // even if a bit has not been set for it.
94 bool HasAddress(const void* obj) const {
95 // If obj < heap_begin_ then offset underflows to some very large value past the end of the
96 // bitmap.
buzbeecbd6d442012-11-17 14:11:25 -080097 const uintptr_t offset = reinterpret_cast<uintptr_t>(obj) - heap_begin_;
Ian Rogers506de0c2012-09-17 15:39:06 -070098 const size_t index = OffsetToIndex(offset);
99 return index < bitmap_size_ / kWordSize;
100 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700101
Mathieu Chartier83c8ee02014-01-28 14:50:23 -0800102 void VisitRange(uintptr_t base, uintptr_t max, ObjectCallback* callback, void* arg) const;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700103
104 class ClearVisitor {
105 public:
106 explicit ClearVisitor(SpaceBitmap* const bitmap)
107 : bitmap_(bitmap) {
108 }
109
Brian Carlstromdf629502013-07-17 22:39:56 -0700110 void operator()(mirror::Object* obj) const {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700111 bitmap_->Clear(obj);
112 }
113 private:
114 SpaceBitmap* const bitmap_;
115 };
116
117 template <typename Visitor>
118 void VisitRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const {
Brian Carlstromdf629502013-07-17 22:39:56 -0700119 for (; visit_begin < visit_end; visit_begin += kAlignment) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800120 visitor(reinterpret_cast<mirror::Object*>(visit_begin));
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700121 }
122 }
123
Mathieu Chartiere9ea70b2014-04-14 15:52:08 -0700124 // Visit the live objects in the range [visit_begin, visit_end).
125 // TODO: Use lock annotations when clang is fixed.
126 // EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier184e3222013-08-03 14:02:57 -0700127 template <typename Visitor>
128 void VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const
Mathieu Chartiere9ea70b2014-04-14 15:52:08 -0700129 NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700130
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700131 // Visits set bits in address order. The callback is not permitted to change the bitmap bits or
132 // max during the traversal.
Mathieu Chartier83c8ee02014-01-28 14:50:23 -0800133 void Walk(ObjectCallback* callback, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700134 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700135
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700136 // Visits set bits with an in order traversal. The callback is not permitted to change the bitmap
137 // bits or max during the traversal.
Mathieu Chartier83c8ee02014-01-28 14:50:23 -0800138 void InOrderWalk(ObjectCallback* callback, void* arg)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800139 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700140
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700141 // Walk through the bitmaps in increasing address order, and find the object pointers that
142 // correspond to garbage objects. Call <callback> zero or more times with lists of these object
143 // pointers. The callback is not permitted to increase the max of either bitmap.
Mathieu Chartier184e3222013-08-03 14:02:57 -0700144 static void SweepWalk(const SpaceBitmap& live, const SpaceBitmap& mark, uintptr_t base,
145 uintptr_t max, SweepCallback* thunk, void* arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700146
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700147 void CopyFrom(SpaceBitmap* source_bitmap);
148
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700149 // Starting address of our internal storage.
Andreas Gampecb8aea42014-04-02 15:39:58 -0700150 uword* Begin() {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700151 return bitmap_begin_;
152 }
153
154 // Size of our internal storage
155 size_t Size() const {
156 return bitmap_size_;
157 }
158
159 // Size in bytes of the memory that the bitmaps spans.
Mathieu Chartier6f365cc2014-04-23 12:42:27 -0700160 uint64_t HeapSize() const {
161 return IndexToOffset<uint64_t>(Size() / kWordSize);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700162 }
163
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700164 uintptr_t HeapBegin() const {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700165 return heap_begin_;
166 }
167
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700168 // The maximum address which the bitmap can span. (HeapBegin() <= object < HeapLimit()).
Mathieu Chartier6f365cc2014-04-23 12:42:27 -0700169 uint64_t HeapLimit() const {
170 return static_cast<uint64_t>(HeapBegin()) + HeapSize();
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700171 }
172
173 // Set the max address which can covered by the bitmap.
174 void SetHeapLimit(uintptr_t new_end);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700175
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700176 std::string GetName() const {
177 return name_;
178 }
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700179
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700180 void SetName(const std::string& name) {
181 name_ = name;
182 }
183
Ian Rogers576ca0c2014-06-06 15:58:22 -0700184 std::string Dump() const;
Ian Rogers1d54e732013-05-02 21:10:01 -0700185
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800186 const void* GetObjectWordAddress(const mirror::Object* obj) const {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700187 uintptr_t addr = reinterpret_cast<uintptr_t>(obj);
188 const uintptr_t offset = addr - heap_begin_;
189 const size_t index = OffsetToIndex(offset);
190 return &bitmap_begin_[index];
191 }
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700192
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700193 private:
194 // TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1,
195 // however, we document that this is expected on heap_end_
Andreas Gampecb8aea42014-04-02 15:39:58 -0700196 SpaceBitmap(const std::string& name, MemMap* mem_map, uword* bitmap_begin, size_t bitmap_size,
Mathieu Chartierbbd695c2014-04-16 09:48:48 -0700197 const void* heap_begin);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700198
Mathieu Chartier73d1e172014-04-11 17:53:48 -0700199 // Helper function for computing bitmap size based on a 64 bit capacity.
200 static size_t ComputeBitmapSize(uint64_t capacity);
201
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700202 template<bool kSetBit>
203 bool Modify(const mirror::Object* obj);
204
205 // For an unvisited object, visit it then all its children found via fields.
206 static void WalkFieldsInOrder(SpaceBitmap* visited, ObjectCallback* callback, mirror::Object* obj,
207 void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
208 // Walk instance fields of the given Class. Separate function to allow recursion on the super
209 // class.
210 static void WalkInstanceFields(SpaceBitmap<kAlignment>* visited, ObjectCallback* callback,
211 mirror::Object* obj, mirror::Class* klass, void* arg)
212 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700213
214 // Backing storage for bitmap.
Ian Rogers700a4022014-05-19 16:49:03 -0700215 std::unique_ptr<MemMap> mem_map_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700216
217 // This bitmap itself, word sized for efficiency in scanning.
Andreas Gampecb8aea42014-04-02 15:39:58 -0700218 uword* const bitmap_begin_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700219
220 // Size of this bitmap.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700221 size_t bitmap_size_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700222
223 // The base address of the heap, which corresponds to the word containing the first bit in the
224 // bitmap.
225 const uintptr_t heap_begin_;
226
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700227 // Name of this bitmap.
228 std::string name_;
229};
230
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700231typedef SpaceBitmap<kObjectAlignment> ContinuousSpaceBitmap;
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700232typedef SpaceBitmap<kLargeObjectAlignment> LargeObjectBitmap;
233
234template<size_t kAlignment>
235std::ostream& operator << (std::ostream& stream, const SpaceBitmap<kAlignment>& bitmap);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700236
Ian Rogers1d54e732013-05-02 21:10:01 -0700237} // namespace accounting
238} // namespace gc
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700239} // namespace art
240
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700241#endif // ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_