blob: 5c7cce24dab57331300b3f9b99c72ce5a31ca5b5 [file] [log] [blame]
Mathieu Chartierb062fdd2012-07-03 09:51:48 -07001/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
18#define ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080019
Ian Rogers719d1a32014-03-06 12:13:39 -080020#include "base/mutex.h"
Mathieu Chartier0a9dc052013-07-25 11:01:28 -070021#include "gc_allocator.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080022#include "globals.h"
23#include "mem_map.h"
Mathieu Chartier83c8ee02014-01-28 14:50:23 -080024#include "object_callbacks.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080025#include "UniquePtr.h"
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070026
27#include <limits.h>
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -070028#include <set>
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070029#include <stdint.h>
30#include <vector>
31
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070032namespace art {
Ian Rogers1d54e732013-05-02 21:10:01 -070033
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080034namespace mirror {
Ian Rogers1d54e732013-05-02 21:10:01 -070035 class Object;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080036} // namespace mirror
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070037
Ian Rogers1d54e732013-05-02 21:10:01 -070038namespace gc {
39namespace accounting {
40
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070041template<size_t kAlignment>
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070042class SpaceBitmap {
43 public:
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080044 typedef void ScanCallback(mirror::Object* obj, void* finger, void* arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070045
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080046 typedef void SweepCallback(size_t ptr_count, mirror::Object** ptrs, void* arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070047
Mathieu Chartier31e89252013-08-28 11:29:12 -070048 // Initialize a space bitmap so that it points to a bitmap large enough to cover a heap at
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070049 // heap_begin of heap_capacity bytes, where objects are guaranteed to be kAlignment-aligned.
50 static SpaceBitmap* Create(const std::string& name, byte* heap_begin, size_t heap_capacity);
51
Mathieu Chartier31e89252013-08-28 11:29:12 -070052 // Initialize a space bitmap using the provided mem_map as the live bits. Takes ownership of the
53 // mem map. The address range covered starts at heap_begin and is of size equal to heap_capacity.
54 // Objects are kAlignement-aligned.
55 static SpaceBitmap* CreateFromMemMap(const std::string& name, MemMap* mem_map,
56 byte* heap_begin, size_t heap_capacity);
57
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070058 ~SpaceBitmap() {
59 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070060
61 // <offset> is the difference from .base to a pointer address.
62 // <index> is the index of .bits that contains the bit representing
63 // <offset>.
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070064 static size_t OffsetToIndex(size_t offset) ALWAYS_INLINE {
Ian Rogers1d54e732013-05-02 21:10:01 -070065 return offset / kAlignment / kBitsPerWord;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070066 }
67
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070068 static uintptr_t IndexToOffset(size_t index) ALWAYS_INLINE {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070069 return static_cast<uintptr_t>(index * kAlignment * kBitsPerWord);
70 }
71
Andreas Gampecb8aea42014-04-02 15:39:58 -070072 // Bits are packed in the obvious way.
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070073 static uword OffsetToMask(uintptr_t offset) ALWAYS_INLINE {
Andreas Gampecb8aea42014-04-02 15:39:58 -070074 return (static_cast<size_t>(1)) << ((offset / kAlignment) % kBitsPerWord);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070075 }
76
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070077 bool Set(const mirror::Object* obj) ALWAYS_INLINE {
78 return Modify<true>(obj);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070079 }
80
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -070081 bool Clear(const mirror::Object* obj) ALWAYS_INLINE {
82 return Modify<false>(obj);
Mathieu Chartier02b6a782012-10-26 13:51:26 -070083 }
84
85 // Returns true if the object was previously marked.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080086 bool AtomicTestAndSet(const mirror::Object* obj);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070087
Ian Rogers1d54e732013-05-02 21:10:01 -070088 // Fill the bitmap with zeroes. Returns the bitmap's memory to the system as a side-effect.
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070089 void Clear();
90
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080091 bool Test(const mirror::Object* obj) const;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070092
Ian Rogers506de0c2012-09-17 15:39:06 -070093 // Return true iff <obj> is within the range of pointers that this bitmap could potentially cover,
94 // even if a bit has not been set for it.
95 bool HasAddress(const void* obj) const {
96 // If obj < heap_begin_ then offset underflows to some very large value past the end of the
97 // bitmap.
buzbeecbd6d442012-11-17 14:11:25 -080098 const uintptr_t offset = reinterpret_cast<uintptr_t>(obj) - heap_begin_;
Ian Rogers506de0c2012-09-17 15:39:06 -070099 const size_t index = OffsetToIndex(offset);
100 return index < bitmap_size_ / kWordSize;
101 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700102
Mathieu Chartier83c8ee02014-01-28 14:50:23 -0800103 void VisitRange(uintptr_t base, uintptr_t max, ObjectCallback* callback, void* arg) const;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700104
105 class ClearVisitor {
106 public:
107 explicit ClearVisitor(SpaceBitmap* const bitmap)
108 : bitmap_(bitmap) {
109 }
110
Brian Carlstromdf629502013-07-17 22:39:56 -0700111 void operator()(mirror::Object* obj) const {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700112 bitmap_->Clear(obj);
113 }
114 private:
115 SpaceBitmap* const bitmap_;
116 };
117
118 template <typename Visitor>
119 void VisitRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const {
Brian Carlstromdf629502013-07-17 22:39:56 -0700120 for (; visit_begin < visit_end; visit_begin += kAlignment) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800121 visitor(reinterpret_cast<mirror::Object*>(visit_begin));
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700122 }
123 }
124
Mathieu Chartiere9ea70b2014-04-14 15:52:08 -0700125 // Visit the live objects in the range [visit_begin, visit_end).
126 // TODO: Use lock annotations when clang is fixed.
127 // EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier184e3222013-08-03 14:02:57 -0700128 template <typename Visitor>
129 void VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const
Mathieu Chartiere9ea70b2014-04-14 15:52:08 -0700130 NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700131
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700132 // Visits set bits in address order. The callback is not permitted to change the bitmap bits or
133 // max during the traversal.
Mathieu Chartier83c8ee02014-01-28 14:50:23 -0800134 void Walk(ObjectCallback* callback, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700135 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700136
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700137 // Visits set bits with an in order traversal. The callback is not permitted to change the bitmap
138 // bits or max during the traversal.
Mathieu Chartier83c8ee02014-01-28 14:50:23 -0800139 void InOrderWalk(ObjectCallback* callback, void* arg)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800140 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700141
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700142 // Walk through the bitmaps in increasing address order, and find the object pointers that
143 // correspond to garbage objects. Call <callback> zero or more times with lists of these object
144 // pointers. The callback is not permitted to increase the max of either bitmap.
Mathieu Chartier184e3222013-08-03 14:02:57 -0700145 static void SweepWalk(const SpaceBitmap& live, const SpaceBitmap& mark, uintptr_t base,
146 uintptr_t max, SweepCallback* thunk, void* arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700147
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700148 void CopyFrom(SpaceBitmap* source_bitmap);
149
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700150 // Starting address of our internal storage.
Andreas Gampecb8aea42014-04-02 15:39:58 -0700151 uword* Begin() {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700152 return bitmap_begin_;
153 }
154
155 // Size of our internal storage
156 size_t Size() const {
157 return bitmap_size_;
158 }
159
160 // Size in bytes of the memory that the bitmaps spans.
161 size_t HeapSize() const {
162 return IndexToOffset(Size() / kWordSize);
163 }
164
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700165 uintptr_t HeapBegin() const {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700166 return heap_begin_;
167 }
168
Mathieu Chartierdcf8d722012-08-02 14:55:54 -0700169 // The maximum address which the bitmap can span. (HeapBegin() <= object < HeapLimit()).
170 uintptr_t HeapLimit() const {
171 return HeapBegin() + static_cast<uintptr_t>(HeapSize());
172 }
173
174 // Set the max address which can covered by the bitmap.
175 void SetHeapLimit(uintptr_t new_end);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700176
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700177 std::string GetName() const {
178 return name_;
179 }
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700180
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700181 void SetName(const std::string& name) {
182 name_ = name;
183 }
184
185 std::string Dump() const {
186 return StringPrintf("%s: %p-%p", name_.c_str(), reinterpret_cast<void*>(HeapBegin()),
187 reinterpret_cast<void*>(HeapLimit()));
188 }
Ian Rogers1d54e732013-05-02 21:10:01 -0700189
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800190 const void* GetObjectWordAddress(const mirror::Object* obj) const {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700191 uintptr_t addr = reinterpret_cast<uintptr_t>(obj);
192 const uintptr_t offset = addr - heap_begin_;
193 const size_t index = OffsetToIndex(offset);
194 return &bitmap_begin_[index];
195 }
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700196
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700197 private:
198 // TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1,
199 // however, we document that this is expected on heap_end_
Andreas Gampecb8aea42014-04-02 15:39:58 -0700200 SpaceBitmap(const std::string& name, MemMap* mem_map, uword* bitmap_begin, size_t bitmap_size,
Mathieu Chartierbbd695c2014-04-16 09:48:48 -0700201 const void* heap_begin);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700202
Mathieu Chartier73d1e172014-04-11 17:53:48 -0700203 // Helper function for computing bitmap size based on a 64 bit capacity.
204 static size_t ComputeBitmapSize(uint64_t capacity);
205
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700206 template<bool kSetBit>
207 bool Modify(const mirror::Object* obj);
208
209 // For an unvisited object, visit it then all its children found via fields.
210 static void WalkFieldsInOrder(SpaceBitmap* visited, ObjectCallback* callback, mirror::Object* obj,
211 void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
212 // Walk instance fields of the given Class. Separate function to allow recursion on the super
213 // class.
214 static void WalkInstanceFields(SpaceBitmap<kAlignment>* visited, ObjectCallback* callback,
215 mirror::Object* obj, mirror::Class* klass, void* arg)
216 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700217
218 // Backing storage for bitmap.
219 UniquePtr<MemMap> mem_map_;
220
221 // This bitmap itself, word sized for efficiency in scanning.
Andreas Gampecb8aea42014-04-02 15:39:58 -0700222 uword* const bitmap_begin_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700223
224 // Size of this bitmap.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700225 size_t bitmap_size_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700226
227 // The base address of the heap, which corresponds to the word containing the first bit in the
228 // bitmap.
229 const uintptr_t heap_begin_;
230
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700231 // Name of this bitmap.
232 std::string name_;
233};
234
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700235typedef SpaceBitmap<kObjectAlignment> ContinuousSpaceBitmap;
Mathieu Chartiera8e8f9c2014-04-09 14:51:05 -0700236typedef SpaceBitmap<kLargeObjectAlignment> LargeObjectBitmap;
237
238template<size_t kAlignment>
239std::ostream& operator << (std::ostream& stream, const SpaceBitmap<kAlignment>& bitmap);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700240
Ian Rogers1d54e732013-05-02 21:10:01 -0700241} // namespace accounting
242} // namespace gc
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700243} // namespace art
244
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700245#endif // ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_