blob: 5224d64efcb7cf62f14242bf7847fc795e7bb8df [file] [log] [blame]
Mathieu Chartierd8195f12012-10-05 12:21:28 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_GC_ACCOUNTING_ATOMIC_STACK_H_
18#define ART_RUNTIME_GC_ACCOUNTING_ATOMIC_STACK_H_
Mathieu Chartierd8195f12012-10-05 12:21:28 -070019
Brian Carlstroma2806552014-02-27 12:29:32 -080020#include <algorithm>
Ian Rogers700a4022014-05-19 16:49:03 -070021#include <memory>
Mathieu Chartierd8195f12012-10-05 12:21:28 -070022#include <string>
23
Ian Rogersef7d42f2014-01-06 12:55:46 -080024#include "atomic.h"
Elliott Hughes07ed66b2012-12-12 18:34:25 -080025#include "base/logging.h"
Elliott Hughes76160052012-12-12 16:31:20 -080026#include "base/macros.h"
Mathieu Chartierd8195f12012-10-05 12:21:28 -070027#include "mem_map.h"
Mathieu Chartiercb535da2015-01-23 13:50:03 -080028#include "stack.h"
Mathieu Chartierd8195f12012-10-05 12:21:28 -070029#include "utils.h"
30
31namespace art {
Ian Rogers1d54e732013-05-02 21:10:01 -070032namespace gc {
33namespace accounting {
Mathieu Chartierd8195f12012-10-05 12:21:28 -070034
Mathieu Chartiercb535da2015-01-23 13:50:03 -080035// Internal representation is StackReference<T>, so this only works with mirror::Object or it's
36// subclasses.
Mathieu Chartierd8195f12012-10-05 12:21:28 -070037template <typename T>
38class AtomicStack {
39 public:
Mathieu Chartiercb535da2015-01-23 13:50:03 -080040 class ObjectComparator {
41 public:
42 // These two comparators are for std::binary_search.
43 bool operator()(const T* a, const StackReference<T>& b) const NO_THREAD_SAFETY_ANALYSIS {
44 return a < b.AsMirrorPtr();
45 }
46 bool operator()(const StackReference<T>& a, const T* b) const NO_THREAD_SAFETY_ANALYSIS {
47 return a.AsMirrorPtr() < b;
48 }
49 // This comparator is for std::sort.
50 bool operator()(const StackReference<T>& a, const StackReference<T>& b) const
51 NO_THREAD_SAFETY_ANALYSIS {
52 return a.AsMirrorPtr() < b.AsMirrorPtr();
53 }
54 };
55
Mathieu Chartierd8195f12012-10-05 12:21:28 -070056 // Capacity is how many elements we can store in the stack.
Mathieu Chartierc1790162014-05-23 10:54:50 -070057 static AtomicStack* Create(const std::string& name, size_t growth_limit, size_t capacity) {
58 std::unique_ptr<AtomicStack> mark_stack(new AtomicStack(name, growth_limit, capacity));
Mathieu Chartierd8195f12012-10-05 12:21:28 -070059 mark_stack->Init();
60 return mark_stack.release();
61 }
62
Ian Rogers1d54e732013-05-02 21:10:01 -070063 ~AtomicStack() {}
Mathieu Chartierd8195f12012-10-05 12:21:28 -070064
65 void Reset() {
Mathieu Chartierc1790162014-05-23 10:54:50 -070066 DCHECK(mem_map_.get() != nullptr);
Mathieu Chartiercb535da2015-01-23 13:50:03 -080067 DCHECK(begin_ != nullptr);
Ian Rogers3e5cf302014-05-20 16:40:37 -070068 front_index_.StoreRelaxed(0);
69 back_index_.StoreRelaxed(0);
Mathieu Chartierf082d3c2013-07-29 17:04:07 -070070 debug_is_sorted_ = true;
Ian Rogersc5f17732014-06-05 20:48:42 -070071 mem_map_->MadviseDontNeedAndZero();
Mathieu Chartierd8195f12012-10-05 12:21:28 -070072 }
73
74 // Beware: Mixing atomic pushes and atomic pops will cause ABA problem.
75
76 // Returns false if we overflowed the stack.
Mathieu Chartiercb535da2015-01-23 13:50:03 -080077 bool AtomicPushBackIgnoreGrowthLimit(T* value) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartierc1790162014-05-23 10:54:50 -070078 return AtomicPushBackInternal(value, capacity_);
79 }
80
81 // Returns false if we overflowed the stack.
Mathieu Chartiercb535da2015-01-23 13:50:03 -080082 bool AtomicPushBack(T* value) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartierc1790162014-05-23 10:54:50 -070083 return AtomicPushBackInternal(value, growth_limit_);
Mathieu Chartierd8195f12012-10-05 12:21:28 -070084 }
85
Hiroshi Yamauchif5b0e202014-02-11 17:02:22 -080086 // Atomically bump the back index by the given number of
87 // slots. Returns false if we overflowed the stack.
Mathieu Chartiercb535da2015-01-23 13:50:03 -080088 bool AtomicBumpBack(size_t num_slots, StackReference<T>** start_address,
89 StackReference<T>** end_address)
90 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Hiroshi Yamauchif5b0e202014-02-11 17:02:22 -080091 if (kIsDebugBuild) {
92 debug_is_sorted_ = false;
93 }
94 int32_t index;
95 int32_t new_index;
96 do {
Ian Rogers3e5cf302014-05-20 16:40:37 -070097 index = back_index_.LoadRelaxed();
Hiroshi Yamauchif5b0e202014-02-11 17:02:22 -080098 new_index = index + num_slots;
Mathieu Chartierc1790162014-05-23 10:54:50 -070099 if (UNLIKELY(static_cast<size_t>(new_index) >= growth_limit_)) {
Hiroshi Yamauchif5b0e202014-02-11 17:02:22 -0800100 // Stack overflow.
101 return false;
102 }
Ian Rogers3e5cf302014-05-20 16:40:37 -0700103 } while (!back_index_.CompareExchangeWeakRelaxed(index, new_index));
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800104 *start_address = begin_ + index;
105 *end_address = begin_ + new_index;
Hiroshi Yamauchif5b0e202014-02-11 17:02:22 -0800106 if (kIsDebugBuild) {
107 // Sanity check that the memory is zero.
108 for (int32_t i = index; i < new_index; ++i) {
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800109 DCHECK_EQ(begin_[i].AsMirrorPtr(), static_cast<T*>(nullptr))
Brian Carlstroma2806552014-02-27 12:29:32 -0800110 << "i=" << i << " index=" << index << " new_index=" << new_index;
Hiroshi Yamauchif5b0e202014-02-11 17:02:22 -0800111 }
112 }
113 return true;
114 }
115
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800116 void AssertAllZero() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Hiroshi Yamauchif5b0e202014-02-11 17:02:22 -0800117 if (kIsDebugBuild) {
118 for (size_t i = 0; i < capacity_; ++i) {
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800119 DCHECK_EQ(begin_[i].AsMirrorPtr(), static_cast<T*>(nullptr)) << "i=" << i;
Hiroshi Yamauchif5b0e202014-02-11 17:02:22 -0800120 }
121 }
122 }
123
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800124 void PushBack(T* value) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartierf082d3c2013-07-29 17:04:07 -0700125 if (kIsDebugBuild) {
126 debug_is_sorted_ = false;
127 }
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800128 const int32_t index = back_index_.LoadRelaxed();
Mathieu Chartierc1790162014-05-23 10:54:50 -0700129 DCHECK_LT(static_cast<size_t>(index), growth_limit_);
Ian Rogers3e5cf302014-05-20 16:40:37 -0700130 back_index_.StoreRelaxed(index + 1);
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800131 begin_[index].Assign(value);
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700132 }
133
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800134 T* PopBack() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers3e5cf302014-05-20 16:40:37 -0700135 DCHECK_GT(back_index_.LoadRelaxed(), front_index_.LoadRelaxed());
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700136 // Decrement the back index non atomically.
Ian Rogers3e5cf302014-05-20 16:40:37 -0700137 back_index_.StoreRelaxed(back_index_.LoadRelaxed() - 1);
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800138 return begin_[back_index_.LoadRelaxed()].AsMirrorPtr();
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700139 }
140
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700141 // Take an item from the front of the stack.
142 T PopFront() {
Ian Rogers3e5cf302014-05-20 16:40:37 -0700143 int32_t index = front_index_.LoadRelaxed();
144 DCHECK_LT(index, back_index_.LoadRelaxed());
145 front_index_.StoreRelaxed(index + 1);
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700146 return begin_[index];
147 }
148
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700149 // Pop a number of elements.
150 void PopBackCount(int32_t n) {
151 DCHECK_GE(Size(), static_cast<size_t>(n));
Ian Rogers3e5cf302014-05-20 16:40:37 -0700152 back_index_.FetchAndSubSequentiallyConsistent(n);
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700153 }
154
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700155 bool IsEmpty() const {
156 return Size() == 0;
157 }
158
159 size_t Size() const {
Ian Rogers3e5cf302014-05-20 16:40:37 -0700160 DCHECK_LE(front_index_.LoadRelaxed(), back_index_.LoadRelaxed());
161 return back_index_.LoadRelaxed() - front_index_.LoadRelaxed();
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700162 }
163
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800164 StackReference<T>* Begin() const {
165 return begin_ + front_index_.LoadRelaxed();
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700166 }
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800167 StackReference<T>* End() const {
168 return begin_ + back_index_.LoadRelaxed();
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700169 }
170
171 size_t Capacity() const {
172 return capacity_;
173 }
174
175 // Will clear the stack.
176 void Resize(size_t new_capacity) {
177 capacity_ = new_capacity;
Mathieu Chartierc1790162014-05-23 10:54:50 -0700178 growth_limit_ = new_capacity;
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700179 Init();
180 }
181
Ian Rogers1d54e732013-05-02 21:10:01 -0700182 void Sort() {
Ian Rogers3e5cf302014-05-20 16:40:37 -0700183 int32_t start_back_index = back_index_.LoadRelaxed();
184 int32_t start_front_index = front_index_.LoadRelaxed();
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800185 std::sort(Begin(), End(), ObjectComparator());
Ian Rogers3e5cf302014-05-20 16:40:37 -0700186 CHECK_EQ(start_back_index, back_index_.LoadRelaxed());
187 CHECK_EQ(start_front_index, front_index_.LoadRelaxed());
Mathieu Chartierf082d3c2013-07-29 17:04:07 -0700188 if (kIsDebugBuild) {
189 debug_is_sorted_ = true;
Ian Rogers1d54e732013-05-02 21:10:01 -0700190 }
191 }
192
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800193 bool ContainsSorted(const T* value) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartierf082d3c2013-07-29 17:04:07 -0700194 DCHECK(debug_is_sorted_);
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800195 return std::binary_search(Begin(), End(), value, ObjectComparator());
Mathieu Chartierf082d3c2013-07-29 17:04:07 -0700196 }
197
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800198 bool Contains(const T* value) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
199 for (auto cur = Begin(), end = End(); cur != end; ++cur) {
200 if (cur->AsMirrorPtr() == value) {
201 return true;
202 }
203 }
204 return false;
Ian Rogers1d54e732013-05-02 21:10:01 -0700205 }
206
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700207 private:
Mathieu Chartierc1790162014-05-23 10:54:50 -0700208 AtomicStack(const std::string& name, size_t growth_limit, size_t capacity)
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700209 : name_(name),
210 back_index_(0),
211 front_index_(0),
Mathieu Chartierc1790162014-05-23 10:54:50 -0700212 begin_(nullptr),
213 growth_limit_(growth_limit),
Ian Rogers1d54e732013-05-02 21:10:01 -0700214 capacity_(capacity),
Mathieu Chartierf082d3c2013-07-29 17:04:07 -0700215 debug_is_sorted_(true) {
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700216 }
217
Mathieu Chartierc1790162014-05-23 10:54:50 -0700218 // Returns false if we overflowed the stack.
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800219 bool AtomicPushBackInternal(T* value, size_t limit) ALWAYS_INLINE
220 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartierc1790162014-05-23 10:54:50 -0700221 if (kIsDebugBuild) {
222 debug_is_sorted_ = false;
223 }
224 int32_t index;
225 do {
226 index = back_index_.LoadRelaxed();
227 if (UNLIKELY(static_cast<size_t>(index) >= limit)) {
228 // Stack overflow.
229 return false;
230 }
231 } while (!back_index_.CompareExchangeWeakRelaxed(index, index + 1));
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800232 begin_[index].Assign(value);
Mathieu Chartierc1790162014-05-23 10:54:50 -0700233 return true;
234 }
235
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700236 // Size in number of elements.
237 void Init() {
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700238 std::string error_msg;
Vladimir Marko5c42c292015-02-25 12:02:49 +0000239 mem_map_.reset(MemMap::MapAnonymous(name_.c_str(), nullptr, capacity_ * sizeof(begin_[0]),
240 PROT_READ | PROT_WRITE, false, false, &error_msg));
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700241 CHECK(mem_map_.get() != NULL) << "couldn't allocate mark stack.\n" << error_msg;
Ian Rogers13735952014-10-08 12:43:28 -0700242 uint8_t* addr = mem_map_->Begin();
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700243 CHECK(addr != NULL);
Mathieu Chartierf082d3c2013-07-29 17:04:07 -0700244 debug_is_sorted_ = true;
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800245 begin_ = reinterpret_cast<StackReference<T>*>(addr);
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700246 Reset();
247 }
248
249 // Name of the mark stack.
250 std::string name_;
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700251 // Memory mapping of the atomic stack.
Ian Rogers700a4022014-05-19 16:49:03 -0700252 std::unique_ptr<MemMap> mem_map_;
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700253 // Back index (index after the last element pushed).
254 AtomicInteger back_index_;
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700255 // Front index, used for implementing PopFront.
256 AtomicInteger front_index_;
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700257 // Base of the atomic stack.
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800258 StackReference<T>* begin_;
Mathieu Chartierc1790162014-05-23 10:54:50 -0700259 // Current maximum which we can push back to, must be <= capacity_.
260 size_t growth_limit_;
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700261 // Maximum number of elements.
262 size_t capacity_;
Mathieu Chartierf082d3c2013-07-29 17:04:07 -0700263 // Whether or not the stack is sorted, only updated in debug mode to avoid performance overhead.
264 bool debug_is_sorted_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700265
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700266 DISALLOW_COPY_AND_ASSIGN(AtomicStack);
267};
268
Mathieu Chartiercb535da2015-01-23 13:50:03 -0800269typedef AtomicStack<mirror::Object> ObjectStack;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800270
Ian Rogers1d54e732013-05-02 21:10:01 -0700271} // namespace accounting
272} // namespace gc
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700273} // namespace art
274
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700275#endif // ART_RUNTIME_GC_ACCOUNTING_ATOMIC_STACK_H_