blob: 0c0ab6d212bf755557d9164e9ce211ea4b7a76e0 [file] [log] [blame]
Mathieu Chartier39e32612013-11-12 16:28:05 -08001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "reference_queue.h"
18
19#include "accounting/card_table-inl.h"
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080020#include "collector/concurrent_copying.h"
Mathieu Chartier39e32612013-11-12 16:28:05 -080021#include "heap.h"
22#include "mirror/class-inl.h"
23#include "mirror/object-inl.h"
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070024#include "mirror/reference-inl.h"
Mathieu Chartier39e32612013-11-12 16:28:05 -080025
26namespace art {
27namespace gc {
28
Mathieu Chartiera5a53ef2014-09-12 12:58:05 -070029ReferenceQueue::ReferenceQueue(Mutex* lock) : lock_(lock), list_(nullptr) {
Mathieu Chartier39e32612013-11-12 16:28:05 -080030}
31
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070032void ReferenceQueue::AtomicEnqueueIfNotEnqueued(Thread* self, mirror::Reference* ref) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -070033 DCHECK(ref != nullptr);
Mathieu Chartiera5a53ef2014-09-12 12:58:05 -070034 MutexLock mu(self, *lock_);
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070035 if (!ref->IsEnqueued()) {
Mathieu Chartier39e32612013-11-12 16:28:05 -080036 EnqueuePendingReference(ref);
37 }
38}
39
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070040void ReferenceQueue::EnqueueReference(mirror::Reference* ref) {
41 CHECK(ref->IsEnqueuable());
Mathieu Chartier39e32612013-11-12 16:28:05 -080042 EnqueuePendingReference(ref);
43}
44
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070045void ReferenceQueue::EnqueuePendingReference(mirror::Reference* ref) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -070046 DCHECK(ref != nullptr);
Mathieu Chartier39e32612013-11-12 16:28:05 -080047 if (IsEmpty()) {
48 // 1 element cyclic queue, ie: Reference ref = ..; ref.pendingNext = ref;
Mathieu Chartier39e32612013-11-12 16:28:05 -080049 list_ = ref;
50 } else {
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070051 mirror::Reference* head = list_->GetPendingNext();
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010052 if (Runtime::Current()->IsActiveTransaction()) {
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070053 ref->SetPendingNext<true>(head);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010054 } else {
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070055 ref->SetPendingNext<false>(head);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010056 }
Mathieu Chartier39e32612013-11-12 16:28:05 -080057 }
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070058 if (Runtime::Current()->IsActiveTransaction()) {
59 list_->SetPendingNext<true>(ref);
60 } else {
61 list_->SetPendingNext<false>(ref);
62 }
Mathieu Chartier39e32612013-11-12 16:28:05 -080063}
64
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070065mirror::Reference* ReferenceQueue::DequeuePendingReference() {
Mathieu Chartier39e32612013-11-12 16:28:05 -080066 DCHECK(!IsEmpty());
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070067 mirror::Reference* head = list_->GetPendingNext();
Mathieu Chartier39e32612013-11-12 16:28:05 -080068 DCHECK(head != nullptr);
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070069 mirror::Reference* ref;
Mathieu Chartier39e32612013-11-12 16:28:05 -080070 // Note: the following code is thread-safe because it is only called from ProcessReferences which
71 // is single threaded.
72 if (list_ == head) {
73 ref = list_;
74 list_ = nullptr;
75 } else {
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070076 mirror::Reference* next = head->GetPendingNext();
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010077 if (Runtime::Current()->IsActiveTransaction()) {
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070078 list_->SetPendingNext<true>(next);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010079 } else {
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070080 list_->SetPendingNext<false>(next);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010081 }
Mathieu Chartier39e32612013-11-12 16:28:05 -080082 ref = head;
83 }
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010084 if (Runtime::Current()->IsActiveTransaction()) {
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070085 ref->SetPendingNext<true>(nullptr);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010086 } else {
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070087 ref->SetPendingNext<false>(nullptr);
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010088 }
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080089 Heap* heap = Runtime::Current()->GetHeap();
90 if (kUseBakerOrBrooksReadBarrier && heap->CurrentCollectorType() == kCollectorTypeCC &&
91 heap->ConcurrentCopyingCollector()->IsActive()) {
Hiroshi Yamauchi70c08d32015-09-10 16:01:30 -070092 // Change the gray ptr we left in ConcurrentCopying::ProcessMarkStackRef() to black or white.
93 // We check IsActive() above because we don't want to do this when the zygote compaction
94 // collector (SemiSpace) is running.
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080095 CHECK(ref != nullptr);
Hiroshi Yamauchi70c08d32015-09-10 16:01:30 -070096 collector::ConcurrentCopying* concurrent_copying = heap->ConcurrentCopyingCollector();
97 const bool is_moving = concurrent_copying->RegionSpace()->IsInToSpace(ref);
98 if (ref->GetReadBarrierPointer() == ReadBarrier::GrayPtr()) {
99 if (is_moving) {
100 ref->AtomicSetReadBarrierPointer(ReadBarrier::GrayPtr(), ReadBarrier::WhitePtr());
101 CHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::WhitePtr());
102 } else {
103 ref->AtomicSetReadBarrierPointer(ReadBarrier::GrayPtr(), ReadBarrier::BlackPtr());
104 CHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::BlackPtr());
105 }
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800106 } else {
Hiroshi Yamauchi70c08d32015-09-10 16:01:30 -0700107 // In ConcurrentCopying::ProcessMarkStackRef() we may leave a black or white Reference in the
108 // queue and find it here, which is OK. Check that the color makes sense depending on whether
109 // the Reference is moving or not and that the referent has been marked.
110 if (is_moving) {
111 CHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::WhitePtr())
112 << "ref=" << ref << " rb_ptr=" << ref->GetReadBarrierPointer();
113 } else {
114 CHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::BlackPtr())
115 << "ref=" << ref << " rb_ptr=" << ref->GetReadBarrierPointer();
116 }
117 mirror::Object* referent = ref->GetReferent<kWithoutReadBarrier>();
118 CHECK(referent != nullptr) << "Reference should not have been enqueued if referent is null";
119 CHECK(concurrent_copying->IsInToSpace(referent))
120 << "ref=" << ref << " rb_ptr=" << ref->GetReadBarrierPointer()
121 << " referent=" << referent;
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800122 }
123 }
Mathieu Chartier39e32612013-11-12 16:28:05 -0800124 return ref;
125}
126
127void ReferenceQueue::Dump(std::ostream& os) const {
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -0700128 mirror::Reference* cur = list_;
Mathieu Chartier39e32612013-11-12 16:28:05 -0800129 os << "Reference starting at list_=" << list_ << "\n";
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800130 if (cur == nullptr) {
131 return;
132 }
133 do {
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -0700134 mirror::Reference* pending_next = cur->GetPendingNext();
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800135 os << "Reference= " << cur << " PendingNext=" << pending_next;
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -0700136 if (cur->IsFinalizerReferenceInstance()) {
137 os << " Zombie=" << cur->AsFinalizerReference()->GetZombie();
Mathieu Chartier39e32612013-11-12 16:28:05 -0800138 }
139 os << "\n";
140 cur = pending_next;
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800141 } while (cur != list_);
142}
143
144size_t ReferenceQueue::GetLength() const {
145 size_t count = 0;
146 mirror::Reference* cur = list_;
147 if (cur != nullptr) {
148 do {
149 ++count;
150 cur = cur->GetPendingNext();
151 } while (cur != list_);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800152 }
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800153 return count;
Mathieu Chartier39e32612013-11-12 16:28:05 -0800154}
155
Mathieu Chartier308351a2014-06-15 12:39:02 -0700156void ReferenceQueue::ClearWhiteReferences(ReferenceQueue* cleared_references,
Mathieu Chartier97509952015-07-13 14:35:43 -0700157 collector::GarbageCollector* collector) {
Mathieu Chartier39e32612013-11-12 16:28:05 -0800158 while (!IsEmpty()) {
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -0700159 mirror::Reference* ref = DequeuePendingReference();
Mathieu Chartier308351a2014-06-15 12:39:02 -0700160 mirror::HeapReference<mirror::Object>* referent_addr = ref->GetReferentReferenceAddr();
Mathieu Chartier97509952015-07-13 14:35:43 -0700161 if (referent_addr->AsMirrorPtr() != nullptr &&
162 !collector->IsMarkedHeapReference(referent_addr)) {
Mathieu Chartier308351a2014-06-15 12:39:02 -0700163 // Referent is white, clear it.
164 if (Runtime::Current()->IsActiveTransaction()) {
165 ref->ClearReferent<true>();
166 } else {
167 ref->ClearReferent<false>();
168 }
169 if (ref->IsEnqueuable()) {
170 cleared_references->EnqueuePendingReference(ref);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800171 }
172 }
173 }
174}
175
Mathieu Chartier308351a2014-06-15 12:39:02 -0700176void ReferenceQueue::EnqueueFinalizerReferences(ReferenceQueue* cleared_references,
Mathieu Chartier97509952015-07-13 14:35:43 -0700177 collector::GarbageCollector* collector) {
Mathieu Chartier39e32612013-11-12 16:28:05 -0800178 while (!IsEmpty()) {
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -0700179 mirror::FinalizerReference* ref = DequeuePendingReference()->AsFinalizerReference();
Mathieu Chartier308351a2014-06-15 12:39:02 -0700180 mirror::HeapReference<mirror::Object>* referent_addr = ref->GetReferentReferenceAddr();
Mathieu Chartier97509952015-07-13 14:35:43 -0700181 if (referent_addr->AsMirrorPtr() != nullptr &&
182 !collector->IsMarkedHeapReference(referent_addr)) {
183 mirror::Object* forward_address = collector->MarkObject(referent_addr->AsMirrorPtr());
Mathieu Chartier308351a2014-06-15 12:39:02 -0700184 // If the referent is non-null the reference must queuable.
185 DCHECK(ref->IsEnqueuable());
186 // Move the updated referent to the zombie field.
187 if (Runtime::Current()->IsActiveTransaction()) {
188 ref->SetZombie<true>(forward_address);
189 ref->ClearReferent<true>();
190 } else {
191 ref->SetZombie<false>(forward_address);
192 ref->ClearReferent<false>();
Mathieu Chartier39e32612013-11-12 16:28:05 -0800193 }
Mathieu Chartier308351a2014-06-15 12:39:02 -0700194 cleared_references->EnqueueReference(ref);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800195 }
196 }
197}
198
Mathieu Chartier97509952015-07-13 14:35:43 -0700199void ReferenceQueue::ForwardSoftReferences(MarkObjectVisitor* visitor) {
Fred Shih530e1b52014-06-09 15:19:54 -0700200 if (UNLIKELY(IsEmpty())) {
201 return;
202 }
203 mirror::Reference* const head = list_;
204 mirror::Reference* ref = head;
205 do {
Mathieu Chartier308351a2014-06-15 12:39:02 -0700206 mirror::HeapReference<mirror::Object>* referent_addr = ref->GetReferentReferenceAddr();
207 if (referent_addr->AsMirrorPtr() != nullptr) {
Mathieu Chartier97509952015-07-13 14:35:43 -0700208 visitor->MarkHeapReference(referent_addr);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800209 }
Fred Shih530e1b52014-06-09 15:19:54 -0700210 ref = ref->GetPendingNext();
211 } while (LIKELY(ref != head));
Mathieu Chartier39e32612013-11-12 16:28:05 -0800212}
213
Mathieu Chartier97509952015-07-13 14:35:43 -0700214void ReferenceQueue::UpdateRoots(IsMarkedVisitor* visitor) {
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700215 if (list_ != nullptr) {
Mathieu Chartier97509952015-07-13 14:35:43 -0700216 list_ = down_cast<mirror::Reference*>(visitor->IsMarked(list_));
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700217 }
218}
219
Mathieu Chartier39e32612013-11-12 16:28:05 -0800220} // namespace gc
221} // namespace art