blob: e25e279ea638d8b31a65465f9903b3b678a38dc2 [file] [log] [blame]
Mathieu Chartier39e32612013-11-12 16:28:05 -08001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "reference_queue.h"
18
19#include "accounting/card_table-inl.h"
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080020#include "collector/concurrent_copying.h"
Mathieu Chartier39e32612013-11-12 16:28:05 -080021#include "heap.h"
22#include "mirror/class-inl.h"
23#include "mirror/object-inl.h"
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070024#include "mirror/reference-inl.h"
Andreas Gampe5d08fcc2017-06-05 17:56:46 -070025#include "object_callbacks.h"
Mathieu Chartier39e32612013-11-12 16:28:05 -080026
27namespace art {
28namespace gc {
29
Mathieu Chartiera5a53ef2014-09-12 12:58:05 -070030ReferenceQueue::ReferenceQueue(Mutex* lock) : lock_(lock), list_(nullptr) {
Mathieu Chartier39e32612013-11-12 16:28:05 -080031}
32
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070033void ReferenceQueue::AtomicEnqueueIfNotEnqueued(Thread* self, ObjPtr<mirror::Reference> ref) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -070034 DCHECK(ref != nullptr);
Mathieu Chartiera5a53ef2014-09-12 12:58:05 -070035 MutexLock mu(self, *lock_);
Richard Uhlerc4695df2016-01-15 14:08:05 -080036 if (ref->IsUnprocessed()) {
37 EnqueueReference(ref);
Mathieu Chartier39e32612013-11-12 16:28:05 -080038 }
39}
40
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070041void ReferenceQueue::EnqueueReference(ObjPtr<mirror::Reference> ref) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -070042 DCHECK(ref != nullptr);
Richard Uhlerc4695df2016-01-15 14:08:05 -080043 CHECK(ref->IsUnprocessed());
Mathieu Chartier39e32612013-11-12 16:28:05 -080044 if (IsEmpty()) {
45 // 1 element cyclic queue, ie: Reference ref = ..; ref.pendingNext = ref;
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070046 list_ = ref.Ptr();
Mathieu Chartier39e32612013-11-12 16:28:05 -080047 } else {
Mathieu Chartier5ffa0782016-07-27 10:45:47 -070048 // The list is owned by the GC, everything that has been inserted must already be at least
49 // gray.
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070050 ObjPtr<mirror::Reference> head = list_->GetPendingNext<kWithoutReadBarrier>();
Richard Uhlerc4695df2016-01-15 14:08:05 -080051 DCHECK(head != nullptr);
Richard Uhler522d51b2016-01-22 14:18:57 -080052 ref->SetPendingNext(head);
Mathieu Chartier39e32612013-11-12 16:28:05 -080053 }
Richard Uhlerc4695df2016-01-15 14:08:05 -080054 // Add the reference in the middle to preserve the cycle.
Richard Uhler522d51b2016-01-22 14:18:57 -080055 list_->SetPendingNext(ref);
Mathieu Chartier39e32612013-11-12 16:28:05 -080056}
57
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070058ObjPtr<mirror::Reference> ReferenceQueue::DequeuePendingReference() {
Mathieu Chartier39e32612013-11-12 16:28:05 -080059 DCHECK(!IsEmpty());
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070060 ObjPtr<mirror::Reference> ref = list_->GetPendingNext<kWithoutReadBarrier>();
Richard Uhlerc4695df2016-01-15 14:08:05 -080061 DCHECK(ref != nullptr);
Mathieu Chartier39e32612013-11-12 16:28:05 -080062 // Note: the following code is thread-safe because it is only called from ProcessReferences which
63 // is single threaded.
Richard Uhlerc4695df2016-01-15 14:08:05 -080064 if (list_ == ref) {
Mathieu Chartier39e32612013-11-12 16:28:05 -080065 list_ = nullptr;
66 } else {
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070067 ObjPtr<mirror::Reference> next = ref->GetPendingNext<kWithoutReadBarrier>();
Richard Uhler522d51b2016-01-22 14:18:57 -080068 list_->SetPendingNext(next);
Mathieu Chartier39e32612013-11-12 16:28:05 -080069 }
Richard Uhler522d51b2016-01-22 14:18:57 -080070 ref->SetPendingNext(nullptr);
Hiroshi Yamauchi057d9772017-02-17 15:33:23 -080071 return ref;
72}
73
74// This must be called whenever DequeuePendingReference is called.
75void ReferenceQueue::DisableReadBarrierForReference(ObjPtr<mirror::Reference> ref) {
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080076 Heap* heap = Runtime::Current()->GetHeap();
77 if (kUseBakerOrBrooksReadBarrier && heap->CurrentCollectorType() == kCollectorTypeCC &&
78 heap->ConcurrentCopyingCollector()->IsActive()) {
Roland Levillain14e5a292018-06-28 12:00:56 +010079 // Change the gray ptr we left in ConcurrentCopying::ProcessMarkStackRef() to non-gray.
Hiroshi Yamauchi70c08d32015-09-10 16:01:30 -070080 // We check IsActive() above because we don't want to do this when the zygote compaction
81 // collector (SemiSpace) is running.
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080082 CHECK(ref != nullptr);
Hiroshi Yamauchi70c08d32015-09-10 16:01:30 -070083 collector::ConcurrentCopying* concurrent_copying = heap->ConcurrentCopyingCollector();
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -070084 uint32_t rb_state = ref->GetReadBarrierState();
85 if (rb_state == ReadBarrier::GrayState()) {
Roland Levillain14e5a292018-06-28 12:00:56 +010086 ref->AtomicSetReadBarrierState(ReadBarrier::GrayState(), ReadBarrier::NonGrayState());
87 CHECK_EQ(ref->GetReadBarrierState(), ReadBarrier::NonGrayState());
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080088 } else {
Roland Levillain14e5a292018-06-28 12:00:56 +010089 // In ConcurrentCopying::ProcessMarkStackRef() we may leave a non-gray reference in the queue
90 // and find it here, which is OK.
91 CHECK_EQ(rb_state, ReadBarrier::NonGrayState()) << "ref=" << ref << " rb_state=" << rb_state;
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070092 ObjPtr<mirror::Object> referent = ref->GetReferent<kWithoutReadBarrier>();
Hiroshi Yamauchid2bb5ba2015-09-14 15:10:50 -070093 // The referent could be null if it's cleared by a mutator (Reference.clear()).
94 if (referent != nullptr) {
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070095 CHECK(concurrent_copying->IsInToSpace(referent.Ptr()))
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -070096 << "ref=" << ref << " rb_state=" << ref->GetReadBarrierState()
Hiroshi Yamauchid2bb5ba2015-09-14 15:10:50 -070097 << " referent=" << referent;
98 }
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080099 }
100 }
Mathieu Chartier39e32612013-11-12 16:28:05 -0800101}
102
103void ReferenceQueue::Dump(std::ostream& os) const {
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -0700104 ObjPtr<mirror::Reference> cur = list_;
Mathieu Chartier39e32612013-11-12 16:28:05 -0800105 os << "Reference starting at list_=" << list_ << "\n";
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800106 if (cur == nullptr) {
107 return;
108 }
109 do {
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -0700110 ObjPtr<mirror::Reference> pending_next = cur->GetPendingNext();
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800111 os << "Reference= " << cur << " PendingNext=" << pending_next;
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -0700112 if (cur->IsFinalizerReferenceInstance()) {
113 os << " Zombie=" << cur->AsFinalizerReference()->GetZombie();
Mathieu Chartier39e32612013-11-12 16:28:05 -0800114 }
115 os << "\n";
116 cur = pending_next;
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800117 } while (cur != list_);
118}
119
120size_t ReferenceQueue::GetLength() const {
121 size_t count = 0;
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -0700122 ObjPtr<mirror::Reference> cur = list_;
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800123 if (cur != nullptr) {
124 do {
125 ++count;
126 cur = cur->GetPendingNext();
127 } while (cur != list_);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800128 }
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800129 return count;
Mathieu Chartier39e32612013-11-12 16:28:05 -0800130}
131
Mathieu Chartier308351a2014-06-15 12:39:02 -0700132void ReferenceQueue::ClearWhiteReferences(ReferenceQueue* cleared_references,
Mathieu Chartier97509952015-07-13 14:35:43 -0700133 collector::GarbageCollector* collector) {
Mathieu Chartier39e32612013-11-12 16:28:05 -0800134 while (!IsEmpty()) {
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -0700135 ObjPtr<mirror::Reference> ref = DequeuePendingReference();
Mathieu Chartier308351a2014-06-15 12:39:02 -0700136 mirror::HeapReference<mirror::Object>* referent_addr = ref->GetReferentReferenceAddr();
Hiroshi Yamauchi65f5f242016-12-19 11:44:47 -0800137 // do_atomic_update is false because this happens during the reference processing phase where
138 // Reference.clear() would block.
139 if (!collector->IsNullOrMarkedHeapReference(referent_addr, /*do_atomic_update*/false)) {
Mathieu Chartier308351a2014-06-15 12:39:02 -0700140 // Referent is white, clear it.
141 if (Runtime::Current()->IsActiveTransaction()) {
142 ref->ClearReferent<true>();
143 } else {
144 ref->ClearReferent<false>();
145 }
Richard Uhlerc4695df2016-01-15 14:08:05 -0800146 cleared_references->EnqueueReference(ref);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800147 }
Hiroshi Yamauchi057d9772017-02-17 15:33:23 -0800148 // Delay disabling the read barrier until here so that the ClearReferent call above in
149 // transaction mode will trigger the read barrier.
150 DisableReadBarrierForReference(ref);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800151 }
152}
153
Mathieu Chartier308351a2014-06-15 12:39:02 -0700154void ReferenceQueue::EnqueueFinalizerReferences(ReferenceQueue* cleared_references,
Mathieu Chartier97509952015-07-13 14:35:43 -0700155 collector::GarbageCollector* collector) {
Mathieu Chartier39e32612013-11-12 16:28:05 -0800156 while (!IsEmpty()) {
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -0700157 ObjPtr<mirror::FinalizerReference> ref = DequeuePendingReference()->AsFinalizerReference();
Mathieu Chartier308351a2014-06-15 12:39:02 -0700158 mirror::HeapReference<mirror::Object>* referent_addr = ref->GetReferentReferenceAddr();
Hiroshi Yamauchi65f5f242016-12-19 11:44:47 -0800159 // do_atomic_update is false because this happens during the reference processing phase where
160 // Reference.clear() would block.
161 if (!collector->IsNullOrMarkedHeapReference(referent_addr, /*do_atomic_update*/false)) {
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -0700162 ObjPtr<mirror::Object> forward_address = collector->MarkObject(referent_addr->AsMirrorPtr());
Mathieu Chartier308351a2014-06-15 12:39:02 -0700163 // Move the updated referent to the zombie field.
164 if (Runtime::Current()->IsActiveTransaction()) {
165 ref->SetZombie<true>(forward_address);
166 ref->ClearReferent<true>();
167 } else {
168 ref->SetZombie<false>(forward_address);
169 ref->ClearReferent<false>();
Mathieu Chartier39e32612013-11-12 16:28:05 -0800170 }
Mathieu Chartier308351a2014-06-15 12:39:02 -0700171 cleared_references->EnqueueReference(ref);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800172 }
Hiroshi Yamauchi057d9772017-02-17 15:33:23 -0800173 // Delay disabling the read barrier until here so that the ClearReferent call above in
174 // transaction mode will trigger the read barrier.
175 DisableReadBarrierForReference(ref->AsReference());
Mathieu Chartier39e32612013-11-12 16:28:05 -0800176 }
177}
178
Mathieu Chartier97509952015-07-13 14:35:43 -0700179void ReferenceQueue::ForwardSoftReferences(MarkObjectVisitor* visitor) {
Fred Shih530e1b52014-06-09 15:19:54 -0700180 if (UNLIKELY(IsEmpty())) {
181 return;
182 }
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -0700183 ObjPtr<mirror::Reference> const head = list_;
184 ObjPtr<mirror::Reference> ref = head;
Fred Shih530e1b52014-06-09 15:19:54 -0700185 do {
Mathieu Chartier308351a2014-06-15 12:39:02 -0700186 mirror::HeapReference<mirror::Object>* referent_addr = ref->GetReferentReferenceAddr();
187 if (referent_addr->AsMirrorPtr() != nullptr) {
Hiroshi Yamauchi057d9772017-02-17 15:33:23 -0800188 // do_atomic_update is false because mutators can't access the referent due to the weak ref
189 // access blocking.
190 visitor->MarkHeapReference(referent_addr, /*do_atomic_update*/ false);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800191 }
Fred Shih530e1b52014-06-09 15:19:54 -0700192 ref = ref->GetPendingNext();
193 } while (LIKELY(ref != head));
Mathieu Chartier39e32612013-11-12 16:28:05 -0800194}
195
Mathieu Chartier97509952015-07-13 14:35:43 -0700196void ReferenceQueue::UpdateRoots(IsMarkedVisitor* visitor) {
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700197 if (list_ != nullptr) {
Mathieu Chartier97509952015-07-13 14:35:43 -0700198 list_ = down_cast<mirror::Reference*>(visitor->IsMarked(list_));
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700199 }
200}
201
Mathieu Chartier39e32612013-11-12 16:28:05 -0800202} // namespace gc
203} // namespace art