blob: 734caea371b2ebf7add62e752475051a74f510d2 [file] [log] [blame]
Mathieu Chartier39e32612013-11-12 16:28:05 -08001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "reference_queue.h"
18
19#include "accounting/card_table-inl.h"
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080020#include "collector/concurrent_copying.h"
Mathieu Chartier39e32612013-11-12 16:28:05 -080021#include "heap.h"
22#include "mirror/class-inl.h"
23#include "mirror/object-inl.h"
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -070024#include "mirror/reference-inl.h"
Mathieu Chartier39e32612013-11-12 16:28:05 -080025
26namespace art {
27namespace gc {
28
Mathieu Chartiera5a53ef2014-09-12 12:58:05 -070029ReferenceQueue::ReferenceQueue(Mutex* lock) : lock_(lock), list_(nullptr) {
Mathieu Chartier39e32612013-11-12 16:28:05 -080030}
31
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070032void ReferenceQueue::AtomicEnqueueIfNotEnqueued(Thread* self, ObjPtr<mirror::Reference> ref) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -070033 DCHECK(ref != nullptr);
Mathieu Chartiera5a53ef2014-09-12 12:58:05 -070034 MutexLock mu(self, *lock_);
Richard Uhlerc4695df2016-01-15 14:08:05 -080035 if (ref->IsUnprocessed()) {
36 EnqueueReference(ref);
Mathieu Chartier39e32612013-11-12 16:28:05 -080037 }
38}
39
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070040void ReferenceQueue::EnqueueReference(ObjPtr<mirror::Reference> ref) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -070041 DCHECK(ref != nullptr);
Richard Uhlerc4695df2016-01-15 14:08:05 -080042 CHECK(ref->IsUnprocessed());
Mathieu Chartier39e32612013-11-12 16:28:05 -080043 if (IsEmpty()) {
44 // 1 element cyclic queue, ie: Reference ref = ..; ref.pendingNext = ref;
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070045 list_ = ref.Ptr();
Mathieu Chartier39e32612013-11-12 16:28:05 -080046 } else {
Mathieu Chartier5ffa0782016-07-27 10:45:47 -070047 // The list is owned by the GC, everything that has been inserted must already be at least
48 // gray.
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070049 ObjPtr<mirror::Reference> head = list_->GetPendingNext<kWithoutReadBarrier>();
Richard Uhlerc4695df2016-01-15 14:08:05 -080050 DCHECK(head != nullptr);
Richard Uhler522d51b2016-01-22 14:18:57 -080051 ref->SetPendingNext(head);
Mathieu Chartier39e32612013-11-12 16:28:05 -080052 }
Richard Uhlerc4695df2016-01-15 14:08:05 -080053 // Add the reference in the middle to preserve the cycle.
Richard Uhler522d51b2016-01-22 14:18:57 -080054 list_->SetPendingNext(ref);
Mathieu Chartier39e32612013-11-12 16:28:05 -080055}
56
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070057ObjPtr<mirror::Reference> ReferenceQueue::DequeuePendingReference() {
Mathieu Chartier39e32612013-11-12 16:28:05 -080058 DCHECK(!IsEmpty());
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070059 ObjPtr<mirror::Reference> ref = list_->GetPendingNext<kWithoutReadBarrier>();
Richard Uhlerc4695df2016-01-15 14:08:05 -080060 DCHECK(ref != nullptr);
Mathieu Chartier39e32612013-11-12 16:28:05 -080061 // Note: the following code is thread-safe because it is only called from ProcessReferences which
62 // is single threaded.
Richard Uhlerc4695df2016-01-15 14:08:05 -080063 if (list_ == ref) {
Mathieu Chartier39e32612013-11-12 16:28:05 -080064 list_ = nullptr;
65 } else {
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070066 ObjPtr<mirror::Reference> next = ref->GetPendingNext<kWithoutReadBarrier>();
Richard Uhler522d51b2016-01-22 14:18:57 -080067 list_->SetPendingNext(next);
Mathieu Chartier39e32612013-11-12 16:28:05 -080068 }
Richard Uhler522d51b2016-01-22 14:18:57 -080069 ref->SetPendingNext(nullptr);
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080070 Heap* heap = Runtime::Current()->GetHeap();
71 if (kUseBakerOrBrooksReadBarrier && heap->CurrentCollectorType() == kCollectorTypeCC &&
72 heap->ConcurrentCopyingCollector()->IsActive()) {
Hiroshi Yamauchi8e674652015-12-22 11:09:18 -080073 // Change the gray ptr we left in ConcurrentCopying::ProcessMarkStackRef() to white.
Hiroshi Yamauchi70c08d32015-09-10 16:01:30 -070074 // We check IsActive() above because we don't want to do this when the zygote compaction
75 // collector (SemiSpace) is running.
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080076 CHECK(ref != nullptr);
Hiroshi Yamauchi70c08d32015-09-10 16:01:30 -070077 collector::ConcurrentCopying* concurrent_copying = heap->ConcurrentCopyingCollector();
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -070078 uint32_t rb_state = ref->GetReadBarrierState();
79 if (rb_state == ReadBarrier::GrayState()) {
80 ref->AtomicSetReadBarrierState(ReadBarrier::GrayState(), ReadBarrier::WhiteState());
81 CHECK_EQ(ref->GetReadBarrierState(), ReadBarrier::WhiteState());
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080082 } else {
Hiroshi Yamauchi8e674652015-12-22 11:09:18 -080083 // In ConcurrentCopying::ProcessMarkStackRef() we may leave a white reference in the queue and
84 // find it here, which is OK.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -070085 CHECK_EQ(rb_state, ReadBarrier::WhiteState()) << "ref=" << ref << " rb_state=" << rb_state;
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070086 ObjPtr<mirror::Object> referent = ref->GetReferent<kWithoutReadBarrier>();
Hiroshi Yamauchid2bb5ba2015-09-14 15:10:50 -070087 // The referent could be null if it's cleared by a mutator (Reference.clear()).
88 if (referent != nullptr) {
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070089 CHECK(concurrent_copying->IsInToSpace(referent.Ptr()))
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -070090 << "ref=" << ref << " rb_state=" << ref->GetReadBarrierState()
Hiroshi Yamauchid2bb5ba2015-09-14 15:10:50 -070091 << " referent=" << referent;
92 }
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -080093 }
94 }
Mathieu Chartier39e32612013-11-12 16:28:05 -080095 return ref;
96}
97
98void ReferenceQueue::Dump(std::ostream& os) const {
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -070099 ObjPtr<mirror::Reference> cur = list_;
Mathieu Chartier39e32612013-11-12 16:28:05 -0800100 os << "Reference starting at list_=" << list_ << "\n";
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800101 if (cur == nullptr) {
102 return;
103 }
104 do {
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -0700105 ObjPtr<mirror::Reference> pending_next = cur->GetPendingNext();
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800106 os << "Reference= " << cur << " PendingNext=" << pending_next;
Mathieu Chartier8fa2dad2014-03-13 12:22:56 -0700107 if (cur->IsFinalizerReferenceInstance()) {
108 os << " Zombie=" << cur->AsFinalizerReference()->GetZombie();
Mathieu Chartier39e32612013-11-12 16:28:05 -0800109 }
110 os << "\n";
111 cur = pending_next;
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800112 } while (cur != list_);
113}
114
115size_t ReferenceQueue::GetLength() const {
116 size_t count = 0;
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -0700117 ObjPtr<mirror::Reference> cur = list_;
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800118 if (cur != nullptr) {
119 do {
120 ++count;
121 cur = cur->GetPendingNext();
122 } while (cur != list_);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800123 }
Mathieu Chartier9e2094f2014-12-11 18:43:48 -0800124 return count;
Mathieu Chartier39e32612013-11-12 16:28:05 -0800125}
126
Mathieu Chartier308351a2014-06-15 12:39:02 -0700127void ReferenceQueue::ClearWhiteReferences(ReferenceQueue* cleared_references,
Mathieu Chartier97509952015-07-13 14:35:43 -0700128 collector::GarbageCollector* collector) {
Mathieu Chartier39e32612013-11-12 16:28:05 -0800129 while (!IsEmpty()) {
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -0700130 ObjPtr<mirror::Reference> ref = DequeuePendingReference();
Mathieu Chartier308351a2014-06-15 12:39:02 -0700131 mirror::HeapReference<mirror::Object>* referent_addr = ref->GetReferentReferenceAddr();
Hiroshi Yamauchi65f5f242016-12-19 11:44:47 -0800132 // do_atomic_update is false because this happens during the reference processing phase where
133 // Reference.clear() would block.
134 if (!collector->IsNullOrMarkedHeapReference(referent_addr, /*do_atomic_update*/false)) {
Mathieu Chartier308351a2014-06-15 12:39:02 -0700135 // Referent is white, clear it.
136 if (Runtime::Current()->IsActiveTransaction()) {
137 ref->ClearReferent<true>();
138 } else {
139 ref->ClearReferent<false>();
140 }
Richard Uhlerc4695df2016-01-15 14:08:05 -0800141 cleared_references->EnqueueReference(ref);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800142 }
143 }
144}
145
Mathieu Chartier308351a2014-06-15 12:39:02 -0700146void ReferenceQueue::EnqueueFinalizerReferences(ReferenceQueue* cleared_references,
Mathieu Chartier97509952015-07-13 14:35:43 -0700147 collector::GarbageCollector* collector) {
Mathieu Chartier39e32612013-11-12 16:28:05 -0800148 while (!IsEmpty()) {
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -0700149 ObjPtr<mirror::FinalizerReference> ref = DequeuePendingReference()->AsFinalizerReference();
Mathieu Chartier308351a2014-06-15 12:39:02 -0700150 mirror::HeapReference<mirror::Object>* referent_addr = ref->GetReferentReferenceAddr();
Hiroshi Yamauchi65f5f242016-12-19 11:44:47 -0800151 // do_atomic_update is false because this happens during the reference processing phase where
152 // Reference.clear() would block.
153 if (!collector->IsNullOrMarkedHeapReference(referent_addr, /*do_atomic_update*/false)) {
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -0700154 ObjPtr<mirror::Object> forward_address = collector->MarkObject(referent_addr->AsMirrorPtr());
Mathieu Chartier308351a2014-06-15 12:39:02 -0700155 // Move the updated referent to the zombie field.
156 if (Runtime::Current()->IsActiveTransaction()) {
157 ref->SetZombie<true>(forward_address);
158 ref->ClearReferent<true>();
159 } else {
160 ref->SetZombie<false>(forward_address);
161 ref->ClearReferent<false>();
Mathieu Chartier39e32612013-11-12 16:28:05 -0800162 }
Mathieu Chartier308351a2014-06-15 12:39:02 -0700163 cleared_references->EnqueueReference(ref);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800164 }
165 }
166}
167
Mathieu Chartier97509952015-07-13 14:35:43 -0700168void ReferenceQueue::ForwardSoftReferences(MarkObjectVisitor* visitor) {
Fred Shih530e1b52014-06-09 15:19:54 -0700169 if (UNLIKELY(IsEmpty())) {
170 return;
171 }
Mathieu Chartier5d3f73a2016-10-14 14:28:47 -0700172 ObjPtr<mirror::Reference> const head = list_;
173 ObjPtr<mirror::Reference> ref = head;
Fred Shih530e1b52014-06-09 15:19:54 -0700174 do {
Mathieu Chartier308351a2014-06-15 12:39:02 -0700175 mirror::HeapReference<mirror::Object>* referent_addr = ref->GetReferentReferenceAddr();
176 if (referent_addr->AsMirrorPtr() != nullptr) {
Mathieu Chartier97509952015-07-13 14:35:43 -0700177 visitor->MarkHeapReference(referent_addr);
Mathieu Chartier39e32612013-11-12 16:28:05 -0800178 }
Fred Shih530e1b52014-06-09 15:19:54 -0700179 ref = ref->GetPendingNext();
180 } while (LIKELY(ref != head));
Mathieu Chartier39e32612013-11-12 16:28:05 -0800181}
182
Mathieu Chartier97509952015-07-13 14:35:43 -0700183void ReferenceQueue::UpdateRoots(IsMarkedVisitor* visitor) {
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700184 if (list_ != nullptr) {
Mathieu Chartier97509952015-07-13 14:35:43 -0700185 list_ = down_cast<mirror::Reference*>(visitor->IsMarked(list_));
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700186 }
187}
188
Mathieu Chartier39e32612013-11-12 16:28:05 -0800189} // namespace gc
190} // namespace art