Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2013 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "reference_queue.h" |
| 18 | |
| 19 | #include "accounting/card_table-inl.h" |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 20 | #include "collector/concurrent_copying.h" |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 21 | #include "heap.h" |
| 22 | #include "mirror/class-inl.h" |
| 23 | #include "mirror/object-inl.h" |
Mathieu Chartier | 8fa2dad | 2014-03-13 12:22:56 -0700 | [diff] [blame] | 24 | #include "mirror/reference-inl.h" |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 25 | |
| 26 | namespace art { |
| 27 | namespace gc { |
| 28 | |
Mathieu Chartier | a5a53ef | 2014-09-12 12:58:05 -0700 | [diff] [blame] | 29 | ReferenceQueue::ReferenceQueue(Mutex* lock) : lock_(lock), list_(nullptr) { |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 30 | } |
| 31 | |
Mathieu Chartier | 8fa2dad | 2014-03-13 12:22:56 -0700 | [diff] [blame] | 32 | void ReferenceQueue::AtomicEnqueueIfNotEnqueued(Thread* self, mirror::Reference* ref) { |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 33 | DCHECK(ref != nullptr); |
Mathieu Chartier | a5a53ef | 2014-09-12 12:58:05 -0700 | [diff] [blame] | 34 | MutexLock mu(self, *lock_); |
Richard Uhler | c4695df | 2016-01-15 14:08:05 -0800 | [diff] [blame] | 35 | if (ref->IsUnprocessed()) { |
| 36 | EnqueueReference(ref); |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 37 | } |
| 38 | } |
| 39 | |
Mathieu Chartier | 8fa2dad | 2014-03-13 12:22:56 -0700 | [diff] [blame] | 40 | void ReferenceQueue::EnqueueReference(mirror::Reference* ref) { |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 41 | DCHECK(ref != nullptr); |
Richard Uhler | c4695df | 2016-01-15 14:08:05 -0800 | [diff] [blame] | 42 | CHECK(ref->IsUnprocessed()); |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 43 | if (IsEmpty()) { |
| 44 | // 1 element cyclic queue, ie: Reference ref = ..; ref.pendingNext = ref; |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 45 | list_ = ref; |
| 46 | } else { |
Mathieu Chartier | 8fa2dad | 2014-03-13 12:22:56 -0700 | [diff] [blame] | 47 | mirror::Reference* head = list_->GetPendingNext(); |
Richard Uhler | c4695df | 2016-01-15 14:08:05 -0800 | [diff] [blame] | 48 | DCHECK(head != nullptr); |
Richard Uhler | 522d51b | 2016-01-22 14:18:57 -0800 | [diff] [blame] | 49 | ref->SetPendingNext(head); |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 50 | } |
Richard Uhler | c4695df | 2016-01-15 14:08:05 -0800 | [diff] [blame] | 51 | // Add the reference in the middle to preserve the cycle. |
Richard Uhler | 522d51b | 2016-01-22 14:18:57 -0800 | [diff] [blame] | 52 | list_->SetPendingNext(ref); |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 53 | } |
| 54 | |
Mathieu Chartier | 8fa2dad | 2014-03-13 12:22:56 -0700 | [diff] [blame] | 55 | mirror::Reference* ReferenceQueue::DequeuePendingReference() { |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 56 | DCHECK(!IsEmpty()); |
Richard Uhler | c4695df | 2016-01-15 14:08:05 -0800 | [diff] [blame] | 57 | mirror::Reference* ref = list_->GetPendingNext(); |
| 58 | DCHECK(ref != nullptr); |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 59 | // Note: the following code is thread-safe because it is only called from ProcessReferences which |
| 60 | // is single threaded. |
Richard Uhler | c4695df | 2016-01-15 14:08:05 -0800 | [diff] [blame] | 61 | if (list_ == ref) { |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 62 | list_ = nullptr; |
| 63 | } else { |
Richard Uhler | c4695df | 2016-01-15 14:08:05 -0800 | [diff] [blame] | 64 | mirror::Reference* next = ref->GetPendingNext(); |
Richard Uhler | 522d51b | 2016-01-22 14:18:57 -0800 | [diff] [blame] | 65 | list_->SetPendingNext(next); |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 66 | } |
Richard Uhler | 522d51b | 2016-01-22 14:18:57 -0800 | [diff] [blame] | 67 | ref->SetPendingNext(nullptr); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 68 | Heap* heap = Runtime::Current()->GetHeap(); |
| 69 | if (kUseBakerOrBrooksReadBarrier && heap->CurrentCollectorType() == kCollectorTypeCC && |
| 70 | heap->ConcurrentCopyingCollector()->IsActive()) { |
Hiroshi Yamauchi | 8e67465 | 2015-12-22 11:09:18 -0800 | [diff] [blame^] | 71 | // Change the gray ptr we left in ConcurrentCopying::ProcessMarkStackRef() to white. |
Hiroshi Yamauchi | 70c08d3 | 2015-09-10 16:01:30 -0700 | [diff] [blame] | 72 | // We check IsActive() above because we don't want to do this when the zygote compaction |
| 73 | // collector (SemiSpace) is running. |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 74 | CHECK(ref != nullptr); |
Hiroshi Yamauchi | 70c08d3 | 2015-09-10 16:01:30 -0700 | [diff] [blame] | 75 | collector::ConcurrentCopying* concurrent_copying = heap->ConcurrentCopyingCollector(); |
Hiroshi Yamauchi | 8e67465 | 2015-12-22 11:09:18 -0800 | [diff] [blame^] | 76 | mirror::Object* rb_ptr = ref->GetReadBarrierPointer(); |
| 77 | if (rb_ptr == ReadBarrier::GrayPtr()) { |
| 78 | ref->AtomicSetReadBarrierPointer(ReadBarrier::GrayPtr(), ReadBarrier::WhitePtr()); |
| 79 | CHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::WhitePtr()); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 80 | } else { |
Hiroshi Yamauchi | 8e67465 | 2015-12-22 11:09:18 -0800 | [diff] [blame^] | 81 | // In ConcurrentCopying::ProcessMarkStackRef() we may leave a white reference in the queue and |
| 82 | // find it here, which is OK. |
| 83 | CHECK_EQ(rb_ptr, ReadBarrier::WhitePtr()) << "ref=" << ref << " rb_ptr=" << rb_ptr; |
Hiroshi Yamauchi | 70c08d3 | 2015-09-10 16:01:30 -0700 | [diff] [blame] | 84 | mirror::Object* referent = ref->GetReferent<kWithoutReadBarrier>(); |
Hiroshi Yamauchi | d2bb5ba | 2015-09-14 15:10:50 -0700 | [diff] [blame] | 85 | // The referent could be null if it's cleared by a mutator (Reference.clear()). |
| 86 | if (referent != nullptr) { |
| 87 | CHECK(concurrent_copying->IsInToSpace(referent)) |
| 88 | << "ref=" << ref << " rb_ptr=" << ref->GetReadBarrierPointer() |
| 89 | << " referent=" << referent; |
| 90 | } |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 91 | } |
| 92 | } |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 93 | return ref; |
| 94 | } |
| 95 | |
| 96 | void ReferenceQueue::Dump(std::ostream& os) const { |
Mathieu Chartier | 8fa2dad | 2014-03-13 12:22:56 -0700 | [diff] [blame] | 97 | mirror::Reference* cur = list_; |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 98 | os << "Reference starting at list_=" << list_ << "\n"; |
Mathieu Chartier | 9e2094f | 2014-12-11 18:43:48 -0800 | [diff] [blame] | 99 | if (cur == nullptr) { |
| 100 | return; |
| 101 | } |
| 102 | do { |
Mathieu Chartier | 8fa2dad | 2014-03-13 12:22:56 -0700 | [diff] [blame] | 103 | mirror::Reference* pending_next = cur->GetPendingNext(); |
Mathieu Chartier | 9e2094f | 2014-12-11 18:43:48 -0800 | [diff] [blame] | 104 | os << "Reference= " << cur << " PendingNext=" << pending_next; |
Mathieu Chartier | 8fa2dad | 2014-03-13 12:22:56 -0700 | [diff] [blame] | 105 | if (cur->IsFinalizerReferenceInstance()) { |
| 106 | os << " Zombie=" << cur->AsFinalizerReference()->GetZombie(); |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 107 | } |
| 108 | os << "\n"; |
| 109 | cur = pending_next; |
Mathieu Chartier | 9e2094f | 2014-12-11 18:43:48 -0800 | [diff] [blame] | 110 | } while (cur != list_); |
| 111 | } |
| 112 | |
| 113 | size_t ReferenceQueue::GetLength() const { |
| 114 | size_t count = 0; |
| 115 | mirror::Reference* cur = list_; |
| 116 | if (cur != nullptr) { |
| 117 | do { |
| 118 | ++count; |
| 119 | cur = cur->GetPendingNext(); |
| 120 | } while (cur != list_); |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 121 | } |
Mathieu Chartier | 9e2094f | 2014-12-11 18:43:48 -0800 | [diff] [blame] | 122 | return count; |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 123 | } |
| 124 | |
Mathieu Chartier | 308351a | 2014-06-15 12:39:02 -0700 | [diff] [blame] | 125 | void ReferenceQueue::ClearWhiteReferences(ReferenceQueue* cleared_references, |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 126 | collector::GarbageCollector* collector) { |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 127 | while (!IsEmpty()) { |
Mathieu Chartier | 8fa2dad | 2014-03-13 12:22:56 -0700 | [diff] [blame] | 128 | mirror::Reference* ref = DequeuePendingReference(); |
Mathieu Chartier | 308351a | 2014-06-15 12:39:02 -0700 | [diff] [blame] | 129 | mirror::HeapReference<mirror::Object>* referent_addr = ref->GetReferentReferenceAddr(); |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 130 | if (referent_addr->AsMirrorPtr() != nullptr && |
| 131 | !collector->IsMarkedHeapReference(referent_addr)) { |
Mathieu Chartier | 308351a | 2014-06-15 12:39:02 -0700 | [diff] [blame] | 132 | // Referent is white, clear it. |
| 133 | if (Runtime::Current()->IsActiveTransaction()) { |
| 134 | ref->ClearReferent<true>(); |
| 135 | } else { |
| 136 | ref->ClearReferent<false>(); |
| 137 | } |
Richard Uhler | c4695df | 2016-01-15 14:08:05 -0800 | [diff] [blame] | 138 | cleared_references->EnqueueReference(ref); |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 139 | } |
| 140 | } |
| 141 | } |
| 142 | |
Mathieu Chartier | 308351a | 2014-06-15 12:39:02 -0700 | [diff] [blame] | 143 | void ReferenceQueue::EnqueueFinalizerReferences(ReferenceQueue* cleared_references, |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 144 | collector::GarbageCollector* collector) { |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 145 | while (!IsEmpty()) { |
Mathieu Chartier | 8fa2dad | 2014-03-13 12:22:56 -0700 | [diff] [blame] | 146 | mirror::FinalizerReference* ref = DequeuePendingReference()->AsFinalizerReference(); |
Mathieu Chartier | 308351a | 2014-06-15 12:39:02 -0700 | [diff] [blame] | 147 | mirror::HeapReference<mirror::Object>* referent_addr = ref->GetReferentReferenceAddr(); |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 148 | if (referent_addr->AsMirrorPtr() != nullptr && |
| 149 | !collector->IsMarkedHeapReference(referent_addr)) { |
| 150 | mirror::Object* forward_address = collector->MarkObject(referent_addr->AsMirrorPtr()); |
Mathieu Chartier | 308351a | 2014-06-15 12:39:02 -0700 | [diff] [blame] | 151 | // Move the updated referent to the zombie field. |
| 152 | if (Runtime::Current()->IsActiveTransaction()) { |
| 153 | ref->SetZombie<true>(forward_address); |
| 154 | ref->ClearReferent<true>(); |
| 155 | } else { |
| 156 | ref->SetZombie<false>(forward_address); |
| 157 | ref->ClearReferent<false>(); |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 158 | } |
Mathieu Chartier | 308351a | 2014-06-15 12:39:02 -0700 | [diff] [blame] | 159 | cleared_references->EnqueueReference(ref); |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 160 | } |
| 161 | } |
| 162 | } |
| 163 | |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 164 | void ReferenceQueue::ForwardSoftReferences(MarkObjectVisitor* visitor) { |
Fred Shih | 530e1b5 | 2014-06-09 15:19:54 -0700 | [diff] [blame] | 165 | if (UNLIKELY(IsEmpty())) { |
| 166 | return; |
| 167 | } |
| 168 | mirror::Reference* const head = list_; |
| 169 | mirror::Reference* ref = head; |
| 170 | do { |
Mathieu Chartier | 308351a | 2014-06-15 12:39:02 -0700 | [diff] [blame] | 171 | mirror::HeapReference<mirror::Object>* referent_addr = ref->GetReferentReferenceAddr(); |
| 172 | if (referent_addr->AsMirrorPtr() != nullptr) { |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 173 | visitor->MarkHeapReference(referent_addr); |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 174 | } |
Fred Shih | 530e1b5 | 2014-06-09 15:19:54 -0700 | [diff] [blame] | 175 | ref = ref->GetPendingNext(); |
| 176 | } while (LIKELY(ref != head)); |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 177 | } |
| 178 | |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 179 | void ReferenceQueue::UpdateRoots(IsMarkedVisitor* visitor) { |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 180 | if (list_ != nullptr) { |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 181 | list_ = down_cast<mirror::Reference*>(visitor->IsMarked(list_)); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 182 | } |
| 183 | } |
| 184 | |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 185 | } // namespace gc |
| 186 | } // namespace art |