blob: a58df8ec2ccb54bcc9788f8baed2268021aa4eb0 [file] [log] [blame]
Mathieu Chartier78f7b4c2014-05-06 10:57:27 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "reference_processor.h"
18
19#include "mirror/object-inl.h"
20#include "mirror/reference-inl.h"
21#include "reflection.h"
22#include "ScopedLocalRef.h"
23#include "scoped_thread_state_change.h"
24#include "well_known_classes.h"
25
26namespace art {
27namespace gc {
28
29ReferenceProcessor::ReferenceProcessor()
30 : process_references_args_(nullptr, nullptr, nullptr), slow_path_enabled_(false),
Mathieu Chartier2d1ab0a2014-05-08 15:27:31 -070031 preserving_references_(false), lock_("reference processor lock", kReferenceProcessorLock),
Mathieu Chartier78f7b4c2014-05-06 10:57:27 -070032 condition_("reference processor condition", lock_) {
33}
34
35void ReferenceProcessor::EnableSlowPath() {
36 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
37 slow_path_enabled_ = true;
38}
39
40void ReferenceProcessor::DisableSlowPath(Thread* self) {
41 slow_path_enabled_ = false;
Mathieu Chartier78f7b4c2014-05-06 10:57:27 -070042 condition_.Broadcast(self);
43}
44
45mirror::Object* ReferenceProcessor::GetReferent(Thread* self, mirror::Reference* reference) {
46 mirror::Object* const referent = reference->GetReferent();
47 if (LIKELY(!slow_path_enabled_)) {
48 return referent;
49 }
50 // Another fast path, the referent is cleared, we can just return null since there is no scenario
51 // where it becomes non-null.
52 if (referent == nullptr) {
53 return nullptr;
54 }
55 MutexLock mu(self, lock_);
56 while (slow_path_enabled_) {
Mathieu Chartier2175f522014-05-09 11:01:06 -070057 mirror::Object* const referent = reference->GetReferent();
58 // If the referent became cleared, return it.
59 if (referent == nullptr) {
60 return nullptr;
61 }
Mathieu Chartier78f7b4c2014-05-06 10:57:27 -070062 // Try to see if the referent is already marked by using the is_marked_callback. We can return
63 // it to the mutator as long as the GC is not preserving references. If the GC is
64 // preserving references, the mutator could take a white field and move it somewhere else
65 // in the heap causing corruption since this field would get swept.
66 IsMarkedCallback* const is_marked_callback = process_references_args_.is_marked_callback_;
67 if (!preserving_references_ && is_marked_callback != nullptr) {
Mathieu Chartier78f7b4c2014-05-06 10:57:27 -070068 mirror::Object* const obj = is_marked_callback(referent, process_references_args_.arg_);
69 // If it's null it means not marked, but it could become marked if the referent is reachable
70 // by finalizer referents. So we can not return in this case and must block.
71 if (obj != nullptr) {
72 return obj;
73 }
74 }
Mathieu Chartier2d1ab0a2014-05-08 15:27:31 -070075 condition_.WaitHoldingLocks(self);
Mathieu Chartier78f7b4c2014-05-06 10:57:27 -070076 }
77 return reference->GetReferent();
78}
79
80mirror::Object* ReferenceProcessor::PreserveSoftReferenceCallback(mirror::Object* obj, void* arg) {
81 auto* const args = reinterpret_cast<ProcessReferencesArgs*>(arg);
82 // TODO: Not preserve all soft references.
83 return args->mark_callback_(obj, args->arg_);
84}
85
86void ReferenceProcessor::StartPreservingReferences(Thread* self) {
87 MutexLock mu(self, lock_);
88 preserving_references_ = true;
89}
90
91void ReferenceProcessor::StopPreservingReferences(Thread* self) {
92 MutexLock mu(self, lock_);
93 preserving_references_ = false;
94 // We are done preserving references, some people who are blocked may see a marked referent.
95 condition_.Broadcast(self);
96}
97
98// Process reference class instances and schedule finalizations.
99void ReferenceProcessor::ProcessReferences(bool concurrent, TimingLogger* timings,
100 bool clear_soft_references,
101 IsMarkedCallback* is_marked_callback,
102 MarkObjectCallback* mark_object_callback,
103 ProcessMarkStackCallback* process_mark_stack_callback,
104 void* arg) {
105 Thread* self = Thread::Current();
106 {
107 MutexLock mu(self, lock_);
108 process_references_args_.is_marked_callback_ = is_marked_callback;
109 process_references_args_.mark_callback_ = mark_object_callback;
110 process_references_args_.arg_ = arg;
Mathieu Chartier2175f522014-05-09 11:01:06 -0700111 CHECK_EQ(slow_path_enabled_, concurrent) << "Slow path must be enabled iff concurrent";
Mathieu Chartier78f7b4c2014-05-06 10:57:27 -0700112 }
Mathieu Chartier2175f522014-05-09 11:01:06 -0700113 timings->StartSplit(concurrent ? "ProcessReferences" : "(Paused)ProcessReferences");
Mathieu Chartier78f7b4c2014-05-06 10:57:27 -0700114 // Unless required to clear soft references with white references, preserve some white referents.
115 if (!clear_soft_references) {
116 TimingLogger::ScopedSplit split(concurrent ? "PreserveSomeSoftReferences" :
117 "(Paused)PreserveSomeSoftReferences", timings);
118 if (concurrent) {
119 StartPreservingReferences(self);
120 }
121 // References with a marked referent are removed from the list.
122 soft_reference_queue_.PreserveSomeSoftReferences(&PreserveSoftReferenceCallback,
123 &process_references_args_);
Mathieu Chartier78f7b4c2014-05-06 10:57:27 -0700124 process_mark_stack_callback(arg);
125 if (concurrent) {
126 StopPreservingReferences(self);
127 }
128 }
129 // Clear all remaining soft and weak references with white referents.
130 soft_reference_queue_.ClearWhiteReferences(cleared_references_, is_marked_callback, arg);
131 weak_reference_queue_.ClearWhiteReferences(cleared_references_, is_marked_callback, arg);
132 {
133 TimingLogger::ScopedSplit split(concurrent ? "EnqueueFinalizerReferences" :
134 "(Paused)EnqueueFinalizerReferences", timings);
135 if (concurrent) {
136 StartPreservingReferences(self);
137 }
138 // Preserve all white objects with finalize methods and schedule them for finalization.
139 finalizer_reference_queue_.EnqueueFinalizerReferences(cleared_references_, is_marked_callback,
140 mark_object_callback, arg);
141 process_mark_stack_callback(arg);
142 if (concurrent) {
143 StopPreservingReferences(self);
144 }
145 }
146 // Clear all finalizer referent reachable soft and weak references with white referents.
147 soft_reference_queue_.ClearWhiteReferences(cleared_references_, is_marked_callback, arg);
148 weak_reference_queue_.ClearWhiteReferences(cleared_references_, is_marked_callback, arg);
149 // Clear all phantom references with white referents.
150 phantom_reference_queue_.ClearWhiteReferences(cleared_references_, is_marked_callback, arg);
151 // At this point all reference queues other than the cleared references should be empty.
152 DCHECK(soft_reference_queue_.IsEmpty());
153 DCHECK(weak_reference_queue_.IsEmpty());
154 DCHECK(finalizer_reference_queue_.IsEmpty());
155 DCHECK(phantom_reference_queue_.IsEmpty());
Mathieu Chartier2175f522014-05-09 11:01:06 -0700156 {
Mathieu Chartier78f7b4c2014-05-06 10:57:27 -0700157 MutexLock mu(self, lock_);
Mathieu Chartier2175f522014-05-09 11:01:06 -0700158 // Need to always do this since the next GC may be concurrent. Doing this for only concurrent
159 // could result in a stale is_marked_callback_ being called before the reference processing
160 // starts since there is a small window of time where slow_path_enabled_ is enabled but the
161 // callback isn't yet set.
162 process_references_args_.is_marked_callback_ = nullptr;
163 if (concurrent) {
164 // Done processing, disable the slow path and broadcast to the waiters.
165 DisableSlowPath(self);
166 }
Mathieu Chartier78f7b4c2014-05-06 10:57:27 -0700167 }
168 timings->EndSplit();
169}
170
171// Process the "referent" field in a java.lang.ref.Reference. If the referent has not yet been
172// marked, put it on the appropriate list in the heap for later processing.
173void ReferenceProcessor::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* ref,
174 IsMarkedCallback is_marked_callback, void* arg) {
175 // klass can be the class of the old object if the visitor already updated the class of ref.
176 DCHECK(klass->IsReferenceClass());
Hiroshi Yamauchibfff21a2014-05-09 12:21:15 -0700177 mirror::Object* referent = ref->GetReferent<kWithoutReadBarrier>();
Mathieu Chartier78f7b4c2014-05-06 10:57:27 -0700178 if (referent != nullptr) {
179 mirror::Object* forward_address = is_marked_callback(referent, arg);
180 // Null means that the object is not currently marked.
181 if (forward_address == nullptr) {
182 Thread* self = Thread::Current();
183 // TODO: Remove these locks, and use atomic stacks for storing references?
184 // We need to check that the references haven't already been enqueued since we can end up
185 // scanning the same reference multiple times due to dirty cards.
186 if (klass->IsSoftReferenceClass()) {
187 soft_reference_queue_.AtomicEnqueueIfNotEnqueued(self, ref);
188 } else if (klass->IsWeakReferenceClass()) {
189 weak_reference_queue_.AtomicEnqueueIfNotEnqueued(self, ref);
190 } else if (klass->IsFinalizerReferenceClass()) {
191 finalizer_reference_queue_.AtomicEnqueueIfNotEnqueued(self, ref);
192 } else if (klass->IsPhantomReferenceClass()) {
193 phantom_reference_queue_.AtomicEnqueueIfNotEnqueued(self, ref);
194 } else {
195 LOG(FATAL) << "Invalid reference type " << PrettyClass(klass) << " " << std::hex
196 << klass->GetAccessFlags();
197 }
198 } else if (referent != forward_address) {
199 // Referent is already marked and we need to update it.
200 ref->SetReferent<false>(forward_address);
201 }
202 }
203}
204
205void ReferenceProcessor::EnqueueClearedReferences() {
206 Thread* self = Thread::Current();
207 Locks::mutator_lock_->AssertNotHeld(self);
208 if (!cleared_references_.IsEmpty()) {
209 // When a runtime isn't started there are no reference queues to care about so ignore.
210 if (LIKELY(Runtime::Current()->IsStarted())) {
211 ScopedObjectAccess soa(self);
212 ScopedLocalRef<jobject> arg(self->GetJniEnv(),
213 soa.AddLocalReference<jobject>(cleared_references_.GetList()));
214 jvalue args[1];
215 args[0].l = arg.get();
216 InvokeWithJValues(soa, nullptr, WellKnownClasses::java_lang_ref_ReferenceQueue_add, args);
217 }
218 cleared_references_.Clear();
219 }
220}
221
222} // namespace gc
223} // namespace art