ART: SHARED_REQUIRES to REQUIRES_SHARED
This coincides with the actual attribute name and upstream usage.
Preparation for deferring to libbase.
Test: m
Test: m test-art-host
Change-Id: Ia8986b5dfd926ba772bf00b0a35eaf83596d8518
diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc
index 43482eb..d866106 100644
--- a/runtime/gc/collector/mark_compact.cc
+++ b/runtime/gc/collector/mark_compact.cc
@@ -136,7 +136,7 @@
} else {
DCHECK(!space_->HasAddress(obj));
auto slow_path = [this](const mirror::Object* ref)
- SHARED_REQUIRES(Locks::mutator_lock_) {
+ REQUIRES_SHARED(Locks::mutator_lock_) {
// Marking a large object, make sure its aligned as a sanity check.
if (!IsAligned<kPageSize>(ref)) {
Runtime::Current()->GetHeap()->DumpSpaces(LOG(ERROR));
@@ -289,7 +289,7 @@
void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
OVERRIDE REQUIRES(Locks::mutator_lock_)
- SHARED_REQUIRES(Locks::heap_bitmap_lock_) {
+ REQUIRES_SHARED(Locks::heap_bitmap_lock_) {
for (size_t i = 0; i < count; ++i) {
mirror::Object* obj = *roots[i];
mirror::Object* new_obj = collector_->GetMarkedForwardAddress(obj);
@@ -303,7 +303,7 @@
void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
const RootInfo& info ATTRIBUTE_UNUSED)
OVERRIDE REQUIRES(Locks::mutator_lock_)
- SHARED_REQUIRES(Locks::heap_bitmap_lock_) {
+ REQUIRES_SHARED(Locks::heap_bitmap_lock_) {
for (size_t i = 0; i < count; ++i) {
mirror::Object* obj = roots[i]->AsMirrorPtr();
mirror::Object* new_obj = collector_->GetMarkedForwardAddress(obj);
@@ -322,7 +322,7 @@
public:
explicit UpdateObjectReferencesVisitor(MarkCompact* collector) : collector_(collector) {}
- void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::heap_bitmap_lock_)
+ void operator()(mirror::Object* obj) const REQUIRES_SHARED(Locks::heap_bitmap_lock_)
REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
collector_->UpdateObjectReferences(obj);
}
@@ -509,7 +509,7 @@
objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
reinterpret_cast<uintptr_t>(space_->End()),
[this](mirror::Object* obj)
- SHARED_REQUIRES(Locks::heap_bitmap_lock_)
+ REQUIRES_SHARED(Locks::heap_bitmap_lock_)
REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE {
MoveObject(obj, obj->SizeOf());
});
@@ -558,7 +558,7 @@
}
void operator()(mirror::Class* klass, mirror::Reference* ref) const
- SHARED_REQUIRES(Locks::mutator_lock_)
+ REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(Locks::heap_bitmap_lock_) {
collector_->DelayReferenceReferent(klass, ref);
}