Revert "Gray only immune objects mutators access."

Fails with:
    art F 11338 11338 art/runtime/gc/collector/concurrent_copying-inl.h:83] Check failed: !kGrayImmuneObject || updated_all_immune_objects_.LoadRelaxed() || gc_grays_immune_objects_ 


Bug: 29516465
Bug: 12687968

This reverts commit 16292fcc98f03690576d0739b2e5fb04b375933c.

Change-Id: I1d2d988b7707e03cc94f019cf8bef5b9a9099060
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index ed95e1d..dd75006 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -50,16 +50,14 @@
       mark_stack_lock_("concurrent copying mark stack lock", kMarkSweepMarkStackLock),
       thread_running_gc_(nullptr),
       is_marking_(false), is_active_(false), is_asserting_to_space_invariant_(false),
-      region_space_bitmap_(nullptr),
       heap_mark_bitmap_(nullptr), live_stack_freeze_size_(0), mark_stack_mode_(kMarkStackModeOff),
       weak_ref_access_enabled_(true),
       skipped_blocks_lock_("concurrent copying bytes blocks lock", kMarkSweepMarkStackLock),
       rb_table_(heap_->GetReadBarrierTable()),
-      force_evacuate_all_(false),
-      immune_gray_stack_lock_("concurrent copying immune gray stack lock",
-                              kMarkSweepMarkStackLock) {
+      force_evacuate_all_(false) {
   static_assert(space::RegionSpace::kRegionSize == accounting::ReadBarrierTable::kRegionSize,
                 "The region space size and the read barrier table region size must match");
+  cc_heap_bitmap_.reset(new accounting::HeapBitmap(heap));
   Thread* self = Thread::Current();
   {
     ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
@@ -141,10 +139,19 @@
         space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) {
       CHECK(space->IsZygoteSpace() || space->IsImageSpace());
       immune_spaces_.AddSpace(space);
+      const char* bitmap_name = space->IsImageSpace() ? "cc image space bitmap" :
+          "cc zygote space bitmap";
+      // TODO: try avoiding using bitmaps for image/zygote to save space.
+      accounting::ContinuousSpaceBitmap* bitmap =
+          accounting::ContinuousSpaceBitmap::Create(bitmap_name, space->Begin(), space->Capacity());
+      cc_heap_bitmap_->AddContinuousSpaceBitmap(bitmap);
+      cc_bitmaps_.push_back(bitmap);
     } else if (space == region_space_) {
       accounting::ContinuousSpaceBitmap* bitmap =
           accounting::ContinuousSpaceBitmap::Create("cc region space bitmap",
                                                     space->Begin(), space->Capacity());
+      cc_heap_bitmap_->AddContinuousSpaceBitmap(bitmap);
+      cc_bitmaps_.push_back(bitmap);
       region_space_bitmap_ = bitmap;
     }
   }
@@ -172,15 +179,6 @@
   } else {
     force_evacuate_all_ = false;
   }
-  if (kUseBakerReadBarrier) {
-    updated_all_immune_objects_.StoreRelaxed(false);
-    // GC may gray immune objects in the thread flip.
-    gc_grays_immune_objects_ = true;
-    if (kIsDebugBuild) {
-      MutexLock mu(Thread::Current(), immune_gray_stack_lock_);
-      DCHECK(immune_gray_stack_.empty());
-    }
-  }
   BindBitmaps();
   if (kVerboseMode) {
     LOG(INFO) << "force_evacuate_all=" << force_evacuate_all_;
@@ -305,6 +303,30 @@
   live_stack_freeze_size_ = heap_->GetLiveStack()->Size();
 }
 
+// Used to visit objects in the immune spaces.
+class ConcurrentCopying::ImmuneSpaceObjVisitor {
+ public:
+  explicit ImmuneSpaceObjVisitor(ConcurrentCopying* cc) : collector_(cc) {}
+
+  void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::mutator_lock_)
+      SHARED_REQUIRES(Locks::heap_bitmap_lock_) {
+    DCHECK(obj != nullptr);
+    DCHECK(collector_->immune_spaces_.ContainsObject(obj));
+    accounting::ContinuousSpaceBitmap* cc_bitmap =
+        collector_->cc_heap_bitmap_->GetContinuousSpaceBitmap(obj);
+    DCHECK(cc_bitmap != nullptr)
+        << "An immune space object must have a bitmap";
+    if (kIsDebugBuild) {
+      DCHECK(collector_->heap_->GetMarkBitmap()->Test(obj))
+          << "Immune space object must be already marked";
+    }
+    collector_->MarkUnevacFromSpaceRegionOrImmuneSpace(obj, cc_bitmap);
+  }
+
+ private:
+  ConcurrentCopying* const collector_;
+};
+
 class EmptyCheckpoint : public Closure {
  public:
   explicit EmptyCheckpoint(ConcurrentCopying* concurrent_copying)
@@ -325,27 +347,6 @@
   ConcurrentCopying* const concurrent_copying_;
 };
 
-// Used to visit objects in the immune spaces.
-inline void ConcurrentCopying::ScanImmuneObject(mirror::Object* obj) {
-  DCHECK(obj != nullptr);
-  DCHECK(immune_spaces_.ContainsObject(obj));
-  // Update the fields without graying it or pushing it onto the mark stack.
-  Scan(obj);
-}
-
-class ConcurrentCopying::ImmuneSpaceScanObjVisitor {
- public:
-  explicit ImmuneSpaceScanObjVisitor(ConcurrentCopying* cc)
-      : collector_(cc) {}
-
-  void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::mutator_lock_) {
-    collector_->ScanImmuneObject(obj);
-  }
-
- private:
-  ConcurrentCopying* const collector_;
-};
-
 // Concurrently mark roots that are guarded by read barriers and process the mark stack.
 void ConcurrentCopying::MarkingPhase() {
   TimingLogger::ScopedTiming split("MarkingPhase", GetTimings());
@@ -353,46 +354,25 @@
     LOG(INFO) << "GC MarkingPhase";
   }
   CHECK(weak_ref_access_enabled_);
-
-  // Scan immune spaces.
-  // Update all the fields in the immune spaces first without graying the objects so that we
-  // minimize dirty pages in the immune spaces. Note mutators can concurrently access and gray some
-  // of the objects.
-  if (kUseBakerReadBarrier) {
-    gc_grays_immune_objects_ = false;
-  }
-  for (auto& space : immune_spaces_.GetSpaces()) {
-    DCHECK(space->IsImageSpace() || space->IsZygoteSpace());
-    accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap();
-    ImmuneSpaceScanObjVisitor visitor(this);
-    live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
-                                  reinterpret_cast<uintptr_t>(space->Limit()),
-                                  visitor);
-  }
-  if (kUseBakerReadBarrier) {
-    // This release fence makes the field updates in the above loop visible before allowing mutator
-    // getting access to immune objects without graying it first.
-    updated_all_immune_objects_.StoreRelease(true);
-    // Now whiten immune objects concurrently accessed and grayed by mutators. We can't do this in
-    // the above loop because we would incorrectly disable the read barrier by whitening an object
-    // which may point to an unscanned, white object, breaking the to-space invariant.
-    //
-    // Make sure no mutators are in the middle of marking an immune object before whitening immune
-    // objects.
-    IssueEmptyCheckpoint();
-    MutexLock mu(Thread::Current(), immune_gray_stack_lock_);
-    if (kVerboseMode) {
-      LOG(INFO) << "immune gray stack size=" << immune_gray_stack_.size();
+  {
+    // Mark the image root. The WB-based collectors do not need to
+    // scan the image objects from roots by relying on the card table,
+    // but it's necessary for the RB to-space invariant to hold.
+    TimingLogger::ScopedTiming split1("VisitImageRoots", GetTimings());
+    for (space::ContinuousSpace* space : heap_->GetContinuousSpaces()) {
+      if (space->IsImageSpace()) {
+        gc::space::ImageSpace* image = space->AsImageSpace();
+        if (image != nullptr) {
+          mirror::ObjectArray<mirror::Object>* image_root = image->GetImageHeader().GetImageRoots();
+          mirror::Object* marked_image_root = Mark(image_root);
+          CHECK_EQ(image_root, marked_image_root) << "An image object does not move";
+          if (ReadBarrier::kEnableToSpaceInvariantChecks) {
+            AssertToSpaceInvariant(nullptr, MemberOffset(0), marked_image_root);
+          }
+        }
+      }
     }
-    for (mirror::Object* obj : immune_gray_stack_) {
-      DCHECK(obj->GetReadBarrierPointer() == ReadBarrier::GrayPtr());
-      bool success = obj->AtomicSetReadBarrierPointer(ReadBarrier::GrayPtr(),
-                                                      ReadBarrier::WhitePtr());
-      DCHECK(success);
-    }
-    immune_gray_stack_.clear();
   }
-
   {
     TimingLogger::ScopedTiming split2("VisitConcurrentRoots", GetTimings());
     Runtime::Current()->VisitConcurrentRoots(this, kVisitRootFlagAllRoots);
@@ -403,6 +383,16 @@
     Runtime::Current()->VisitNonThreadRoots(this);
   }
 
+  // Immune spaces.
+  for (auto& space : immune_spaces_.GetSpaces()) {
+    DCHECK(space->IsImageSpace() || space->IsZygoteSpace());
+    accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap();
+    ImmuneSpaceObjVisitor visitor(this);
+    live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
+                                  reinterpret_cast<uintptr_t>(space->Limit()),
+                                  visitor);
+  }
+
   Thread* self = Thread::Current();
   {
     TimingLogger::ScopedTiming split7("ProcessMarkStack", GetTimings());
@@ -1249,9 +1239,6 @@
     IssueEmptyCheckpoint();
     // Disable the check.
     is_mark_stack_push_disallowed_.StoreSequentiallyConsistent(0);
-    if (kUseBakerReadBarrier) {
-      updated_all_immune_objects_.StoreSequentiallyConsistent(false);
-    }
     CheckEmptyMarkStack();
   }
 
@@ -1301,9 +1288,13 @@
     SwapBitmaps();
     heap_->UnBindBitmaps();
 
-    // Delete the region bitmap.
-    DCHECK(region_space_bitmap_ != nullptr);
-    delete region_space_bitmap_;
+    // Remove bitmaps for the immune spaces.
+    while (!cc_bitmaps_.empty()) {
+      accounting::ContinuousSpaceBitmap* cc_bitmap = cc_bitmaps_.back();
+      cc_heap_bitmap_->RemoveContinuousSpaceBitmap(cc_bitmap);
+      delete cc_bitmap;
+      cc_bitmaps_.pop_back();
+    }
     region_space_bitmap_ = nullptr;
   }
 
@@ -1419,6 +1410,15 @@
     // In a non-moving space.
     if (immune_spaces_.ContainsObject(obj)) {
       LOG(INFO) << "holder is in an immune image or the zygote space.";
+      accounting::ContinuousSpaceBitmap* cc_bitmap =
+          cc_heap_bitmap_->GetContinuousSpaceBitmap(obj);
+      CHECK(cc_bitmap != nullptr)
+          << "An immune space object must have a bitmap.";
+      if (cc_bitmap->Test(obj)) {
+        LOG(INFO) << "holder is marked in the bit map.";
+      } else {
+        LOG(INFO) << "holder is NOT marked in the bit map.";
+      }
     } else {
       LOG(INFO) << "holder is in a non-immune, non-moving (or main) space.";
       accounting::ContinuousSpaceBitmap* mark_bitmap =
@@ -1449,17 +1449,17 @@
                                                                mirror::Object* ref) {
   // In a non-moving spaces. Check that the ref is marked.
   if (immune_spaces_.ContainsObject(ref)) {
+    accounting::ContinuousSpaceBitmap* cc_bitmap =
+        cc_heap_bitmap_->GetContinuousSpaceBitmap(ref);
+    CHECK(cc_bitmap != nullptr)
+        << "An immune space ref must have a bitmap. " << ref;
     if (kUseBakerReadBarrier) {
-      // Immune object may not be gray if called from the GC.
-      if (Thread::Current() == thread_running_gc_ && !gc_grays_immune_objects_) {
-        return;
-      }
-      bool updated_all_immune_objects = updated_all_immune_objects_.LoadSequentiallyConsistent();
-      CHECK(updated_all_immune_objects || ref->GetReadBarrierPointer() == ReadBarrier::GrayPtr())
+      CHECK(cc_bitmap->Test(ref))
           << "Unmarked immune space ref. obj=" << obj << " rb_ptr="
-          << (obj != nullptr ? obj->GetReadBarrierPointer() : nullptr)
-          << " ref=" << ref << " ref rb_ptr=" << ref->GetReadBarrierPointer()
-          << " updated_all_immune_objects=" << updated_all_immune_objects;
+          << obj->GetReadBarrierPointer() << " ref=" << ref;
+    } else {
+      CHECK(cc_bitmap->Test(ref))
+          << "Unmarked immune space ref. obj=" << obj << " ref=" << ref;
     }
   } else {
     accounting::ContinuousSpaceBitmap* mark_bitmap =
@@ -1510,7 +1510,7 @@
   void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
       ALWAYS_INLINE
       SHARED_REQUIRES(Locks::mutator_lock_) {
-    collector_->MarkRoot</*kGrayImmuneObject*/false>(root);
+    collector_->MarkRoot(root);
   }
 
  private:
@@ -1520,7 +1520,6 @@
 // Scan ref fields of an object.
 inline void ConcurrentCopying::Scan(mirror::Object* to_ref) {
   DCHECK(!region_space_->IsInFromSpace(to_ref));
-  DCHECK_EQ(Thread::Current(), thread_running_gc_);
   RefFieldsVisitor visitor(this);
   // Disable the read barrier for a performance reason.
   to_ref->VisitReferences</*kVisitNativeRoots*/true, kDefaultVerifyFlags, kWithoutReadBarrier>(
@@ -1529,10 +1528,9 @@
 
 // Process a field.
 inline void ConcurrentCopying::Process(mirror::Object* obj, MemberOffset offset) {
-  DCHECK_EQ(Thread::Current(), thread_running_gc_);
   mirror::Object* ref = obj->GetFieldObject<
       mirror::Object, kVerifyNone, kWithoutReadBarrier, false>(offset);
-  mirror::Object* to_ref = Mark</*kGrayImmuneObject*/false>(ref);
+  mirror::Object* to_ref = Mark(ref);
   if (to_ref == ref) {
     return;
   }
@@ -1571,11 +1569,10 @@
   }
 }
 
-template<bool kGrayImmuneObject>
 inline void ConcurrentCopying::MarkRoot(mirror::CompressedReference<mirror::Object>* root) {
   DCHECK(!root->IsNull());
   mirror::Object* const ref = root->AsMirrorPtr();
-  mirror::Object* to_ref = Mark<kGrayImmuneObject>(ref);
+  mirror::Object* to_ref = Mark(ref);
   if (to_ref != ref) {
     auto* addr = reinterpret_cast<Atomic<mirror::CompressedReference<mirror::Object>>*>(root);
     auto expected_ref = mirror::CompressedReference<mirror::Object>::FromMirrorPtr(ref);
@@ -1596,8 +1593,7 @@
   for (size_t i = 0; i < count; ++i) {
     mirror::CompressedReference<mirror::Object>* const root = roots[i];
     if (!root->IsNull()) {
-      // kGrayImmuneObject is true because this is used for the thread flip.
-      MarkRoot</*kGrayImmuneObject*/true>(root);
+      MarkRoot(root);
     }
   }
 }
@@ -1840,8 +1836,21 @@
   } else {
     // from_ref is in a non-moving space.
     if (immune_spaces_.ContainsObject(from_ref)) {
-      // An immune object is alive.
-      to_ref = from_ref;
+      accounting::ContinuousSpaceBitmap* cc_bitmap =
+          cc_heap_bitmap_->GetContinuousSpaceBitmap(from_ref);
+      DCHECK(cc_bitmap != nullptr)
+          << "An immune space object must have a bitmap";
+      if (kIsDebugBuild) {
+        DCHECK(heap_mark_bitmap_->GetContinuousSpaceBitmap(from_ref)->Test(from_ref))
+            << "Immune space object must be already marked";
+      }
+      if (cc_bitmap->Test(from_ref)) {
+        // Already marked.
+        to_ref = from_ref;
+      } else {
+        // Newly marked.
+        to_ref = nullptr;
+      }
     } else {
       // Non-immune non-moving space. Use the mark bitmap.
       accounting::ContinuousSpaceBitmap* mark_bitmap =
@@ -1880,74 +1889,85 @@
 mirror::Object* ConcurrentCopying::MarkNonMoving(mirror::Object* ref) {
   // ref is in a non-moving space (from_ref == to_ref).
   DCHECK(!region_space_->HasAddress(ref)) << ref;
-  DCHECK(!immune_spaces_.ContainsObject(ref));
-  // Use the mark bitmap.
-  accounting::ContinuousSpaceBitmap* mark_bitmap =
-      heap_mark_bitmap_->GetContinuousSpaceBitmap(ref);
-  accounting::LargeObjectBitmap* los_bitmap =
-      heap_mark_bitmap_->GetLargeObjectBitmap(ref);
-  CHECK(los_bitmap != nullptr) << "LOS bitmap covers the entire address range";
-  bool is_los = mark_bitmap == nullptr;
-  if (!is_los && mark_bitmap->Test(ref)) {
-    // Already marked.
-    if (kUseBakerReadBarrier) {
-      DCHECK(ref->GetReadBarrierPointer() == ReadBarrier::GrayPtr() ||
-             ref->GetReadBarrierPointer() == ReadBarrier::WhitePtr());
+  if (immune_spaces_.ContainsObject(ref)) {
+    accounting::ContinuousSpaceBitmap* cc_bitmap =
+        cc_heap_bitmap_->GetContinuousSpaceBitmap(ref);
+    DCHECK(cc_bitmap != nullptr)
+        << "An immune space object must have a bitmap";
+    if (kIsDebugBuild) {
+      DCHECK(heap_mark_bitmap_->GetContinuousSpaceBitmap(ref)->Test(ref))
+          << "Immune space object must be already marked";
     }
-  } else if (is_los && los_bitmap->Test(ref)) {
-    // Already marked in LOS.
-    if (kUseBakerReadBarrier) {
-      DCHECK(ref->GetReadBarrierPointer() == ReadBarrier::GrayPtr() ||
-             ref->GetReadBarrierPointer() == ReadBarrier::WhitePtr());
-    }
+    MarkUnevacFromSpaceRegionOrImmuneSpace(ref, cc_bitmap);
   } else {
-    // Not marked.
-    if (IsOnAllocStack(ref)) {
-      // If it's on the allocation stack, it's considered marked. Keep it white.
-      // Objects on the allocation stack need not be marked.
-      if (!is_los) {
-        DCHECK(!mark_bitmap->Test(ref));
-      } else {
-        DCHECK(!los_bitmap->Test(ref));
-      }
+    // Use the mark bitmap.
+    accounting::ContinuousSpaceBitmap* mark_bitmap =
+        heap_mark_bitmap_->GetContinuousSpaceBitmap(ref);
+    accounting::LargeObjectBitmap* los_bitmap =
+        heap_mark_bitmap_->GetLargeObjectBitmap(ref);
+    CHECK(los_bitmap != nullptr) << "LOS bitmap covers the entire address range";
+    bool is_los = mark_bitmap == nullptr;
+    if (!is_los && mark_bitmap->Test(ref)) {
+      // Already marked.
       if (kUseBakerReadBarrier) {
-        DCHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::WhitePtr());
+        DCHECK(ref->GetReadBarrierPointer() == ReadBarrier::GrayPtr() ||
+               ref->GetReadBarrierPointer() == ReadBarrier::WhitePtr());
+      }
+    } else if (is_los && los_bitmap->Test(ref)) {
+      // Already marked in LOS.
+      if (kUseBakerReadBarrier) {
+        DCHECK(ref->GetReadBarrierPointer() == ReadBarrier::GrayPtr() ||
+               ref->GetReadBarrierPointer() == ReadBarrier::WhitePtr());
       }
     } else {
-      // For the baker-style RB, we need to handle 'false-gray' cases. See the
-      // kRegionTypeUnevacFromSpace-case comment in Mark().
-      if (kUseBakerReadBarrier) {
-        // Test the bitmap first to reduce the chance of false gray cases.
-        if ((!is_los && mark_bitmap->Test(ref)) ||
-            (is_los && los_bitmap->Test(ref))) {
-          return ref;
+      // Not marked.
+      if (IsOnAllocStack(ref)) {
+        // If it's on the allocation stack, it's considered marked. Keep it white.
+        // Objects on the allocation stack need not be marked.
+        if (!is_los) {
+          DCHECK(!mark_bitmap->Test(ref));
+        } else {
+          DCHECK(!los_bitmap->Test(ref));
         }
-      }
-      // Not marked or on the allocation stack. Try to mark it.
-      // This may or may not succeed, which is ok.
-      bool cas_success = false;
-      if (kUseBakerReadBarrier) {
-        cas_success = ref->AtomicSetReadBarrierPointer(ReadBarrier::WhitePtr(),
-                                                       ReadBarrier::GrayPtr());
-      }
-      if (!is_los && mark_bitmap->AtomicTestAndSet(ref)) {
-        // Already marked.
-        if (kUseBakerReadBarrier && cas_success &&
-            ref->GetReadBarrierPointer() == ReadBarrier::GrayPtr()) {
-          PushOntoFalseGrayStack(ref);
-        }
-      } else if (is_los && los_bitmap->AtomicTestAndSet(ref)) {
-        // Already marked in LOS.
-        if (kUseBakerReadBarrier && cas_success &&
-            ref->GetReadBarrierPointer() == ReadBarrier::GrayPtr()) {
-          PushOntoFalseGrayStack(ref);
+        if (kUseBakerReadBarrier) {
+          DCHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::WhitePtr());
         }
       } else {
-        // Newly marked.
+        // For the baker-style RB, we need to handle 'false-gray' cases. See the
+        // kRegionTypeUnevacFromSpace-case comment in Mark().
         if (kUseBakerReadBarrier) {
-          DCHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::GrayPtr());
+          // Test the bitmap first to reduce the chance of false gray cases.
+          if ((!is_los && mark_bitmap->Test(ref)) ||
+              (is_los && los_bitmap->Test(ref))) {
+            return ref;
+          }
         }
-        PushOntoMarkStack(ref);
+        // Not marked or on the allocation stack. Try to mark it.
+        // This may or may not succeed, which is ok.
+        bool cas_success = false;
+        if (kUseBakerReadBarrier) {
+          cas_success = ref->AtomicSetReadBarrierPointer(ReadBarrier::WhitePtr(),
+                                                         ReadBarrier::GrayPtr());
+        }
+        if (!is_los && mark_bitmap->AtomicTestAndSet(ref)) {
+          // Already marked.
+          if (kUseBakerReadBarrier && cas_success &&
+              ref->GetReadBarrierPointer() == ReadBarrier::GrayPtr()) {
+            PushOntoFalseGrayStack(ref);
+          }
+        } else if (is_los && los_bitmap->AtomicTestAndSet(ref)) {
+          // Already marked in LOS.
+          if (kUseBakerReadBarrier && cas_success &&
+              ref->GetReadBarrierPointer() == ReadBarrier::GrayPtr()) {
+            PushOntoFalseGrayStack(ref);
+          }
+        } else {
+          // Newly marked.
+          if (kUseBakerReadBarrier) {
+            DCHECK_EQ(ref->GetReadBarrierPointer(), ReadBarrier::GrayPtr());
+          }
+          PushOntoMarkStack(ref);
+        }
       }
     }
   }