More of the concurrent copying collector.

Bug: 12687968
Change-Id: I62f70274d47df6d6cab714df95c518b750ce3105
diff --git a/runtime/read_barrier.h b/runtime/read_barrier.h
index ed5db4e..474b46f 100644
--- a/runtime/read_barrier.h
+++ b/runtime/read_barrier.h
@@ -19,6 +19,7 @@
 
 #include "base/mutex.h"
 #include "base/macros.h"
+#include "jni.h"
 #include "offsets.h"
 #include "read_barrier_c.h"
 
@@ -26,25 +27,70 @@
 // which needs to be a C header file for asm_support.h.
 
 namespace art {
+
 namespace mirror {
+  class ArtField;
+  class ArtMethod;
   class Object;
   template<typename MirrorType> class HeapReference;
 }  // namespace mirror
 
 class ReadBarrier {
  public:
+  // TODO: disable thse flags for production use.
+  // Enable the to-space invariant checks.
+  static constexpr bool kEnableToSpaceInvariantChecks = true;
+  // Enable the read barrier checks.
+  static constexpr bool kEnableReadBarrierInvariantChecks = true;
+
   // It's up to the implementation whether the given field gets
   // updated whereas the return value must be an updated reference.
-  template <typename MirrorType, ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+  template <typename MirrorType, ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
+            bool kMaybeDuringStartup = false>
   ALWAYS_INLINE static MirrorType* Barrier(
       mirror::Object* obj, MemberOffset offset, mirror::HeapReference<MirrorType>* ref_addr)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   // It's up to the implementation whether the given root gets updated
   // whereas the return value must be an updated reference.
-  template <typename MirrorType, ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
+  template <typename MirrorType, ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
+            bool kMaybeDuringStartup = false>
   ALWAYS_INLINE static MirrorType* BarrierForRoot(MirrorType** root)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+  static bool IsDuringStartup();
+
+  // Without the holder object.
+  static void AssertToSpaceInvariant(mirror::Object* ref)
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+    AssertToSpaceInvariant(nullptr, MemberOffset(0), ref);
+  }
+  // With the holder object.
+  static void AssertToSpaceInvariant(mirror::Object* obj, MemberOffset offset,
+                                     mirror::Object* ref)
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+  static mirror::Object* Mark(mirror::Object* obj) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+  static mirror::Object* WhitePtr() {
+    return reinterpret_cast<mirror::Object*>(white_ptr_);
+  }
+  static mirror::Object* GrayPtr() {
+    return reinterpret_cast<mirror::Object*>(gray_ptr_);
+  }
+  static mirror::Object* BlackPtr() {
+    return reinterpret_cast<mirror::Object*>(black_ptr_);
+  }
+
+  ALWAYS_INLINE static bool HasGrayReadBarrierPointer(mirror::Object* obj,
+                                                      uintptr_t* out_rb_ptr_high_bits)
+      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+  // Note: These couldn't be constexpr pointers as reinterpret_cast isn't compatible with them.
+  static constexpr uintptr_t white_ptr_ = 0x0;    // Not marked.
+  static constexpr uintptr_t gray_ptr_ = 0x1;     // Marked, but not marked through. On mark stack.
+  static constexpr uintptr_t black_ptr_ = 0x2;    // Marked through. Used for non-moving objects.
+  static constexpr uintptr_t rb_ptr_mask_ = 0x3;  // The low 2 bits for white|gray|black.
 };
 
 }  // namespace art