Run an empty checkpoint before marking ends in the CC collector.

To avoid a race condition between the end of marking and a mutator
still in the middle of a read barrier.

Bug: 12687968
Bug: 21564728
Change-Id: I4962d895b4df89d2bcde97cbabdb98a14a19dd6b
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index 6984c16..c7d2e9f 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -412,6 +412,11 @@
     // the mark stack here once again.
     ProcessMarkStack();
     CheckEmptyMarkQueue();
+    if (kVerboseMode) {
+      LOG(INFO) << "AllowNewSystemWeaks";
+    }
+    Runtime::Current()->AllowNewSystemWeaks();
+    IssueEmptyCheckpoint();
     // Disable marking.
     if (kUseTableLookupReadBarrier) {
       heap_->rb_table_->ClearAll();
@@ -419,10 +424,6 @@
     }
     is_mark_queue_push_disallowed_.StoreSequentiallyConsistent(1);
     is_marking_ = false;
-    if (kVerboseMode) {
-      LOG(INFO) << "AllowNewSystemWeaks";
-    }
-    Runtime::Current()->AllowNewSystemWeaks();
     CheckEmptyMarkQueue();
   }