Refactor space bitmap to be a value object

Remove the need for pointer indirections at many places, reduce the
amount of code.

Bug: 116052292
Test: test-art-host
Change-Id: I4b1040d29f9ba99ea23c51f70a06af2ffbe496d4
diff --git a/runtime/gc/accounting/heap_bitmap.cc b/runtime/gc/accounting/heap_bitmap.cc
index 1d729ff..4a3902e 100644
--- a/runtime/gc/accounting/heap_bitmap.cc
+++ b/runtime/gc/accounting/heap_bitmap.cc
@@ -23,23 +23,6 @@
 namespace gc {
 namespace accounting {
 
-void HeapBitmap::ReplaceBitmap(ContinuousSpaceBitmap* old_bitmap,
-                               ContinuousSpaceBitmap* new_bitmap) {
-  auto it = std::find(continuous_space_bitmaps_.begin(), continuous_space_bitmaps_.end(),
-                      old_bitmap);
-  CHECK(it != continuous_space_bitmaps_.end()) << " continuous space bitmap " << old_bitmap
-      << " not found";
-  *it = new_bitmap;
-}
-
-void HeapBitmap::ReplaceLargeObjectBitmap(LargeObjectBitmap* old_bitmap,
-                                          LargeObjectBitmap* new_bitmap) {
-  auto it = std::find(large_object_bitmaps_.begin(), large_object_bitmaps_.end(), old_bitmap);
-  CHECK(it != large_object_bitmaps_.end()) << " large object bitmap " << old_bitmap
-      << " not found";
-  *it = new_bitmap;
-}
-
 void HeapBitmap::AddContinuousSpaceBitmap(accounting::ContinuousSpaceBitmap* bitmap) {
   DCHECK(bitmap != nullptr);
   // Check that there is no bitmap overlap.
diff --git a/runtime/gc/accounting/heap_bitmap.h b/runtime/gc/accounting/heap_bitmap.h
index e477556..a5f4499 100644
--- a/runtime/gc/accounting/heap_bitmap.h
+++ b/runtime/gc/accounting/heap_bitmap.h
@@ -55,14 +55,6 @@
       REQUIRES(Locks::heap_bitmap_lock_)
       REQUIRES_SHARED(Locks::mutator_lock_);
 
-  // Find and replace a bitmap pointer, this is used by for the bitmap swapping in the GC.
-  void ReplaceBitmap(ContinuousSpaceBitmap* old_bitmap, ContinuousSpaceBitmap* new_bitmap)
-      REQUIRES(Locks::heap_bitmap_lock_);
-
-  // Find and replace a object set pointer, this is used by for the bitmap swapping in the GC.
-  void ReplaceLargeObjectBitmap(LargeObjectBitmap* old_bitmap, LargeObjectBitmap* new_bitmap)
-      REQUIRES(Locks::heap_bitmap_lock_);
-
   explicit HeapBitmap(Heap* heap) : heap_(heap) {}
 
  private:
diff --git a/runtime/gc/accounting/space_bitmap.cc b/runtime/gc/accounting/space_bitmap.cc
index 4029057..3c5688d 100644
--- a/runtime/gc/accounting/space_bitmap.cc
+++ b/runtime/gc/accounting/space_bitmap.cc
@@ -48,13 +48,12 @@
 }
 
 template<size_t kAlignment>
-SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::CreateFromMemMap(
+SpaceBitmap<kAlignment> SpaceBitmap<kAlignment>::CreateFromMemMap(
     const std::string& name, MemMap&& mem_map, uint8_t* heap_begin, size_t heap_capacity) {
   CHECK(mem_map.IsValid());
   uintptr_t* bitmap_begin = reinterpret_cast<uintptr_t*>(mem_map.Begin());
   const size_t bitmap_size = ComputeBitmapSize(heap_capacity);
-  return new SpaceBitmap(
-      name, std::move(mem_map), bitmap_begin, bitmap_size, heap_begin, heap_capacity);
+  return { name, std::move(mem_map), bitmap_begin, bitmap_size, heap_begin, heap_capacity };
 }
 
 template<size_t kAlignment>
@@ -78,7 +77,7 @@
 SpaceBitmap<kAlignment>::~SpaceBitmap() {}
 
 template<size_t kAlignment>
-SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::Create(
+SpaceBitmap<kAlignment> SpaceBitmap<kAlignment>::Create(
     const std::string& name, uint8_t* heap_begin, size_t heap_capacity) {
   // Round up since `heap_capacity` is not necessarily a multiple of `kAlignment * kBitsPerIntPtrT`
   // (we represent one word as an `intptr_t`).
@@ -91,7 +90,7 @@
                                         &error_msg);
   if (UNLIKELY(!mem_map.IsValid())) {
     LOG(ERROR) << "Failed to allocate bitmap " << name << ": " << error_msg;
-    return nullptr;
+    return SpaceBitmap<kAlignment>();
   }
   return CreateFromMemMap(name, std::move(mem_map), heap_begin, heap_capacity);
 }
diff --git a/runtime/gc/accounting/space_bitmap.h b/runtime/gc/accounting/space_bitmap.h
index 6ca254a..fe98741 100644
--- a/runtime/gc/accounting/space_bitmap.h
+++ b/runtime/gc/accounting/space_bitmap.h
@@ -45,15 +45,15 @@
 
   // Initialize a space bitmap so that it points to a bitmap large enough to cover a heap at
   // heap_begin of heap_capacity bytes, where objects are guaranteed to be kAlignment-aligned.
-  static SpaceBitmap* Create(const std::string& name, uint8_t* heap_begin, size_t heap_capacity);
+  static SpaceBitmap Create(const std::string& name, uint8_t* heap_begin, size_t heap_capacity);
 
   // Initialize a space bitmap using the provided mem_map as the live bits. Takes ownership of the
   // mem map. The address range covered starts at heap_begin and is of size equal to heap_capacity.
   // Objects are kAlignement-aligned.
-  static SpaceBitmap* CreateFromMemMap(const std::string& name,
-                                       MemMap&& mem_map,
-                                       uint8_t* heap_begin,
-                                       size_t heap_capacity);
+  static SpaceBitmap CreateFromMemMap(const std::string& name,
+                                      MemMap&& mem_map,
+                                      uint8_t* heap_begin,
+                                      size_t heap_capacity);
 
   ~SpaceBitmap();
 
@@ -124,19 +124,6 @@
     return index < bitmap_size_ / sizeof(intptr_t);
   }
 
-  class ClearVisitor {
-   public:
-    explicit ClearVisitor(SpaceBitmap* const bitmap)
-        : bitmap_(bitmap) {
-    }
-
-    void operator()(mirror::Object* obj) const {
-      bitmap_->Clear(obj);
-    }
-   private:
-    SpaceBitmap* const bitmap_;
-  };
-
   template <typename Visitor>
   void VisitRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const {
     for (; visit_begin < visit_end; visit_begin += kAlignment) {
@@ -219,6 +206,26 @@
   static size_t ComputeBitmapSize(uint64_t capacity);
   static size_t ComputeHeapSize(uint64_t bitmap_bytes);
 
+  // TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1,
+  // however, we document that this is expected on heap_end_
+
+  SpaceBitmap() = default;
+  SpaceBitmap(SpaceBitmap&&) = default;
+  SpaceBitmap& operator=(SpaceBitmap&&) = default;
+
+  bool IsValid() const {
+    return bitmap_begin_ != nullptr;
+  }
+
+  // Copy a view of the other bitmap without taking ownership of the underlying data.
+  void CopyView(SpaceBitmap& other) {
+    bitmap_begin_ = other.bitmap_begin_;
+    bitmap_size_ = other.bitmap_size_;
+    heap_begin_ = other.heap_begin_;
+    heap_limit_ = other.heap_limit_;
+    name_ = other.name_;
+  }
+
  private:
   // TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1,
   // however, we document that this is expected on heap_end_
@@ -238,17 +245,17 @@
   MemMap mem_map_;
 
   // This bitmap itself, word sized for efficiency in scanning.
-  Atomic<uintptr_t>* const bitmap_begin_;
+  Atomic<uintptr_t>* bitmap_begin_ = nullptr;
 
   // Size of this bitmap.
-  size_t bitmap_size_;
+  size_t bitmap_size_ = 0u;
 
   // The start address of the memory covered by the bitmap, which corresponds to the word
   // containing the first bit in the bitmap.
-  const uintptr_t heap_begin_;
+  uintptr_t heap_begin_ = 0u;
 
   // The end address of the memory covered by the bitmap. This may not be on a word boundary.
-  uintptr_t heap_limit_;
+  uintptr_t heap_limit_ = 0u;
 
   // Name of this bitmap.
   std::string name_;
diff --git a/runtime/gc/accounting/space_bitmap_test.cc b/runtime/gc/accounting/space_bitmap_test.cc
index 9f355e3..3a69865 100644
--- a/runtime/gc/accounting/space_bitmap_test.cc
+++ b/runtime/gc/accounting/space_bitmap_test.cc
@@ -33,9 +33,9 @@
 TEST_F(SpaceBitmapTest, Init) {
   uint8_t* heap_begin = reinterpret_cast<uint8_t*>(0x10000000);
   size_t heap_capacity = 16 * MB;
-  std::unique_ptr<ContinuousSpaceBitmap> space_bitmap(
+  ContinuousSpaceBitmap space_bitmap(
       ContinuousSpaceBitmap::Create("test bitmap", heap_begin, heap_capacity));
-  EXPECT_TRUE(space_bitmap.get() != nullptr);
+  EXPECT_TRUE(space_bitmap.IsValid());
 }
 
 class BitmapVerify {
@@ -61,16 +61,16 @@
   uint8_t* heap_begin = reinterpret_cast<uint8_t*>(0x10000000);
   size_t heap_capacity = 16 * MB;
 
-  std::unique_ptr<ContinuousSpaceBitmap> space_bitmap(
+  ContinuousSpaceBitmap space_bitmap(
       ContinuousSpaceBitmap::Create("test bitmap", heap_begin, heap_capacity));
-  EXPECT_TRUE(space_bitmap != nullptr);
+  EXPECT_TRUE(space_bitmap.IsValid());
 
   // Set all the odd bits in the first BitsPerIntPtrT * 3 to one.
   for (size_t j = 0; j < kBitsPerIntPtrT * 3; ++j) {
     const mirror::Object* obj =
         reinterpret_cast<mirror::Object*>(heap_begin + j * kObjectAlignment);
     if (reinterpret_cast<uintptr_t>(obj) & 0xF) {
-      space_bitmap->Set(obj);
+      space_bitmap.Set(obj);
     }
   }
   // Try every possible starting bit in the first word. Then for each starting bit, try each
@@ -83,7 +83,7 @@
     for (size_t j = 0; j < static_cast<size_t>(kBitsPerIntPtrT * 2); ++j) {
       mirror::Object* end =
           reinterpret_cast<mirror::Object*>(heap_begin + (i + j) * kObjectAlignment);
-      BitmapVerify(space_bitmap.get(), start, end);
+      BitmapVerify(&space_bitmap, start, end);
     }
   }
 }
@@ -92,14 +92,14 @@
   uint8_t* heap_begin = reinterpret_cast<uint8_t*>(0x10000000);
   size_t heap_capacity = 16 * MB;
 
-  std::unique_ptr<ContinuousSpaceBitmap> bitmap(
+  ContinuousSpaceBitmap bitmap(
       ContinuousSpaceBitmap::Create("test bitmap", heap_begin, heap_capacity));
-  EXPECT_TRUE(bitmap != nullptr);
+  EXPECT_TRUE(bitmap.IsValid());
 
   // Set all of the bits in the bitmap.
   for (size_t j = 0; j < heap_capacity; j += kObjectAlignment) {
     const mirror::Object* obj = reinterpret_cast<mirror::Object*>(heap_begin + j);
-    bitmap->Set(obj);
+    bitmap.Set(obj);
   }
 
   std::vector<std::pair<uintptr_t, uintptr_t>> ranges = {
@@ -113,18 +113,18 @@
   for (const std::pair<uintptr_t, uintptr_t>& range : ranges) {
     const mirror::Object* obj_begin = reinterpret_cast<mirror::Object*>(heap_begin + range.first);
     const mirror::Object* obj_end = reinterpret_cast<mirror::Object*>(heap_begin + range.second);
-    bitmap->ClearRange(obj_begin, obj_end);
+    bitmap.ClearRange(obj_begin, obj_end);
     // Boundaries should still be marked.
     for (uintptr_t i = 0; i < range.first; i += kObjectAlignment) {
-      EXPECT_TRUE(bitmap->Test(reinterpret_cast<mirror::Object*>(heap_begin + i)));
+      EXPECT_TRUE(bitmap.Test(reinterpret_cast<mirror::Object*>(heap_begin + i)));
     }
     for (uintptr_t i = range.second; i < range.second + kPageSize; i += kObjectAlignment) {
-      EXPECT_TRUE(bitmap->Test(reinterpret_cast<mirror::Object*>(heap_begin + i)));
+      EXPECT_TRUE(bitmap.Test(reinterpret_cast<mirror::Object*>(heap_begin + i)));
     }
     // Everything inside should be cleared.
     for (uintptr_t i = range.first; i < range.second; i += kObjectAlignment) {
-      EXPECT_FALSE(bitmap->Test(reinterpret_cast<mirror::Object*>(heap_begin + i)));
-      bitmap->Set(reinterpret_cast<mirror::Object*>(heap_begin + i));
+      EXPECT_FALSE(bitmap.Test(reinterpret_cast<mirror::Object*>(heap_begin + i)));
+      bitmap.Set(reinterpret_cast<mirror::Object*>(heap_begin + i));
     }
   }
 }
@@ -162,7 +162,7 @@
   RandGen r(0x1234);
 
   for (int i = 0; i < 5 ; ++i) {
-    std::unique_ptr<ContinuousSpaceBitmap> space_bitmap(
+    ContinuousSpaceBitmap space_bitmap(
         ContinuousSpaceBitmap::Create("test bitmap", heap_begin, heap_capacity));
 
     for (int j = 0; j < 10000; ++j) {
@@ -170,9 +170,9 @@
       bool set = r.next() % 2 == 1;
 
       if (set) {
-        space_bitmap->Set(reinterpret_cast<mirror::Object*>(heap_begin + offset));
+        space_bitmap.Set(reinterpret_cast<mirror::Object*>(heap_begin + offset));
       } else {
-        space_bitmap->Clear(reinterpret_cast<mirror::Object*>(heap_begin + offset));
+        space_bitmap.Clear(reinterpret_cast<mirror::Object*>(heap_begin + offset));
       }
     }
 
@@ -183,7 +183,7 @@
 
       size_t manual = 0;
       for (uintptr_t k = offset; k < end; k += kAlignment) {
-        if (space_bitmap->Test(reinterpret_cast<mirror::Object*>(heap_begin + k))) {
+        if (space_bitmap.Test(reinterpret_cast<mirror::Object*>(heap_begin + k))) {
           manual++;
         }
       }
@@ -191,7 +191,7 @@
       uintptr_t range_begin = reinterpret_cast<uintptr_t>(heap_begin) + offset;
       uintptr_t range_end = reinterpret_cast<uintptr_t>(heap_begin) + end;
 
-      fn(space_bitmap.get(), range_begin, range_end, manual);
+      fn(&space_bitmap, range_begin, range_end, manual);
     }
   }
 }
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index f23b3fd..e3f5c33 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -101,8 +101,6 @@
       weak_ref_access_enabled_(true),
       copied_live_bytes_ratio_sum_(0.f),
       gc_count_(0),
-      region_space_inter_region_bitmap_(nullptr),
-      non_moving_space_inter_region_bitmap_(nullptr),
       reclaimed_bytes_ratio_sum_(0.f),
       skipped_blocks_lock_("concurrent copying bytes blocks lock", kMarkSweepMarkStackLock),
       measure_read_barrier_slow_path_(measure_read_barrier_slow_path),
@@ -294,24 +292,24 @@
 
 void ConcurrentCopying::CreateInterRegionRefBitmaps() {
   DCHECK(use_generational_cc_);
-  DCHECK(region_space_inter_region_bitmap_ == nullptr);
-  DCHECK(non_moving_space_inter_region_bitmap_ == nullptr);
+  DCHECK(!region_space_inter_region_bitmap_.IsValid());
+  DCHECK(!non_moving_space_inter_region_bitmap_.IsValid());
   DCHECK(region_space_ != nullptr);
   DCHECK(heap_->non_moving_space_ != nullptr);
   // Region-space
-  region_space_inter_region_bitmap_.reset(accounting::ContinuousSpaceBitmap::Create(
+  region_space_inter_region_bitmap_ = accounting::ContinuousSpaceBitmap::Create(
       "region-space inter region ref bitmap",
       reinterpret_cast<uint8_t*>(region_space_->Begin()),
-      region_space_->Limit() - region_space_->Begin()));
-  CHECK(region_space_inter_region_bitmap_ != nullptr)
+      region_space_->Limit() - region_space_->Begin());
+  CHECK(region_space_inter_region_bitmap_.IsValid())
       << "Couldn't allocate region-space inter region ref bitmap";
 
   // non-moving-space
-  non_moving_space_inter_region_bitmap_.reset(accounting::ContinuousSpaceBitmap::Create(
+  non_moving_space_inter_region_bitmap_ = accounting::ContinuousSpaceBitmap::Create(
       "non-moving-space inter region ref bitmap",
       reinterpret_cast<uint8_t*>(heap_->non_moving_space_->Begin()),
-      heap_->non_moving_space_->Limit() - heap_->non_moving_space_->Begin()));
-  CHECK(non_moving_space_inter_region_bitmap_ != nullptr)
+      heap_->non_moving_space_->Limit() - heap_->non_moving_space_->Begin());
+  CHECK(non_moving_space_inter_region_bitmap_.IsValid())
       << "Couldn't allocate non-moving-space inter region ref bitmap";
 }
 
@@ -1138,9 +1136,9 @@
       // only class object reference, which is either in some immune-space, or
       // in non-moving-space.
       DCHECK(heap_->non_moving_space_->HasAddress(ref));
-      non_moving_space_inter_region_bitmap_->Set(ref);
+      non_moving_space_inter_region_bitmap_.Set(ref);
     } else {
-      region_space_inter_region_bitmap_->Set(ref);
+      region_space_inter_region_bitmap_.Set(ref);
     }
   }
 }
@@ -1459,10 +1457,10 @@
               // We need to process un-evac references as they may be unprocessed,
               // if they skipped the marking phase due to heap mutation.
               ScanDirtyObject</*kNoUnEvac*/ false>(obj);
-              non_moving_space_inter_region_bitmap_->Clear(obj);
+              non_moving_space_inter_region_bitmap_.Clear(obj);
             } else if (region_space_->IsInUnevacFromSpace(obj)) {
               ScanDirtyObject</*kNoUnEvac*/ false>(obj);
-              region_space_inter_region_bitmap_->Clear(obj);
+              region_space_inter_region_bitmap_.Clear(obj);
             }
           },
           accounting::CardTable::kCardAged);
@@ -1474,10 +1472,10 @@
                          ScanDirtyObject</*kNoUnEvac*/ true>(obj);
                        };
         if (space == region_space_) {
-          region_space_->ScanUnevacFromSpace(region_space_inter_region_bitmap_.get(), visitor);
+          region_space_->ScanUnevacFromSpace(&region_space_inter_region_bitmap_, visitor);
         } else {
           DCHECK(space == heap_->non_moving_space_);
-          non_moving_space_inter_region_bitmap_->VisitMarkedRange(
+          non_moving_space_inter_region_bitmap_.VisitMarkedRange(
               reinterpret_cast<uintptr_t>(space->Begin()),
               reinterpret_cast<uintptr_t>(space->End()),
               visitor);
@@ -2584,11 +2582,11 @@
     // inter-region refs
     if (use_generational_cc_ && !young_gen_) {
       // region space
-      add_gc_range(region_space_inter_region_bitmap_->Begin(),
-                   region_space_inter_region_bitmap_->Size());
+      add_gc_range(region_space_inter_region_bitmap_.Begin(),
+                   region_space_inter_region_bitmap_.Size());
       // non-moving space
-      add_gc_range(non_moving_space_inter_region_bitmap_->Begin(),
-                   non_moving_space_inter_region_bitmap_->Size());
+      add_gc_range(non_moving_space_inter_region_bitmap_.Begin(),
+                   non_moving_space_inter_region_bitmap_.Size());
     }
     // Extract RSS using mincore(). Updates the cummulative RSS counter.
     ExtractRssFromMincore(&gc_ranges);
@@ -3639,8 +3637,8 @@
     // We do not currently use the region space cards at all, madvise them away to save ram.
     heap_->GetCardTable()->ClearCardRange(region_space_->Begin(), region_space_->Limit());
   } else if (use_generational_cc_ && !young_gen_) {
-    region_space_inter_region_bitmap_->Clear();
-    non_moving_space_inter_region_bitmap_->Clear();
+    region_space_inter_region_bitmap_.Clear();
+    non_moving_space_inter_region_bitmap_.Clear();
   }
   {
     MutexLock mu(self, skipped_blocks_lock_);
diff --git a/runtime/gc/collector/concurrent_copying.h b/runtime/gc/collector/concurrent_copying.h
index 2e5752b..0cb5a3e 100644
--- a/runtime/gc/collector/concurrent_copying.h
+++ b/runtime/gc/collector/concurrent_copying.h
@@ -18,6 +18,7 @@
 #define ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_H_
 
 #include "garbage_collector.h"
+#include "gc/accounting/space_bitmap.h"
 #include "immune_spaces.h"
 #include "offsets.h"
 
@@ -409,8 +410,8 @@
   size_t gc_count_;
   // Bit is set if the corresponding object has inter-region references that
   // were found during the marking phase of two-phase full-heap GC cycle.
-  std::unique_ptr<accounting::ContinuousSpaceBitmap> region_space_inter_region_bitmap_;
-  std::unique_ptr<accounting::ContinuousSpaceBitmap> non_moving_space_inter_region_bitmap_;
+  accounting::ContinuousSpaceBitmap region_space_inter_region_bitmap_;
+  accounting::ContinuousSpaceBitmap non_moving_space_inter_region_bitmap_;
 
   // reclaimed_bytes_ratio = reclaimed_bytes/num_allocated_bytes per GC cycle
   float reclaimed_bytes_ratio_sum_;
diff --git a/runtime/gc/collector/garbage_collector.cc b/runtime/gc/collector/garbage_collector.cc
index 4be768c..bb1a146 100644
--- a/runtime/gc/collector/garbage_collector.cc
+++ b/runtime/gc/collector/garbage_collector.cc
@@ -195,23 +195,14 @@
     if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyAlwaysCollect ||
         (gc_type == kGcTypeFull &&
          space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect)) {
-      accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap();
-      accounting::ContinuousSpaceBitmap* mark_bitmap = space->GetMarkBitmap();
-      if (live_bitmap != nullptr && live_bitmap != mark_bitmap) {
-        heap_->GetLiveBitmap()->ReplaceBitmap(live_bitmap, mark_bitmap);
-        heap_->GetMarkBitmap()->ReplaceBitmap(mark_bitmap, live_bitmap);
+      if (space->GetLiveBitmap() != nullptr && !space->HasBoundBitmaps()) {
         CHECK(space->IsContinuousMemMapAllocSpace());
         space->AsContinuousMemMapAllocSpace()->SwapBitmaps();
       }
     }
   }
   for (const auto& disc_space : GetHeap()->GetDiscontinuousSpaces()) {
-    space::LargeObjectSpace* space = disc_space->AsLargeObjectSpace();
-    accounting::LargeObjectBitmap* live_set = space->GetLiveBitmap();
-    accounting::LargeObjectBitmap* mark_set = space->GetMarkBitmap();
-    heap_->GetLiveBitmap()->ReplaceLargeObjectBitmap(live_set, mark_set);
-    heap_->GetMarkBitmap()->ReplaceLargeObjectBitmap(mark_set, live_set);
-    space->SwapBitmaps();
+    disc_space->AsLargeObjectSpace()->SwapBitmaps();
   }
 }
 
diff --git a/runtime/gc/collector/immune_spaces.cc b/runtime/gc/collector/immune_spaces.cc
index 3c20e51..84fcc3f 100644
--- a/runtime/gc/collector/immune_spaces.cc
+++ b/runtime/gc/collector/immune_spaces.cc
@@ -107,7 +107,7 @@
 void ImmuneSpaces::AddSpace(space::ContinuousSpace* space) {
   DCHECK(spaces_.find(space) == spaces_.end()) << *space;
   // Bind live to mark bitmap if necessary.
-  if (space->GetLiveBitmap() != space->GetMarkBitmap()) {
+  if (space->GetLiveBitmap() != nullptr && !space->HasBoundBitmaps()) {
     CHECK(space->IsContinuousMemMapAllocSpace());
     space->AsContinuousMemMapAllocSpace()->BindLiveToMarkBitmap();
   }
diff --git a/runtime/gc/collector/immune_spaces_test.cc b/runtime/gc/collector/immune_spaces_test.cc
index b0d09ba..88f5d4e 100644
--- a/runtime/gc/collector/immune_spaces_test.cc
+++ b/runtime/gc/collector/immune_spaces_test.cc
@@ -41,7 +41,7 @@
 class DummyImageSpace : public space::ImageSpace {
  public:
   DummyImageSpace(MemMap&& map,
-                  std::unique_ptr<accounting::ContinuousSpaceBitmap> live_bitmap,
+                  accounting::ContinuousSpaceBitmap&& live_bitmap,
                   std::unique_ptr<DummyOatFile>&& oat_file,
                   MemMap&& oat_map)
       : ImageSpace("DummyImageSpace",
@@ -68,11 +68,11 @@
     // Create a bunch of dummy bitmaps since these are required to create image spaces. The bitmaps
     // do not need to cover the image spaces though.
     for (size_t i = 0; i < kMaxBitmaps; ++i) {
-      std::unique_ptr<accounting::ContinuousSpaceBitmap> bitmap(
+      accounting::ContinuousSpaceBitmap bitmap(
           accounting::ContinuousSpaceBitmap::Create("bitmap",
                                                     reinterpret_cast<uint8_t*>(kPageSize),
                                                     kPageSize));
-      CHECK(bitmap != nullptr);
+      CHECK(bitmap.IsValid());
       live_bitmaps_.push_back(std::move(bitmap));
     }
   }
@@ -96,7 +96,7 @@
       return nullptr;
     }
     CHECK(!live_bitmaps_.empty());
-    std::unique_ptr<accounting::ContinuousSpaceBitmap> live_bitmap(std::move(live_bitmaps_.back()));
+    accounting::ContinuousSpaceBitmap live_bitmap(std::move(live_bitmaps_.back()));
     live_bitmaps_.pop_back();
     MemMap oat_map = MemMap::MapAnonymous("OatMap",
                                           oat_size,
@@ -136,7 +136,7 @@
  private:
   // Bitmap pool for pre-allocated dummy bitmaps. We need to pre-allocate them since we don't want
   // them to randomly get placed somewhere where we want an image space.
-  std::vector<std::unique_ptr<accounting::ContinuousSpaceBitmap>> live_bitmaps_;
+  std::vector<accounting::ContinuousSpaceBitmap> live_bitmaps_;
 };
 
 class DummySpace : public space::ContinuousSpace {
@@ -156,11 +156,11 @@
     return false;
   }
 
-  accounting::ContinuousSpaceBitmap* GetLiveBitmap() const override {
+  accounting::ContinuousSpaceBitmap* GetLiveBitmap() override {
     return nullptr;
   }
 
-  accounting::ContinuousSpaceBitmap* GetMarkBitmap() const override {
+  accounting::ContinuousSpaceBitmap* GetMarkBitmap() override {
     return nullptr;
   }
 };
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 51b0237..063c443 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -3932,9 +3932,8 @@
 void Heap::ClearMarkedObjects() {
   // Clear all of the spaces' mark bitmaps.
   for (const auto& space : GetContinuousSpaces()) {
-    accounting::ContinuousSpaceBitmap* mark_bitmap = space->GetMarkBitmap();
-    if (space->GetLiveBitmap() != mark_bitmap) {
-      mark_bitmap->Clear();
+    if (space->GetLiveBitmap() != nullptr && !space->HasBoundBitmaps()) {
+      space->GetMarkBitmap()->Clear();
     }
   }
   // Clear the marked objects in the discontinous space object sets.
diff --git a/runtime/gc/heap_test.cc b/runtime/gc/heap_test.cc
index 5f4621e..817c876 100644
--- a/runtime/gc/heap_test.cc
+++ b/runtime/gc/heap_test.cc
@@ -87,11 +87,11 @@
 TEST_F(HeapTest, HeapBitmapCapacityTest) {
   uint8_t* heap_begin = reinterpret_cast<uint8_t*>(0x1000);
   const size_t heap_capacity = kObjectAlignment * (sizeof(intptr_t) * 8 + 1);
-  std::unique_ptr<accounting::ContinuousSpaceBitmap> bitmap(
+  accounting::ContinuousSpaceBitmap bitmap(
       accounting::ContinuousSpaceBitmap::Create("test bitmap", heap_begin, heap_capacity));
   mirror::Object* fake_end_of_heap_object =
       reinterpret_cast<mirror::Object*>(&heap_begin[heap_capacity - kObjectAlignment]);
-  bitmap->Set(fake_end_of_heap_object);
+  bitmap.Set(fake_end_of_heap_object);
 }
 
 TEST_F(HeapTest, DumpGCPerformanceOnShutdown) {
diff --git a/runtime/gc/space/bump_pointer_space.h b/runtime/gc/space/bump_pointer_space.h
index 3e4961a..559fae8 100644
--- a/runtime/gc/space/bump_pointer_space.h
+++ b/runtime/gc/space/bump_pointer_space.h
@@ -96,11 +96,11 @@
     return GetMemMap()->Size();
   }
 
-  accounting::ContinuousSpaceBitmap* GetLiveBitmap() const override {
+  accounting::ContinuousSpaceBitmap* GetLiveBitmap() override {
     return nullptr;
   }
 
-  accounting::ContinuousSpaceBitmap* GetMarkBitmap() const override {
+  accounting::ContinuousSpaceBitmap* GetMarkBitmap() override {
     return nullptr;
   }
 
diff --git a/runtime/gc/space/dlmalloc_space.cc b/runtime/gc/space/dlmalloc_space.cc
index 3ef0f4e..3fa4c3c 100644
--- a/runtime/gc/space/dlmalloc_space.cc
+++ b/runtime/gc/space/dlmalloc_space.cc
@@ -337,8 +337,8 @@
 void DlMallocSpace::Clear() {
   size_t footprint_limit = GetFootprintLimit();
   madvise(GetMemMap()->Begin(), GetMemMap()->Size(), MADV_DONTNEED);
-  live_bitmap_->Clear();
-  mark_bitmap_->Clear();
+  live_bitmap_.Clear();
+  mark_bitmap_.Clear();
   SetEnd(Begin() + starting_size_);
   mspace_ = CreateMspace(mem_map_.Begin(), starting_size_, initial_size_);
   SetFootprintLimit(footprint_limit);
diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc
index 05cc3f7..9c0b025 100644
--- a/runtime/gc/space/image_space.cc
+++ b/runtime/gc/space/image_space.cc
@@ -69,7 +69,7 @@
 ImageSpace::ImageSpace(const std::string& image_filename,
                        const char* image_location,
                        MemMap&& mem_map,
-                       std::unique_ptr<accounting::ContinuousSpaceBitmap> live_bitmap,
+                       accounting::ContinuousSpaceBitmap&& live_bitmap,
                        uint8_t* end)
     : MemMapSpace(image_filename,
                   std::move(mem_map),
@@ -80,7 +80,7 @@
       live_bitmap_(std::move(live_bitmap)),
       oat_file_non_owned_(nullptr),
       image_location_(image_location) {
-  DCHECK(live_bitmap_ != nullptr);
+  DCHECK(live_bitmap_.IsValid());
 }
 
 static int32_t ChooseRelocationOffsetDelta(int32_t min_delta, int32_t max_delta) {
@@ -348,7 +348,7 @@
     CHECK_ALIGNED(current, kObjectAlignment);
     auto* obj = reinterpret_cast<mirror::Object*>(current);
     CHECK(obj->GetClass() != nullptr) << "Image object at address " << obj << " has null class";
-    CHECK(live_bitmap_->Test(obj)) << obj->PrettyTypeOf();
+    CHECK(live_bitmap_.Test(obj)) << obj->PrettyTypeOf();
     if (kUseBakerReadBarrier) {
       obj->AssertReadBarrierState();
     }
@@ -876,17 +876,16 @@
     const ImageSection& image_objects = image_header->GetObjectsSection();
     // We only want the mirror object, not the ArtFields and ArtMethods.
     uint8_t* const image_end = map.Begin() + image_objects.End();
-    std::unique_ptr<accounting::ContinuousSpaceBitmap> bitmap;
+    accounting::ContinuousSpaceBitmap bitmap;
     {
       TimingLogger::ScopedTiming timing("CreateImageBitmap", logger);
-      bitmap.reset(
-          accounting::ContinuousSpaceBitmap::CreateFromMemMap(
-              bitmap_name,
-              std::move(image_bitmap_map),
-              reinterpret_cast<uint8_t*>(map.Begin()),
-              // Make sure the bitmap is aligned to card size instead of just bitmap word size.
-              RoundUp(image_objects.End(), gc::accounting::CardTable::kCardSize)));
-      if (bitmap == nullptr) {
+      bitmap = accounting::ContinuousSpaceBitmap::CreateFromMemMap(
+          bitmap_name,
+          std::move(image_bitmap_map),
+          reinterpret_cast<uint8_t*>(map.Begin()),
+          // Make sure the bitmap is aligned to card size instead of just bitmap word size.
+          RoundUp(image_objects.End(), gc::accounting::CardTable::kCardSize));
+      if (!bitmap.IsValid()) {
         *error_msg = StringPrintf("Could not create bitmap '%s'", bitmap_name.c_str());
         return nullptr;
       }
@@ -1209,7 +1208,7 @@
     if (fixup_image) {
       // Two pass approach, fix up all classes first, then fix up non class-objects.
       // The visited bitmap is used to ensure that pointer arrays are not forwarded twice.
-      std::unique_ptr<gc::accounting::ContinuousSpaceBitmap> visited_bitmap(
+      gc::accounting::ContinuousSpaceBitmap visited_bitmap(
           gc::accounting::ContinuousSpaceBitmap::Create("Relocate bitmap",
                                                         target_base,
                                                         image_header.GetImageSize()));
@@ -1240,7 +1239,7 @@
             if (!app_image_objects.InDest(klass.Ptr())) {
               continue;
             }
-            const bool already_marked = visited_bitmap->Set(klass.Ptr());
+            const bool already_marked = visited_bitmap.Set(klass.Ptr());
             CHECK(!already_marked) << "App image class already visited";
             patch_object_visitor.VisitClass(klass, class_class);
             // Then patch the non-embedded vtable and iftable.
@@ -1248,7 +1247,7 @@
                 klass->GetVTable<kVerifyNone, kWithoutReadBarrier>();
             if (vtable != nullptr &&
                 app_image_objects.InDest(vtable.Ptr()) &&
-                !visited_bitmap->Set(vtable.Ptr())) {
+                !visited_bitmap.Set(vtable.Ptr())) {
               patch_object_visitor.VisitPointerArray(vtable);
             }
             ObjPtr<mirror::IfTable> iftable = klass->GetIfTable<kVerifyNone, kWithoutReadBarrier>();
@@ -1263,7 +1262,7 @@
                   // The iftable has not been patched, so we need to explicitly adjust the pointer.
                   ObjPtr<mirror::PointerArray> ifarray = forward_object(unpatched_ifarray.Ptr());
                   if (app_image_objects.InDest(ifarray.Ptr()) &&
-                      !visited_bitmap->Set(ifarray.Ptr())) {
+                      !visited_bitmap.Set(ifarray.Ptr())) {
                     patch_object_visitor.VisitPointerArray(ifarray);
                   }
                 }
@@ -1280,7 +1279,7 @@
       // Need to update the image to be at the target base.
       uintptr_t objects_begin = reinterpret_cast<uintptr_t>(target_base + objects_section.Offset());
       uintptr_t objects_end = reinterpret_cast<uintptr_t>(target_base + objects_section.End());
-      FixupObjectVisitor<ForwardObject> fixup_object_visitor(visited_bitmap.get(), forward_object);
+      FixupObjectVisitor<ForwardObject> fixup_object_visitor(&visited_bitmap, forward_object);
       bitmap->VisitMarkedRange(objects_begin, objects_end, fixup_object_visitor);
       // Fixup image roots.
       CHECK(app_image_objects.InSource(reinterpret_cast<uintptr_t>(
@@ -1636,7 +1635,7 @@
   static void DoRelocateSpaces(ArrayRef<const std::unique_ptr<ImageSpace>>& spaces,
                                int64_t base_diff64) REQUIRES_SHARED(Locks::mutator_lock_) {
     DCHECK(!spaces.empty());
-    std::unique_ptr<gc::accounting::ContinuousSpaceBitmap> patched_objects(
+    gc::accounting::ContinuousSpaceBitmap patched_objects(
         gc::accounting::ContinuousSpaceBitmap::Create(
             "Marked objects",
             spaces.front()->Begin(),
@@ -1647,7 +1646,7 @@
     DoRelocateSpaces<kPointerSize, /*kExtension=*/ false>(
         spaces.SubArray(/*pos=*/ 0u, base_component_count),
         base_diff64,
-        patched_objects.get());
+        &patched_objects);
 
     for (size_t i = base_component_count, size = spaces.size(); i != size; ) {
       const ImageHeader& ext_header = spaces[i]->GetImageHeader();
@@ -1656,7 +1655,7 @@
       DoRelocateSpaces<kPointerSize, /*kExtension=*/ true>(
           spaces.SubArray(/*pos=*/ i, ext_component_count),
           base_diff64,
-          patched_objects.get());
+          &patched_objects);
       i += ext_component_count;
     }
   }
diff --git a/runtime/gc/space/image_space.h b/runtime/gc/space/image_space.h
index c020dc1..8cee20c 100644
--- a/runtime/gc/space/image_space.h
+++ b/runtime/gc/space/image_space.h
@@ -97,14 +97,14 @@
     return image_location_;
   }
 
-  accounting::ContinuousSpaceBitmap* GetLiveBitmap() const override {
-    return live_bitmap_.get();
+  accounting::ContinuousSpaceBitmap* GetLiveBitmap() override {
+    return &live_bitmap_;
   }
 
-  accounting::ContinuousSpaceBitmap* GetMarkBitmap() const override {
+  accounting::ContinuousSpaceBitmap* GetMarkBitmap() override {
     // ImageSpaces have the same bitmap for both live and marked. This helps reduce the number of
     // special cases to test against.
-    return live_bitmap_.get();
+    return &live_bitmap_;
   }
 
   void Dump(std::ostream& os) const override;
@@ -191,12 +191,12 @@
 
   static Atomic<uint32_t> bitmap_index_;
 
-  std::unique_ptr<accounting::ContinuousSpaceBitmap> live_bitmap_;
+  accounting::ContinuousSpaceBitmap live_bitmap_;
 
   ImageSpace(const std::string& name,
              const char* image_location,
              MemMap&& mem_map,
-             std::unique_ptr<accounting::ContinuousSpaceBitmap> live_bitmap,
+             accounting::ContinuousSpaceBitmap&& live_bitmap,
              uint8_t* end);
 
   // The OatFile associated with the image during early startup to
diff --git a/runtime/gc/space/large_object_space.cc b/runtime/gc/space/large_object_space.cc
index dc63af0..9616ec1 100644
--- a/runtime/gc/space/large_object_space.cc
+++ b/runtime/gc/space/large_object_space.cc
@@ -102,11 +102,11 @@
 };
 
 void LargeObjectSpace::SwapBitmaps() {
-  live_bitmap_.swap(mark_bitmap_);
-  // Swap names to get more descriptive diagnostics.
-  std::string temp_name = live_bitmap_->GetName();
-  live_bitmap_->SetName(mark_bitmap_->GetName());
-  mark_bitmap_->SetName(temp_name);
+  std::swap(live_bitmap_, mark_bitmap_);
+  // Preserve names to get more descriptive diagnostics.
+  std::string temp_name = live_bitmap_.GetName();
+  live_bitmap_.SetName(mark_bitmap_.GetName());
+  mark_bitmap_.SetName(temp_name);
 }
 
 LargeObjectSpace::LargeObjectSpace(const std::string& name, uint8_t* begin, uint8_t* end,
@@ -119,7 +119,7 @@
 
 
 void LargeObjectSpace::CopyLiveToMarked() {
-  mark_bitmap_->CopyFrom(live_bitmap_.get());
+  mark_bitmap_.CopyFrom(&live_bitmap_);
 }
 
 LargeObjectMapSpace::LargeObjectMapSpace(const std::string& name)
diff --git a/runtime/gc/space/malloc_space.cc b/runtime/gc/space/malloc_space.cc
index 474231b..281d9c2 100644
--- a/runtime/gc/space/malloc_space.cc
+++ b/runtime/gc/space/malloc_space.cc
@@ -63,16 +63,15 @@
     static const uintptr_t kGcCardSize = static_cast<uintptr_t>(accounting::CardTable::kCardSize);
     CHECK_ALIGNED(reinterpret_cast<uintptr_t>(mem_map_.Begin()), kGcCardSize);
     CHECK_ALIGNED(reinterpret_cast<uintptr_t>(mem_map_.End()), kGcCardSize);
-    live_bitmap_.reset(accounting::ContinuousSpaceBitmap::Create(
+    live_bitmap_ = accounting::ContinuousSpaceBitmap::Create(
         StringPrintf("allocspace %s live-bitmap %d", name.c_str(), static_cast<int>(bitmap_index)),
-        Begin(), NonGrowthLimitCapacity()));
-    CHECK(live_bitmap_.get() != nullptr) << "could not create allocspace live bitmap #"
+        Begin(), NonGrowthLimitCapacity());
+    CHECK(live_bitmap_.IsValid()) << "could not create allocspace live bitmap #"
         << bitmap_index;
-    mark_bitmap_.reset(accounting::ContinuousSpaceBitmap::Create(
+    mark_bitmap_ = accounting::ContinuousSpaceBitmap::Create(
         StringPrintf("allocspace %s mark-bitmap %d", name.c_str(), static_cast<int>(bitmap_index)),
-        Begin(), NonGrowthLimitCapacity()));
-    CHECK(mark_bitmap_.get() != nullptr) << "could not create allocspace mark bitmap #"
-        << bitmap_index;
+        Begin(), NonGrowthLimitCapacity());
+    CHECK(mark_bitmap_.IsValid()) << "could not create allocspace mark bitmap #" << bitmap_index;
   }
   for (auto& freed : recent_freed_objects_) {
     freed.first = nullptr;
@@ -229,14 +228,16 @@
                                      growth_limit,
                                      CanMoveObjects());
   SetLimit(End());
-  live_bitmap_->SetHeapLimit(reinterpret_cast<uintptr_t>(End()));
-  CHECK_EQ(live_bitmap_->HeapLimit(), reinterpret_cast<uintptr_t>(End()));
-  mark_bitmap_->SetHeapLimit(reinterpret_cast<uintptr_t>(End()));
-  CHECK_EQ(mark_bitmap_->HeapLimit(), reinterpret_cast<uintptr_t>(End()));
+  live_bitmap_.SetHeapLimit(reinterpret_cast<uintptr_t>(End()));
+  CHECK_EQ(live_bitmap_.HeapLimit(), reinterpret_cast<uintptr_t>(End()));
+  mark_bitmap_.SetHeapLimit(reinterpret_cast<uintptr_t>(End()));
+  CHECK_EQ(mark_bitmap_.HeapLimit(), reinterpret_cast<uintptr_t>(End()));
 
   // Create the actual zygote space.
-  ZygoteSpace* zygote_space = ZygoteSpace::Create("Zygote space", ReleaseMemMap(),
-                                                  live_bitmap_.release(), mark_bitmap_.release());
+  ZygoteSpace* zygote_space = ZygoteSpace::Create("Zygote space",
+                                                  ReleaseMemMap(),
+                                                  std::move(live_bitmap_),
+                                                  std::move(mark_bitmap_));
   if (UNLIKELY(zygote_space == nullptr)) {
     VLOG(heap) << "Failed creating zygote space from space " << GetName();
   } else {
@@ -280,9 +281,9 @@
   CHECK_LE(new_capacity, NonGrowthLimitCapacity());
   GetLiveBitmap()->SetHeapSize(new_capacity);
   GetMarkBitmap()->SetHeapSize(new_capacity);
-  if (temp_bitmap_.get() != nullptr) {
+  if (temp_bitmap_.IsValid()) {
     // If the bitmaps are clamped, then the temp bitmap is actually the mark bitmap.
-    temp_bitmap_->SetHeapSize(new_capacity);
+    temp_bitmap_.SetHeapSize(new_capacity);
   }
   GetMemMap()->SetSize(new_capacity);
   limit_ = Begin() + new_capacity;
diff --git a/runtime/gc/space/region_space.cc b/runtime/gc/space/region_space.cc
index 823043e..4bf5524 100644
--- a/runtime/gc/space/region_space.cc
+++ b/runtime/gc/space/region_space.cc
@@ -125,8 +125,8 @@
   for (size_t i = 0; i < num_regions_; ++i, region_addr += kRegionSize) {
     regions_[i].Init(i, region_addr, region_addr + kRegionSize);
   }
-  mark_bitmap_.reset(
-      accounting::ContinuousSpaceBitmap::Create("region space live bitmap", Begin(), Capacity()));
+  mark_bitmap_ =
+      accounting::ContinuousSpaceBitmap::Create("region space live bitmap", Begin(), Capacity());
   if (kIsDebugBuild) {
     CHECK_EQ(regions_[0].Begin(), Begin());
     for (size_t i = 0; i < num_regions_; ++i) {
diff --git a/runtime/gc/space/region_space.h b/runtime/gc/space/region_space.h
index 26af633..6061c25 100644
--- a/runtime/gc/space/region_space.h
+++ b/runtime/gc/space/region_space.h
@@ -107,11 +107,11 @@
     UNIMPLEMENTED(FATAL);
     return 0;
   }
-  accounting::ContinuousSpaceBitmap* GetLiveBitmap() const override {
-    return mark_bitmap_.get();
+  accounting::ContinuousSpaceBitmap* GetLiveBitmap() override {
+    return &mark_bitmap_;
   }
-  accounting::ContinuousSpaceBitmap* GetMarkBitmap() const override {
-    return mark_bitmap_.get();
+  accounting::ContinuousSpaceBitmap* GetMarkBitmap() override {
+    return &mark_bitmap_;
   }
 
   void Clear() override REQUIRES(!region_lock_);
@@ -756,7 +756,7 @@
   size_t cyclic_alloc_region_index_ GUARDED_BY(region_lock_);
 
   // Mark bitmap used by the GC.
-  std::unique_ptr<accounting::ContinuousSpaceBitmap> mark_bitmap_;
+  accounting::ContinuousSpaceBitmap mark_bitmap_;
 
   DISALLOW_COPY_AND_ASSIGN(RegionSpace);
 };
diff --git a/runtime/gc/space/rosalloc_space.cc b/runtime/gc/space/rosalloc_space.cc
index 36fd864..fc9cad0 100644
--- a/runtime/gc/space/rosalloc_space.cc
+++ b/runtime/gc/space/rosalloc_space.cc
@@ -427,8 +427,8 @@
 void RosAllocSpace::Clear() {
   size_t footprint_limit = GetFootprintLimit();
   madvise(GetMemMap()->Begin(), GetMemMap()->Size(), MADV_DONTNEED);
-  live_bitmap_->Clear();
-  mark_bitmap_->Clear();
+  live_bitmap_.Clear();
+  mark_bitmap_.Clear();
   SetEnd(begin_ + starting_size_);
   delete rosalloc_;
   rosalloc_ = CreateRosAlloc(mem_map_.Begin(),
diff --git a/runtime/gc/space/space.cc b/runtime/gc/space/space.cc
index e7961eb..8ba8603 100644
--- a/runtime/gc/space/space.cc
+++ b/runtime/gc/space/space.cc
@@ -81,12 +81,10 @@
     Space(name, gc_retention_policy) {
   // TODO: Fix this if we ever support objects not in the low 32 bit.
   const size_t capacity = static_cast<size_t>(std::numeric_limits<uint32_t>::max());
-  live_bitmap_.reset(accounting::LargeObjectBitmap::Create("large live objects", nullptr,
-                                                           capacity));
-  CHECK(live_bitmap_.get() != nullptr);
-  mark_bitmap_.reset(accounting::LargeObjectBitmap::Create("large marked objects", nullptr,
-                                                           capacity));
-  CHECK(mark_bitmap_.get() != nullptr);
+  live_bitmap_ = accounting::LargeObjectBitmap::Create("large live objects", nullptr, capacity);
+  CHECK(live_bitmap_.IsValid());
+  mark_bitmap_ = accounting::LargeObjectBitmap::Create("large marked objects", nullptr, capacity);
+  CHECK(mark_bitmap_.IsValid());
 }
 
 collector::ObjectBytePair ContinuousMemMapAllocSpace::Sweep(bool swap_bitmaps) {
@@ -109,35 +107,28 @@
 
 void ContinuousMemMapAllocSpace::BindLiveToMarkBitmap() {
   CHECK(!HasBoundBitmaps());
-  accounting::ContinuousSpaceBitmap* live_bitmap = GetLiveBitmap();
-  if (live_bitmap != mark_bitmap_.get()) {
-    accounting::ContinuousSpaceBitmap* mark_bitmap = mark_bitmap_.release();
-    Runtime::Current()->GetHeap()->GetMarkBitmap()->ReplaceBitmap(mark_bitmap, live_bitmap);
-    temp_bitmap_.reset(mark_bitmap);
-    mark_bitmap_.reset(live_bitmap);
-  }
+  temp_bitmap_ = std::move(mark_bitmap_);
+  mark_bitmap_.CopyView(live_bitmap_);
 }
 
-bool ContinuousMemMapAllocSpace::HasBoundBitmaps() const {
-  return temp_bitmap_.get() != nullptr;
+bool ContinuousSpace::HasBoundBitmaps() {
+  // Check if the bitmaps are pointing to the same underlying data.
+  return GetLiveBitmap()->Begin() == GetMarkBitmap()->Begin();
 }
 
 void ContinuousMemMapAllocSpace::UnBindBitmaps() {
   CHECK(HasBoundBitmaps());
   // At this point, `temp_bitmap_` holds our old mark bitmap.
-  accounting::ContinuousSpaceBitmap* new_bitmap = temp_bitmap_.release();
-  Runtime::Current()->GetHeap()->GetMarkBitmap()->ReplaceBitmap(mark_bitmap_.get(), new_bitmap);
-  CHECK_EQ(mark_bitmap_.release(), live_bitmap_.get());
-  mark_bitmap_.reset(new_bitmap);
-  DCHECK(temp_bitmap_.get() == nullptr);
+  mark_bitmap_ = std::move(temp_bitmap_);
 }
 
 void ContinuousMemMapAllocSpace::SwapBitmaps() {
-  live_bitmap_.swap(mark_bitmap_);
-  // Swap names to get more descriptive diagnostics.
-  std::string temp_name(live_bitmap_->GetName());
-  live_bitmap_->SetName(mark_bitmap_->GetName());
-  mark_bitmap_->SetName(temp_name);
+  CHECK(!HasBoundBitmaps());
+  std::swap(live_bitmap_, mark_bitmap_);
+  // Preserve names to get more descriptive diagnostics.
+  std::string temp_name(live_bitmap_.GetName());
+  live_bitmap_.SetName(mark_bitmap_.GetName());
+  mark_bitmap_.SetName(temp_name);
 }
 
 AllocSpace::SweepCallbackContext::SweepCallbackContext(bool swap_bitmaps_in, space::Space* space_in)
diff --git a/runtime/gc/space/space.h b/runtime/gc/space/space.h
index 6a4095c..3b7e3b7 100644
--- a/runtime/gc/space/space.h
+++ b/runtime/gc/space/space.h
@@ -295,8 +295,8 @@
     return End() - Begin();
   }
 
-  virtual accounting::ContinuousSpaceBitmap* GetLiveBitmap() const = 0;
-  virtual accounting::ContinuousSpaceBitmap* GetMarkBitmap() const = 0;
+  virtual accounting::ContinuousSpaceBitmap* GetLiveBitmap() = 0;
+  virtual accounting::ContinuousSpaceBitmap* GetMarkBitmap() = 0;
 
   // Maximum which the mapped space can grow to.
   virtual size_t Capacity() const {
@@ -318,6 +318,8 @@
     return true;
   }
 
+  bool HasBoundBitmaps() REQUIRES(Locks::heap_bitmap_lock_);
+
   virtual ~ContinuousSpace() {}
 
  protected:
@@ -344,12 +346,12 @@
 // is suitable for use for large primitive arrays.
 class DiscontinuousSpace : public Space {
  public:
-  accounting::LargeObjectBitmap* GetLiveBitmap() const {
-    return live_bitmap_.get();
+  accounting::LargeObjectBitmap* GetLiveBitmap() {
+    return &live_bitmap_;
   }
 
-  accounting::LargeObjectBitmap* GetMarkBitmap() const {
-    return mark_bitmap_.get();
+  accounting::LargeObjectBitmap* GetMarkBitmap() {
+    return &mark_bitmap_;
   }
 
   bool IsDiscontinuousSpace() const override {
@@ -361,8 +363,8 @@
  protected:
   DiscontinuousSpace(const std::string& name, GcRetentionPolicy gc_retention_policy);
 
-  std::unique_ptr<accounting::LargeObjectBitmap> live_bitmap_;
-  std::unique_ptr<accounting::LargeObjectBitmap> mark_bitmap_;
+  accounting::LargeObjectBitmap live_bitmap_;
+  accounting::LargeObjectBitmap mark_bitmap_;
 
  private:
   DISALLOW_IMPLICIT_CONSTRUCTORS(DiscontinuousSpace);
@@ -423,37 +425,36 @@
     return this;
   }
 
-  bool HasBoundBitmaps() const REQUIRES(Locks::heap_bitmap_lock_);
   // Make the mark bitmap an alias of the live bitmap. Save the current mark bitmap into
   // `temp_bitmap_`, so that we can restore it later in ContinuousMemMapAllocSpace::UnBindBitmaps.
   void BindLiveToMarkBitmap() REQUIRES(Locks::heap_bitmap_lock_);
   // Unalias the mark bitmap from the live bitmap and restore the old mark bitmap.
   void UnBindBitmaps() REQUIRES(Locks::heap_bitmap_lock_);
   // Swap the live and mark bitmaps of this space. This is used by the GC for concurrent sweeping.
-  void SwapBitmaps();
+  void SwapBitmaps() REQUIRES(Locks::heap_bitmap_lock_);
 
   // Clear the space back to an empty space.
   virtual void Clear() = 0;
 
-  accounting::ContinuousSpaceBitmap* GetLiveBitmap() const override {
-    return live_bitmap_.get();
+  accounting::ContinuousSpaceBitmap* GetLiveBitmap() override {
+    return &live_bitmap_;
   }
 
-  accounting::ContinuousSpaceBitmap* GetMarkBitmap() const override {
-    return mark_bitmap_.get();
+  accounting::ContinuousSpaceBitmap* GetMarkBitmap() override {
+    return &mark_bitmap_;
   }
 
-  accounting::ContinuousSpaceBitmap* GetTempBitmap() const {
-    return temp_bitmap_.get();
+  accounting::ContinuousSpaceBitmap* GetTempBitmap() {
+    return &temp_bitmap_;
   }
 
   collector::ObjectBytePair Sweep(bool swap_bitmaps);
   virtual accounting::ContinuousSpaceBitmap::SweepCallback* GetSweepCallback() = 0;
 
  protected:
-  std::unique_ptr<accounting::ContinuousSpaceBitmap> live_bitmap_;
-  std::unique_ptr<accounting::ContinuousSpaceBitmap> mark_bitmap_;
-  std::unique_ptr<accounting::ContinuousSpaceBitmap> temp_bitmap_;
+  accounting::ContinuousSpaceBitmap live_bitmap_;
+  accounting::ContinuousSpaceBitmap mark_bitmap_;
+  accounting::ContinuousSpaceBitmap temp_bitmap_;
 
   ContinuousMemMapAllocSpace(const std::string& name,
                              MemMap&& mem_map,
diff --git a/runtime/gc/space/zygote_space.cc b/runtime/gc/space/zygote_space.cc
index e8acc6d..66427a7 100644
--- a/runtime/gc/space/zygote_space.cc
+++ b/runtime/gc/space/zygote_space.cc
@@ -44,20 +44,18 @@
 
 ZygoteSpace* ZygoteSpace::Create(const std::string& name,
                                  MemMap&& mem_map,
-                                 accounting::ContinuousSpaceBitmap* live_bitmap,
-                                 accounting::ContinuousSpaceBitmap* mark_bitmap) {
-  DCHECK(live_bitmap != nullptr);
-  DCHECK(mark_bitmap != nullptr);
+                                 accounting::ContinuousSpaceBitmap&& live_bitmap,
+                                 accounting::ContinuousSpaceBitmap&& mark_bitmap) {
+  DCHECK(live_bitmap.IsValid());
+  DCHECK(mark_bitmap.IsValid());
   size_t objects_allocated = 0;
   CountObjectsAllocated visitor(&objects_allocated);
   ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
-  live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(mem_map.Begin()),
-                                reinterpret_cast<uintptr_t>(mem_map.End()), visitor);
+  live_bitmap.VisitMarkedRange(reinterpret_cast<uintptr_t>(mem_map.Begin()),
+                               reinterpret_cast<uintptr_t>(mem_map.End()), visitor);
   ZygoteSpace* zygote_space = new ZygoteSpace(name, std::move(mem_map), objects_allocated);
-  CHECK(zygote_space->live_bitmap_.get() == nullptr);
-  CHECK(zygote_space->mark_bitmap_.get() == nullptr);
-  zygote_space->live_bitmap_.reset(live_bitmap);
-  zygote_space->mark_bitmap_.reset(mark_bitmap);
+  zygote_space->live_bitmap_ = std::move(live_bitmap);
+  zygote_space->mark_bitmap_ = std::move(mark_bitmap);
   return zygote_space;
 }
 
diff --git a/runtime/gc/space/zygote_space.h b/runtime/gc/space/zygote_space.h
index 1f38cfb..631691d 100644
--- a/runtime/gc/space/zygote_space.h
+++ b/runtime/gc/space/zygote_space.h
@@ -32,8 +32,8 @@
   // Returns the remaining storage in the out_map field.
   static ZygoteSpace* Create(const std::string& name,
                              MemMap&& mem_map,
-                             accounting::ContinuousSpaceBitmap* live_bitmap,
-                             accounting::ContinuousSpaceBitmap* mark_bitmap)
+                             accounting::ContinuousSpaceBitmap&& live_bitmap,
+                             accounting::ContinuousSpaceBitmap&& mark_bitmap)
       REQUIRES_SHARED(Locks::mutator_lock_);
   // In PreZygoteFork() we set mark-bit of all live objects to avoid page
   // getting dirtied due to it.