Move ArtField to native

Add linear alloc. Moved ArtField to be native object. Changed image
writer to put ArtFields after the mirror section.

Savings:
2MB on low ram devices
4MB on normal devices

Total PSS measurements before (normal N5, 95s after shell start):
Image size: 7729152 bytes
23112 kB: .NonMoving
23212 kB: .NonMoving
22868 kB: .NonMoving
23072 kB: .NonMoving
22836 kB: .NonMoving
19618 kB: .Zygote
19850 kB: .Zygote
19623 kB: .Zygote
19924 kB: .Zygote
19612 kB: .Zygote
Avg: 42745.4 kB

After:
Image size: 7462912 bytes
17440 kB: .NonMoving
16776 kB: .NonMoving
16804 kB: .NonMoving
17812 kB: .NonMoving
16820 kB: .NonMoving
18788 kB: .Zygote
18856 kB: .Zygote
19064 kB: .Zygote
18841 kB: .Zygote
18629 kB: .Zygote
3499 kB: .LinearAlloc
3408 kB: .LinearAlloc
3424 kB: .LinearAlloc
3600 kB: .LinearAlloc
3436 kB: .LinearAlloc
Avg: 39439.4 kB

No reflection performance changes.

Bug: 19264997
Bug: 17643507

Change-Id: I10c73a37913332080aeb978c7c94713bdfe4fe1c
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
index dd29404..59d38ad 100644
--- a/runtime/base/arena_allocator.cc
+++ b/runtime/base/arena_allocator.cc
@@ -132,11 +132,10 @@
   free(reinterpret_cast<void*>(memory_));
 }
 
-MemMapArena::MemMapArena(size_t size) {
+MemMapArena::MemMapArena(size_t size, bool low_4gb) {
   std::string error_msg;
-  map_.reset(
-      MemMap::MapAnonymous("dalvik-LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, false,
-                           false, &error_msg));
+  map_.reset(MemMap::MapAnonymous(
+      "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
   CHECK(map_.get() != nullptr) << error_msg;
   memory_ = map_->Begin();
   size_ = map_->Size();
@@ -156,8 +155,12 @@
   }
 }
 
-ArenaPool::ArenaPool(bool use_malloc)
-    : use_malloc_(use_malloc), lock_("Arena pool lock"), free_arenas_(nullptr) {
+ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
+    : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
+      low_4gb_(low_4gb) {
+  if (low_4gb) {
+    CHECK(!use_malloc) << "low4gb must use map implementation";
+  }
   if (!use_malloc) {
     MemMap::Init();
   }
@@ -182,7 +185,8 @@
     }
   }
   if (ret == nullptr) {
-    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) : new MemMapArena(size);
+    ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
+        new MemMapArena(size, low_4gb_);
   }
   ret->Reset();
   return ret;
@@ -229,6 +233,17 @@
   return ArenaAllocatorStats::BytesAllocated();
 }
 
+size_t ArenaAllocator::BytesUsed() const {
+  size_t total = ptr_ - begin_;
+  if (arena_head_ != nullptr) {
+    for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
+         cur_arena = cur_arena->next_) {
+     total += cur_arena->GetBytesAllocated();
+    }
+  }
+  return total;
+}
+
 ArenaAllocator::ArenaAllocator(ArenaPool* pool)
   : pool_(pool),
     begin_(nullptr),
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
index cc7b856..3a86b61 100644
--- a/runtime/base/arena_allocator.h
+++ b/runtime/base/arena_allocator.h
@@ -165,7 +165,7 @@
 
 class MemMapArena FINAL : public Arena {
  public:
-  explicit MemMapArena(size_t size = Arena::kDefaultSize);
+  explicit MemMapArena(size_t size, bool low_4gb);
   virtual ~MemMapArena() { }
   void Release() OVERRIDE;
 
@@ -175,7 +175,7 @@
 
 class ArenaPool {
  public:
-  explicit ArenaPool(bool use_malloc = true);
+  explicit ArenaPool(bool use_malloc = true, bool low_4gb = false);
   ~ArenaPool();
   Arena* AllocArena(size_t size) LOCKS_EXCLUDED(lock_);
   void FreeArenaChain(Arena* first) LOCKS_EXCLUDED(lock_);
@@ -188,6 +188,7 @@
   const bool use_malloc_;
   mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
   Arena* free_arenas_ GUARDED_BY(lock_);
+  const bool low_4gb_;
   DISALLOW_COPY_AND_ASSIGN(ArenaPool);
 };
 
@@ -227,6 +228,9 @@
   void ObtainNewArenaForAllocation(size_t allocation_size);
   size_t BytesAllocated() const;
   MemStats GetMemStats() const;
+  // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
+  // TODO: Change BytesAllocated to this behavior?
+  size_t BytesUsed() const;
 
  private:
   static constexpr size_t kAlignment = 8;
diff --git a/runtime/base/mutex.h b/runtime/base/mutex.h
index af00834..6e4b96c 100644
--- a/runtime/base/mutex.h
+++ b/runtime/base/mutex.h
@@ -73,6 +73,7 @@
   kRosAllocBulkFreeLock,
   kAllocSpaceLock,
   kBumpPointerSpaceBlockLock,
+  kArenaPoolLock,
   kDexFileMethodInlinerLock,
   kDexFileToMethodInlinerMapLock,
   kMarkSweepMarkStackLock,