Move mirror::ArtMethod to native

Optimizing + quick tests are passing, devices boot.

TODO: Test and fix bugs in mips64.

Saves 16 bytes per most ArtMethod, 7.5MB reduction in system PSS.
Some of the savings are from removal of virtual methods and direct
methods object arrays.

Bug: 19264997
Change-Id: I622469a0cfa0e7082a2119f3d6a9491eb61e3f3d
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
index b53fa84..8f2d94b 100644
--- a/runtime/base/arena_allocator.cc
+++ b/runtime/base/arena_allocator.cc
@@ -302,6 +302,18 @@
   end_ = new_arena->End();
 }
 
+bool ArenaAllocator::Contains(const void* ptr) const {
+  if (ptr >= begin_ && ptr < end_) {
+    return true;
+  }
+  for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
+    if (cur_arena->Contains(ptr)) {
+      return true;
+    }
+  }
+  return false;
+}
+
 MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
                    ssize_t lost_bytes_adjustment)
     : name_(name),
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
index 2e617b5..d9723b5 100644
--- a/runtime/base/arena_allocator.h
+++ b/runtime/base/arena_allocator.h
@@ -142,6 +142,11 @@
     return bytes_allocated_;
   }
 
+  // Return true if ptr is contained in the arena.
+  bool Contains(const void* ptr) const {
+    return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
+  }
+
  protected:
   size_t bytes_allocated_;
   uint8_t* memory_;
@@ -219,19 +224,52 @@
     return ret;
   }
 
+  // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
+  void* Realloc(void* ptr, size_t ptr_size, size_t new_size,
+                ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
+    DCHECK_GE(new_size, ptr_size);
+    DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
+    auto* end = reinterpret_cast<uint8_t*>(ptr) + ptr_size;
+    // If we haven't allocated anything else, we can safely extend.
+    if (end == ptr_) {
+      const size_t size_delta = new_size - ptr_size;
+      // Check remain space.
+      const size_t remain = end_ - ptr_;
+      if (remain >= size_delta) {
+        ptr_ += size_delta;
+        ArenaAllocatorStats::RecordAlloc(size_delta, kind);
+        return ptr;
+      }
+    }
+    auto* new_ptr = Alloc(new_size, kind);
+    memcpy(new_ptr, ptr, ptr_size);
+    // TODO: Call free on ptr if linear alloc supports free.
+    return new_ptr;
+  }
+
   template <typename T>
   T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
     return static_cast<T*>(Alloc(length * sizeof(T), kind));
   }
 
   void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
+
   void ObtainNewArenaForAllocation(size_t allocation_size);
+
   size_t BytesAllocated() const;
+
   MemStats GetMemStats() const;
+
   // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
   // TODO: Change BytesAllocated to this behavior?
   size_t BytesUsed() const;
 
+  ArenaPool* GetArenaPool() const {
+    return pool_;
+  }
+
+  bool Contains(const void* ptr) const;
+
  private:
   static constexpr size_t kAlignment = 8;
 
diff --git a/runtime/base/iteration_range.h b/runtime/base/iteration_range.h
index 5a46376..6a0ef1f 100644
--- a/runtime/base/iteration_range.h
+++ b/runtime/base/iteration_range.h
@@ -17,6 +17,8 @@
 #ifndef ART_RUNTIME_BASE_ITERATION_RANGE_H_
 #define ART_RUNTIME_BASE_ITERATION_RANGE_H_
 
+#include <iterator>
+
 namespace art {
 
 // Helper class that acts as a container for range-based loops, given an iteration
@@ -38,10 +40,15 @@
   iterator cend() const { return last_; }
 
  private:
-  iterator first_;
-  iterator last_;
+  const iterator first_;
+  const iterator last_;
 };
 
+template <typename Iter>
+static inline IterationRange<Iter> MakeIterationRange(const Iter& begin_it, const Iter& end_it) {
+  return IterationRange<Iter>(begin_it, end_it);
+}
+
 }  // namespace art
 
 #endif  // ART_RUNTIME_BASE_ITERATION_RANGE_H_
diff --git a/runtime/base/macros.h b/runtime/base/macros.h
index c00ae78..5c59647 100644
--- a/runtime/base/macros.h
+++ b/runtime/base/macros.h
@@ -50,7 +50,6 @@
 #define ART_FRIEND_TYPED_TEST(test_set_name, individual_test)\
 template<typename T> ART_FRIEND_TEST(test_set_name, individual_test)
 
-
 // DISALLOW_COPY_AND_ASSIGN disallows the copy and operator= functions. It goes in the private:
 // declarations in a class.
 #if !defined(DISALLOW_COPY_AND_ASSIGN)
diff --git a/runtime/base/scoped_arena_containers.h b/runtime/base/scoped_arena_containers.h
index df79085..82db60e 100644
--- a/runtime/base/scoped_arena_containers.h
+++ b/runtime/base/scoped_arena_containers.h
@@ -20,6 +20,7 @@
 #include <deque>
 #include <queue>
 #include <set>
+#include <unordered_map>
 #include <vector>
 
 #include "arena_containers.h"  // For ArenaAllocatorAdapterKind.
@@ -55,6 +56,11 @@
 using ScopedArenaSafeMap =
     SafeMap<K, V, Comparator, ScopedArenaAllocatorAdapter<std::pair<const K, V>>>;
 
+template <typename K, typename V, class Hash = std::hash<K>, class KeyEqual = std::equal_to<K>>
+using ScopedArenaUnorderedMap =
+    std::unordered_map<K, V, Hash, KeyEqual, ScopedArenaAllocatorAdapter<std::pair<const K, V>>>;
+
+
 // Implementation details below.
 
 template <>