Move arenas into runtime

Moved arena pool into the runtime.

Motivation:
Allow GC to use arena allocators, recycle arena pool for linear alloc.

Bug: 19264997
Change-Id: I8ddbb6d55ee923a980b28fb656c758c5d7697c2f
diff --git a/runtime/Android.mk b/runtime/Android.mk
index 4714610..c647cc2 100644
--- a/runtime/Android.mk
+++ b/runtime/Android.mk
@@ -22,10 +22,12 @@
   atomic.cc.arm \
   barrier.cc \
   base/allocator.cc \
+  base/arena_allocator.cc \
   base/bit_vector.cc \
   base/hex_dump.cc \
   base/logging.cc \
   base/mutex.cc \
+  base/scoped_arena_allocator.cc \
   base/scoped_flock.cc \
   base/stringpiece.cc \
   base/stringprintf.cc \
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
new file mode 100644
index 0000000..b3f812e
--- /dev/null
+++ b/runtime/base/arena_allocator.cc
@@ -0,0 +1,296 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <algorithm>
+#include <iomanip>
+#include <numeric>
+
+#include "arena_allocator.h"
+#include "logging.h"
+#include "mutex.h"
+#include "thread-inl.h"
+#include <memcheck/memcheck.h>
+
+namespace art {
+
+// Memmap is a bit slower than malloc according to my measurements.
+static constexpr bool kUseMemMap = false;
+static constexpr bool kUseMemSet = true && kUseMemMap;
+static constexpr size_t kValgrindRedZoneBytes = 8;
+constexpr size_t Arena::kDefaultSize;
+
+template <bool kCount>
+const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
+  "Misc       ",
+  "BasicBlock ",
+  "BBList     "
+  "BBPreds    ",
+  "DfsPreOrd  ",
+  "DfsPostOrd ",
+  "DomPostOrd ",
+  "TopoOrd    ",
+  "Lowering   ",
+  "LIR        ",
+  "LIR masks  ",
+  "SwitchTbl  ",
+  "FillArray  ",
+  "SlowPaths  ",
+  "MIR        ",
+  "DataFlow   ",
+  "GrowList   ",
+  "GrowBitMap ",
+  "SSA2Dalvik ",
+  "Dalvik2SSA ",
+  "DebugInfo  ",
+  "Successor  ",
+  "RegAlloc   ",
+  "Data       ",
+  "Preds      ",
+  "STL        ",
+};
+
+template <bool kCount>
+ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
+    : num_allocations_(0u) {
+  std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
+}
+
+template <bool kCount>
+void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
+  num_allocations_ = other.num_allocations_;
+  std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
+}
+
+template <bool kCount>
+void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
+  alloc_stats_[kind] += bytes;
+  ++num_allocations_;
+}
+
+template <bool kCount>
+size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
+  return num_allocations_;
+}
+
+template <bool kCount>
+size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
+  const size_t init = 0u;  // Initial value of the correct type.
+  return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
+}
+
+template <bool kCount>
+void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
+                                           ssize_t lost_bytes_adjustment) const {
+  size_t malloc_bytes = 0u;
+  size_t lost_bytes = 0u;
+  size_t num_arenas = 0u;
+  for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
+    malloc_bytes += arena->Size();
+    lost_bytes += arena->RemainingSpace();
+    ++num_arenas;
+  }
+  // The lost_bytes_adjustment is used to make up for the fact that the current arena
+  // may not have the bytes_allocated_ updated correctly.
+  lost_bytes += lost_bytes_adjustment;
+  const size_t bytes_allocated = BytesAllocated();
+  os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
+     << ", lost: " << lost_bytes << "\n";
+  size_t num_allocations = NumAllocations();
+  if (num_allocations != 0) {
+    os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
+       << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
+  }
+  os << "===== Allocation by kind\n";
+  static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
+  for (int i = 0; i < kNumArenaAllocKinds; i++) {
+      os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
+  }
+}
+
+// Explicitly instantiate the used implementation.
+template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
+
+Arena::Arena(size_t size)
+    : bytes_allocated_(0),
+      map_(nullptr),
+      next_(nullptr) {
+  if (kUseMemMap) {
+    std::string error_msg;
+    map_ = MemMap::MapAnonymous("dalvik-arena", NULL, size, PROT_READ | PROT_WRITE, false,
+                                &error_msg);
+    CHECK(map_ != nullptr) << error_msg;
+    memory_ = map_->Begin();
+    size_ = map_->Size();
+  } else {
+    memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
+    size_ = size;
+  }
+}
+
+Arena::~Arena() {
+  if (kUseMemMap) {
+    delete map_;
+  } else {
+    free(reinterpret_cast<void*>(memory_));
+  }
+}
+
+void Arena::Reset() {
+  if (bytes_allocated_) {
+    if (kUseMemSet || !kUseMemMap) {
+      memset(Begin(), 0, bytes_allocated_);
+    } else {
+      map_->MadviseDontNeedAndZero();
+    }
+    bytes_allocated_ = 0;
+  }
+}
+
+ArenaPool::ArenaPool()
+    : lock_("Arena pool lock"),
+      free_arenas_(nullptr) {
+}
+
+ArenaPool::~ArenaPool() {
+  while (free_arenas_ != nullptr) {
+    auto* arena = free_arenas_;
+    free_arenas_ = free_arenas_->next_;
+    delete arena;
+  }
+}
+
+Arena* ArenaPool::AllocArena(size_t size) {
+  Thread* self = Thread::Current();
+  Arena* ret = nullptr;
+  {
+    MutexLock lock(self, lock_);
+    if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
+      ret = free_arenas_;
+      free_arenas_ = free_arenas_->next_;
+    }
+  }
+  if (ret == nullptr) {
+    ret = new Arena(size);
+  }
+  ret->Reset();
+  return ret;
+}
+
+size_t ArenaPool::GetBytesAllocated() const {
+  size_t total = 0;
+  MutexLock lock(Thread::Current(), lock_);
+  for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
+    total += arena->GetBytesAllocated();
+  }
+  return total;
+}
+
+void ArenaPool::FreeArenaChain(Arena* first) {
+  if (UNLIKELY(RUNNING_ON_VALGRIND > 0)) {
+    for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
+      VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
+    }
+  }
+  if (first != nullptr) {
+    Arena* last = first;
+    while (last->next_ != nullptr) {
+      last = last->next_;
+    }
+    Thread* self = Thread::Current();
+    MutexLock lock(self, lock_);
+    last->next_ = free_arenas_;
+    free_arenas_ = first;
+  }
+}
+
+size_t ArenaAllocator::BytesAllocated() const {
+  return ArenaAllocatorStats::BytesAllocated();
+}
+
+ArenaAllocator::ArenaAllocator(ArenaPool* pool)
+  : pool_(pool),
+    begin_(nullptr),
+    end_(nullptr),
+    ptr_(nullptr),
+    arena_head_(nullptr),
+    running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
+}
+
+void ArenaAllocator::UpdateBytesAllocated() {
+  if (arena_head_ != nullptr) {
+    // Update how many bytes we have allocated into the arena so that the arena pool knows how
+    // much memory to zero out.
+    arena_head_->bytes_allocated_ = ptr_ - begin_;
+  }
+}
+
+void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
+  size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
+  if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
+    // Obtain a new block.
+    ObtainNewArenaForAllocation(rounded_bytes);
+    if (UNLIKELY(ptr_ == nullptr)) {
+      return nullptr;
+    }
+  }
+  ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
+  uint8_t* ret = ptr_;
+  ptr_ += rounded_bytes;
+  // Check that the memory is already zeroed out.
+  for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
+    CHECK_EQ(*ptr, 0U);
+  }
+  VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
+  return ret;
+}
+
+ArenaAllocator::~ArenaAllocator() {
+  // Reclaim all the arenas by giving them back to the thread pool.
+  UpdateBytesAllocated();
+  pool_->FreeArenaChain(arena_head_);
+}
+
+void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
+  UpdateBytesAllocated();
+  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
+  new_arena->next_ = arena_head_;
+  arena_head_ = new_arena;
+  // Update our internal data structures.
+  ptr_ = begin_ = new_arena->Begin();
+  end_ = new_arena->End();
+}
+
+MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
+                   ssize_t lost_bytes_adjustment)
+    : name_(name),
+      stats_(stats),
+      first_arena_(first_arena),
+      lost_bytes_adjustment_(lost_bytes_adjustment) {
+}
+
+void MemStats::Dump(std::ostream& os) const {
+  os << name_ << " stats:\n";
+  stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
+}
+
+// Dump memory usage stats.
+MemStats ArenaAllocator::GetMemStats() const {
+  ssize_t lost_bytes_adjustment =
+      (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
+  return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
+}
+
+}  // namespace art
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
new file mode 100644
index 0000000..9237391
--- /dev/null
+++ b/runtime/base/arena_allocator.h
@@ -0,0 +1,240 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
+#define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
+
+#include <stdint.h>
+#include <stddef.h>
+
+#include "debug_stack.h"
+#include "macros.h"
+#include "mem_map.h"
+#include "mutex.h"
+#include "utils.h"
+
+namespace art {
+
+class Arena;
+class ArenaPool;
+class ArenaAllocator;
+class ArenaStack;
+class ScopedArenaAllocator;
+class MemStats;
+
+template <typename T>
+class ArenaAllocatorAdapter;
+
+static constexpr bool kArenaAllocatorCountAllocations = false;
+
+// Type of allocation for memory tuning.
+enum ArenaAllocKind {
+  kArenaAllocMisc,
+  kArenaAllocBB,
+  kArenaAllocBBList,
+  kArenaAllocBBPredecessors,
+  kArenaAllocDfsPreOrder,
+  kArenaAllocDfsPostOrder,
+  kArenaAllocDomPostOrder,
+  kArenaAllocTopologicalSortOrder,
+  kArenaAllocLoweringInfo,
+  kArenaAllocLIR,
+  kArenaAllocLIRResourceMask,
+  kArenaAllocSwitchTable,
+  kArenaAllocFillArrayData,
+  kArenaAllocSlowPaths,
+  kArenaAllocMIR,
+  kArenaAllocDFInfo,
+  kArenaAllocGrowableArray,
+  kArenaAllocGrowableBitMap,
+  kArenaAllocSSAToDalvikMap,
+  kArenaAllocDalvikToSSAMap,
+  kArenaAllocDebugInfo,
+  kArenaAllocSuccessor,
+  kArenaAllocRegAlloc,
+  kArenaAllocData,
+  kArenaAllocPredecessors,
+  kArenaAllocSTL,
+  kNumArenaAllocKinds
+};
+
+template <bool kCount>
+class ArenaAllocatorStatsImpl;
+
+template <>
+class ArenaAllocatorStatsImpl<false> {
+ public:
+  ArenaAllocatorStatsImpl() = default;
+  ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
+  ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
+
+  void Copy(const ArenaAllocatorStatsImpl& other) { UNUSED(other); }
+  void RecordAlloc(size_t bytes, ArenaAllocKind kind) { UNUSED(bytes, kind); }
+  size_t NumAllocations() const { return 0u; }
+  size_t BytesAllocated() const { return 0u; }
+  void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const {
+    UNUSED(os); UNUSED(first); UNUSED(lost_bytes_adjustment);
+  }
+};
+
+template <bool kCount>
+class ArenaAllocatorStatsImpl {
+ public:
+  ArenaAllocatorStatsImpl();
+  ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
+  ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
+
+  void Copy(const ArenaAllocatorStatsImpl& other);
+  void RecordAlloc(size_t bytes, ArenaAllocKind kind);
+  size_t NumAllocations() const;
+  size_t BytesAllocated() const;
+  void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
+
+ private:
+  size_t num_allocations_;
+  // TODO: Use std::array<size_t, kNumArenaAllocKinds> from C++11 when we upgrade the STL.
+  size_t alloc_stats_[kNumArenaAllocKinds];  // Bytes used by various allocation kinds.
+
+  static const char* const kAllocNames[];
+};
+
+typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
+
+class Arena {
+ public:
+  static constexpr size_t kDefaultSize = 128 * KB;
+  explicit Arena(size_t size = kDefaultSize);
+  ~Arena();
+  void Reset();
+  uint8_t* Begin() {
+    return memory_;
+  }
+
+  uint8_t* End() {
+    return memory_ + size_;
+  }
+
+  size_t Size() const {
+    return size_;
+  }
+
+  size_t RemainingSpace() const {
+    return Size() - bytes_allocated_;
+  }
+
+  size_t GetBytesAllocated() const {
+    return bytes_allocated_;
+  }
+
+ private:
+  size_t bytes_allocated_;
+  uint8_t* memory_;
+  size_t size_;
+  MemMap* map_;
+  Arena* next_;
+  friend class ArenaPool;
+  friend class ArenaAllocator;
+  friend class ArenaStack;
+  friend class ScopedArenaAllocator;
+  template <bool kCount> friend class ArenaAllocatorStatsImpl;
+  DISALLOW_COPY_AND_ASSIGN(Arena);
+};
+
+class ArenaPool {
+ public:
+  ArenaPool();
+  ~ArenaPool();
+  Arena* AllocArena(size_t size) LOCKS_EXCLUDED(lock_);
+  void FreeArenaChain(Arena* first) LOCKS_EXCLUDED(lock_);
+  size_t GetBytesAllocated() const LOCKS_EXCLUDED(lock_);
+
+ private:
+  mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
+  Arena* free_arenas_ GUARDED_BY(lock_);
+  DISALLOW_COPY_AND_ASSIGN(ArenaPool);
+};
+
+class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats {
+ public:
+  explicit ArenaAllocator(ArenaPool* pool);
+  ~ArenaAllocator();
+
+  // Get adapter for use in STL containers. See arena_containers.h .
+  ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
+
+  // Returns zeroed memory.
+  void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
+    if (UNLIKELY(running_on_valgrind_)) {
+      return AllocValgrind(bytes, kind);
+    }
+    bytes = RoundUp(bytes, kAlignment);
+    if (UNLIKELY(ptr_ + bytes > end_)) {
+      // Obtain a new block.
+      ObtainNewArenaForAllocation(bytes);
+      if (UNLIKELY(ptr_ == nullptr)) {
+        return nullptr;
+      }
+    }
+    ArenaAllocatorStats::RecordAlloc(bytes, kind);
+    uint8_t* ret = ptr_;
+    ptr_ += bytes;
+    return ret;
+  }
+
+  template <typename T>
+  T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
+    return static_cast<T*>(Alloc(length * sizeof(T), kind));
+  }
+
+  void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
+  void ObtainNewArenaForAllocation(size_t allocation_size);
+  size_t BytesAllocated() const;
+  MemStats GetMemStats() const;
+
+ private:
+  static constexpr size_t kAlignment = 8;
+
+  void UpdateBytesAllocated();
+
+  ArenaPool* pool_;
+  uint8_t* begin_;
+  uint8_t* end_;
+  uint8_t* ptr_;
+  Arena* arena_head_;
+  bool running_on_valgrind_;
+
+  template <typename U>
+  friend class ArenaAllocatorAdapter;
+
+  DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
+};  // ArenaAllocator
+
+class MemStats {
+ public:
+  MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
+           ssize_t lost_bytes_adjustment = 0);
+  void Dump(std::ostream& os) const;
+
+ private:
+  const char* const name_;
+  const ArenaAllocatorStats* const stats_;
+  const Arena* const first_arena_;
+  const ssize_t lost_bytes_adjustment_;
+};  // MemStats
+
+}  // namespace art
+
+#endif  // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
diff --git a/runtime/base/arena_containers.h b/runtime/base/arena_containers.h
new file mode 100644
index 0000000..162eb16
--- /dev/null
+++ b/runtime/base/arena_containers.h
@@ -0,0 +1,206 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_BASE_ARENA_CONTAINERS_H_
+#define ART_RUNTIME_BASE_ARENA_CONTAINERS_H_
+
+#include <deque>
+#include <queue>
+#include <set>
+#include <vector>
+
+#include "arena_allocator.h"
+#include "safe_map.h"
+
+namespace art {
+
+// Adapter for use of ArenaAllocator in STL containers.
+// Use ArenaAllocator::Adapter() to create an adapter to pass to container constructors.
+// For example,
+//   struct Foo {
+//     explicit Foo(ArenaAllocator* allocator)
+//         : foo_vector(allocator->Adapter(kArenaAllocMisc)),
+//           foo_map(std::less<int>(), allocator->Adapter()) {
+//     }
+//     ArenaVector<int> foo_vector;
+//     ArenaSafeMap<int, int> foo_map;
+//   };
+template <typename T>
+class ArenaAllocatorAdapter;
+
+template <typename T>
+using ArenaDeque = std::deque<T, ArenaAllocatorAdapter<T>>;
+
+template <typename T>
+using ArenaQueue = std::queue<T, ArenaDeque<T>>;
+
+template <typename T>
+using ArenaVector = std::vector<T, ArenaAllocatorAdapter<T>>;
+
+template <typename T, typename Comparator = std::less<T>>
+using ArenaSet = std::set<T, Comparator, ArenaAllocatorAdapter<T>>;
+
+template <typename K, typename V, typename Comparator = std::less<K>>
+using ArenaSafeMap =
+    SafeMap<K, V, Comparator, ArenaAllocatorAdapter<std::pair<const K, V>>>;
+
+// Implementation details below.
+
+template <bool kCount>
+class ArenaAllocatorAdapterKindImpl;
+
+template <>
+class ArenaAllocatorAdapterKindImpl<false> {
+ public:
+  // Not tracking allocations, ignore the supplied kind and arbitrarily provide kArenaAllocSTL.
+  explicit ArenaAllocatorAdapterKindImpl(ArenaAllocKind kind) { UNUSED(kind); }
+  ArenaAllocatorAdapterKindImpl& operator=(const ArenaAllocatorAdapterKindImpl& other) = default;
+  ArenaAllocKind Kind() { return kArenaAllocSTL; }
+};
+
+template <bool kCount>
+class ArenaAllocatorAdapterKindImpl {
+ public:
+  explicit ArenaAllocatorAdapterKindImpl(ArenaAllocKind kind) : kind_(kind) { }
+  ArenaAllocatorAdapterKindImpl& operator=(const ArenaAllocatorAdapterKindImpl& other) = default;
+  ArenaAllocKind Kind() { return kind_; }
+
+ private:
+  ArenaAllocKind kind_;
+};
+
+typedef ArenaAllocatorAdapterKindImpl<kArenaAllocatorCountAllocations> ArenaAllocatorAdapterKind;
+
+template <>
+class ArenaAllocatorAdapter<void>
+    : private DebugStackReference, private ArenaAllocatorAdapterKind {
+ public:
+  typedef void value_type;
+  typedef void* pointer;
+  typedef const void* const_pointer;
+
+  template <typename U>
+  struct rebind {
+    typedef ArenaAllocatorAdapter<U> other;
+  };
+
+  explicit ArenaAllocatorAdapter(ArenaAllocator* arena_allocator,
+                                 ArenaAllocKind kind = kArenaAllocSTL)
+      : DebugStackReference(arena_allocator),
+        ArenaAllocatorAdapterKind(kind),
+        arena_allocator_(arena_allocator) {
+  }
+  template <typename U>
+  ArenaAllocatorAdapter(const ArenaAllocatorAdapter<U>& other)
+      : DebugStackReference(other),
+        ArenaAllocatorAdapterKind(other),
+        arena_allocator_(other.arena_allocator_) {
+  }
+  ArenaAllocatorAdapter(const ArenaAllocatorAdapter& other) = default;
+  ArenaAllocatorAdapter& operator=(const ArenaAllocatorAdapter& other) = default;
+  ~ArenaAllocatorAdapter() = default;
+
+ private:
+  ArenaAllocator* arena_allocator_;
+
+  template <typename U>
+  friend class ArenaAllocatorAdapter;
+};
+
+template <typename T>
+class ArenaAllocatorAdapter : private DebugStackReference, private ArenaAllocatorAdapterKind {
+ public:
+  typedef T value_type;
+  typedef T* pointer;
+  typedef T& reference;
+  typedef const T* const_pointer;
+  typedef const T& const_reference;
+  typedef size_t size_type;
+  typedef ptrdiff_t difference_type;
+
+  template <typename U>
+  struct rebind {
+    typedef ArenaAllocatorAdapter<U> other;
+  };
+
+  explicit ArenaAllocatorAdapter(ArenaAllocator* arena_allocator, ArenaAllocKind kind)
+      : DebugStackReference(arena_allocator),
+        ArenaAllocatorAdapterKind(kind),
+        arena_allocator_(arena_allocator) {
+  }
+  template <typename U>
+  ArenaAllocatorAdapter(const ArenaAllocatorAdapter<U>& other)
+      : DebugStackReference(other),
+        ArenaAllocatorAdapterKind(other),
+        arena_allocator_(other.arena_allocator_) {
+  }
+  ArenaAllocatorAdapter(const ArenaAllocatorAdapter& other) = default;
+  ArenaAllocatorAdapter& operator=(const ArenaAllocatorAdapter& other) = default;
+  ~ArenaAllocatorAdapter() = default;
+
+  size_type max_size() const {
+    return static_cast<size_type>(-1) / sizeof(T);
+  }
+
+  pointer address(reference x) const { return &x; }
+  const_pointer address(const_reference x) const { return &x; }
+
+  pointer allocate(size_type n, ArenaAllocatorAdapter<void>::pointer hint = nullptr) {
+    UNUSED(hint);
+    DCHECK_LE(n, max_size());
+    return arena_allocator_->AllocArray<T>(n, ArenaAllocatorAdapterKind::Kind());
+  }
+  void deallocate(pointer p, size_type n) {
+    UNUSED(p, n);
+  }
+
+  void construct(pointer p, const_reference val) {
+    new (static_cast<void*>(p)) value_type(val);
+  }
+  void destroy(pointer p) {
+    p->~value_type();
+  }
+
+ private:
+  ArenaAllocator* arena_allocator_;
+
+  template <typename U>
+  friend class ArenaAllocatorAdapter;
+
+  template <typename U>
+  friend bool operator==(const ArenaAllocatorAdapter<U>& lhs,
+                         const ArenaAllocatorAdapter<U>& rhs);
+};
+
+template <typename T>
+inline bool operator==(const ArenaAllocatorAdapter<T>& lhs,
+                       const ArenaAllocatorAdapter<T>& rhs) {
+  return lhs.arena_allocator_ == rhs.arena_allocator_;
+}
+
+template <typename T>
+inline bool operator!=(const ArenaAllocatorAdapter<T>& lhs,
+                       const ArenaAllocatorAdapter<T>& rhs) {
+  return !(lhs == rhs);
+}
+
+inline ArenaAllocatorAdapter<void> ArenaAllocator::Adapter(ArenaAllocKind kind) {
+  return ArenaAllocatorAdapter<void>(this, kind);
+}
+
+}  // namespace art
+
+#endif  // ART_RUNTIME_BASE_ARENA_CONTAINERS_H_
diff --git a/runtime/base/arena_object.h b/runtime/base/arena_object.h
new file mode 100644
index 0000000..ab97d0c
--- /dev/null
+++ b/runtime/base/arena_object.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_BASE_ARENA_OBJECT_H_
+#define ART_RUNTIME_BASE_ARENA_OBJECT_H_
+
+#include "base/arena_allocator.h"
+#include "base/logging.h"
+#include "scoped_arena_allocator.h"
+
+namespace art {
+
+// Parent for arena allocated objects giving appropriate new and delete operators.
+template<enum ArenaAllocKind kAllocKind>
+class ArenaObject {
+ public:
+  // Allocate a new ArenaObject of 'size' bytes in the Arena.
+  void* operator new(size_t size, ArenaAllocator* allocator) {
+    return allocator->Alloc(size, kAllocKind);
+  }
+
+  static void* operator new(size_t size, ScopedArenaAllocator* arena) {
+    return arena->Alloc(size, kAllocKind);
+  }
+
+  void operator delete(void*, size_t) {
+    LOG(FATAL) << "UNREACHABLE";
+    UNREACHABLE();
+  }
+};
+
+
+// Parent for arena allocated objects that get deleted, gives appropriate new and delete operators.
+// Currently this is used by the quick compiler for debug reference counting arena allocations.
+template<enum ArenaAllocKind kAllocKind>
+class DeletableArenaObject {
+ public:
+  // Allocate a new ArenaObject of 'size' bytes in the Arena.
+  void* operator new(size_t size, ArenaAllocator* allocator) {
+    return allocator->Alloc(size, kAllocKind);
+  }
+
+  static void* operator new(size_t size, ScopedArenaAllocator* arena) {
+    return arena->Alloc(size, kAllocKind);
+  }
+
+  void operator delete(void*, size_t) {
+    // Nop.
+  }
+};
+
+}  // namespace art
+
+#endif  // ART_RUNTIME_BASE_ARENA_OBJECT_H_
diff --git a/runtime/base/debug_stack.h b/runtime/base/debug_stack.h
new file mode 100644
index 0000000..03f4575
--- /dev/null
+++ b/runtime/base/debug_stack.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_BASE_DEBUG_STACK_H_
+#define ART_RUNTIME_BASE_DEBUG_STACK_H_
+
+#include "base/logging.h"
+#include "base/macros.h"
+#include "globals.h"
+
+namespace art {
+
+// Helper classes for reference counting to enforce construction/destruction order and
+// usage of the top element of a stack in debug mode with no overhead in release mode.
+
+// Reference counter. No references allowed in destructor or in explicitly called CheckNoRefs().
+template <bool kIsDebug>
+class DebugStackRefCounterImpl;
+// Reference. Allows an explicit check that it's the top reference.
+template <bool kIsDebug>
+class DebugStackReferenceImpl;
+// Indirect top reference. Checks that the reference is the top reference when used.
+template <bool kIsDebug>
+class DebugStackIndirectTopRefImpl;
+
+typedef DebugStackRefCounterImpl<kIsDebugBuild> DebugStackRefCounter;
+typedef DebugStackReferenceImpl<kIsDebugBuild> DebugStackReference;
+typedef DebugStackIndirectTopRefImpl<kIsDebugBuild> DebugStackIndirectTopRef;
+
+// Non-debug mode specializations. This should be optimized away.
+
+template <>
+class DebugStackRefCounterImpl<false> {
+ public:
+  size_t IncrementRefCount() { return 0u; }
+  void DecrementRefCount() { }
+  size_t GetRefCount() const { return 0u; }
+  void CheckNoRefs() const { }
+};
+
+template <>
+class DebugStackReferenceImpl<false> {
+ public:
+  explicit DebugStackReferenceImpl(DebugStackRefCounterImpl<false>* counter) { UNUSED(counter); }
+  DebugStackReferenceImpl(const DebugStackReferenceImpl& other) = default;
+  DebugStackReferenceImpl& operator=(const DebugStackReferenceImpl& other) = default;
+  void CheckTop() { }
+};
+
+template <>
+class DebugStackIndirectTopRefImpl<false> {
+ public:
+  explicit DebugStackIndirectTopRefImpl(DebugStackReferenceImpl<false>* ref) { UNUSED(ref); }
+  DebugStackIndirectTopRefImpl(const DebugStackIndirectTopRefImpl& other) = default;
+  DebugStackIndirectTopRefImpl& operator=(const DebugStackIndirectTopRefImpl& other) = default;
+  void CheckTop() { }
+};
+
+// Debug mode versions.
+
+template <bool kIsDebug>
+class DebugStackRefCounterImpl {
+ public:
+  DebugStackRefCounterImpl() : ref_count_(0u) { }
+  ~DebugStackRefCounterImpl() { CheckNoRefs(); }
+  size_t IncrementRefCount() { return ++ref_count_; }
+  void DecrementRefCount() { --ref_count_; }
+  size_t GetRefCount() const { return ref_count_; }
+  void CheckNoRefs() const { CHECK_EQ(ref_count_, 0u); }
+
+ private:
+  size_t ref_count_;
+};
+
+template <bool kIsDebug>
+class DebugStackReferenceImpl {
+ public:
+  explicit DebugStackReferenceImpl(DebugStackRefCounterImpl<kIsDebug>* counter)
+    : counter_(counter), ref_count_(counter->IncrementRefCount()) {
+  }
+  DebugStackReferenceImpl(const DebugStackReferenceImpl& other)
+    : counter_(other.counter_), ref_count_(counter_->IncrementRefCount()) {
+  }
+  DebugStackReferenceImpl& operator=(const DebugStackReferenceImpl& other) {
+    CHECK(counter_ == other.counter_);
+    return *this;
+  }
+  ~DebugStackReferenceImpl() { counter_->DecrementRefCount(); }
+  void CheckTop() { CHECK_EQ(counter_->GetRefCount(), ref_count_); }
+
+ private:
+  DebugStackRefCounterImpl<true>* counter_;
+  size_t ref_count_;
+};
+
+template <bool kIsDebug>
+class DebugStackIndirectTopRefImpl {
+ public:
+  explicit DebugStackIndirectTopRefImpl(DebugStackReferenceImpl<kIsDebug>* ref)
+      : ref_(ref) {
+    CheckTop();
+  }
+  DebugStackIndirectTopRefImpl(const DebugStackIndirectTopRefImpl& other)
+      : ref_(other.ref_) {
+    CheckTop();
+  }
+  DebugStackIndirectTopRefImpl& operator=(const DebugStackIndirectTopRefImpl& other) {
+    CHECK(ref_ == other.ref_);
+    CheckTop();
+    return *this;
+  }
+  ~DebugStackIndirectTopRefImpl() {
+    CheckTop();
+  }
+  void CheckTop() {
+    ref_->CheckTop();
+  }
+
+ private:
+  DebugStackReferenceImpl<kIsDebug>* ref_;
+};
+
+}  // namespace art
+
+#endif  // ART_RUNTIME_BASE_DEBUG_STACK_H_
diff --git a/runtime/base/scoped_arena_allocator.cc b/runtime/base/scoped_arena_allocator.cc
new file mode 100644
index 0000000..4a7be38
--- /dev/null
+++ b/runtime/base/scoped_arena_allocator.cc
@@ -0,0 +1,146 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "scoped_arena_allocator.h"
+
+#include "arena_allocator.h"
+#include <memcheck/memcheck.h>
+
+namespace art {
+
+static constexpr size_t kValgrindRedZoneBytes = 8;
+
+ArenaStack::ArenaStack(ArenaPool* arena_pool)
+  : DebugStackRefCounter(),
+    stats_and_pool_(arena_pool),
+    bottom_arena_(nullptr),
+    top_arena_(nullptr),
+    top_ptr_(nullptr),
+    top_end_(nullptr),
+    running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
+}
+
+ArenaStack::~ArenaStack() {
+  DebugStackRefCounter::CheckNoRefs();
+  stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
+}
+
+void ArenaStack::Reset() {
+  DebugStackRefCounter::CheckNoRefs();
+  stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
+  bottom_arena_ = nullptr;
+  top_arena_  = nullptr;
+  top_ptr_ = nullptr;
+  top_end_ = nullptr;
+}
+
+MemStats ArenaStack::GetPeakStats() const {
+  DebugStackRefCounter::CheckNoRefs();
+  return MemStats("ArenaStack peak", static_cast<const TaggedStats<Peak>*>(&stats_and_pool_),
+                  bottom_arena_);
+}
+
+uint8_t* ArenaStack::AllocateFromNextArena(size_t rounded_bytes) {
+  UpdateBytesAllocated();
+  size_t allocation_size = std::max(Arena::kDefaultSize, rounded_bytes);
+  if (UNLIKELY(top_arena_ == nullptr)) {
+    top_arena_ = bottom_arena_ = stats_and_pool_.pool->AllocArena(allocation_size);
+    top_arena_->next_ = nullptr;
+  } else if (top_arena_->next_ != nullptr && top_arena_->next_->Size() >= allocation_size) {
+    top_arena_ = top_arena_->next_;
+  } else {
+    Arena* tail = top_arena_->next_;
+    top_arena_->next_ = stats_and_pool_.pool->AllocArena(allocation_size);
+    top_arena_ = top_arena_->next_;
+    top_arena_->next_ = tail;
+  }
+  top_end_ = top_arena_->End();
+  // top_ptr_ shall be updated by ScopedArenaAllocator.
+  return top_arena_->Begin();
+}
+
+void ArenaStack::UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats) {
+  if (PeakStats()->BytesAllocated() < CurrentStats()->BytesAllocated()) {
+    PeakStats()->Copy(*CurrentStats());
+  }
+  CurrentStats()->Copy(restore_stats);
+}
+
+void ArenaStack::UpdateBytesAllocated() {
+  if (top_arena_ != nullptr) {
+    // Update how many bytes we have allocated into the arena so that the arena pool knows how
+    // much memory to zero out. Though ScopedArenaAllocator doesn't guarantee the memory is
+    // zero-initialized, the Arena may be reused by ArenaAllocator which does guarantee this.
+    size_t allocated = static_cast<size_t>(top_ptr_ - top_arena_->Begin());
+    if (top_arena_->bytes_allocated_ < allocated) {
+      top_arena_->bytes_allocated_ = allocated;
+    }
+  }
+}
+
+void* ArenaStack::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
+  size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
+  uint8_t* ptr = top_ptr_;
+  if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
+    ptr = AllocateFromNextArena(rounded_bytes);
+    CHECK(ptr != nullptr) << "Failed to allocate memory";
+  }
+  CurrentStats()->RecordAlloc(bytes, kind);
+  top_ptr_ = ptr + rounded_bytes;
+  VALGRIND_MAKE_MEM_UNDEFINED(ptr, bytes);
+  VALGRIND_MAKE_MEM_NOACCESS(ptr + bytes, rounded_bytes - bytes);
+  return ptr;
+}
+
+ScopedArenaAllocator::ScopedArenaAllocator(ArenaStack* arena_stack)
+  : DebugStackReference(arena_stack),
+    DebugStackRefCounter(),
+    ArenaAllocatorStats(*arena_stack->CurrentStats()),
+    arena_stack_(arena_stack),
+    mark_arena_(arena_stack->top_arena_),
+    mark_ptr_(arena_stack->top_ptr_),
+    mark_end_(arena_stack->top_end_) {
+}
+
+ScopedArenaAllocator::~ScopedArenaAllocator() {
+  DoReset();
+}
+
+void ScopedArenaAllocator::Reset() {
+  DoReset();
+  // If this allocator was Create()d, we need to move the arena_stack_->top_ptr_ past *this.
+  if (mark_ptr_ == reinterpret_cast<uint8_t*>(this)) {
+    arena_stack_->top_ptr_ = mark_ptr_ + RoundUp(sizeof(ScopedArenaAllocator), 8);
+  }
+}
+
+void ScopedArenaAllocator::DoReset() {
+  DebugStackReference::CheckTop();
+  DebugStackRefCounter::CheckNoRefs();
+  arena_stack_->UpdatePeakStatsAndRestore(*this);
+  arena_stack_->UpdateBytesAllocated();
+  if (LIKELY(mark_arena_ != nullptr)) {
+    arena_stack_->top_arena_ = mark_arena_;
+    arena_stack_->top_ptr_ = mark_ptr_;
+    arena_stack_->top_end_ = mark_end_;
+  } else if (arena_stack_->bottom_arena_ != nullptr) {
+    mark_arena_ = arena_stack_->top_arena_ = arena_stack_->bottom_arena_;
+    mark_ptr_ = arena_stack_->top_ptr_ = mark_arena_->Begin();
+    mark_end_ = arena_stack_->top_end_ = mark_arena_->End();
+  }
+}
+
+}  // namespace art
diff --git a/runtime/base/scoped_arena_allocator.h b/runtime/base/scoped_arena_allocator.h
new file mode 100644
index 0000000..bbedeac
--- /dev/null
+++ b/runtime/base/scoped_arena_allocator.h
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
+#define ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
+
+#include "arena_allocator.h"
+#include "debug_stack.h"
+#include "globals.h"
+#include "logging.h"
+#include "macros.h"
+
+namespace art {
+
+class ArenaStack;
+class ScopedArenaAllocator;
+
+template <typename T>
+class ScopedArenaAllocatorAdapter;
+
+// Holds a list of Arenas for use by ScopedArenaAllocator stack.
+class ArenaStack : private DebugStackRefCounter {
+ public:
+  explicit ArenaStack(ArenaPool* arena_pool);
+  ~ArenaStack();
+
+  void Reset();
+
+  size_t PeakBytesAllocated() {
+    return PeakStats()->BytesAllocated();
+  }
+
+  MemStats GetPeakStats() const;
+
+ private:
+  struct Peak;
+  struct Current;
+  template <typename Tag> struct TaggedStats : ArenaAllocatorStats { };
+  struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> {
+    explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { }
+    ArenaPool* const pool;
+  };
+
+  ArenaAllocatorStats* PeakStats() {
+    return static_cast<TaggedStats<Peak>*>(&stats_and_pool_);
+  }
+
+  ArenaAllocatorStats* CurrentStats() {
+    return static_cast<TaggedStats<Current>*>(&stats_and_pool_);
+  }
+
+  // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter.
+  void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
+    if (UNLIKELY(running_on_valgrind_)) {
+      return AllocValgrind(bytes, kind);
+    }
+    size_t rounded_bytes = RoundUp(bytes, 8);
+    uint8_t* ptr = top_ptr_;
+    if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
+      ptr = AllocateFromNextArena(rounded_bytes);
+    }
+    CurrentStats()->RecordAlloc(bytes, kind);
+    top_ptr_ = ptr + rounded_bytes;
+    return ptr;
+  }
+
+  uint8_t* AllocateFromNextArena(size_t rounded_bytes);
+  void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats);
+  void UpdateBytesAllocated();
+  void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
+
+  StatsAndPool stats_and_pool_;
+  Arena* bottom_arena_;
+  Arena* top_arena_;
+  uint8_t* top_ptr_;
+  uint8_t* top_end_;
+
+  const bool running_on_valgrind_;
+
+  friend class ScopedArenaAllocator;
+  template <typename T>
+  friend class ScopedArenaAllocatorAdapter;
+
+  DISALLOW_COPY_AND_ASSIGN(ArenaStack);
+};
+
+class ScopedArenaAllocator
+    : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats {
+ public:
+  // Create a ScopedArenaAllocator directly on the ArenaStack when the scope of
+  // the allocator is not exactly a C++ block scope. For example, an optimization
+  // pass can create the scoped allocator in Start() and destroy it in End().
+  static ScopedArenaAllocator* Create(ArenaStack* arena_stack) {
+    void* addr = arena_stack->Alloc(sizeof(ScopedArenaAllocator), kArenaAllocMisc);
+    ScopedArenaAllocator* allocator = new(addr) ScopedArenaAllocator(arena_stack);
+    allocator->mark_ptr_ = reinterpret_cast<uint8_t*>(addr);
+    return allocator;
+  }
+
+  explicit ScopedArenaAllocator(ArenaStack* arena_stack);
+  ~ScopedArenaAllocator();
+
+  void Reset();
+
+  void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
+    DebugStackReference::CheckTop();
+    return arena_stack_->Alloc(bytes, kind);
+  }
+
+  template <typename T>
+  T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
+    return static_cast<T*>(Alloc(length * sizeof(T), kind));
+  }
+
+  // Get adapter for use in STL containers. See scoped_arena_containers.h .
+  ScopedArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
+
+  // Allow a delete-expression to destroy but not deallocate allocators created by Create().
+  static void operator delete(void* ptr) { UNUSED(ptr); }
+
+ private:
+  ArenaStack* const arena_stack_;
+  Arena* mark_arena_;
+  uint8_t* mark_ptr_;
+  uint8_t* mark_end_;
+
+  void DoReset();
+
+  template <typename T>
+  friend class ScopedArenaAllocatorAdapter;
+
+  DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator);
+};
+
+}  // namespace art
+
+#endif  // ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
diff --git a/runtime/base/scoped_arena_containers.h b/runtime/base/scoped_arena_containers.h
new file mode 100644
index 0000000..664a909
--- /dev/null
+++ b/runtime/base/scoped_arena_containers.h
@@ -0,0 +1,193 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_BASE_SCOPED_ARENA_CONTAINERS_H_
+#define ART_RUNTIME_BASE_SCOPED_ARENA_CONTAINERS_H_
+
+#include <deque>
+#include <queue>
+#include <set>
+#include <vector>
+
+#include "arena_containers.h"  // For ArenaAllocatorAdapterKind.
+#include "scoped_arena_allocator.h"
+#include "safe_map.h"
+
+namespace art {
+
+// Adapter for use of ScopedArenaAllocator in STL containers.
+// Use ScopedArenaAllocator::Adapter() to create an adapter to pass to container constructors.
+// For example,
+//   void foo(ScopedArenaAllocator* allocator) {
+//     ScopedArenaVector<int> foo_vector(allocator->Adapter(kArenaAllocMisc));
+//     ScopedArenaSafeMap<int, int> foo_map(std::less<int>(), allocator->Adapter());
+//     // Use foo_vector and foo_map...
+//   }
+template <typename T>
+class ScopedArenaAllocatorAdapter;
+
+template <typename T>
+using ScopedArenaDeque = std::deque<T, ScopedArenaAllocatorAdapter<T>>;
+
+template <typename T>
+using ScopedArenaQueue = std::queue<T, ScopedArenaDeque<T>>;
+
+template <typename T>
+using ScopedArenaVector = std::vector<T, ScopedArenaAllocatorAdapter<T>>;
+
+template <typename T, typename Comparator = std::less<T>>
+using ScopedArenaSet = std::set<T, Comparator, ScopedArenaAllocatorAdapter<T>>;
+
+template <typename K, typename V, typename Comparator = std::less<K>>
+using ScopedArenaSafeMap =
+    SafeMap<K, V, Comparator, ScopedArenaAllocatorAdapter<std::pair<const K, V>>>;
+
+// Implementation details below.
+
+template <>
+class ScopedArenaAllocatorAdapter<void>
+    : private DebugStackReference, private DebugStackIndirectTopRef,
+      private ArenaAllocatorAdapterKind {
+ public:
+  typedef void value_type;
+  typedef void* pointer;
+  typedef const void* const_pointer;
+
+  template <typename U>
+  struct rebind {
+    typedef ScopedArenaAllocatorAdapter<U> other;
+  };
+
+  explicit ScopedArenaAllocatorAdapter(ScopedArenaAllocator* arena_allocator,
+                                       ArenaAllocKind kind = kArenaAllocSTL)
+      : DebugStackReference(arena_allocator),
+        DebugStackIndirectTopRef(arena_allocator),
+        ArenaAllocatorAdapterKind(kind),
+        arena_stack_(arena_allocator->arena_stack_) {
+  }
+  template <typename U>
+  ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other)
+      : DebugStackReference(other),
+        DebugStackIndirectTopRef(other),
+        ArenaAllocatorAdapterKind(other),
+        arena_stack_(other.arena_stack_) {
+  }
+  ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter& other) = default;
+  ScopedArenaAllocatorAdapter& operator=(const ScopedArenaAllocatorAdapter& other) = default;
+  ~ScopedArenaAllocatorAdapter() = default;
+
+ private:
+  ArenaStack* arena_stack_;
+
+  template <typename U>
+  friend class ScopedArenaAllocatorAdapter;
+};
+
+template <typename T>
+class ScopedArenaAllocatorAdapter
+    : private DebugStackReference, private DebugStackIndirectTopRef,
+      private ArenaAllocatorAdapterKind {
+ public:
+  typedef T value_type;
+  typedef T* pointer;
+  typedef T& reference;
+  typedef const T* const_pointer;
+  typedef const T& const_reference;
+  typedef size_t size_type;
+  typedef ptrdiff_t difference_type;
+
+  template <typename U>
+  struct rebind {
+    typedef ScopedArenaAllocatorAdapter<U> other;
+  };
+
+  explicit ScopedArenaAllocatorAdapter(ScopedArenaAllocator* arena_allocator,
+                                       ArenaAllocKind kind = kArenaAllocSTL)
+      : DebugStackReference(arena_allocator),
+        DebugStackIndirectTopRef(arena_allocator),
+        ArenaAllocatorAdapterKind(kind),
+        arena_stack_(arena_allocator->arena_stack_) {
+  }
+  template <typename U>
+  ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other)
+      : DebugStackReference(other),
+        DebugStackIndirectTopRef(other),
+        ArenaAllocatorAdapterKind(other),
+        arena_stack_(other.arena_stack_) {
+  }
+  ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter& other) = default;
+  ScopedArenaAllocatorAdapter& operator=(const ScopedArenaAllocatorAdapter& other) = default;
+  ~ScopedArenaAllocatorAdapter() = default;
+
+  size_type max_size() const {
+    return static_cast<size_type>(-1) / sizeof(T);
+  }
+
+  pointer address(reference x) const { return &x; }
+  const_pointer address(const_reference x) const { return &x; }
+
+  pointer allocate(size_type n, ScopedArenaAllocatorAdapter<void>::pointer hint = nullptr) {
+    UNUSED(hint);
+    DCHECK_LE(n, max_size());
+    DebugStackIndirectTopRef::CheckTop();
+    return reinterpret_cast<T*>(arena_stack_->Alloc(n * sizeof(T),
+                                                    ArenaAllocatorAdapterKind::Kind()));
+  }
+  void deallocate(pointer p, size_type n) {
+    UNUSED(p);
+    UNUSED(n);
+    DebugStackIndirectTopRef::CheckTop();
+  }
+
+  void construct(pointer p, const_reference val) {
+    // Don't CheckTop(), allow reusing existing capacity of a vector/deque below the top.
+    new (static_cast<void*>(p)) value_type(val);
+  }
+  void destroy(pointer p) {
+    // Don't CheckTop(), allow reusing existing capacity of a vector/deque below the top.
+    p->~value_type();
+  }
+
+ private:
+  ArenaStack* arena_stack_;
+
+  template <typename U>
+  friend class ScopedArenaAllocatorAdapter;
+
+  template <typename U>
+  friend bool operator==(const ScopedArenaAllocatorAdapter<U>& lhs,
+                         const ScopedArenaAllocatorAdapter<U>& rhs);
+};
+
+template <typename T>
+inline bool operator==(const ScopedArenaAllocatorAdapter<T>& lhs,
+                       const ScopedArenaAllocatorAdapter<T>& rhs) {
+  return lhs.arena_stack_ == rhs.arena_stack_;
+}
+
+template <typename T>
+inline bool operator!=(const ScopedArenaAllocatorAdapter<T>& lhs,
+                       const ScopedArenaAllocatorAdapter<T>& rhs) {
+  return !(lhs == rhs);
+}
+
+inline ScopedArenaAllocatorAdapter<void> ScopedArenaAllocator::Adapter(ArenaAllocKind kind) {
+  return ScopedArenaAllocatorAdapter<void>(this, kind);
+}
+
+}  // namespace art
+
+#endif  // ART_RUNTIME_BASE_SCOPED_ARENA_CONTAINERS_H_