Give the JIT its own arena pool to avoid lock contentions.
Sharing it with the verifier and the class loader is not ideal,
especially at startup time.
bug:27398183
bug:23128949
Change-Id: I1b91663a13f6c5b33ad3b4be780d93eb7fe445b4
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
index a4b38ea..44af3f7 100644
--- a/runtime/base/arena_allocator.cc
+++ b/runtime/base/arena_allocator.cc
@@ -183,10 +183,10 @@
free(reinterpret_cast<void*>(memory_));
}
-MemMapArena::MemMapArena(size_t size, bool low_4gb) {
+MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name) {
std::string error_msg;
map_.reset(MemMap::MapAnonymous(
- "LinearAlloc", nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
+ name, nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
CHECK(map_.get() != nullptr) << error_msg;
memory_ = map_->Begin();
size_ = map_->Size();
@@ -210,9 +210,12 @@
}
}
-ArenaPool::ArenaPool(bool use_malloc, bool low_4gb)
- : use_malloc_(use_malloc), lock_("Arena pool lock", kArenaPoolLock), free_arenas_(nullptr),
- low_4gb_(low_4gb) {
+ArenaPool::ArenaPool(bool use_malloc, bool low_4gb, const char* name)
+ : use_malloc_(use_malloc),
+ lock_("Arena pool lock", kArenaPoolLock),
+ free_arenas_(nullptr),
+ low_4gb_(low_4gb),
+ name_(name) {
if (low_4gb) {
CHECK(!use_malloc) << "low4gb must use map implementation";
}
@@ -250,7 +253,7 @@
}
if (ret == nullptr) {
ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
- new MemMapArena(size, low_4gb_);
+ new MemMapArena(size, low_4gb_, name_);
}
ret->Reset();
return ret;