Add valgrind support to compiler arena allocator.
Change-Id: Id9974301d3810bfac968ee562b01a11098e402c2
diff --git a/compiler/dex/arena_allocator.cc b/compiler/dex/arena_allocator.cc
index 5a91d27..2da8064 100644
--- a/compiler/dex/arena_allocator.cc
+++ b/compiler/dex/arena_allocator.cc
@@ -20,12 +20,14 @@
#include "base/logging.h"
#include "base/mutex.h"
#include "thread-inl.h"
+#include <memcheck/memcheck.h>
namespace art {
// Memmap is a bit slower than malloc according to my measurements.
static constexpr bool kUseMemMap = false;
static constexpr bool kUseMemSet = true && kUseMemMap;
+static constexpr size_t kValgrindRedZoneBytes = 8;
static const char* alloc_names[ArenaAllocator::kNumAllocKinds] = {
"Misc ",
@@ -108,6 +110,9 @@
void ArenaPool::FreeArena(Arena* arena) {
Thread* self = Thread::Current();
+ if (UNLIKELY(RUNNING_ON_VALGRIND)) {
+ VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
+ }
{
MutexLock lock(self, lock_);
arena->next_ = free_arenas_;
@@ -129,7 +134,8 @@
end_(nullptr),
ptr_(nullptr),
arena_head_(nullptr),
- num_allocations_(0) {
+ num_allocations_(0),
+ running_on_valgrind_(RUNNING_ON_VALGRIND) {
memset(&alloc_stats_[0], 0, sizeof(alloc_stats_));
}
@@ -141,6 +147,29 @@
}
}
+void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
+ size_t rounded_bytes = (bytes + 3 + kValgrindRedZoneBytes) & ~3;
+ if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
+ // Obtain a new block.
+ ObtainNewArenaForAllocation(rounded_bytes);
+ if (UNLIKELY(ptr_ == nullptr)) {
+ return nullptr;
+ }
+ }
+ if (kCountAllocations) {
+ alloc_stats_[kind] += rounded_bytes;
+ ++num_allocations_;
+ }
+ uint8_t* ret = ptr_;
+ ptr_ += rounded_bytes;
+ // Check that the memory is already zeroed out.
+ for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
+ CHECK_EQ(*ptr, 0U);
+ }
+ VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
+ return ret;
+}
+
ArenaAllocator::~ArenaAllocator() {
// Reclaim all the arenas by giving them back to the thread pool.
UpdateBytesAllocated();
diff --git a/compiler/dex/arena_allocator.h b/compiler/dex/arena_allocator.h
index dda52a2..d11d67c 100644
--- a/compiler/dex/arena_allocator.h
+++ b/compiler/dex/arena_allocator.h
@@ -103,6 +103,9 @@
// Returns zeroed memory.
void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
+ if (UNLIKELY(running_on_valgrind_)) {
+ return AllocValgrind(bytes, kind);
+ }
bytes = (bytes + 3) & ~3;
if (UNLIKELY(ptr_ + bytes > end_)) {
// Obtain a new block.
@@ -120,6 +123,7 @@
return ret;
}
+ void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
void ObtainNewArenaForAllocation(size_t allocation_size);
size_t BytesAllocated() const;
void DumpMemStats(std::ostream& os) const;
@@ -132,10 +136,9 @@
uint8_t* end_;
uint8_t* ptr_;
Arena* arena_head_;
-
- // Statistics.
size_t num_allocations_;
- size_t alloc_stats_[kNumAllocKinds]; // Bytes used by various allocation kinds.
+ size_t alloc_stats_[kNumAllocKinds]; // Bytes used by various allocation kinds.
+ bool running_on_valgrind_;
DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
}; // ArenaAllocator