Generalize Valgrind annotations in ART to support ASan.
Also add redzones around non-fixed mem_map(s).
Also extend -Wframe-larger-than limit to enable arm64 ASan build.
Change-Id: Ie572481a25fead59fc8978d2c317a33ac418516c
diff --git a/runtime/base/scoped_arena_allocator.cc b/runtime/base/scoped_arena_allocator.cc
index 4a7be38..d823edd 100644
--- a/runtime/base/scoped_arena_allocator.cc
+++ b/runtime/base/scoped_arena_allocator.cc
@@ -17,11 +17,11 @@
#include "scoped_arena_allocator.h"
#include "arena_allocator.h"
-#include <memcheck/memcheck.h>
+#include "base/memory_tool.h"
namespace art {
-static constexpr size_t kValgrindRedZoneBytes = 8;
+static constexpr size_t kMemoryToolRedZoneBytes = 8;
ArenaStack::ArenaStack(ArenaPool* arena_pool)
: DebugStackRefCounter(),
@@ -30,7 +30,7 @@
top_arena_(nullptr),
top_ptr_(nullptr),
top_end_(nullptr),
- running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
+ is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL > 0) {
}
ArenaStack::~ArenaStack() {
@@ -92,7 +92,7 @@
}
void* ArenaStack::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
- size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
+ size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
uint8_t* ptr = top_ptr_;
if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
ptr = AllocateFromNextArena(rounded_bytes);
@@ -100,8 +100,8 @@
}
CurrentStats()->RecordAlloc(bytes, kind);
top_ptr_ = ptr + rounded_bytes;
- VALGRIND_MAKE_MEM_UNDEFINED(ptr, bytes);
- VALGRIND_MAKE_MEM_NOACCESS(ptr + bytes, rounded_bytes - bytes);
+ MEMORY_TOOL_MAKE_UNDEFINED(ptr, bytes);
+ MEMORY_TOOL_MAKE_NOACCESS(ptr + bytes, rounded_bytes - bytes);
return ptr;
}