Merge "Remove dex_pc's default value from top level HInstruction"
diff --git a/build/Android.oat.mk b/build/Android.oat.mk
index 3272c27..592843e 100644
--- a/build/Android.oat.mk
+++ b/build/Android.oat.mk
@@ -57,7 +57,7 @@
endif
ifeq ($(1),optimizing)
core_compile_options += --compiler-backend=Optimizing
- core_dex2oat_dependency += $(DEX2OAT)
+ core_dex2oat_dependency := $(DEX2OAT)
core_infix := -optimizing
endif
ifeq ($(1),interpreter)
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 5177b9a..12d6b03 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -562,6 +562,9 @@
return linker_patches;
}
+// TODO: The function below uses too much stack space. Bug: 24698147
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wframe-larger-than="
CompiledMethod* OptimizingCompiler::CompileOptimized(HGraph* graph,
CodeGenerator* codegen,
CompilerDriver* compiler_driver,
@@ -611,6 +614,7 @@
soa.Self()->TransitionFromSuspendedToRunnable();
return compiled_method;
}
+#pragma GCC diagnostic pop
CompiledMethod* OptimizingCompiler::CompileBaseline(
CodeGenerator* codegen,
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
index 345428c..6f2aa46 100644
--- a/runtime/base/arena_allocator.cc
+++ b/runtime/base/arena_allocator.cc
@@ -23,7 +23,6 @@
#include "mem_map.h"
#include "mutex.h"
#include "thread-inl.h"
-#include "base/memory_tool.h"
namespace art {
@@ -290,8 +289,7 @@
begin_(nullptr),
end_(nullptr),
ptr_(nullptr),
- arena_head_(nullptr),
- is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL) {
+ arena_head_(nullptr) {
}
void ArenaAllocator::UpdateBytesAllocated() {
@@ -302,14 +300,13 @@
}
}
-void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
+void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
// Obtain a new block.
ObtainNewArenaForAllocation(rounded_bytes);
- if (UNLIKELY(ptr_ == nullptr)) {
- return nullptr;
- }
+ CHECK(ptr_ != nullptr);
+ MEMORY_TOOL_MAKE_UNDEFINED(ptr_, end_ - ptr_);
}
ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
uint8_t* ret = ptr_;
@@ -318,6 +315,7 @@
for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
CHECK_EQ(*ptr, 0U);
}
+ MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
MEMORY_TOOL_MAKE_NOACCESS(ret + bytes, rounded_bytes - bytes);
return ret;
}
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
index b4f19ee..565b416 100644
--- a/runtime/base/arena_allocator.h
+++ b/runtime/base/arena_allocator.h
@@ -21,6 +21,7 @@
#include <stddef.h>
#include "base/bit_utils.h"
+#include "base/memory_tool.h"
#include "debug_stack.h"
#include "macros.h"
#include "mutex.h"
@@ -149,6 +150,52 @@
typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
+template <bool kAvailable, bool kValgrind>
+class ArenaAllocatorMemoryToolCheckImpl {
+ // This is the generic template but since there is a partial specialization
+ // for kValgrind == false, this can be instantiated only for kValgrind == true.
+ static_assert(kValgrind, "This template can be instantiated only for Valgrind.");
+ static_assert(kAvailable, "Valgrind implies memory tool availability.");
+
+ public:
+ ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { }
+ bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; }
+
+ private:
+ const bool is_running_on_valgrind_;
+};
+
+template <bool kAvailable>
+class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> {
+ public:
+ ArenaAllocatorMemoryToolCheckImpl() { }
+ bool IsRunningOnMemoryTool() { return kAvailable; }
+};
+
+typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind>
+ ArenaAllocatorMemoryToolCheck;
+
+class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck {
+ public:
+ using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool;
+
+ void MakeDefined(void* ptr, size_t size) {
+ if (IsRunningOnMemoryTool()) {
+ MEMORY_TOOL_MAKE_DEFINED(ptr, size);
+ }
+ }
+ void MakeUndefined(void* ptr, size_t size) {
+ if (IsRunningOnMemoryTool()) {
+ MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
+ }
+ }
+ void MakeInaccessible(void* ptr, size_t size) {
+ if (IsRunningOnMemoryTool()) {
+ MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
+ }
+ }
+};
+
class Arena {
public:
static constexpr size_t kDefaultSize = 128 * KB;
@@ -233,18 +280,24 @@
DISALLOW_COPY_AND_ASSIGN(ArenaPool);
};
-class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats {
+class ArenaAllocator
+ : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
public:
explicit ArenaAllocator(ArenaPool* pool);
~ArenaAllocator();
+ using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
+ using ArenaAllocatorMemoryTool::MakeDefined;
+ using ArenaAllocatorMemoryTool::MakeUndefined;
+ using ArenaAllocatorMemoryTool::MakeInaccessible;
+
// Get adapter for use in STL containers. See arena_containers.h .
ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
// Returns zeroed memory.
void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
- if (UNLIKELY(is_running_on_memory_tool_)) {
- return AllocValgrind(bytes, kind);
+ if (UNLIKELY(IsRunningOnMemoryTool())) {
+ return AllocWithMemoryTool(bytes, kind);
}
bytes = RoundUp(bytes, kAlignment);
if (UNLIKELY(ptr_ + bytes > end_)) {
@@ -268,6 +321,7 @@
auto* end = reinterpret_cast<uint8_t*>(ptr) + ptr_size;
// If we haven't allocated anything else, we can safely extend.
if (end == ptr_) {
+ DCHECK(!IsRunningOnMemoryTool()); // Red zone prevents end == ptr_.
const size_t size_delta = new_size - ptr_size;
// Check remain space.
const size_t remain = end_ - ptr_;
@@ -288,7 +342,7 @@
return static_cast<T*>(Alloc(length * sizeof(T), kind));
}
- void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
+ void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
void ObtainNewArenaForAllocation(size_t allocation_size);
@@ -316,7 +370,6 @@
uint8_t* end_;
uint8_t* ptr_;
Arena* arena_head_;
- bool is_running_on_memory_tool_;
template <typename U>
friend class ArenaAllocatorAdapter;
diff --git a/runtime/base/arena_containers.h b/runtime/base/arena_containers.h
index 810c1c4..e7ea09d 100644
--- a/runtime/base/arena_containers.h
+++ b/runtime/base/arena_containers.h
@@ -161,7 +161,7 @@
return arena_allocator_->AllocArray<T>(n, ArenaAllocatorAdapterKind::Kind());
}
void deallocate(pointer p, size_type n) {
- UNUSED(p, n);
+ arena_allocator_->MakeInaccessible(p, sizeof(T) * n);
}
void construct(pointer p, const_reference val) {
diff --git a/runtime/base/memory_tool.h b/runtime/base/memory_tool.h
index e0bdcfe..e1a2e07 100644
--- a/runtime/base/memory_tool.h
+++ b/runtime/base/memory_tool.h
@@ -32,10 +32,12 @@
#define MEMORY_TOOL_MAKE_NOACCESS(p, s) __asan_poison_memory_region(p, s)
#define MEMORY_TOOL_MAKE_UNDEFINED(p, s) __asan_unpoison_memory_region(p, s)
#define MEMORY_TOOL_MAKE_DEFINED(p, s) __asan_unpoison_memory_region(p, s)
+constexpr bool kMemoryToolIsAvailable = true;
#else
#define MEMORY_TOOL_MAKE_NOACCESS(p, s) do { (void)(p); (void)(s); } while (0)
#define MEMORY_TOOL_MAKE_UNDEFINED(p, s) do { (void)(p); (void)(s); } while (0)
#define MEMORY_TOOL_MAKE_DEFINED(p, s) do { (void)(p); (void)(s); } while (0)
+constexpr bool kMemoryToolIsAvailable = false;
#endif
#define ATTRIBUTE_NO_SANITIZE_ADDRESS __attribute__((no_sanitize_address))
@@ -54,6 +56,7 @@
#define MEMORY_TOOL_MAKE_DEFINED(p, s) VALGRIND_MAKE_MEM_DEFINED(p, s)
#define ATTRIBUTE_NO_SANITIZE_ADDRESS
#define RUNNING_ON_MEMORY_TOOL RUNNING_ON_VALGRIND
+constexpr bool kMemoryToolIsAvailable = true;
constexpr bool kMemoryToolIsValgrind = true;
constexpr bool kMemoryToolDetectsLeaks = true;
constexpr bool kMemoryToolAddsRedzones = true;
diff --git a/runtime/base/scoped_arena_allocator.cc b/runtime/base/scoped_arena_allocator.cc
index d823edd..31f96e4 100644
--- a/runtime/base/scoped_arena_allocator.cc
+++ b/runtime/base/scoped_arena_allocator.cc
@@ -29,8 +29,7 @@
bottom_arena_(nullptr),
top_arena_(nullptr),
top_ptr_(nullptr),
- top_end_(nullptr),
- is_running_on_memory_tool_(RUNNING_ON_MEMORY_TOOL > 0) {
+ top_end_(nullptr) {
}
ArenaStack::~ArenaStack() {
@@ -91,7 +90,7 @@
}
}
-void* ArenaStack::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
+void* ArenaStack::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
uint8_t* ptr = top_ptr_;
if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
diff --git a/runtime/base/scoped_arena_allocator.h b/runtime/base/scoped_arena_allocator.h
index ca514e4..4f51370 100644
--- a/runtime/base/scoped_arena_allocator.h
+++ b/runtime/base/scoped_arena_allocator.h
@@ -32,11 +32,16 @@
class ScopedArenaAllocatorAdapter;
// Holds a list of Arenas for use by ScopedArenaAllocator stack.
-class ArenaStack : private DebugStackRefCounter {
+class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryTool {
public:
explicit ArenaStack(ArenaPool* arena_pool);
~ArenaStack();
+ using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
+ using ArenaAllocatorMemoryTool::MakeDefined;
+ using ArenaAllocatorMemoryTool::MakeUndefined;
+ using ArenaAllocatorMemoryTool::MakeInaccessible;
+
void Reset();
size_t PeakBytesAllocated() {
@@ -64,8 +69,8 @@
// Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter.
void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
- if (UNLIKELY(is_running_on_memory_tool_)) {
- return AllocValgrind(bytes, kind);
+ if (UNLIKELY(IsRunningOnMemoryTool())) {
+ return AllocWithMemoryTool(bytes, kind);
}
size_t rounded_bytes = RoundUp(bytes, 8);
uint8_t* ptr = top_ptr_;
@@ -80,7 +85,7 @@
uint8_t* AllocateFromNextArena(size_t rounded_bytes);
void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats);
void UpdateBytesAllocated();
- void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
+ void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
StatsAndPool stats_and_pool_;
Arena* bottom_arena_;
@@ -88,8 +93,6 @@
uint8_t* top_ptr_;
uint8_t* top_end_;
- const bool is_running_on_memory_tool_;
-
friend class ScopedArenaAllocator;
template <typename T>
friend class ScopedArenaAllocatorAdapter;
diff --git a/runtime/base/scoped_arena_containers.h b/runtime/base/scoped_arena_containers.h
index 82db60e..eecc55f 100644
--- a/runtime/base/scoped_arena_containers.h
+++ b/runtime/base/scoped_arena_containers.h
@@ -153,9 +153,8 @@
ArenaAllocatorAdapterKind::Kind()));
}
void deallocate(pointer p, size_type n) {
- UNUSED(p);
- UNUSED(n);
DebugStackIndirectTopRef::CheckTop();
+ arena_stack_->MakeInaccessible(p, sizeof(T) * n);
}
void construct(pointer p, const_reference val) {
diff --git a/tools/art b/tools/art
index 676d6ae..304a9d0 100644
--- a/tools/art
+++ b/tools/art
@@ -89,6 +89,7 @@
invoke_with="perf record -o $ANDROID_DATA/perf.data -e cycles:u $invoke_with"
fi
+# We use the PIC core image to work with perf.
ANDROID_DATA=$ANDROID_DATA \
ANDROID_ROOT=$ANDROID_ROOT \
LD_LIBRARY_PATH=$LD_LIBRARY_PATH \
@@ -97,7 +98,7 @@
$invoke_with $ANDROID_ROOT/bin/$DALVIKVM $lib \
-XXlib:$LIBART \
-Xnorelocate \
- -Ximage:$ANDROID_ROOT/framework/core.art \
+ -Ximage:$ANDROID_ROOT/framework/core-optimizing-pic.art \
-Xcompiler-option --generate-debug-info \
"$@"