Split the allocation path into 'instrumented' and 'uninstrumented'
ones.
The instrumented path is equivalent to the existing allocation path
that checks for three instrumentation mechanisms (the debugger
allocation tracking, the runtime allocation stats collection, and
valgrind) for every allocation. The uinstrumented path does not
perform these checks. We use the uninstrumented path by default and
enable the instrumented path only when any of the three mechanisms is
enabled. The uninstrumented version of Heap::AllocObject() is inlined.
This change improves the Ritz MemAllocTest by ~4% on Nexus 4 and ~3%
on Host/x86.
Bug: 9986565
Change-Id: I3e68dfff6789d77bbdcea98457b694e1b5fcef5f
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index 0ac3cf0..ffd3034 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -101,6 +101,11 @@
};
static constexpr HeapVerificationMode kDesiredHeapVerification = kNoHeapVerification;
+// If true, measure the total allocation time.
+static constexpr bool kMeasureAllocationTime = false;
+// Primitive arrays larger than this size are put in the large object space.
+static constexpr size_t kLargeObjectThreshold = 3 * kPageSize;
+
class Heap {
public:
static constexpr size_t kDefaultInitialSize = 2 * MB;
@@ -129,7 +134,17 @@
// Allocates and initializes storage for an object instance.
mirror::Object* AllocObject(Thread* self, mirror::Class* klass, size_t num_bytes)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ return AllocObjectInstrumented(self, klass, num_bytes);
+ }
+ mirror::Object* AllocObjectInstrumented(Thread* self, mirror::Class* klass, size_t num_bytes)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ mirror::Object* AllocObjectUninstrumented(Thread* self, mirror::Class* klass, size_t num_bytes)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ void DebugCheckPreconditionsForAllobObject(mirror::Class* c, size_t byte_count)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ void ThrowOutOfMemoryError(size_t byte_count, bool large_object_allocation);
void RegisterNativeAllocation(int bytes)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -425,9 +440,24 @@
void AddModUnionTable(accounting::ModUnionTable* mod_union_table);
private:
+ bool TryAllocLargeObjectInstrumented(Thread* self, mirror::Class* c, size_t byte_count,
+ mirror::Object** obj_ptr, size_t* bytes_allocated)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ bool TryAllocLargeObjectUninstrumented(Thread* self, mirror::Class* c, size_t byte_count,
+ mirror::Object** obj_ptr, size_t* bytes_allocated)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ bool ShouldAllocLargeObject(mirror::Class* c, size_t byte_count);
+ void CheckConcurrentGC(Thread* self, size_t new_num_bytes_allocated, mirror::Object* obj);
+
// Allocates uninitialized storage. Passing in a null space tries to place the object in the
// large object space.
- template <class T> mirror::Object* Allocate(Thread* self, T* space, size_t num_bytes, size_t* bytes_allocated)
+ template <class T> mirror::Object* AllocateInstrumented(Thread* self, T* space, size_t num_bytes,
+ size_t* bytes_allocated)
+ LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ template <class T> mirror::Object* AllocateUninstrumented(Thread* self, T* space, size_t num_bytes,
+ size_t* bytes_allocated)
LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -439,17 +469,29 @@
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Try to allocate a number of bytes, this function never does any GCs.
- mirror::Object* TryToAllocate(Thread* self, space::AllocSpace* space, size_t alloc_size, bool grow,
- size_t* bytes_allocated)
+ mirror::Object* TryToAllocateInstrumented(Thread* self, space::AllocSpace* space, size_t alloc_size,
+ bool grow, size_t* bytes_allocated)
LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
// Try to allocate a number of bytes, this function never does any GCs. DlMallocSpace-specialized version.
- mirror::Object* TryToAllocate(Thread* self, space::DlMallocSpace* space, size_t alloc_size, bool grow,
- size_t* bytes_allocated)
+ mirror::Object* TryToAllocateInstrumented(Thread* self, space::DlMallocSpace* space, size_t alloc_size,
+ bool grow, size_t* bytes_allocated)
LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ mirror::Object* TryToAllocateUninstrumented(Thread* self, space::AllocSpace* space, size_t alloc_size,
+ bool grow, size_t* bytes_allocated)
+ LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ mirror::Object* TryToAllocateUninstrumented(Thread* self, space::DlMallocSpace* space, size_t alloc_size,
+ bool grow, size_t* bytes_allocated)
+ LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ void ThrowOutOfMemoryError(Thread* self, size_t byte_count, bool large_object_allocation)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
bool IsOutOfMemoryOnAllocation(size_t alloc_size, bool grow);
// Pushes a list of cleared references out to the managed heap.
@@ -459,7 +501,11 @@
void RequestConcurrentGC(Thread* self) LOCKS_EXCLUDED(Locks::runtime_shutdown_lock_);
bool IsGCRequestPending() const;
- void RecordAllocation(size_t size, mirror::Object* object)
+ size_t RecordAllocationInstrumented(size_t size, mirror::Object* object)
+ LOCKS_EXCLUDED(GlobalSynchronization::heap_bitmap_lock_)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ size_t RecordAllocationUninstrumented(size_t size, mirror::Object* object)
LOCKS_EXCLUDED(GlobalSynchronization::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -608,9 +654,6 @@
// Since the heap was created, how many objects have been freed.
size_t total_objects_freed_ever_;
- // Primitive objects larger than this size are put in the large object space.
- const size_t large_object_threshold_;
-
// Number of bytes allocated. Adjusted after each allocation and free.
AtomicInteger num_bytes_allocated_;
@@ -712,6 +755,16 @@
friend class ScopedHeapLock;
friend class space::SpaceTest;
+ class AllocationTimer {
+ private:
+ Heap* heap_;
+ mirror::Object** allocated_obj_ptr_;
+ uint64_t allocation_start_time_;
+ public:
+ AllocationTimer(Heap* heap, mirror::Object** allocated_obj_ptr);
+ ~AllocationTimer();
+ };
+
DISALLOW_IMPLICIT_CONSTRUCTORS(Heap);
};