blob: 24a8afea6eb2c52a3c826e4c3cd557b2795cdd73 [file] [log] [blame]
Vladimir Marko83cc7ae2014-02-12 18:02:05 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_UTILS_SCOPED_ARENA_ALLOCATOR_H_
18#define ART_COMPILER_UTILS_SCOPED_ARENA_ALLOCATOR_H_
19
20#include "base/logging.h"
21#include "base/macros.h"
22#include "utils/arena_allocator.h"
23#include "utils/debug_stack.h"
24#include "globals.h"
25
26namespace art {
27
28class ArenaStack;
29class ScopedArenaAllocator;
30
31template <typename T>
32class ScopedArenaAllocatorAdapter;
33
34// Holds a list of Arenas for use by ScopedArenaAllocator stack.
35class ArenaStack : private DebugStackRefCounter {
36 public:
37 explicit ArenaStack(ArenaPool* arena_pool);
38 ~ArenaStack();
39
40 size_t PeakBytesAllocated() {
41 return PeakStats()->BytesAllocated();
42 }
43
44 MemStats GetPeakStats() const;
45
46 private:
47 struct Peak;
48 struct Current;
49 template <typename Tag> struct TaggedStats : ArenaAllocatorStats { };
50 struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> {
51 explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { }
52 ArenaPool* const pool;
53 };
54
55 ArenaAllocatorStats* PeakStats() {
56 return static_cast<TaggedStats<Peak>*>(&stats_and_pool_);
57 }
58
59 ArenaAllocatorStats* CurrentStats() {
60 return static_cast<TaggedStats<Current>*>(&stats_and_pool_);
61 }
62
63 // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter.
64 void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
65 if (UNLIKELY(running_on_valgrind_)) {
66 return AllocValgrind(bytes, kind);
67 }
68 size_t rounded_bytes = (bytes + 3) & ~3;
69 uint8_t* ptr = top_ptr_;
70 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
71 ptr = AllocateFromNextArena(rounded_bytes);
72 }
73 CurrentStats()->RecordAlloc(bytes, kind);
74 top_ptr_ = ptr + rounded_bytes;
75 return ptr;
76 }
77
78 uint8_t* AllocateFromNextArena(size_t rounded_bytes);
79 void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats);
80 void UpdateBytesAllocated();
81 void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
82
83 StatsAndPool stats_and_pool_;
84 Arena* bottom_arena_;
85 Arena* top_arena_;
86 uint8_t* top_ptr_;
87 uint8_t* top_end_;
88
89 const bool running_on_valgrind_;
90
91 friend class ScopedArenaAllocator;
92 template <typename T>
93 friend class ScopedArenaAllocatorAdapter;
94
95 DISALLOW_COPY_AND_ASSIGN(ArenaStack);
96};
97
98class ScopedArenaAllocator
99 : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats {
100 public:
101 // Create a ScopedArenaAllocator directly on the ArenaStack when the scope of
102 // the allocator is not exactly a C++ block scope. For example, an optimization
103 // pass can create the scoped allocator in Start() and destroy it in End().
104 static ScopedArenaAllocator* Create(ArenaStack* arena_stack) {
105 void* addr = arena_stack->Alloc(sizeof(ScopedArenaAllocator), kArenaAllocMisc);
106 ScopedArenaAllocator* allocator = new(addr) ScopedArenaAllocator(arena_stack);
107 allocator->mark_ptr_ = reinterpret_cast<uint8_t*>(addr);
108 return allocator;
109 }
110
111 explicit ScopedArenaAllocator(ArenaStack* arena_stack);
112 ~ScopedArenaAllocator();
113
114 void Reset();
115
116 void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
117 DebugStackReference::CheckTop();
118 return arena_stack_->Alloc(bytes, kind);
119 }
120
121 // ScopedArenaAllocatorAdapter is incomplete here, we need to define this later.
122 ScopedArenaAllocatorAdapter<void> Adapter();
123
124 // Allow a delete-expression to destroy but not deallocate allocators created by Create().
125 static void operator delete(void* ptr) { UNUSED(ptr); }
126
127 private:
128 ArenaStack* const arena_stack_;
129 Arena* mark_arena_;
130 uint8_t* mark_ptr_;
131 uint8_t* mark_end_;
132
133 template <typename T>
134 friend class ScopedArenaAllocatorAdapter;
135
136 DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator);
137};
138
139template <>
140class ScopedArenaAllocatorAdapter<void>
141 : private DebugStackReference, private DebugStackIndirectTopRef {
142 public:
143 typedef void value_type;
144 typedef void* pointer;
145 typedef const void* const_pointer;
146
147 template <typename U>
148 struct rebind {
149 typedef ScopedArenaAllocatorAdapter<U> other;
150 };
151
152 explicit ScopedArenaAllocatorAdapter(ScopedArenaAllocator* arena_allocator)
153 : DebugStackReference(arena_allocator),
154 DebugStackIndirectTopRef(arena_allocator),
155 arena_stack_(arena_allocator->arena_stack_) {
156 }
157 template <typename U>
158 ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other)
159 : DebugStackReference(other),
160 DebugStackIndirectTopRef(other),
161 arena_stack_(other.arena_stack_) {
162 }
163 ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter& other) = default;
164 ScopedArenaAllocatorAdapter& operator=(const ScopedArenaAllocatorAdapter& other) = default;
165 ~ScopedArenaAllocatorAdapter() = default;
166
167 private:
168 ArenaStack* arena_stack_;
169
170 template <typename U>
171 friend class ScopedArenaAllocatorAdapter;
172};
173
174// Adapter for use of ScopedArenaAllocator in STL containers.
175template <typename T>
176class ScopedArenaAllocatorAdapter : private DebugStackReference, private DebugStackIndirectTopRef {
177 public:
178 typedef T value_type;
179 typedef T* pointer;
180 typedef T& reference;
181 typedef const T* const_pointer;
182 typedef const T& const_reference;
183 typedef size_t size_type;
184 typedef ptrdiff_t difference_type;
185
186 template <typename U>
187 struct rebind {
188 typedef ScopedArenaAllocatorAdapter<U> other;
189 };
190
191 explicit ScopedArenaAllocatorAdapter(ScopedArenaAllocator* arena_allocator)
192 : DebugStackReference(arena_allocator),
193 DebugStackIndirectTopRef(arena_allocator),
194 arena_stack_(arena_allocator->arena_stack_) {
195 }
196 template <typename U>
197 ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other)
198 : DebugStackReference(other),
199 DebugStackIndirectTopRef(other),
200 arena_stack_(other.arena_stack_) {
201 }
202 ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter& other) = default;
203 ScopedArenaAllocatorAdapter& operator=(const ScopedArenaAllocatorAdapter& other) = default;
204 ~ScopedArenaAllocatorAdapter() = default;
205
206 size_type max_size() const {
207 return static_cast<size_type>(-1) / sizeof(T);
208 }
209
210 pointer address(reference x) const { return &x; }
211 const_pointer address(const_reference x) const { return &x; }
212
213 pointer allocate(size_type n, ScopedArenaAllocatorAdapter<void>::pointer hint = nullptr) {
214 DCHECK_LE(n, max_size());
215 DebugStackIndirectTopRef::CheckTop();
216 return reinterpret_cast<T*>(arena_stack_->Alloc(n * sizeof(T), kArenaAllocSTL));
217 }
218 void deallocate(pointer p, size_type n) {
219 DebugStackIndirectTopRef::CheckTop();
220 }
221
222 void construct(pointer p, const_reference val) {
223 DebugStackIndirectTopRef::CheckTop();
224 new (static_cast<void*>(p)) value_type(val);
225 }
226 void destroy(pointer p) {
227 DebugStackIndirectTopRef::CheckTop();
228 p->~value_type();
229 }
230
231 private:
232 ArenaStack* arena_stack_;
233
234 template <typename U>
235 friend class ScopedArenaAllocatorAdapter;
236};
237
238inline ScopedArenaAllocatorAdapter<void> ScopedArenaAllocator::Adapter() {
239 return ScopedArenaAllocatorAdapter<void>(this);
240}
241
242} // namespace art
243
244#endif // ART_COMPILER_UTILS_SCOPED_ARENA_ALLOCATOR_H_