blob: 9932586ed954d93711f2e708facb5aab8b161519 [file] [log] [blame]
Vladimir Marko3e0e7172016-04-22 18:07:13 +01001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "base/arena_allocator.h"
18#include "base/arena_bit_vector.h"
19#include "gtest/gtest.h"
20
21namespace art {
22
23class ArenaAllocatorTest : public testing::Test {
24 protected:
25 size_t NumberOfArenas(ArenaAllocator* arena) {
26 size_t result = 0u;
27 for (Arena* a = arena->arena_head_; a != nullptr; a = a->next_) {
28 ++result;
29 }
30 return result;
31 }
32};
33
34TEST_F(ArenaAllocatorTest, Test) {
35 ArenaPool pool;
36 ArenaAllocator arena(&pool);
37 ArenaBitVector bv(&arena, 10, true);
38 bv.SetBit(5);
39 EXPECT_EQ(1U, bv.GetStorageSize());
40 bv.SetBit(35);
41 EXPECT_EQ(2U, bv.GetStorageSize());
42}
43
Vladimir Marko3f84f2c2016-04-25 19:40:34 +010044TEST_F(ArenaAllocatorTest, MakeDefined) {
45 // Regression test to make sure we mark the allocated area defined.
46 ArenaPool pool;
47 static constexpr size_t kSmallArraySize = 10;
48 static constexpr size_t kLargeArraySize = 50;
49 uint32_t* small_array;
50 {
51 // Allocate a small array from an arena and release it.
52 ArenaAllocator arena(&pool);
53 small_array = arena.AllocArray<uint32_t>(kSmallArraySize);
54 ASSERT_EQ(0u, small_array[kSmallArraySize - 1u]);
55 }
56 {
57 // Reuse the previous arena and allocate more than previous allocation including red zone.
58 ArenaAllocator arena(&pool);
59 uint32_t* large_array = arena.AllocArray<uint32_t>(kLargeArraySize);
60 ASSERT_EQ(0u, large_array[kLargeArraySize - 1u]);
61 // Verify that the allocation was made on the same arena.
62 ASSERT_EQ(small_array, large_array);
63 }
64}
65
Vladimir Marko3e0e7172016-04-22 18:07:13 +010066TEST_F(ArenaAllocatorTest, LargeAllocations) {
67 {
68 ArenaPool pool;
69 ArenaAllocator arena(&pool);
70 // Note: Leaving some space for memory tool red zones.
71 void* alloc1 = arena.Alloc(Arena::kDefaultSize * 5 / 8);
72 void* alloc2 = arena.Alloc(Arena::kDefaultSize * 2 / 8);
73 ASSERT_NE(alloc1, alloc2);
74 ASSERT_EQ(1u, NumberOfArenas(&arena));
75 }
76 {
77 ArenaPool pool;
78 ArenaAllocator arena(&pool);
79 void* alloc1 = arena.Alloc(Arena::kDefaultSize * 13 / 16);
80 void* alloc2 = arena.Alloc(Arena::kDefaultSize * 11 / 16);
81 ASSERT_NE(alloc1, alloc2);
82 ASSERT_EQ(2u, NumberOfArenas(&arena));
83 void* alloc3 = arena.Alloc(Arena::kDefaultSize * 7 / 16);
84 ASSERT_NE(alloc1, alloc3);
85 ASSERT_NE(alloc2, alloc3);
86 ASSERT_EQ(3u, NumberOfArenas(&arena));
87 }
88 {
89 ArenaPool pool;
90 ArenaAllocator arena(&pool);
91 void* alloc1 = arena.Alloc(Arena::kDefaultSize * 13 / 16);
92 void* alloc2 = arena.Alloc(Arena::kDefaultSize * 9 / 16);
93 ASSERT_NE(alloc1, alloc2);
94 ASSERT_EQ(2u, NumberOfArenas(&arena));
95 // Note: Leaving some space for memory tool red zones.
96 void* alloc3 = arena.Alloc(Arena::kDefaultSize * 5 / 16);
97 ASSERT_NE(alloc1, alloc3);
98 ASSERT_NE(alloc2, alloc3);
99 ASSERT_EQ(2u, NumberOfArenas(&arena));
100 }
101 {
102 ArenaPool pool;
103 ArenaAllocator arena(&pool);
104 void* alloc1 = arena.Alloc(Arena::kDefaultSize * 9 / 16);
105 void* alloc2 = arena.Alloc(Arena::kDefaultSize * 13 / 16);
106 ASSERT_NE(alloc1, alloc2);
107 ASSERT_EQ(2u, NumberOfArenas(&arena));
108 // Note: Leaving some space for memory tool red zones.
109 void* alloc3 = arena.Alloc(Arena::kDefaultSize * 5 / 16);
110 ASSERT_NE(alloc1, alloc3);
111 ASSERT_NE(alloc2, alloc3);
112 ASSERT_EQ(2u, NumberOfArenas(&arena));
113 }
114 {
115 ArenaPool pool;
116 ArenaAllocator arena(&pool);
117 // Note: Leaving some space for memory tool red zones.
118 for (size_t i = 0; i != 15; ++i) {
119 arena.Alloc(Arena::kDefaultSize * 1 / 16); // Allocate 15 times from the same arena.
120 ASSERT_EQ(i + 1u, NumberOfArenas(&arena));
121 arena.Alloc(Arena::kDefaultSize * 17 / 16); // Allocate a separate arena.
122 ASSERT_EQ(i + 2u, NumberOfArenas(&arena));
123 }
124 }
125}
126
Andreas Gampec134ee72016-08-22 14:03:10 -0700127TEST_F(ArenaAllocatorTest, AllocAlignment) {
128 ArenaPool pool;
129 ArenaAllocator arena(&pool);
130 for (size_t iterations = 0; iterations <= 10; ++iterations) {
131 for (size_t size = 1; size <= ArenaAllocator::kAlignment + 1; ++size) {
132 void* allocation = arena.Alloc(size);
133 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(allocation))
134 << reinterpret_cast<uintptr_t>(allocation);
135 }
136 }
137}
138
139TEST_F(ArenaAllocatorTest, ReallocAlignment) {
140 {
141 // Case 1: small aligned allocation, aligned extend inside arena.
142 ArenaPool pool;
143 ArenaAllocator arena(&pool);
144
145 const size_t original_size = ArenaAllocator::kAlignment * 2;
146 void* original_allocation = arena.Alloc(original_size);
147 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
148
149 const size_t new_size = ArenaAllocator::kAlignment * 3;
150 void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size);
151 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
152 // Secondary: expect the same buffer.
153 EXPECT_EQ(original_allocation, realloc_allocation);
154
155 void* after_alloc = arena.Alloc(1);
156 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
157 }
158
159 {
160 // Case 2: small aligned allocation, non-aligned extend inside arena.
161 ArenaPool pool;
162 ArenaAllocator arena(&pool);
163
164 const size_t original_size = ArenaAllocator::kAlignment * 2;
165 void* original_allocation = arena.Alloc(original_size);
166 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
167
168 const size_t new_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
169 void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size);
170 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
171 // Secondary: expect the same buffer.
172 EXPECT_EQ(original_allocation, realloc_allocation);
173
174 void* after_alloc = arena.Alloc(1);
175 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
176 }
177
178 {
179 // Case 3: small non-aligned allocation, aligned extend inside arena.
180 ArenaPool pool;
181 ArenaAllocator arena(&pool);
182
183 const size_t original_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
184 void* original_allocation = arena.Alloc(original_size);
185 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
186
187 const size_t new_size = ArenaAllocator::kAlignment * 4;
188 void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size);
189 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
190 // Secondary: expect the same buffer.
191 EXPECT_EQ(original_allocation, realloc_allocation);
192
193 void* after_alloc = arena.Alloc(1);
194 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
195 }
196
197 {
198 // Case 4: small non-aligned allocation, aligned non-extend inside arena.
199 ArenaPool pool;
200 ArenaAllocator arena(&pool);
201
202 const size_t original_size = ArenaAllocator::kAlignment * 2 + (ArenaAllocator::kAlignment / 2);
203 void* original_allocation = arena.Alloc(original_size);
204 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
205
206 const size_t new_size = ArenaAllocator::kAlignment * 3;
207 void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size);
208 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
209 // Secondary: expect the same buffer.
210 EXPECT_EQ(original_allocation, realloc_allocation);
211
212 void* after_alloc = arena.Alloc(1);
213 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
214 }
215
216 // The next part is brittle, as the default size for an arena is variable, and we don't know about
217 // sanitization.
218
219 {
220 // Case 5: large allocation, aligned extend into next arena.
221 ArenaPool pool;
222 ArenaAllocator arena(&pool);
223
224 const size_t original_size = Arena::kDefaultSize - ArenaAllocator::kAlignment * 5;
225 void* original_allocation = arena.Alloc(original_size);
226 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
227
228 const size_t new_size = Arena::kDefaultSize + ArenaAllocator::kAlignment * 2;
229 void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size);
230 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
231 // Secondary: expect new buffer.
232 EXPECT_NE(original_allocation, realloc_allocation);
233
234 void* after_alloc = arena.Alloc(1);
235 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
236 }
237
238 {
239 // Case 6: large allocation, non-aligned extend into next arena.
240 ArenaPool pool;
241 ArenaAllocator arena(&pool);
242
243 const size_t original_size = Arena::kDefaultSize -
244 ArenaAllocator::kAlignment * 4 -
245 ArenaAllocator::kAlignment / 2;
246 void* original_allocation = arena.Alloc(original_size);
247 ASSERT_TRUE(IsAligned<ArenaAllocator::kAlignment>(original_allocation));
248
249 const size_t new_size = Arena::kDefaultSize +
250 ArenaAllocator::kAlignment * 2 +
251 ArenaAllocator::kAlignment / 2;
252 void* realloc_allocation = arena.Realloc(original_allocation, original_size, new_size);
253 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(realloc_allocation));
254 // Secondary: expect new buffer.
255 EXPECT_NE(original_allocation, realloc_allocation);
256
257 void* after_alloc = arena.Alloc(1);
258 EXPECT_TRUE(IsAligned<ArenaAllocator::kAlignment>(after_alloc));
259 }
260}
261
262
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100263} // namespace art