blob: aea2d9f895812b9b8a463380468fd21c534b4af2 [file] [log] [blame]
Mathieu Chartier28b1cf72016-01-15 16:44:57 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "space_test.h"
18
19#include "dlmalloc_space.h"
20#include "rosalloc_space.h"
21#include "scoped_thread_state_change.h"
22
23namespace art {
24namespace gc {
25namespace space {
26
27enum MallocSpaceType {
28 kMallocSpaceDlMalloc,
29 kMallocSpaceRosAlloc,
30};
31
32class SpaceCreateTest : public SpaceTest<CommonRuntimeTestWithParam<MallocSpaceType>> {
33 public:
34 MallocSpace* CreateSpace(const std::string& name,
35 size_t initial_size,
36 size_t growth_limit,
37 size_t capacity,
38 uint8_t* requested_begin) {
39 const MallocSpaceType type = GetParam();
40 if (type == kMallocSpaceDlMalloc) {
41 return DlMallocSpace::Create(name,
42 initial_size,
43 growth_limit,
44 capacity,
45 requested_begin,
46 false);
47 }
48 DCHECK_EQ(static_cast<uint32_t>(type), static_cast<uint32_t>(kMallocSpaceRosAlloc));
49 return RosAllocSpace::Create(name,
50 initial_size,
51 growth_limit,
52 capacity,
53 requested_begin,
54 Runtime::Current()->GetHeap()->IsLowMemoryMode(),
55 false);
56 }
57};
58
59TEST_P(SpaceCreateTest, InitTestBody) {
60 // This will lead to error messages in the log.
61 ScopedLogSeverity sls(LogSeverity::FATAL);
62
63 {
64 // Init < max == growth
65 std::unique_ptr<Space> space(CreateSpace("test", 16 * MB, 32 * MB, 32 * MB, nullptr));
66 EXPECT_TRUE(space != nullptr);
67 // Init == max == growth
68 space.reset(CreateSpace("test", 16 * MB, 16 * MB, 16 * MB, nullptr));
69 EXPECT_TRUE(space != nullptr);
70 // Init > max == growth
71 space.reset(CreateSpace("test", 32 * MB, 16 * MB, 16 * MB, nullptr));
72 EXPECT_TRUE(space == nullptr);
73 // Growth == init < max
74 space.reset(CreateSpace("test", 16 * MB, 16 * MB, 32 * MB, nullptr));
75 EXPECT_TRUE(space != nullptr);
76 // Growth < init < max
77 space.reset(CreateSpace("test", 16 * MB, 8 * MB, 32 * MB, nullptr));
78 EXPECT_TRUE(space == nullptr);
79 // Init < growth < max
80 space.reset(CreateSpace("test", 8 * MB, 16 * MB, 32 * MB, nullptr));
81 EXPECT_TRUE(space != nullptr);
82 // Init < max < growth
83 space.reset(CreateSpace("test", 8 * MB, 32 * MB, 16 * MB, nullptr));
84 EXPECT_TRUE(space == nullptr);
85 }
86}
87
88// TODO: This test is not very good, we should improve it.
89// The test should do more allocations before the creation of the ZygoteSpace, and then do
90// allocations after the ZygoteSpace is created. The test should also do some GCs to ensure that
91// the GC works with the ZygoteSpace.
92TEST_P(SpaceCreateTest, ZygoteSpaceTestBody) {
93 size_t dummy;
94 MallocSpace* space(CreateSpace("test", 4 * MB, 16 * MB, 16 * MB, nullptr));
95 ASSERT_TRUE(space != nullptr);
96
97 // Make space findable to the heap, will also delete space when runtime is cleaned up
98 AddSpace(space);
99 Thread* self = Thread::Current();
100 ScopedObjectAccess soa(self);
101
102 // Succeeds, fits without adjusting the footprint limit.
103 size_t ptr1_bytes_allocated, ptr1_usable_size, ptr1_bytes_tl_bulk_allocated;
104 StackHandleScope<3> hs(soa.Self());
105 MutableHandle<mirror::Object> ptr1(hs.NewHandle(Alloc(space,
106 self,
107 1 * MB,
108 &ptr1_bytes_allocated,
109 &ptr1_usable_size,
110 &ptr1_bytes_tl_bulk_allocated)));
111 EXPECT_TRUE(ptr1.Get() != nullptr);
112 EXPECT_LE(1U * MB, ptr1_bytes_allocated);
113 EXPECT_LE(1U * MB, ptr1_usable_size);
114 EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
115 EXPECT_EQ(ptr1_bytes_tl_bulk_allocated, ptr1_bytes_allocated);
116
117 // Fails, requires a higher footprint limit.
118 mirror::Object* ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy);
119 EXPECT_TRUE(ptr2 == nullptr);
120
121 // Succeeds, adjusts the footprint.
122 size_t ptr3_bytes_allocated, ptr3_usable_size, ptr3_bytes_tl_bulk_allocated;
123 MutableHandle<mirror::Object> ptr3(hs.NewHandle(AllocWithGrowth(space,
124 self,
125 8 * MB,
126 &ptr3_bytes_allocated,
127 &ptr3_usable_size,
128 &ptr3_bytes_tl_bulk_allocated)));
129 EXPECT_TRUE(ptr3.Get() != nullptr);
130 EXPECT_LE(8U * MB, ptr3_bytes_allocated);
131 EXPECT_LE(8U * MB, ptr3_usable_size);
132 EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
133 EXPECT_EQ(ptr3_bytes_tl_bulk_allocated, ptr3_bytes_allocated);
134
135 // Fails, requires a higher footprint limit.
136 mirror::Object* ptr4 = space->Alloc(self, 8 * MB, &dummy, nullptr, &dummy);
137 EXPECT_TRUE(ptr4 == nullptr);
138
139 // Also fails, requires a higher allowed footprint.
140 mirror::Object* ptr5 = space->AllocWithGrowth(self, 8 * MB, &dummy, nullptr, &dummy);
141 EXPECT_TRUE(ptr5 == nullptr);
142
143 // Release some memory.
144 size_t free3 = space->AllocationSize(ptr3.Get(), nullptr);
145 EXPECT_EQ(free3, ptr3_bytes_allocated);
146 EXPECT_EQ(free3, space->Free(self, ptr3.Assign(nullptr)));
147 EXPECT_LE(8U * MB, free3);
148
149 // Succeeds, now that memory has been freed.
150 size_t ptr6_bytes_allocated, ptr6_usable_size, ptr6_bytes_tl_bulk_allocated;
151 Handle<mirror::Object> ptr6(hs.NewHandle(AllocWithGrowth(space,
152 self,
153 9 * MB,
154 &ptr6_bytes_allocated,
155 &ptr6_usable_size,
156 &ptr6_bytes_tl_bulk_allocated)));
157 EXPECT_TRUE(ptr6.Get() != nullptr);
158 EXPECT_LE(9U * MB, ptr6_bytes_allocated);
159 EXPECT_LE(9U * MB, ptr6_usable_size);
160 EXPECT_LE(ptr6_usable_size, ptr6_bytes_allocated);
161 EXPECT_EQ(ptr6_bytes_tl_bulk_allocated, ptr6_bytes_allocated);
162
163 // Final clean up.
164 size_t free1 = space->AllocationSize(ptr1.Get(), nullptr);
165 space->Free(self, ptr1.Assign(nullptr));
166 EXPECT_LE(1U * MB, free1);
167
168 // Make sure that the zygote space isn't directly at the start of the space.
169 EXPECT_TRUE(space->Alloc(self, 1U * MB, &dummy, nullptr, &dummy) != nullptr);
170
171 gc::Heap* heap = Runtime::Current()->GetHeap();
172 space::Space* old_space = space;
173 heap->RemoveSpace(old_space);
174 heap->RevokeAllThreadLocalBuffers();
175 space::ZygoteSpace* zygote_space = space->CreateZygoteSpace("alloc space",
176 heap->IsLowMemoryMode(),
177 &space);
178 delete old_space;
179 // Add the zygote space.
180 AddSpace(zygote_space, false);
181
182 // Make space findable to the heap, will also delete space when runtime is cleaned up
183 AddSpace(space, false);
184
185 // Succeeds, fits without adjusting the footprint limit.
186 ptr1.Assign(Alloc(space,
187 self,
188 1 * MB,
189 &ptr1_bytes_allocated,
190 &ptr1_usable_size,
191 &ptr1_bytes_tl_bulk_allocated));
192 EXPECT_TRUE(ptr1.Get() != nullptr);
193 EXPECT_LE(1U * MB, ptr1_bytes_allocated);
194 EXPECT_LE(1U * MB, ptr1_usable_size);
195 EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
196 EXPECT_EQ(ptr1_bytes_tl_bulk_allocated, ptr1_bytes_allocated);
197
198 // Fails, requires a higher footprint limit.
199 ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy);
200 EXPECT_TRUE(ptr2 == nullptr);
201
202 // Succeeds, adjusts the footprint.
203 ptr3.Assign(AllocWithGrowth(space,
204 self,
205 2 * MB,
206 &ptr3_bytes_allocated,
207 &ptr3_usable_size,
208 &ptr3_bytes_tl_bulk_allocated));
209 EXPECT_TRUE(ptr3.Get() != nullptr);
210 EXPECT_LE(2U * MB, ptr3_bytes_allocated);
211 EXPECT_LE(2U * MB, ptr3_usable_size);
212 EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
213 EXPECT_EQ(ptr3_bytes_tl_bulk_allocated, ptr3_bytes_allocated);
214 space->Free(self, ptr3.Assign(nullptr));
215
216 // Final clean up.
217 free1 = space->AllocationSize(ptr1.Get(), nullptr);
218 space->Free(self, ptr1.Assign(nullptr));
219 EXPECT_LE(1U * MB, free1);
220}
221
222TEST_P(SpaceCreateTest, AllocAndFreeTestBody) {
223 size_t dummy = 0;
224 MallocSpace* space(CreateSpace("test", 4 * MB, 16 * MB, 16 * MB, nullptr));
225 ASSERT_TRUE(space != nullptr);
226 Thread* self = Thread::Current();
227 ScopedObjectAccess soa(self);
228
229 // Make space findable to the heap, will also delete space when runtime is cleaned up
230 AddSpace(space);
231
232 // Succeeds, fits without adjusting the footprint limit.
233 size_t ptr1_bytes_allocated, ptr1_usable_size, ptr1_bytes_tl_bulk_allocated;
234 StackHandleScope<3> hs(soa.Self());
235 MutableHandle<mirror::Object> ptr1(hs.NewHandle(Alloc(space,
236 self,
237 1 * MB,
238 &ptr1_bytes_allocated,
239 &ptr1_usable_size,
240 &ptr1_bytes_tl_bulk_allocated)));
241 EXPECT_TRUE(ptr1.Get() != nullptr);
242 EXPECT_LE(1U * MB, ptr1_bytes_allocated);
243 EXPECT_LE(1U * MB, ptr1_usable_size);
244 EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated);
245 EXPECT_EQ(ptr1_bytes_tl_bulk_allocated, ptr1_bytes_allocated);
246
247 // Fails, requires a higher footprint limit.
248 mirror::Object* ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy);
249 EXPECT_TRUE(ptr2 == nullptr);
250
251 // Succeeds, adjusts the footprint.
252 size_t ptr3_bytes_allocated, ptr3_usable_size, ptr3_bytes_tl_bulk_allocated;
253 MutableHandle<mirror::Object> ptr3(hs.NewHandle(AllocWithGrowth(space,
254 self,
255 8 * MB,
256 &ptr3_bytes_allocated,
257 &ptr3_usable_size,
258 &ptr3_bytes_tl_bulk_allocated)));
259 EXPECT_TRUE(ptr3.Get() != nullptr);
260 EXPECT_LE(8U * MB, ptr3_bytes_allocated);
261 EXPECT_LE(8U * MB, ptr3_usable_size);
262 EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated);
263 EXPECT_EQ(ptr3_bytes_tl_bulk_allocated, ptr3_bytes_allocated);
264
265 // Fails, requires a higher footprint limit.
266 mirror::Object* ptr4 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy);
267 EXPECT_TRUE(ptr4 == nullptr);
268
269 // Also fails, requires a higher allowed footprint.
270 mirror::Object* ptr5 = AllocWithGrowth(space, self, 8 * MB, &dummy, nullptr, &dummy);
271 EXPECT_TRUE(ptr5 == nullptr);
272
273 // Release some memory.
274 size_t free3 = space->AllocationSize(ptr3.Get(), nullptr);
275 EXPECT_EQ(free3, ptr3_bytes_allocated);
276 space->Free(self, ptr3.Assign(nullptr));
277 EXPECT_LE(8U * MB, free3);
278
279 // Succeeds, now that memory has been freed.
280 size_t ptr6_bytes_allocated, ptr6_usable_size, ptr6_bytes_tl_bulk_allocated;
281 Handle<mirror::Object> ptr6(hs.NewHandle(AllocWithGrowth(space,
282 self,
283 9 * MB,
284 &ptr6_bytes_allocated,
285 &ptr6_usable_size,
286 &ptr6_bytes_tl_bulk_allocated)));
287 EXPECT_TRUE(ptr6.Get() != nullptr);
288 EXPECT_LE(9U * MB, ptr6_bytes_allocated);
289 EXPECT_LE(9U * MB, ptr6_usable_size);
290 EXPECT_LE(ptr6_usable_size, ptr6_bytes_allocated);
291 EXPECT_EQ(ptr6_bytes_tl_bulk_allocated, ptr6_bytes_allocated);
292
293 // Final clean up.
294 size_t free1 = space->AllocationSize(ptr1.Get(), nullptr);
295 space->Free(self, ptr1.Assign(nullptr));
296 EXPECT_LE(1U * MB, free1);
297}
298
299TEST_P(SpaceCreateTest, AllocAndFreeListTestBody) {
300 MallocSpace* space(CreateSpace("test", 4 * MB, 16 * MB, 16 * MB, nullptr));
301 ASSERT_TRUE(space != nullptr);
302
303 // Make space findable to the heap, will also delete space when runtime is cleaned up
304 AddSpace(space);
305 Thread* self = Thread::Current();
306 ScopedObjectAccess soa(self);
307
308 // Succeeds, fits without adjusting the max allowed footprint.
309 mirror::Object* lots_of_objects[1024];
310 for (size_t i = 0; i < arraysize(lots_of_objects); i++) {
311 size_t allocation_size, usable_size, bytes_tl_bulk_allocated;
312 size_t size_of_zero_length_byte_array = SizeOfZeroLengthByteArray();
313 lots_of_objects[i] = Alloc(space,
314 self,
315 size_of_zero_length_byte_array,
316 &allocation_size,
317 &usable_size,
318 &bytes_tl_bulk_allocated);
319 EXPECT_TRUE(lots_of_objects[i] != nullptr);
320 size_t computed_usable_size;
321 EXPECT_EQ(allocation_size, space->AllocationSize(lots_of_objects[i], &computed_usable_size));
322 EXPECT_EQ(usable_size, computed_usable_size);
323 EXPECT_TRUE(bytes_tl_bulk_allocated == 0 ||
324 bytes_tl_bulk_allocated >= allocation_size);
325 }
326
327 // Release memory.
328 space->FreeList(self, arraysize(lots_of_objects), lots_of_objects);
329
330 // Succeeds, fits by adjusting the max allowed footprint.
331 for (size_t i = 0; i < arraysize(lots_of_objects); i++) {
332 size_t allocation_size, usable_size, bytes_tl_bulk_allocated;
333 lots_of_objects[i] = AllocWithGrowth(space,
334 self,
335 1024,
336 &allocation_size,
337 &usable_size,
338 &bytes_tl_bulk_allocated);
339 EXPECT_TRUE(lots_of_objects[i] != nullptr);
340 size_t computed_usable_size;
341 EXPECT_EQ(allocation_size, space->AllocationSize(lots_of_objects[i], &computed_usable_size));
342 EXPECT_EQ(usable_size, computed_usable_size);
343 EXPECT_TRUE(bytes_tl_bulk_allocated == 0 ||
344 bytes_tl_bulk_allocated >= allocation_size);
345 }
346
347 // Release memory.
348 space->FreeList(self, arraysize(lots_of_objects), lots_of_objects);
349}
350
351INSTANTIATE_TEST_CASE_P(CreateRosAllocSpace,
352 SpaceCreateTest,
353 testing::Values(kMallocSpaceRosAlloc));
354INSTANTIATE_TEST_CASE_P(CreateDlMallocSpace,
355 SpaceCreateTest,
356 testing::Values(kMallocSpaceDlMalloc));
357
358} // namespace space
359} // namespace gc
360} // namespace art