Elliott Hughes | 2faa5f1 | 2012-01-30 14:42:07 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 16 | |
Andreas Gampe | a743351 | 2014-02-21 13:19:23 -0800 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_GC_SPACE_SPACE_TEST_H_ |
| 18 | #define ART_RUNTIME_GC_SPACE_SPACE_TEST_H_ |
| 19 | |
Brian Carlstrom | a1ce1fe | 2014-02-24 23:23:58 -0800 | [diff] [blame] | 20 | #include <stdint.h> |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 21 | #include <memory> |
Brian Carlstrom | a1ce1fe | 2014-02-24 23:23:58 -0800 | [diff] [blame] | 22 | |
| 23 | #include "common_runtime_test.h" |
Brian Carlstrom | 578bbdc | 2011-07-21 14:07:47 -0700 | [diff] [blame] | 24 | #include "globals.h" |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 25 | #include "mirror/array-inl.h" |
| 26 | #include "mirror/object-inl.h" |
Ian Rogers | e63db27 | 2014-07-15 15:36:11 -0700 | [diff] [blame] | 27 | #include "scoped_thread_state_change.h" |
| 28 | #include "zygote_space.h" |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 29 | |
| 30 | namespace art { |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 31 | namespace gc { |
| 32 | namespace space { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 33 | |
Brian Carlstrom | a1ce1fe | 2014-02-24 23:23:58 -0800 | [diff] [blame] | 34 | class SpaceTest : public CommonRuntimeTest { |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 35 | public: |
Mathieu Chartier | 5647d18 | 2014-03-07 15:00:39 -0800 | [diff] [blame] | 36 | jobject byte_array_class_; |
| 37 | |
| 38 | SpaceTest() : byte_array_class_(nullptr) { |
| 39 | } |
| 40 | |
Mathieu Chartier | 1b54f9c | 2014-04-30 16:45:02 -0700 | [diff] [blame] | 41 | void AddSpace(ContinuousSpace* space, bool revoke = true) { |
| 42 | Heap* heap = Runtime::Current()->GetHeap(); |
| 43 | if (revoke) { |
| 44 | heap->RevokeAllThreadLocalBuffers(); |
| 45 | } |
| 46 | heap->AddSpace(space); |
| 47 | heap->SetSpaceAsDefault(space); |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 48 | } |
Mathieu Chartier | 5647d18 | 2014-03-07 15:00:39 -0800 | [diff] [blame] | 49 | |
| 50 | mirror::Class* GetByteArrayClass(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 51 | StackHandleScope<1> hs(self); |
| 52 | auto null_loader(hs.NewHandle<mirror::ClassLoader>(nullptr)); |
Mathieu Chartier | 5647d18 | 2014-03-07 15:00:39 -0800 | [diff] [blame] | 53 | if (byte_array_class_ == nullptr) { |
| 54 | mirror::Class* byte_array_class = |
| 55 | Runtime::Current()->GetClassLinker()->FindClass(self, "[B", null_loader); |
| 56 | EXPECT_TRUE(byte_array_class != nullptr); |
| 57 | byte_array_class_ = self->GetJniEnv()->NewLocalRef(byte_array_class); |
| 58 | EXPECT_TRUE(byte_array_class_ != nullptr); |
| 59 | } |
| 60 | return reinterpret_cast<mirror::Class*>(self->DecodeJObject(byte_array_class_)); |
| 61 | } |
| 62 | |
| 63 | mirror::Object* Alloc(space::MallocSpace* alloc_space, Thread* self, size_t bytes, |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 64 | size_t* bytes_allocated, size_t* usable_size, |
| 65 | size_t* bytes_tl_bulk_allocated) |
Mathieu Chartier | 5647d18 | 2014-03-07 15:00:39 -0800 | [diff] [blame] | 66 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 67 | StackHandleScope<1> hs(self); |
| 68 | Handle<mirror::Class> byte_array_class(hs.NewHandle(GetByteArrayClass(self))); |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 69 | mirror::Object* obj = alloc_space->Alloc(self, bytes, bytes_allocated, usable_size, |
| 70 | bytes_tl_bulk_allocated); |
Mathieu Chartier | 5647d18 | 2014-03-07 15:00:39 -0800 | [diff] [blame] | 71 | if (obj != nullptr) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 72 | InstallClass(obj, byte_array_class.Get(), bytes); |
Mathieu Chartier | 5647d18 | 2014-03-07 15:00:39 -0800 | [diff] [blame] | 73 | } |
| 74 | return obj; |
| 75 | } |
| 76 | |
| 77 | mirror::Object* AllocWithGrowth(space::MallocSpace* alloc_space, Thread* self, size_t bytes, |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 78 | size_t* bytes_allocated, size_t* usable_size, |
| 79 | size_t* bytes_tl_bulk_allocated) |
Mathieu Chartier | 5647d18 | 2014-03-07 15:00:39 -0800 | [diff] [blame] | 80 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 81 | StackHandleScope<1> hs(self); |
| 82 | Handle<mirror::Class> byte_array_class(hs.NewHandle(GetByteArrayClass(self))); |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 83 | mirror::Object* obj = alloc_space->AllocWithGrowth(self, bytes, bytes_allocated, usable_size, |
| 84 | bytes_tl_bulk_allocated); |
Mathieu Chartier | 5647d18 | 2014-03-07 15:00:39 -0800 | [diff] [blame] | 85 | if (obj != nullptr) { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 86 | InstallClass(obj, byte_array_class.Get(), bytes); |
Mathieu Chartier | 5647d18 | 2014-03-07 15:00:39 -0800 | [diff] [blame] | 87 | } |
| 88 | return obj; |
| 89 | } |
| 90 | |
| 91 | void InstallClass(mirror::Object* o, mirror::Class* byte_array_class, size_t size) |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 92 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Hiroshi Yamauchi | 4d2efce | 2014-02-10 16:19:09 -0800 | [diff] [blame] | 93 | // Note the minimum size, which is the size of a zero-length byte array. |
| 94 | EXPECT_GE(size, SizeOfZeroLengthByteArray()); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 95 | EXPECT_TRUE(byte_array_class != nullptr); |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 96 | o->SetClass(byte_array_class); |
Hiroshi Yamauchi | 800ac2d | 2014-04-02 17:32:54 -0700 | [diff] [blame] | 97 | if (kUseBakerOrBrooksReadBarrier) { |
| 98 | // Like the proper heap object allocation, install and verify |
| 99 | // the correct read barrier pointer. |
| 100 | if (kUseBrooksReadBarrier) { |
| 101 | o->SetReadBarrierPointer(o); |
| 102 | } |
| 103 | o->AssertReadBarrierPointer(); |
Hiroshi Yamauchi | 9d04a20 | 2014-01-31 13:35:49 -0800 | [diff] [blame] | 104 | } |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 105 | mirror::Array* arr = o->AsArray<kVerifyNone>(); |
Hiroshi Yamauchi | 4d2efce | 2014-02-10 16:19:09 -0800 | [diff] [blame] | 106 | size_t header_size = SizeOfZeroLengthByteArray(); |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 107 | int32_t length = size - header_size; |
| 108 | arr->SetLength(length); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 109 | EXPECT_EQ(arr->SizeOf<kVerifyNone>(), size); |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 110 | } |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 111 | |
Hiroshi Yamauchi | 4d2efce | 2014-02-10 16:19:09 -0800 | [diff] [blame] | 112 | static size_t SizeOfZeroLengthByteArray() { |
| 113 | return mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimByte)).Uint32Value(); |
| 114 | } |
| 115 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 116 | typedef MallocSpace* (*CreateSpaceFn)(const std::string& name, size_t initial_size, size_t growth_limit, |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 117 | size_t capacity, uint8_t* requested_begin); |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 118 | void InitTestBody(CreateSpaceFn create_space); |
| 119 | void ZygoteSpaceTestBody(CreateSpaceFn create_space); |
| 120 | void AllocAndFreeTestBody(CreateSpaceFn create_space); |
| 121 | void AllocAndFreeListTestBody(CreateSpaceFn create_space); |
| 122 | |
| 123 | void SizeFootPrintGrowthLimitAndTrimBody(MallocSpace* space, intptr_t object_size, |
| 124 | int round, size_t growth_limit); |
| 125 | void SizeFootPrintGrowthLimitAndTrimDriver(size_t object_size, CreateSpaceFn create_space); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 126 | }; |
Brian Carlstrom | 9b7f2c2 | 2011-09-27 14:35:04 -0700 | [diff] [blame] | 127 | |
Ian Rogers | 719d1a3 | 2014-03-06 12:13:39 -0800 | [diff] [blame] | 128 | static inline size_t test_rand(size_t* seed) { |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 129 | *seed = *seed * 1103515245 + 12345; |
| 130 | return *seed; |
| 131 | } |
| 132 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 133 | void SpaceTest::InitTestBody(CreateSpaceFn create_space) { |
Andreas Gampe | 369810a | 2015-01-14 19:53:31 -0800 | [diff] [blame] | 134 | // This will lead to error messages in the log. |
| 135 | ScopedLogSeverity sls(LogSeverity::FATAL); |
| 136 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 137 | { |
jeffhao | c116070 | 2011-10-27 15:48:45 -0700 | [diff] [blame] | 138 | // Init < max == growth |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 139 | std::unique_ptr<Space> space(create_space("test", 16 * MB, 32 * MB, 32 * MB, nullptr)); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 140 | EXPECT_TRUE(space.get() != nullptr); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 141 | } |
| 142 | { |
jeffhao | c116070 | 2011-10-27 15:48:45 -0700 | [diff] [blame] | 143 | // Init == max == growth |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 144 | std::unique_ptr<Space> space(create_space("test", 16 * MB, 16 * MB, 16 * MB, nullptr)); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 145 | EXPECT_TRUE(space.get() != nullptr); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 146 | } |
| 147 | { |
jeffhao | c116070 | 2011-10-27 15:48:45 -0700 | [diff] [blame] | 148 | // Init > max == growth |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 149 | std::unique_ptr<Space> space(create_space("test", 32 * MB, 16 * MB, 16 * MB, nullptr)); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 150 | EXPECT_TRUE(space.get() == nullptr); |
jeffhao | c116070 | 2011-10-27 15:48:45 -0700 | [diff] [blame] | 151 | } |
| 152 | { |
| 153 | // Growth == init < max |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 154 | std::unique_ptr<Space> space(create_space("test", 16 * MB, 16 * MB, 32 * MB, nullptr)); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 155 | EXPECT_TRUE(space.get() != nullptr); |
jeffhao | c116070 | 2011-10-27 15:48:45 -0700 | [diff] [blame] | 156 | } |
| 157 | { |
| 158 | // Growth < init < max |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 159 | std::unique_ptr<Space> space(create_space("test", 16 * MB, 8 * MB, 32 * MB, nullptr)); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 160 | EXPECT_TRUE(space.get() == nullptr); |
jeffhao | c116070 | 2011-10-27 15:48:45 -0700 | [diff] [blame] | 161 | } |
| 162 | { |
| 163 | // Init < growth < max |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 164 | std::unique_ptr<Space> space(create_space("test", 8 * MB, 16 * MB, 32 * MB, nullptr)); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 165 | EXPECT_TRUE(space.get() != nullptr); |
jeffhao | c116070 | 2011-10-27 15:48:45 -0700 | [diff] [blame] | 166 | } |
| 167 | { |
| 168 | // Init < max < growth |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 169 | std::unique_ptr<Space> space(create_space("test", 8 * MB, 32 * MB, 16 * MB, nullptr)); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 170 | EXPECT_TRUE(space.get() == nullptr); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 171 | } |
| 172 | } |
| 173 | |
Mathieu Chartier | dcf8d72 | 2012-08-02 14:55:54 -0700 | [diff] [blame] | 174 | // TODO: This test is not very good, we should improve it. |
| 175 | // The test should do more allocations before the creation of the ZygoteSpace, and then do |
| 176 | // allocations after the ZygoteSpace is created. The test should also do some GCs to ensure that |
| 177 | // the GC works with the ZygoteSpace. |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 178 | void SpaceTest::ZygoteSpaceTestBody(CreateSpaceFn create_space) { |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 179 | size_t dummy; |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 180 | MallocSpace* space(create_space("test", 4 * MB, 16 * MB, 16 * MB, nullptr)); |
| 181 | ASSERT_TRUE(space != nullptr); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 182 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 183 | // Make space findable to the heap, will also delete space when runtime is cleaned up |
| 184 | AddSpace(space); |
| 185 | Thread* self = Thread::Current(); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 186 | ScopedObjectAccess soa(self); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 187 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 188 | // Succeeds, fits without adjusting the footprint limit. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 189 | size_t ptr1_bytes_allocated, ptr1_usable_size, ptr1_bytes_tl_bulk_allocated; |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 190 | StackHandleScope<3> hs(soa.Self()); |
Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 191 | MutableHandle<mirror::Object> ptr1( |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 192 | hs.NewHandle(Alloc(space, self, 1 * MB, &ptr1_bytes_allocated, &ptr1_usable_size, |
| 193 | &ptr1_bytes_tl_bulk_allocated))); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 194 | EXPECT_TRUE(ptr1.Get() != nullptr); |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 195 | EXPECT_LE(1U * MB, ptr1_bytes_allocated); |
| 196 | EXPECT_LE(1U * MB, ptr1_usable_size); |
| 197 | EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated); |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 198 | EXPECT_EQ(ptr1_bytes_tl_bulk_allocated, ptr1_bytes_allocated); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 199 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 200 | // Fails, requires a higher footprint limit. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 201 | mirror::Object* ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 202 | EXPECT_TRUE(ptr2 == nullptr); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 203 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 204 | // Succeeds, adjusts the footprint. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 205 | size_t ptr3_bytes_allocated, ptr3_usable_size, ptr3_bytes_tl_bulk_allocated; |
Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 206 | MutableHandle<mirror::Object> ptr3( |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 207 | hs.NewHandle(AllocWithGrowth(space, self, 8 * MB, &ptr3_bytes_allocated, &ptr3_usable_size, |
| 208 | &ptr3_bytes_tl_bulk_allocated))); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 209 | EXPECT_TRUE(ptr3.Get() != nullptr); |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 210 | EXPECT_LE(8U * MB, ptr3_bytes_allocated); |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 211 | EXPECT_LE(8U * MB, ptr3_usable_size); |
| 212 | EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated); |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 213 | EXPECT_EQ(ptr3_bytes_tl_bulk_allocated, ptr3_bytes_allocated); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 214 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 215 | // Fails, requires a higher footprint limit. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 216 | mirror::Object* ptr4 = space->Alloc(self, 8 * MB, &dummy, nullptr, &dummy); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 217 | EXPECT_TRUE(ptr4 == nullptr); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 218 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 219 | // Also fails, requires a higher allowed footprint. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 220 | mirror::Object* ptr5 = space->AllocWithGrowth(self, 8 * MB, &dummy, nullptr, &dummy); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 221 | EXPECT_TRUE(ptr5 == nullptr); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 222 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 223 | // Release some memory. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 224 | size_t free3 = space->AllocationSize(ptr3.Get(), nullptr); |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 225 | EXPECT_EQ(free3, ptr3_bytes_allocated); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 226 | EXPECT_EQ(free3, space->Free(self, ptr3.Assign(nullptr))); |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 227 | EXPECT_LE(8U * MB, free3); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 228 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 229 | // Succeeds, now that memory has been freed. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 230 | size_t ptr6_bytes_allocated, ptr6_usable_size, ptr6_bytes_tl_bulk_allocated; |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 231 | Handle<mirror::Object> ptr6( |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 232 | hs.NewHandle(AllocWithGrowth(space, self, 9 * MB, &ptr6_bytes_allocated, &ptr6_usable_size, |
| 233 | &ptr6_bytes_tl_bulk_allocated))); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 234 | EXPECT_TRUE(ptr6.Get() != nullptr); |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 235 | EXPECT_LE(9U * MB, ptr6_bytes_allocated); |
| 236 | EXPECT_LE(9U * MB, ptr6_usable_size); |
| 237 | EXPECT_LE(ptr6_usable_size, ptr6_bytes_allocated); |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 238 | EXPECT_EQ(ptr6_bytes_tl_bulk_allocated, ptr6_bytes_allocated); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 239 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 240 | // Final clean up. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 241 | size_t free1 = space->AllocationSize(ptr1.Get(), nullptr); |
| 242 | space->Free(self, ptr1.Assign(nullptr)); |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 243 | EXPECT_LE(1U * MB, free1); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 244 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 245 | // Make sure that the zygote space isn't directly at the start of the space. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 246 | EXPECT_TRUE(space->Alloc(self, 1U * MB, &dummy, nullptr, &dummy) != nullptr); |
Mathieu Chartier | a1602f2 | 2014-01-13 17:19:19 -0800 | [diff] [blame] | 247 | |
| 248 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
| 249 | space::Space* old_space = space; |
| 250 | heap->RemoveSpace(old_space); |
Mathieu Chartier | 1b54f9c | 2014-04-30 16:45:02 -0700 | [diff] [blame] | 251 | heap->RevokeAllThreadLocalBuffers(); |
Mathieu Chartier | a1602f2 | 2014-01-13 17:19:19 -0800 | [diff] [blame] | 252 | space::ZygoteSpace* zygote_space = space->CreateZygoteSpace("alloc space", |
| 253 | heap->IsLowMemoryMode(), |
| 254 | &space); |
| 255 | delete old_space; |
| 256 | // Add the zygote space. |
Mathieu Chartier | 1b54f9c | 2014-04-30 16:45:02 -0700 | [diff] [blame] | 257 | AddSpace(zygote_space, false); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 258 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 259 | // Make space findable to the heap, will also delete space when runtime is cleaned up |
Mathieu Chartier | 1b54f9c | 2014-04-30 16:45:02 -0700 | [diff] [blame] | 260 | AddSpace(space, false); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 261 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 262 | // Succeeds, fits without adjusting the footprint limit. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 263 | ptr1.Assign(Alloc(space, self, 1 * MB, &ptr1_bytes_allocated, &ptr1_usable_size, |
| 264 | &ptr1_bytes_tl_bulk_allocated)); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 265 | EXPECT_TRUE(ptr1.Get() != nullptr); |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 266 | EXPECT_LE(1U * MB, ptr1_bytes_allocated); |
| 267 | EXPECT_LE(1U * MB, ptr1_usable_size); |
| 268 | EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated); |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 269 | EXPECT_EQ(ptr1_bytes_tl_bulk_allocated, ptr1_bytes_allocated); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 270 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 271 | // Fails, requires a higher footprint limit. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 272 | ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 273 | EXPECT_TRUE(ptr2 == nullptr); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 274 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 275 | // Succeeds, adjusts the footprint. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 276 | ptr3.Assign(AllocWithGrowth(space, self, 2 * MB, &ptr3_bytes_allocated, &ptr3_usable_size, |
| 277 | &ptr3_bytes_tl_bulk_allocated)); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 278 | EXPECT_TRUE(ptr3.Get() != nullptr); |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 279 | EXPECT_LE(2U * MB, ptr3_bytes_allocated); |
| 280 | EXPECT_LE(2U * MB, ptr3_usable_size); |
| 281 | EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated); |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 282 | EXPECT_EQ(ptr3_bytes_tl_bulk_allocated, ptr3_bytes_allocated); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 283 | space->Free(self, ptr3.Assign(nullptr)); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 284 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 285 | // Final clean up. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 286 | free1 = space->AllocationSize(ptr1.Get(), nullptr); |
| 287 | space->Free(self, ptr1.Assign(nullptr)); |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 288 | EXPECT_LE(1U * MB, free1); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 289 | } |
| 290 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 291 | void SpaceTest::AllocAndFreeTestBody(CreateSpaceFn create_space) { |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 292 | size_t dummy = 0; |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 293 | MallocSpace* space(create_space("test", 4 * MB, 16 * MB, 16 * MB, nullptr)); |
| 294 | ASSERT_TRUE(space != nullptr); |
Ian Rogers | 50b35e2 | 2012-10-04 10:09:15 -0700 | [diff] [blame] | 295 | Thread* self = Thread::Current(); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 296 | ScopedObjectAccess soa(self); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 297 | |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 298 | // Make space findable to the heap, will also delete space when runtime is cleaned up |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 299 | AddSpace(space); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 300 | |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 301 | // Succeeds, fits without adjusting the footprint limit. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 302 | size_t ptr1_bytes_allocated, ptr1_usable_size, ptr1_bytes_tl_bulk_allocated; |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 303 | StackHandleScope<3> hs(soa.Self()); |
Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 304 | MutableHandle<mirror::Object> ptr1( |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 305 | hs.NewHandle(Alloc(space, self, 1 * MB, &ptr1_bytes_allocated, &ptr1_usable_size, |
| 306 | &ptr1_bytes_tl_bulk_allocated))); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 307 | EXPECT_TRUE(ptr1.Get() != nullptr); |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 308 | EXPECT_LE(1U * MB, ptr1_bytes_allocated); |
| 309 | EXPECT_LE(1U * MB, ptr1_usable_size); |
| 310 | EXPECT_LE(ptr1_usable_size, ptr1_bytes_allocated); |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 311 | EXPECT_EQ(ptr1_bytes_tl_bulk_allocated, ptr1_bytes_allocated); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 312 | |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 313 | // Fails, requires a higher footprint limit. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 314 | mirror::Object* ptr2 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 315 | EXPECT_TRUE(ptr2 == nullptr); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 316 | |
| 317 | // Succeeds, adjusts the footprint. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 318 | size_t ptr3_bytes_allocated, ptr3_usable_size, ptr3_bytes_tl_bulk_allocated; |
Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 319 | MutableHandle<mirror::Object> ptr3( |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 320 | hs.NewHandle(AllocWithGrowth(space, self, 8 * MB, &ptr3_bytes_allocated, &ptr3_usable_size, |
| 321 | &ptr3_bytes_tl_bulk_allocated))); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 322 | EXPECT_TRUE(ptr3.Get() != nullptr); |
Hiroshi Yamauchi | 50b2928 | 2013-07-30 13:58:37 -0700 | [diff] [blame] | 323 | EXPECT_LE(8U * MB, ptr3_bytes_allocated); |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 324 | EXPECT_LE(8U * MB, ptr3_usable_size); |
| 325 | EXPECT_LE(ptr3_usable_size, ptr3_bytes_allocated); |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 326 | EXPECT_EQ(ptr3_bytes_tl_bulk_allocated, ptr3_bytes_allocated); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 327 | |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 328 | // Fails, requires a higher footprint limit. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 329 | mirror::Object* ptr4 = Alloc(space, self, 8 * MB, &dummy, nullptr, &dummy); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 330 | EXPECT_TRUE(ptr4 == nullptr); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 331 | |
| 332 | // Also fails, requires a higher allowed footprint. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 333 | mirror::Object* ptr5 = AllocWithGrowth(space, self, 8 * MB, &dummy, nullptr, &dummy); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 334 | EXPECT_TRUE(ptr5 == nullptr); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 335 | |
| 336 | // Release some memory. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 337 | size_t free3 = space->AllocationSize(ptr3.Get(), nullptr); |
Hiroshi Yamauchi | 50b2928 | 2013-07-30 13:58:37 -0700 | [diff] [blame] | 338 | EXPECT_EQ(free3, ptr3_bytes_allocated); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 339 | space->Free(self, ptr3.Assign(nullptr)); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 340 | EXPECT_LE(8U * MB, free3); |
| 341 | |
| 342 | // Succeeds, now that memory has been freed. |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 343 | size_t ptr6_bytes_allocated, ptr6_usable_size, ptr6_bytes_tl_bulk_allocated; |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 344 | Handle<mirror::Object> ptr6( |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 345 | hs.NewHandle(AllocWithGrowth(space, self, 9 * MB, &ptr6_bytes_allocated, &ptr6_usable_size, |
| 346 | &ptr6_bytes_tl_bulk_allocated))); |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 347 | EXPECT_TRUE(ptr6.Get() != nullptr); |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 348 | EXPECT_LE(9U * MB, ptr6_bytes_allocated); |
| 349 | EXPECT_LE(9U * MB, ptr6_usable_size); |
| 350 | EXPECT_LE(ptr6_usable_size, ptr6_bytes_allocated); |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 351 | EXPECT_EQ(ptr6_bytes_tl_bulk_allocated, ptr6_bytes_allocated); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 352 | |
| 353 | // Final clean up. |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 354 | size_t free1 = space->AllocationSize(ptr1.Get(), nullptr); |
| 355 | space->Free(self, ptr1.Assign(nullptr)); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 356 | EXPECT_LE(1U * MB, free1); |
| 357 | } |
| 358 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 359 | void SpaceTest::AllocAndFreeListTestBody(CreateSpaceFn create_space) { |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 360 | MallocSpace* space(create_space("test", 4 * MB, 16 * MB, 16 * MB, nullptr)); |
| 361 | ASSERT_TRUE(space != nullptr); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 362 | |
| 363 | // Make space findable to the heap, will also delete space when runtime is cleaned up |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 364 | AddSpace(space); |
Ian Rogers | 50b35e2 | 2012-10-04 10:09:15 -0700 | [diff] [blame] | 365 | Thread* self = Thread::Current(); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 366 | ScopedObjectAccess soa(self); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 367 | |
| 368 | // Succeeds, fits without adjusting the max allowed footprint. |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 369 | mirror::Object* lots_of_objects[1024]; |
Elliott Hughes | b25c3f6 | 2012-03-26 16:35:06 -0700 | [diff] [blame] | 370 | for (size_t i = 0; i < arraysize(lots_of_objects); i++) { |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 371 | size_t allocation_size, usable_size, bytes_tl_bulk_allocated; |
Hiroshi Yamauchi | 4d2efce | 2014-02-10 16:19:09 -0800 | [diff] [blame] | 372 | size_t size_of_zero_length_byte_array = SizeOfZeroLengthByteArray(); |
Mathieu Chartier | 5647d18 | 2014-03-07 15:00:39 -0800 | [diff] [blame] | 373 | lots_of_objects[i] = Alloc(space, self, size_of_zero_length_byte_array, &allocation_size, |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 374 | &usable_size, &bytes_tl_bulk_allocated); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 375 | EXPECT_TRUE(lots_of_objects[i] != nullptr); |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 376 | size_t computed_usable_size; |
| 377 | EXPECT_EQ(allocation_size, space->AllocationSize(lots_of_objects[i], &computed_usable_size)); |
| 378 | EXPECT_EQ(usable_size, computed_usable_size); |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 379 | EXPECT_TRUE(bytes_tl_bulk_allocated == 0 || |
| 380 | bytes_tl_bulk_allocated >= allocation_size); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 381 | } |
| 382 | |
Mathieu Chartier | 73d1e17 | 2014-04-11 17:53:48 -0700 | [diff] [blame] | 383 | // Release memory. |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 384 | space->FreeList(self, arraysize(lots_of_objects), lots_of_objects); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 385 | |
| 386 | // Succeeds, fits by adjusting the max allowed footprint. |
Elliott Hughes | b25c3f6 | 2012-03-26 16:35:06 -0700 | [diff] [blame] | 387 | for (size_t i = 0; i < arraysize(lots_of_objects); i++) { |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 388 | size_t allocation_size, usable_size, bytes_tl_bulk_allocated; |
| 389 | lots_of_objects[i] = AllocWithGrowth(space, self, 1024, &allocation_size, &usable_size, |
| 390 | &bytes_tl_bulk_allocated); |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 391 | EXPECT_TRUE(lots_of_objects[i] != nullptr); |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 392 | size_t computed_usable_size; |
| 393 | EXPECT_EQ(allocation_size, space->AllocationSize(lots_of_objects[i], &computed_usable_size)); |
| 394 | EXPECT_EQ(usable_size, computed_usable_size); |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 395 | EXPECT_TRUE(bytes_tl_bulk_allocated == 0 || |
| 396 | bytes_tl_bulk_allocated >= allocation_size); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 397 | } |
| 398 | |
Mathieu Chartier | 73d1e17 | 2014-04-11 17:53:48 -0700 | [diff] [blame] | 399 | // Release memory. |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 400 | space->FreeList(self, arraysize(lots_of_objects), lots_of_objects); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 401 | } |
| 402 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 403 | void SpaceTest::SizeFootPrintGrowthLimitAndTrimBody(MallocSpace* space, intptr_t object_size, |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 404 | int round, size_t growth_limit) { |
| 405 | if (((object_size > 0 && object_size >= static_cast<intptr_t>(growth_limit))) || |
| 406 | ((object_size < 0 && -object_size >= static_cast<intptr_t>(growth_limit)))) { |
| 407 | // No allocation can succeed |
| 408 | return; |
| 409 | } |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 410 | |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 411 | // The space's footprint equals amount of resources requested from system |
| 412 | size_t footprint = space->GetFootprint(); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 413 | |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 414 | // The space must at least have its book keeping allocated |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 415 | EXPECT_GT(footprint, 0u); |
| 416 | |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 417 | // But it shouldn't exceed the initial size |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 418 | EXPECT_LE(footprint, growth_limit); |
| 419 | |
| 420 | // space's size shouldn't exceed the initial size |
| 421 | EXPECT_LE(space->Size(), growth_limit); |
| 422 | |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 423 | // this invariant should always hold or else the space has grown to be larger than what the |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 424 | // space believes its size is (which will break invariants) |
| 425 | EXPECT_GE(space->Size(), footprint); |
| 426 | |
| 427 | // Fill the space with lots of small objects up to the growth limit |
| 428 | size_t max_objects = (growth_limit / (object_size > 0 ? object_size : 8)) + 1; |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 429 | std::unique_ptr<mirror::Object*[]> lots_of_objects(new mirror::Object*[max_objects]); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 430 | size_t last_object = 0; // last object for which allocation succeeded |
| 431 | size_t amount_allocated = 0; // amount of space allocated |
Ian Rogers | 50b35e2 | 2012-10-04 10:09:15 -0700 | [diff] [blame] | 432 | Thread* self = Thread::Current(); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 433 | ScopedObjectAccess soa(self); |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 434 | size_t rand_seed = 123456789; |
Elliott Hughes | b25c3f6 | 2012-03-26 16:35:06 -0700 | [diff] [blame] | 435 | for (size_t i = 0; i < max_objects; i++) { |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 436 | size_t alloc_fails = 0; // number of failed allocations |
| 437 | size_t max_fails = 30; // number of times we fail allocation before giving up |
| 438 | for (; alloc_fails < max_fails; alloc_fails++) { |
| 439 | size_t alloc_size; |
| 440 | if (object_size > 0) { |
| 441 | alloc_size = object_size; |
| 442 | } else { |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 443 | alloc_size = test_rand(&rand_seed) % static_cast<size_t>(-object_size); |
Hiroshi Yamauchi | 4d2efce | 2014-02-10 16:19:09 -0800 | [diff] [blame] | 444 | // Note the minimum size, which is the size of a zero-length byte array. |
| 445 | size_t size_of_zero_length_byte_array = SizeOfZeroLengthByteArray(); |
| 446 | if (alloc_size < size_of_zero_length_byte_array) { |
| 447 | alloc_size = size_of_zero_length_byte_array; |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 448 | } |
| 449 | } |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 450 | StackHandleScope<1> hs(soa.Self()); |
| 451 | auto object(hs.NewHandle<mirror::Object>(nullptr)); |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 452 | size_t bytes_allocated = 0; |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 453 | size_t bytes_tl_bulk_allocated; |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 454 | if (round <= 1) { |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 455 | object.Assign(Alloc(space, self, alloc_size, &bytes_allocated, nullptr, |
| 456 | &bytes_tl_bulk_allocated)); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 457 | } else { |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 458 | object.Assign(AllocWithGrowth(space, self, alloc_size, &bytes_allocated, nullptr, |
| 459 | &bytes_tl_bulk_allocated)); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 460 | } |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 461 | footprint = space->GetFootprint(); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 462 | EXPECT_GE(space->Size(), footprint); // invariant |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 463 | if (object.Get() != nullptr) { // allocation succeeded |
| 464 | lots_of_objects[i] = object.Get(); |
| 465 | size_t allocation_size = space->AllocationSize(object.Get(), nullptr); |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 466 | EXPECT_EQ(bytes_allocated, allocation_size); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 467 | if (object_size > 0) { |
| 468 | EXPECT_GE(allocation_size, static_cast<size_t>(object_size)); |
| 469 | } else { |
| 470 | EXPECT_GE(allocation_size, 8u); |
| 471 | } |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 472 | EXPECT_TRUE(bytes_tl_bulk_allocated == 0 || |
| 473 | bytes_tl_bulk_allocated >= allocation_size); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 474 | amount_allocated += allocation_size; |
| 475 | break; |
| 476 | } |
| 477 | } |
| 478 | if (alloc_fails == max_fails) { |
| 479 | last_object = i; |
| 480 | break; |
| 481 | } |
| 482 | } |
| 483 | CHECK_NE(last_object, 0u); // we should have filled the space |
| 484 | EXPECT_GT(amount_allocated, 0u); |
| 485 | |
| 486 | // We shouldn't have gone past the growth_limit |
| 487 | EXPECT_LE(amount_allocated, growth_limit); |
| 488 | EXPECT_LE(footprint, growth_limit); |
| 489 | EXPECT_LE(space->Size(), growth_limit); |
| 490 | |
| 491 | // footprint and size should agree with amount allocated |
| 492 | EXPECT_GE(footprint, amount_allocated); |
| 493 | EXPECT_GE(space->Size(), amount_allocated); |
| 494 | |
| 495 | // Release storage in a semi-adhoc manner |
| 496 | size_t free_increment = 96; |
Elliott Hughes | b25c3f6 | 2012-03-26 16:35:06 -0700 | [diff] [blame] | 497 | while (true) { |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 498 | { |
| 499 | ScopedThreadStateChange tsc(self, kNative); |
| 500 | // Give the space a haircut. |
| 501 | space->Trim(); |
| 502 | } |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 503 | |
| 504 | // Bounds sanity |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 505 | footprint = space->GetFootprint(); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 506 | EXPECT_LE(amount_allocated, growth_limit); |
| 507 | EXPECT_GE(footprint, amount_allocated); |
| 508 | EXPECT_LE(footprint, growth_limit); |
| 509 | EXPECT_GE(space->Size(), amount_allocated); |
| 510 | EXPECT_LE(space->Size(), growth_limit); |
| 511 | |
| 512 | if (free_increment == 0) { |
| 513 | break; |
| 514 | } |
| 515 | |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 516 | // Free some objects |
| 517 | for (size_t i = 0; i < last_object; i += free_increment) { |
| 518 | mirror::Object* object = lots_of_objects.get()[i]; |
| 519 | if (object == nullptr) { |
| 520 | continue; |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 521 | } |
Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 522 | size_t allocation_size = space->AllocationSize(object, nullptr); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 523 | if (object_size > 0) { |
| 524 | EXPECT_GE(allocation_size, static_cast<size_t>(object_size)); |
| 525 | } else { |
| 526 | EXPECT_GE(allocation_size, 8u); |
| 527 | } |
| 528 | space->Free(self, object); |
| 529 | lots_of_objects.get()[i] = nullptr; |
| 530 | amount_allocated -= allocation_size; |
| 531 | footprint = space->GetFootprint(); |
| 532 | EXPECT_GE(space->Size(), footprint); // invariant |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 533 | } |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 534 | |
| 535 | free_increment >>= 1; |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 536 | } |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 537 | |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 538 | // The space has become empty here before allocating a large object |
| 539 | // below. For RosAlloc, revoke thread-local runs, which are kept |
| 540 | // even when empty for a performance reason, so that they won't |
| 541 | // cause the following large object allocation to fail due to |
| 542 | // potential fragmentation. Note they are normally revoked at each |
| 543 | // GC (but no GC here.) |
| 544 | space->RevokeAllThreadLocalBuffers(); |
| 545 | |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 546 | // All memory was released, try a large allocation to check freed memory is being coalesced |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 547 | StackHandleScope<1> hs(soa.Self()); |
| 548 | auto large_object(hs.NewHandle<mirror::Object>(nullptr)); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 549 | size_t three_quarters_space = (growth_limit / 2) + (growth_limit / 4); |
Mathieu Chartier | eb5710e | 2013-07-25 15:19:42 -0700 | [diff] [blame] | 550 | size_t bytes_allocated = 0; |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 551 | size_t bytes_tl_bulk_allocated; |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 552 | if (round <= 1) { |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 553 | large_object.Assign(Alloc(space, self, three_quarters_space, &bytes_allocated, nullptr, |
| 554 | &bytes_tl_bulk_allocated)); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 555 | } else { |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 556 | large_object.Assign(AllocWithGrowth(space, self, three_quarters_space, &bytes_allocated, |
Hiroshi Yamauchi | 4460a84 | 2015-03-09 11:57:48 -0700 | [diff] [blame^] | 557 | nullptr, &bytes_tl_bulk_allocated)); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 558 | } |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 559 | EXPECT_TRUE(large_object.Get() != nullptr); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 560 | |
| 561 | // Sanity check footprint |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 562 | footprint = space->GetFootprint(); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 563 | EXPECT_LE(footprint, growth_limit); |
| 564 | EXPECT_GE(space->Size(), footprint); |
| 565 | EXPECT_LE(space->Size(), growth_limit); |
| 566 | |
| 567 | // Clean up |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 568 | space->Free(self, large_object.Assign(nullptr)); |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 569 | |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 570 | // Sanity check footprint |
Hiroshi Yamauchi | cf58d4a | 2013-09-26 14:21:22 -0700 | [diff] [blame] | 571 | footprint = space->GetFootprint(); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 572 | EXPECT_LE(footprint, growth_limit); |
| 573 | EXPECT_GE(space->Size(), footprint); |
| 574 | EXPECT_LE(space->Size(), growth_limit); |
| 575 | } |
| 576 | |
Hiroshi Yamauchi | 3ddbd42 | 2013-12-06 17:43:36 -0800 | [diff] [blame] | 577 | void SpaceTest::SizeFootPrintGrowthLimitAndTrimDriver(size_t object_size, CreateSpaceFn create_space) { |
Hiroshi Yamauchi | 4d2efce | 2014-02-10 16:19:09 -0800 | [diff] [blame] | 578 | if (object_size < SizeOfZeroLengthByteArray()) { |
| 579 | // Too small for the object layout/model. |
| 580 | return; |
| 581 | } |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 582 | size_t initial_size = 4 * MB; |
| 583 | size_t growth_limit = 8 * MB; |
| 584 | size_t capacity = 16 * MB; |
Mathieu Chartier | 4e30541 | 2014-02-19 10:54:44 -0800 | [diff] [blame] | 585 | MallocSpace* space(create_space("test", initial_size, growth_limit, capacity, nullptr)); |
| 586 | ASSERT_TRUE(space != nullptr); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 587 | |
| 588 | // Basic sanity |
| 589 | EXPECT_EQ(space->Capacity(), growth_limit); |
| 590 | EXPECT_EQ(space->NonGrowthLimitCapacity(), capacity); |
| 591 | |
| 592 | // Make space findable to the heap, will also delete space when runtime is cleaned up |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 593 | AddSpace(space); |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 594 | |
| 595 | // In this round we don't allocate with growth and therefore can't grow past the initial size. |
| 596 | // This effectively makes the growth_limit the initial_size, so assert this. |
| 597 | SizeFootPrintGrowthLimitAndTrimBody(space, object_size, 1, initial_size); |
| 598 | SizeFootPrintGrowthLimitAndTrimBody(space, object_size, 2, growth_limit); |
| 599 | // Remove growth limit |
| 600 | space->ClearGrowthLimit(); |
| 601 | EXPECT_EQ(space->Capacity(), capacity); |
| 602 | SizeFootPrintGrowthLimitAndTrimBody(space, object_size, 3, capacity); |
| 603 | } |
| 604 | |
Andreas Gampe | 24651ec | 2014-02-27 13:26:16 -0800 | [diff] [blame] | 605 | #define TEST_SizeFootPrintGrowthLimitAndTrimStatic(name, spaceName, spaceFn, size) \ |
| 606 | TEST_F(spaceName##StaticTest, SizeFootPrintGrowthLimitAndTrim_AllocationsOf_##name) { \ |
Andreas Gampe | a743351 | 2014-02-21 13:19:23 -0800 | [diff] [blame] | 607 | SizeFootPrintGrowthLimitAndTrimDriver(size, spaceFn); \ |
Andreas Gampe | 24651ec | 2014-02-27 13:26:16 -0800 | [diff] [blame] | 608 | } |
| 609 | |
| 610 | #define TEST_SizeFootPrintGrowthLimitAndTrimRandom(name, spaceName, spaceFn, size) \ |
| 611 | TEST_F(spaceName##RandomTest, SizeFootPrintGrowthLimitAndTrim_RandomAllocationsWithMax_##name) { \ |
Andreas Gampe | a743351 | 2014-02-21 13:19:23 -0800 | [diff] [blame] | 612 | SizeFootPrintGrowthLimitAndTrimDriver(-size, spaceFn); \ |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 613 | } |
| 614 | |
Andreas Gampe | 24651ec | 2014-02-27 13:26:16 -0800 | [diff] [blame] | 615 | #define TEST_SPACE_CREATE_FN_BASE(spaceName, spaceFn) \ |
| 616 | class spaceName##BaseTest : public SpaceTest { \ |
Andreas Gampe | a743351 | 2014-02-21 13:19:23 -0800 | [diff] [blame] | 617 | }; \ |
| 618 | \ |
Andreas Gampe | 24651ec | 2014-02-27 13:26:16 -0800 | [diff] [blame] | 619 | TEST_F(spaceName##BaseTest, Init) { \ |
Andreas Gampe | a743351 | 2014-02-21 13:19:23 -0800 | [diff] [blame] | 620 | InitTestBody(spaceFn); \ |
| 621 | } \ |
Andreas Gampe | 24651ec | 2014-02-27 13:26:16 -0800 | [diff] [blame] | 622 | TEST_F(spaceName##BaseTest, ZygoteSpace) { \ |
Andreas Gampe | a743351 | 2014-02-21 13:19:23 -0800 | [diff] [blame] | 623 | ZygoteSpaceTestBody(spaceFn); \ |
| 624 | } \ |
Andreas Gampe | 24651ec | 2014-02-27 13:26:16 -0800 | [diff] [blame] | 625 | TEST_F(spaceName##BaseTest, AllocAndFree) { \ |
Andreas Gampe | a743351 | 2014-02-21 13:19:23 -0800 | [diff] [blame] | 626 | AllocAndFreeTestBody(spaceFn); \ |
| 627 | } \ |
Andreas Gampe | 24651ec | 2014-02-27 13:26:16 -0800 | [diff] [blame] | 628 | TEST_F(spaceName##BaseTest, AllocAndFreeList) { \ |
Andreas Gampe | a743351 | 2014-02-21 13:19:23 -0800 | [diff] [blame] | 629 | AllocAndFreeListTestBody(spaceFn); \ |
Andreas Gampe | 24651ec | 2014-02-27 13:26:16 -0800 | [diff] [blame] | 630 | } |
| 631 | |
| 632 | #define TEST_SPACE_CREATE_FN_STATIC(spaceName, spaceFn) \ |
| 633 | class spaceName##StaticTest : public SpaceTest { \ |
| 634 | }; \ |
| 635 | \ |
| 636 | TEST_SizeFootPrintGrowthLimitAndTrimStatic(12B, spaceName, spaceFn, 12) \ |
| 637 | TEST_SizeFootPrintGrowthLimitAndTrimStatic(16B, spaceName, spaceFn, 16) \ |
| 638 | TEST_SizeFootPrintGrowthLimitAndTrimStatic(24B, spaceName, spaceFn, 24) \ |
| 639 | TEST_SizeFootPrintGrowthLimitAndTrimStatic(32B, spaceName, spaceFn, 32) \ |
| 640 | TEST_SizeFootPrintGrowthLimitAndTrimStatic(64B, spaceName, spaceFn, 64) \ |
| 641 | TEST_SizeFootPrintGrowthLimitAndTrimStatic(128B, spaceName, spaceFn, 128) \ |
| 642 | TEST_SizeFootPrintGrowthLimitAndTrimStatic(1KB, spaceName, spaceFn, 1 * KB) \ |
| 643 | TEST_SizeFootPrintGrowthLimitAndTrimStatic(4KB, spaceName, spaceFn, 4 * KB) \ |
| 644 | TEST_SizeFootPrintGrowthLimitAndTrimStatic(1MB, spaceName, spaceFn, 1 * MB) \ |
| 645 | TEST_SizeFootPrintGrowthLimitAndTrimStatic(4MB, spaceName, spaceFn, 4 * MB) \ |
| 646 | TEST_SizeFootPrintGrowthLimitAndTrimStatic(8MB, spaceName, spaceFn, 8 * MB) |
| 647 | |
| 648 | #define TEST_SPACE_CREATE_FN_RANDOM(spaceName, spaceFn) \ |
| 649 | class spaceName##RandomTest : public SpaceTest { \ |
| 650 | }; \ |
| 651 | \ |
| 652 | TEST_SizeFootPrintGrowthLimitAndTrimRandom(16B, spaceName, spaceFn, 16) \ |
| 653 | TEST_SizeFootPrintGrowthLimitAndTrimRandom(24B, spaceName, spaceFn, 24) \ |
| 654 | TEST_SizeFootPrintGrowthLimitAndTrimRandom(32B, spaceName, spaceFn, 32) \ |
| 655 | TEST_SizeFootPrintGrowthLimitAndTrimRandom(64B, spaceName, spaceFn, 64) \ |
| 656 | TEST_SizeFootPrintGrowthLimitAndTrimRandom(128B, spaceName, spaceFn, 128) \ |
| 657 | TEST_SizeFootPrintGrowthLimitAndTrimRandom(1KB, spaceName, spaceFn, 1 * KB) \ |
| 658 | TEST_SizeFootPrintGrowthLimitAndTrimRandom(4KB, spaceName, spaceFn, 4 * KB) \ |
| 659 | TEST_SizeFootPrintGrowthLimitAndTrimRandom(1MB, spaceName, spaceFn, 1 * MB) \ |
| 660 | TEST_SizeFootPrintGrowthLimitAndTrimRandom(4MB, spaceName, spaceFn, 4 * MB) \ |
| 661 | TEST_SizeFootPrintGrowthLimitAndTrimRandom(8MB, spaceName, spaceFn, 8 * MB) |
Ian Rogers | 3bb17a6 | 2012-01-27 23:56:44 -0800 | [diff] [blame] | 662 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 663 | } // namespace space |
| 664 | } // namespace gc |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 665 | } // namespace art |
Andreas Gampe | a743351 | 2014-02-21 13:19:23 -0800 | [diff] [blame] | 666 | |
| 667 | #endif // ART_RUNTIME_GC_SPACE_SPACE_TEST_H_ |