Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 17 | #include "entrypoints/quick/quick_alloc_entrypoints.h" |
| 18 | |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 19 | #include "callee_save_frame.h" |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 20 | #include "entrypoints/entrypoint_utils-inl.h" |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 21 | #include "mirror/art_method-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 22 | #include "mirror/class-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 23 | #include "mirror/object_array-inl.h" |
Ian Rogers | 4f6ad8a | 2013-03-18 15:27:28 -0700 | [diff] [blame] | 24 | #include "mirror/object-inl.h" |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 25 | |
| 26 | namespace art { |
| 27 | |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 28 | static constexpr bool kUseTlabFastPath = true; |
| 29 | |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 30 | #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, suffix2, instrumented_bool, allocator_type) \ |
| 31 | extern "C" mirror::Object* artAllocObjectFromCode ##suffix##suffix2( \ |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 32 | uint32_t type_idx, mirror::ArtMethod* method, Thread* self, \ |
| 33 | StackReference<mirror::ArtMethod>* sp) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 34 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 35 | if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \ |
Andreas Gampe | 05d2ab2 | 2014-08-06 16:27:52 -0700 | [diff] [blame^] | 36 | mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx); \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 37 | if (LIKELY(klass != nullptr && klass->IsInitialized() && !klass->IsFinalizable())) { \ |
| 38 | size_t byte_count = klass->GetObjectSize(); \ |
| 39 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 40 | mirror::Object* obj; \ |
| 41 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 42 | obj = self->AllocTlab(byte_count); \ |
| 43 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 44 | obj->SetClass(klass); \ |
| 45 | if (kUseBakerOrBrooksReadBarrier) { \ |
| 46 | if (kUseBrooksReadBarrier) { \ |
| 47 | obj->SetReadBarrierPointer(obj); \ |
| 48 | } \ |
| 49 | obj->AssertReadBarrierPointer(); \ |
| 50 | } \ |
| 51 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 52 | return obj; \ |
| 53 | } \ |
| 54 | } \ |
| 55 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 56 | FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ |
| 57 | return AllocObjectFromCode<false, instrumented_bool>(type_idx, method, self, allocator_type); \ |
| 58 | } \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 59 | extern "C" mirror::Object* artAllocObjectFromCodeResolved##suffix##suffix2( \ |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 60 | mirror::Class* klass, mirror::ArtMethod* method, Thread* self, \ |
| 61 | StackReference<mirror::ArtMethod>* sp) \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 62 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 63 | if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \ |
| 64 | if (LIKELY(klass->IsInitialized())) { \ |
| 65 | size_t byte_count = klass->GetObjectSize(); \ |
| 66 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 67 | mirror::Object* obj; \ |
| 68 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 69 | obj = self->AllocTlab(byte_count); \ |
| 70 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 71 | obj->SetClass(klass); \ |
| 72 | if (kUseBakerOrBrooksReadBarrier) { \ |
| 73 | if (kUseBrooksReadBarrier) { \ |
| 74 | obj->SetReadBarrierPointer(obj); \ |
| 75 | } \ |
| 76 | obj->AssertReadBarrierPointer(); \ |
| 77 | } \ |
| 78 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 79 | return obj; \ |
| 80 | } \ |
| 81 | } \ |
| 82 | } \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 83 | FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ |
| 84 | return AllocObjectFromCodeResolved<instrumented_bool>(klass, method, self, allocator_type); \ |
| 85 | } \ |
| 86 | extern "C" mirror::Object* artAllocObjectFromCodeInitialized##suffix##suffix2( \ |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 87 | mirror::Class* klass, mirror::ArtMethod* method, Thread* self, \ |
| 88 | StackReference<mirror::ArtMethod>* sp) \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 89 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 90 | if (kUseTlabFastPath && !instrumented_bool && allocator_type == gc::kAllocatorTypeTLAB) { \ |
| 91 | size_t byte_count = klass->GetObjectSize(); \ |
| 92 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 93 | mirror::Object* obj; \ |
| 94 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 95 | obj = self->AllocTlab(byte_count); \ |
| 96 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 97 | obj->SetClass(klass); \ |
| 98 | if (kUseBakerOrBrooksReadBarrier) { \ |
| 99 | if (kUseBrooksReadBarrier) { \ |
| 100 | obj->SetReadBarrierPointer(obj); \ |
| 101 | } \ |
| 102 | obj->AssertReadBarrierPointer(); \ |
| 103 | } \ |
| 104 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 105 | return obj; \ |
| 106 | } \ |
| 107 | } \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 108 | FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ |
| 109 | return AllocObjectFromCodeInitialized<instrumented_bool>(klass, method, self, allocator_type); \ |
| 110 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 111 | extern "C" mirror::Object* artAllocObjectFromCodeWithAccessCheck##suffix##suffix2( \ |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 112 | uint32_t type_idx, mirror::ArtMethod* method, Thread* self, \ |
| 113 | StackReference<mirror::ArtMethod>* sp) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 114 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
| 115 | FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ |
| 116 | return AllocObjectFromCode<true, instrumented_bool>(type_idx, method, self, allocator_type); \ |
| 117 | } \ |
| 118 | extern "C" mirror::Array* artAllocArrayFromCode##suffix##suffix2( \ |
| 119 | uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self, \ |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 120 | StackReference<mirror::ArtMethod>* sp) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 121 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
| 122 | FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ |
| 123 | return AllocArrayFromCode<false, instrumented_bool>(type_idx, method, component_count, self, \ |
| 124 | allocator_type); \ |
| 125 | } \ |
Hiroshi Yamauchi | bb8f0ab | 2014-01-27 16:50:29 -0800 | [diff] [blame] | 126 | extern "C" mirror::Array* artAllocArrayFromCodeResolved##suffix##suffix2( \ |
| 127 | mirror::Class* klass, mirror::ArtMethod* method, int32_t component_count, Thread* self, \ |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 128 | StackReference<mirror::ArtMethod>* sp) \ |
Hiroshi Yamauchi | bb8f0ab | 2014-01-27 16:50:29 -0800 | [diff] [blame] | 129 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
| 130 | FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ |
| 131 | return AllocArrayFromCodeResolved<false, instrumented_bool>(klass, method, component_count, self, \ |
| 132 | allocator_type); \ |
| 133 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 134 | extern "C" mirror::Array* artAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \ |
| 135 | uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self, \ |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 136 | StackReference<mirror::ArtMethod>* sp) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 137 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
| 138 | FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ |
| 139 | return AllocArrayFromCode<true, instrumented_bool>(type_idx, method, component_count, self, \ |
| 140 | allocator_type); \ |
| 141 | } \ |
| 142 | extern "C" mirror::Array* artCheckAndAllocArrayFromCode##suffix##suffix2( \ |
| 143 | uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self, \ |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 144 | StackReference<mirror::ArtMethod>* sp) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 145 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
| 146 | FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 147 | if (!instrumented_bool) { \ |
| 148 | return CheckAndAllocArrayFromCode(type_idx, method, component_count, self, false, allocator_type); \ |
| 149 | } else { \ |
| 150 | return CheckAndAllocArrayFromCodeInstrumented(type_idx, method, component_count, self, false, allocator_type); \ |
| 151 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 152 | } \ |
| 153 | extern "C" mirror::Array* artCheckAndAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \ |
| 154 | uint32_t type_idx, mirror::ArtMethod* method, int32_t component_count, Thread* self, \ |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 155 | StackReference<mirror::ArtMethod>* sp) \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 156 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { \ |
| 157 | FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsOnly); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 158 | if (!instrumented_bool) { \ |
| 159 | return CheckAndAllocArrayFromCode(type_idx, method, component_count, self, true, allocator_type); \ |
| 160 | } else { \ |
| 161 | return CheckAndAllocArrayFromCodeInstrumented(type_idx, method, component_count, self, true, allocator_type); \ |
| 162 | } \ |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 163 | } |
| 164 | |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 165 | #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(suffix, allocator_type) \ |
| 166 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, Instrumented, true, allocator_type) \ |
| 167 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, , false, allocator_type) |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 168 | |
Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 169 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(DlMalloc, gc::kAllocatorTypeDlMalloc) |
| 170 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RosAlloc, gc::kAllocatorTypeRosAlloc) |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 171 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(BumpPointer, gc::kAllocatorTypeBumpPointer) |
Mathieu Chartier | 692fafd | 2013-11-29 17:24:40 -0800 | [diff] [blame] | 172 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(TLAB, gc::kAllocatorTypeTLAB) |
Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 173 | |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 174 | #define GENERATE_ENTRYPOINTS(suffix) \ |
| 175 | extern "C" void* art_quick_alloc_array##suffix(uint32_t, void*, int32_t); \ |
| 176 | extern "C" void* art_quick_alloc_array_resolved##suffix(void* klass, void*, int32_t); \ |
| 177 | extern "C" void* art_quick_alloc_array_with_access_check##suffix(uint32_t, void*, int32_t); \ |
| 178 | extern "C" void* art_quick_alloc_object##suffix(uint32_t type_idx, void* method); \ |
| 179 | extern "C" void* art_quick_alloc_object_resolved##suffix(void* klass, void* method); \ |
| 180 | extern "C" void* art_quick_alloc_object_initialized##suffix(void* klass, void* method); \ |
| 181 | extern "C" void* art_quick_alloc_object_with_access_check##suffix(uint32_t type_idx, void* method); \ |
| 182 | extern "C" void* art_quick_check_and_alloc_array##suffix(uint32_t, void*, int32_t); \ |
| 183 | extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix(uint32_t, void*, int32_t); \ |
| 184 | extern "C" void* art_quick_alloc_array##suffix##_instrumented(uint32_t, void*, int32_t); \ |
| 185 | extern "C" void* art_quick_alloc_array_resolved##suffix##_instrumented(void* klass, void*, int32_t); \ |
| 186 | extern "C" void* art_quick_alloc_array_with_access_check##suffix##_instrumented(uint32_t, void*, int32_t); \ |
| 187 | extern "C" void* art_quick_alloc_object##suffix##_instrumented(uint32_t type_idx, void* method); \ |
| 188 | extern "C" void* art_quick_alloc_object_resolved##suffix##_instrumented(void* klass, void* method); \ |
| 189 | extern "C" void* art_quick_alloc_object_initialized##suffix##_instrumented(void* klass, void* method); \ |
| 190 | extern "C" void* art_quick_alloc_object_with_access_check##suffix##_instrumented(uint32_t type_idx, void* method); \ |
| 191 | extern "C" void* art_quick_check_and_alloc_array##suffix##_instrumented(uint32_t, void*, int32_t); \ |
| 192 | extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented(uint32_t, void*, int32_t); \ |
| 193 | void SetQuickAllocEntryPoints##suffix(QuickEntryPoints* qpoints, bool instrumented) { \ |
| 194 | if (instrumented) { \ |
| 195 | qpoints->pAllocArray = art_quick_alloc_array##suffix##_instrumented; \ |
| 196 | qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix##_instrumented; \ |
| 197 | qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check##suffix##_instrumented; \ |
| 198 | qpoints->pAllocObject = art_quick_alloc_object##suffix##_instrumented; \ |
| 199 | qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix##_instrumented; \ |
| 200 | qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix##_instrumented; \ |
| 201 | qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check##suffix##_instrumented; \ |
| 202 | qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array##suffix##_instrumented; \ |
| 203 | qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented; \ |
| 204 | } else { \ |
| 205 | qpoints->pAllocArray = art_quick_alloc_array##suffix; \ |
| 206 | qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix; \ |
| 207 | qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check##suffix; \ |
| 208 | qpoints->pAllocObject = art_quick_alloc_object##suffix; \ |
| 209 | qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix; \ |
| 210 | qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix; \ |
| 211 | qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check##suffix; \ |
| 212 | qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array##suffix; \ |
| 213 | qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check##suffix; \ |
| 214 | } \ |
| 215 | } |
| 216 | |
| 217 | // Generate the entrypoint functions. |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 218 | #if !defined(__APPLE__) || !defined(__LP64__) |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 219 | GENERATE_ENTRYPOINTS(_dlmalloc); |
| 220 | GENERATE_ENTRYPOINTS(_rosalloc); |
| 221 | GENERATE_ENTRYPOINTS(_bump_pointer); |
| 222 | GENERATE_ENTRYPOINTS(_tlab); |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 223 | #endif |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 224 | |
| 225 | static bool entry_points_instrumented = false; |
| 226 | static gc::AllocatorType entry_points_allocator = gc::kAllocatorTypeDlMalloc; |
| 227 | |
| 228 | void SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator) { |
| 229 | entry_points_allocator = allocator; |
| 230 | } |
| 231 | |
| 232 | void SetQuickAllocEntryPointsInstrumented(bool instrumented) { |
| 233 | entry_points_instrumented = instrumented; |
| 234 | } |
| 235 | |
| 236 | void ResetQuickAllocEntryPoints(QuickEntryPoints* qpoints) { |
| 237 | switch (entry_points_allocator) { |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 238 | #if !defined(__APPLE__) || !defined(__LP64__) |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 239 | case gc::kAllocatorTypeDlMalloc: { |
| 240 | SetQuickAllocEntryPoints_dlmalloc(qpoints, entry_points_instrumented); |
| 241 | break; |
| 242 | } |
| 243 | case gc::kAllocatorTypeRosAlloc: { |
| 244 | SetQuickAllocEntryPoints_rosalloc(qpoints, entry_points_instrumented); |
| 245 | break; |
| 246 | } |
| 247 | case gc::kAllocatorTypeBumpPointer: { |
| 248 | CHECK(kMovingCollector); |
| 249 | SetQuickAllocEntryPoints_bump_pointer(qpoints, entry_points_instrumented); |
| 250 | break; |
| 251 | } |
| 252 | case gc::kAllocatorTypeTLAB: { |
| 253 | CHECK(kMovingCollector); |
| 254 | SetQuickAllocEntryPoints_tlab(qpoints, entry_points_instrumented); |
| 255 | break; |
| 256 | } |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 257 | #endif |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 258 | default: { |
| 259 | LOG(FATAL) << "Unimplemented"; |
| 260 | } |
| 261 | } |
| 262 | } |
| 263 | |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 264 | } // namespace art |