Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 17 | #include "entrypoints/quick/quick_alloc_entrypoints.h" |
| 18 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 19 | #include "art_method-inl.h" |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 20 | #include "base/enums.h" |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 21 | #include "callee_save_frame.h" |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 22 | #include "entrypoints/entrypoint_utils-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 23 | #include "mirror/class-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 24 | #include "mirror/object_array-inl.h" |
Ian Rogers | 4f6ad8a | 2013-03-18 15:27:28 -0700 | [diff] [blame] | 25 | #include "mirror/object-inl.h" |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 26 | |
| 27 | namespace art { |
| 28 | |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 29 | static constexpr bool kUseTlabFastPath = true; |
| 30 | |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 31 | #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, suffix2, instrumented_bool, allocator_type) \ |
| 32 | extern "C" mirror::Object* artAllocObjectFromCode ##suffix##suffix2( \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 33 | uint32_t type_idx, ArtMethod* method, Thread* self) \ |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 34 | SHARED_REQUIRES(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 35 | ScopedQuickEntrypointChecks sqec(self); \ |
Chih-Hung Hsieh | fba3997 | 2016-05-11 11:26:48 -0700 | [diff] [blame] | 36 | if (kUseTlabFastPath && !(instrumented_bool) && (allocator_type) == gc::kAllocatorTypeTLAB) { \ |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 37 | mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx, kRuntimePointerSize); \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 38 | if (LIKELY(klass != nullptr && klass->IsInitialized() && !klass->IsFinalizable())) { \ |
| 39 | size_t byte_count = klass->GetObjectSize(); \ |
| 40 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 41 | mirror::Object* obj; \ |
| 42 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 43 | obj = self->AllocTlab(byte_count); \ |
| 44 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 45 | obj->SetClass(klass); \ |
| 46 | if (kUseBakerOrBrooksReadBarrier) { \ |
| 47 | if (kUseBrooksReadBarrier) { \ |
| 48 | obj->SetReadBarrierPointer(obj); \ |
| 49 | } \ |
| 50 | obj->AssertReadBarrierPointer(); \ |
| 51 | } \ |
| 52 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 53 | return obj; \ |
| 54 | } \ |
| 55 | } \ |
| 56 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 57 | return AllocObjectFromCode<false, instrumented_bool>(type_idx, method, self, allocator_type); \ |
| 58 | } \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 59 | extern "C" mirror::Object* artAllocObjectFromCodeResolved##suffix##suffix2( \ |
Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 60 | mirror::Class* klass, ArtMethod* method ATTRIBUTE_UNUSED, Thread* self) \ |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 61 | SHARED_REQUIRES(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 62 | ScopedQuickEntrypointChecks sqec(self); \ |
Chih-Hung Hsieh | fba3997 | 2016-05-11 11:26:48 -0700 | [diff] [blame] | 63 | if (kUseTlabFastPath && !(instrumented_bool) && (allocator_type) == gc::kAllocatorTypeTLAB) { \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 64 | if (LIKELY(klass->IsInitialized())) { \ |
| 65 | size_t byte_count = klass->GetObjectSize(); \ |
| 66 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 67 | mirror::Object* obj; \ |
| 68 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 69 | obj = self->AllocTlab(byte_count); \ |
| 70 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 71 | obj->SetClass(klass); \ |
| 72 | if (kUseBakerOrBrooksReadBarrier) { \ |
| 73 | if (kUseBrooksReadBarrier) { \ |
| 74 | obj->SetReadBarrierPointer(obj); \ |
| 75 | } \ |
| 76 | obj->AssertReadBarrierPointer(); \ |
| 77 | } \ |
| 78 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 79 | return obj; \ |
| 80 | } \ |
| 81 | } \ |
| 82 | } \ |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 83 | return AllocObjectFromCodeResolved<instrumented_bool>(klass, self, allocator_type); \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 84 | } \ |
| 85 | extern "C" mirror::Object* artAllocObjectFromCodeInitialized##suffix##suffix2( \ |
Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 86 | mirror::Class* klass, ArtMethod* method ATTRIBUTE_UNUSED, Thread* self) \ |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 87 | SHARED_REQUIRES(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 88 | ScopedQuickEntrypointChecks sqec(self); \ |
Chih-Hung Hsieh | fba3997 | 2016-05-11 11:26:48 -0700 | [diff] [blame] | 89 | if (kUseTlabFastPath && !(instrumented_bool) && (allocator_type) == gc::kAllocatorTypeTLAB) { \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 90 | size_t byte_count = klass->GetObjectSize(); \ |
| 91 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 92 | mirror::Object* obj; \ |
| 93 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 94 | obj = self->AllocTlab(byte_count); \ |
| 95 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 96 | obj->SetClass(klass); \ |
| 97 | if (kUseBakerOrBrooksReadBarrier) { \ |
| 98 | if (kUseBrooksReadBarrier) { \ |
| 99 | obj->SetReadBarrierPointer(obj); \ |
| 100 | } \ |
| 101 | obj->AssertReadBarrierPointer(); \ |
| 102 | } \ |
| 103 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 104 | return obj; \ |
| 105 | } \ |
| 106 | } \ |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 107 | return AllocObjectFromCodeInitialized<instrumented_bool>(klass, self, allocator_type); \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 108 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 109 | extern "C" mirror::Object* artAllocObjectFromCodeWithAccessCheck##suffix##suffix2( \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 110 | uint32_t type_idx, ArtMethod* method, Thread* self) \ |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 111 | SHARED_REQUIRES(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 112 | ScopedQuickEntrypointChecks sqec(self); \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 113 | return AllocObjectFromCode<true, instrumented_bool>(type_idx, method, self, allocator_type); \ |
| 114 | } \ |
| 115 | extern "C" mirror::Array* artAllocArrayFromCode##suffix##suffix2( \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 116 | uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \ |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 117 | SHARED_REQUIRES(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 118 | ScopedQuickEntrypointChecks sqec(self); \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 119 | return AllocArrayFromCode<false, instrumented_bool>(type_idx, component_count, method, self, \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 120 | allocator_type); \ |
| 121 | } \ |
Hiroshi Yamauchi | bb8f0ab | 2014-01-27 16:50:29 -0800 | [diff] [blame] | 122 | extern "C" mirror::Array* artAllocArrayFromCodeResolved##suffix##suffix2( \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 123 | mirror::Class* klass, int32_t component_count, ArtMethod* method, Thread* self) \ |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 124 | SHARED_REQUIRES(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 125 | ScopedQuickEntrypointChecks sqec(self); \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 126 | return AllocArrayFromCodeResolved<false, instrumented_bool>(klass, component_count, method, self, \ |
Hiroshi Yamauchi | bb8f0ab | 2014-01-27 16:50:29 -0800 | [diff] [blame] | 127 | allocator_type); \ |
| 128 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 129 | extern "C" mirror::Array* artAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 130 | uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \ |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 131 | SHARED_REQUIRES(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 132 | ScopedQuickEntrypointChecks sqec(self); \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 133 | return AllocArrayFromCode<true, instrumented_bool>(type_idx, component_count, method, self, \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 134 | allocator_type); \ |
| 135 | } \ |
| 136 | extern "C" mirror::Array* artCheckAndAllocArrayFromCode##suffix##suffix2( \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 137 | uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \ |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 138 | SHARED_REQUIRES(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 139 | ScopedQuickEntrypointChecks sqec(self); \ |
Chih-Hung Hsieh | fba3997 | 2016-05-11 11:26:48 -0700 | [diff] [blame] | 140 | if (!(instrumented_bool)) { \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 141 | return CheckAndAllocArrayFromCode(type_idx, component_count, method, self, false, allocator_type); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 142 | } else { \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 143 | return CheckAndAllocArrayFromCodeInstrumented(type_idx, component_count, method, self, false, allocator_type); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 144 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 145 | } \ |
| 146 | extern "C" mirror::Array* artCheckAndAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 147 | uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \ |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 148 | SHARED_REQUIRES(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 149 | ScopedQuickEntrypointChecks sqec(self); \ |
Chih-Hung Hsieh | fba3997 | 2016-05-11 11:26:48 -0700 | [diff] [blame] | 150 | if (!(instrumented_bool)) { \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 151 | return CheckAndAllocArrayFromCode(type_idx, component_count, method, self, true, allocator_type); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 152 | } else { \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 153 | return CheckAndAllocArrayFromCodeInstrumented(type_idx, component_count, method, self, true, allocator_type); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 154 | } \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 155 | } \ |
| 156 | extern "C" mirror::String* artAllocStringFromBytesFromCode##suffix##suffix2( \ |
| 157 | mirror::ByteArray* byte_array, int32_t high, int32_t offset, int32_t byte_count, \ |
| 158 | Thread* self) \ |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 159 | SHARED_REQUIRES(Locks::mutator_lock_) { \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 160 | ScopedQuickEntrypointChecks sqec(self); \ |
| 161 | StackHandleScope<1> hs(self); \ |
| 162 | Handle<mirror::ByteArray> handle_array(hs.NewHandle(byte_array)); \ |
| 163 | return mirror::String::AllocFromByteArray<instrumented_bool>(self, byte_count, handle_array, \ |
| 164 | offset, high, allocator_type); \ |
| 165 | } \ |
| 166 | extern "C" mirror::String* artAllocStringFromCharsFromCode##suffix##suffix2( \ |
| 167 | int32_t offset, int32_t char_count, mirror::CharArray* char_array, Thread* self) \ |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 168 | SHARED_REQUIRES(Locks::mutator_lock_) { \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 169 | StackHandleScope<1> hs(self); \ |
| 170 | Handle<mirror::CharArray> handle_array(hs.NewHandle(char_array)); \ |
| 171 | return mirror::String::AllocFromCharArray<instrumented_bool>(self, char_count, handle_array, \ |
| 172 | offset, allocator_type); \ |
| 173 | } \ |
Chih-Hung Hsieh | fba3997 | 2016-05-11 11:26:48 -0700 | [diff] [blame] | 174 | extern "C" mirror::String* artAllocStringFromStringFromCode##suffix##suffix2( /* NOLINT */ \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 175 | mirror::String* string, Thread* self) \ |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 176 | SHARED_REQUIRES(Locks::mutator_lock_) { \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 177 | StackHandleScope<1> hs(self); \ |
| 178 | Handle<mirror::String> handle_string(hs.NewHandle(string)); \ |
| 179 | return mirror::String::AllocFromString<instrumented_bool>(self, handle_string->GetLength(), \ |
| 180 | handle_string, 0, allocator_type); \ |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 181 | } |
| 182 | |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 183 | #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(suffix, allocator_type) \ |
| 184 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, Instrumented, true, allocator_type) \ |
| 185 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, , false, allocator_type) |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 186 | |
Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 187 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(DlMalloc, gc::kAllocatorTypeDlMalloc) |
| 188 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RosAlloc, gc::kAllocatorTypeRosAlloc) |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 189 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(BumpPointer, gc::kAllocatorTypeBumpPointer) |
Mathieu Chartier | 692fafd | 2013-11-29 17:24:40 -0800 | [diff] [blame] | 190 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(TLAB, gc::kAllocatorTypeTLAB) |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 191 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(Region, gc::kAllocatorTypeRegion) |
| 192 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RegionTLAB, gc::kAllocatorTypeRegionTLAB) |
Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 193 | |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 194 | #define GENERATE_ENTRYPOINTS(suffix) \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 195 | extern "C" void* art_quick_alloc_array##suffix(uint32_t, int32_t, ArtMethod* ref); \ |
| 196 | extern "C" void* art_quick_alloc_array_resolved##suffix(mirror::Class* klass, int32_t, ArtMethod* ref); \ |
| 197 | extern "C" void* art_quick_alloc_array_with_access_check##suffix(uint32_t, int32_t, ArtMethod* ref); \ |
| 198 | extern "C" void* art_quick_alloc_object##suffix(uint32_t type_idx, ArtMethod* ref); \ |
| 199 | extern "C" void* art_quick_alloc_object_resolved##suffix(mirror::Class* klass, ArtMethod* ref); \ |
| 200 | extern "C" void* art_quick_alloc_object_initialized##suffix(mirror::Class* klass, ArtMethod* ref); \ |
| 201 | extern "C" void* art_quick_alloc_object_with_access_check##suffix(uint32_t type_idx, ArtMethod* ref); \ |
| 202 | extern "C" void* art_quick_check_and_alloc_array##suffix(uint32_t, int32_t, ArtMethod* ref); \ |
| 203 | extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix(uint32_t, int32_t, ArtMethod* ref); \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 204 | extern "C" void* art_quick_alloc_string_from_bytes##suffix(void*, int32_t, int32_t, int32_t); \ |
| 205 | extern "C" void* art_quick_alloc_string_from_chars##suffix(int32_t, int32_t, void*); \ |
| 206 | extern "C" void* art_quick_alloc_string_from_string##suffix(void*); \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 207 | extern "C" void* art_quick_alloc_array##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \ |
| 208 | extern "C" void* art_quick_alloc_array_resolved##suffix##_instrumented(mirror::Class* klass, int32_t, ArtMethod* ref); \ |
| 209 | extern "C" void* art_quick_alloc_array_with_access_check##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \ |
| 210 | extern "C" void* art_quick_alloc_object##suffix##_instrumented(uint32_t type_idx, ArtMethod* ref); \ |
| 211 | extern "C" void* art_quick_alloc_object_resolved##suffix##_instrumented(mirror::Class* klass, ArtMethod* ref); \ |
| 212 | extern "C" void* art_quick_alloc_object_initialized##suffix##_instrumented(mirror::Class* klass, ArtMethod* ref); \ |
| 213 | extern "C" void* art_quick_alloc_object_with_access_check##suffix##_instrumented(uint32_t type_idx, ArtMethod* ref); \ |
| 214 | extern "C" void* art_quick_check_and_alloc_array##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \ |
| 215 | extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 216 | extern "C" void* art_quick_alloc_string_from_bytes##suffix##_instrumented(void*, int32_t, int32_t, int32_t); \ |
| 217 | extern "C" void* art_quick_alloc_string_from_chars##suffix##_instrumented(int32_t, int32_t, void*); \ |
| 218 | extern "C" void* art_quick_alloc_string_from_string##suffix##_instrumented(void*); \ |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 219 | void SetQuickAllocEntryPoints##suffix(QuickEntryPoints* qpoints, bool instrumented) { \ |
| 220 | if (instrumented) { \ |
| 221 | qpoints->pAllocArray = art_quick_alloc_array##suffix##_instrumented; \ |
| 222 | qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix##_instrumented; \ |
| 223 | qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check##suffix##_instrumented; \ |
| 224 | qpoints->pAllocObject = art_quick_alloc_object##suffix##_instrumented; \ |
| 225 | qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix##_instrumented; \ |
| 226 | qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix##_instrumented; \ |
| 227 | qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check##suffix##_instrumented; \ |
| 228 | qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array##suffix##_instrumented; \ |
| 229 | qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented; \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 230 | qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix##_instrumented; \ |
| 231 | qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix##_instrumented; \ |
| 232 | qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix##_instrumented; \ |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 233 | } else { \ |
| 234 | qpoints->pAllocArray = art_quick_alloc_array##suffix; \ |
| 235 | qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix; \ |
| 236 | qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check##suffix; \ |
| 237 | qpoints->pAllocObject = art_quick_alloc_object##suffix; \ |
| 238 | qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix; \ |
| 239 | qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix; \ |
| 240 | qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check##suffix; \ |
| 241 | qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array##suffix; \ |
| 242 | qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check##suffix; \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 243 | qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix; \ |
| 244 | qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix; \ |
| 245 | qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix; \ |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 246 | } \ |
| 247 | } |
| 248 | |
| 249 | // Generate the entrypoint functions. |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 250 | #if !defined(__APPLE__) || !defined(__LP64__) |
Andreas Gampe | c8ccf68 | 2014-09-29 20:07:43 -0700 | [diff] [blame] | 251 | GENERATE_ENTRYPOINTS(_dlmalloc) |
| 252 | GENERATE_ENTRYPOINTS(_rosalloc) |
| 253 | GENERATE_ENTRYPOINTS(_bump_pointer) |
| 254 | GENERATE_ENTRYPOINTS(_tlab) |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 255 | GENERATE_ENTRYPOINTS(_region) |
| 256 | GENERATE_ENTRYPOINTS(_region_tlab) |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 257 | #endif |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 258 | |
| 259 | static bool entry_points_instrumented = false; |
| 260 | static gc::AllocatorType entry_points_allocator = gc::kAllocatorTypeDlMalloc; |
| 261 | |
| 262 | void SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator) { |
| 263 | entry_points_allocator = allocator; |
| 264 | } |
| 265 | |
| 266 | void SetQuickAllocEntryPointsInstrumented(bool instrumented) { |
| 267 | entry_points_instrumented = instrumented; |
| 268 | } |
| 269 | |
| 270 | void ResetQuickAllocEntryPoints(QuickEntryPoints* qpoints) { |
Andreas Gampe | 48cc32c | 2015-04-07 02:53:04 +0000 | [diff] [blame] | 271 | #if !defined(__APPLE__) || !defined(__LP64__) |
Ian Rogers | de2db52 | 2014-11-04 14:43:18 -0800 | [diff] [blame] | 272 | switch (entry_points_allocator) { |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 273 | case gc::kAllocatorTypeDlMalloc: { |
| 274 | SetQuickAllocEntryPoints_dlmalloc(qpoints, entry_points_instrumented); |
Ian Rogers | 7dc9c81 | 2014-11-04 15:10:55 -0800 | [diff] [blame] | 275 | return; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 276 | } |
| 277 | case gc::kAllocatorTypeRosAlloc: { |
| 278 | SetQuickAllocEntryPoints_rosalloc(qpoints, entry_points_instrumented); |
Ian Rogers | 7dc9c81 | 2014-11-04 15:10:55 -0800 | [diff] [blame] | 279 | return; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 280 | } |
| 281 | case gc::kAllocatorTypeBumpPointer: { |
| 282 | CHECK(kMovingCollector); |
| 283 | SetQuickAllocEntryPoints_bump_pointer(qpoints, entry_points_instrumented); |
Ian Rogers | 7dc9c81 | 2014-11-04 15:10:55 -0800 | [diff] [blame] | 284 | return; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 285 | } |
| 286 | case gc::kAllocatorTypeTLAB: { |
| 287 | CHECK(kMovingCollector); |
| 288 | SetQuickAllocEntryPoints_tlab(qpoints, entry_points_instrumented); |
Ian Rogers | 7dc9c81 | 2014-11-04 15:10:55 -0800 | [diff] [blame] | 289 | return; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 290 | } |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 291 | case gc::kAllocatorTypeRegion: { |
| 292 | CHECK(kMovingCollector); |
| 293 | SetQuickAllocEntryPoints_region(qpoints, entry_points_instrumented); |
| 294 | return; |
| 295 | } |
| 296 | case gc::kAllocatorTypeRegionTLAB: { |
| 297 | CHECK(kMovingCollector); |
| 298 | SetQuickAllocEntryPoints_region_tlab(qpoints, entry_points_instrumented); |
| 299 | return; |
| 300 | } |
Andreas Gampe | 48cc32c | 2015-04-07 02:53:04 +0000 | [diff] [blame] | 301 | default: |
| 302 | break; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 303 | } |
Andreas Gampe | 48cc32c | 2015-04-07 02:53:04 +0000 | [diff] [blame] | 304 | #else |
| 305 | UNUSED(qpoints); |
| 306 | #endif |
| 307 | UNIMPLEMENTED(FATAL); |
Ian Rogers | de2db52 | 2014-11-04 14:43:18 -0800 | [diff] [blame] | 308 | UNREACHABLE(); |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 309 | } |
| 310 | |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 311 | } // namespace art |