Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 17 | #include "entrypoints/quick/quick_alloc_entrypoints.h" |
| 18 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 19 | #include "art_method-inl.h" |
Andreas Gampe | 542451c | 2016-07-26 09:02:02 -0700 | [diff] [blame] | 20 | #include "base/enums.h" |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 21 | #include "callee_save_frame.h" |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame^] | 22 | #include "dex_file_types.h" |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 23 | #include "entrypoints/entrypoint_utils-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 24 | #include "mirror/class-inl.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 25 | #include "mirror/object_array-inl.h" |
Ian Rogers | 4f6ad8a | 2013-03-18 15:27:28 -0700 | [diff] [blame] | 26 | #include "mirror/object-inl.h" |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 27 | |
| 28 | namespace art { |
| 29 | |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 30 | static constexpr bool kUseTlabFastPath = true; |
| 31 | |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 32 | #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, suffix2, instrumented_bool, allocator_type) \ |
| 33 | extern "C" mirror::Object* artAllocObjectFromCode ##suffix##suffix2( \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 34 | uint32_t type_idx, ArtMethod* method, Thread* self) \ |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 35 | REQUIRES_SHARED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 36 | ScopedQuickEntrypointChecks sqec(self); \ |
Chih-Hung Hsieh | fba3997 | 2016-05-11 11:26:48 -0700 | [diff] [blame] | 37 | if (kUseTlabFastPath && !(instrumented_bool) && (allocator_type) == gc::kAllocatorTypeTLAB) { \ |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame^] | 38 | mirror::Class* klass = method->GetDexCacheResolvedType<false>(dex::TypeIndex(type_idx), \ |
| 39 | kRuntimePointerSize); \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 40 | if (LIKELY(klass != nullptr && klass->IsInitialized() && !klass->IsFinalizable())) { \ |
| 41 | size_t byte_count = klass->GetObjectSize(); \ |
| 42 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 43 | mirror::Object* obj; \ |
| 44 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 45 | obj = self->AllocTlab(byte_count); \ |
| 46 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 47 | obj->SetClass(klass); \ |
Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 48 | if (kUseBakerReadBarrier) { \ |
| 49 | obj->AssertReadBarrierState(); \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 50 | } \ |
| 51 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 52 | return obj; \ |
| 53 | } \ |
| 54 | } \ |
| 55 | } \ |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame^] | 56 | return AllocObjectFromCode<false, instrumented_bool>(dex::TypeIndex(type_idx), \ |
| 57 | method, \ |
| 58 | self, \ |
| 59 | allocator_type); \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 60 | } \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 61 | extern "C" mirror::Object* artAllocObjectFromCodeResolved##suffix##suffix2( \ |
Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 62 | mirror::Class* klass, ArtMethod* method ATTRIBUTE_UNUSED, Thread* self) \ |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 63 | REQUIRES_SHARED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 64 | ScopedQuickEntrypointChecks sqec(self); \ |
Chih-Hung Hsieh | fba3997 | 2016-05-11 11:26:48 -0700 | [diff] [blame] | 65 | if (kUseTlabFastPath && !(instrumented_bool) && (allocator_type) == gc::kAllocatorTypeTLAB) { \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 66 | if (LIKELY(klass->IsInitialized())) { \ |
| 67 | size_t byte_count = klass->GetObjectSize(); \ |
| 68 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 69 | mirror::Object* obj; \ |
| 70 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 71 | obj = self->AllocTlab(byte_count); \ |
| 72 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 73 | obj->SetClass(klass); \ |
Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 74 | if (kUseBakerReadBarrier) { \ |
| 75 | obj->AssertReadBarrierState(); \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 76 | } \ |
| 77 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 78 | return obj; \ |
| 79 | } \ |
| 80 | } \ |
| 81 | } \ |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 82 | return AllocObjectFromCodeResolved<instrumented_bool>(klass, self, allocator_type); \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 83 | } \ |
| 84 | extern "C" mirror::Object* artAllocObjectFromCodeInitialized##suffix##suffix2( \ |
Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 85 | mirror::Class* klass, ArtMethod* method ATTRIBUTE_UNUSED, Thread* self) \ |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 86 | REQUIRES_SHARED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 87 | ScopedQuickEntrypointChecks sqec(self); \ |
Chih-Hung Hsieh | fba3997 | 2016-05-11 11:26:48 -0700 | [diff] [blame] | 88 | if (kUseTlabFastPath && !(instrumented_bool) && (allocator_type) == gc::kAllocatorTypeTLAB) { \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 89 | size_t byte_count = klass->GetObjectSize(); \ |
| 90 | byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \ |
| 91 | mirror::Object* obj; \ |
| 92 | if (LIKELY(byte_count < self->TlabSize())) { \ |
| 93 | obj = self->AllocTlab(byte_count); \ |
| 94 | DCHECK(obj != nullptr) << "AllocTlab can't fail"; \ |
| 95 | obj->SetClass(klass); \ |
Hiroshi Yamauchi | 12b58b2 | 2016-11-01 11:55:29 -0700 | [diff] [blame] | 96 | if (kUseBakerReadBarrier) { \ |
| 97 | obj->AssertReadBarrierState(); \ |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 98 | } \ |
| 99 | QuasiAtomic::ThreadFenceForConstructor(); \ |
| 100 | return obj; \ |
| 101 | } \ |
| 102 | } \ |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 103 | return AllocObjectFromCodeInitialized<instrumented_bool>(klass, self, allocator_type); \ |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 104 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 105 | extern "C" mirror::Object* artAllocObjectFromCodeWithAccessCheck##suffix##suffix2( \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 106 | uint32_t type_idx, ArtMethod* method, Thread* self) \ |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 107 | REQUIRES_SHARED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 108 | ScopedQuickEntrypointChecks sqec(self); \ |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame^] | 109 | return AllocObjectFromCode<true, instrumented_bool>(dex::TypeIndex(type_idx), \ |
| 110 | method, \ |
| 111 | self, \ |
| 112 | allocator_type); \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 113 | } \ |
| 114 | extern "C" mirror::Array* artAllocArrayFromCode##suffix##suffix2( \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 115 | uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \ |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 116 | REQUIRES_SHARED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 117 | ScopedQuickEntrypointChecks sqec(self); \ |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame^] | 118 | return AllocArrayFromCode<false, instrumented_bool>(dex::TypeIndex(type_idx), \ |
| 119 | component_count, \ |
| 120 | method, \ |
| 121 | self, \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 122 | allocator_type); \ |
| 123 | } \ |
Hiroshi Yamauchi | bb8f0ab | 2014-01-27 16:50:29 -0800 | [diff] [blame] | 124 | extern "C" mirror::Array* artAllocArrayFromCodeResolved##suffix##suffix2( \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 125 | mirror::Class* klass, int32_t component_count, ArtMethod* method, Thread* self) \ |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 126 | REQUIRES_SHARED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 127 | ScopedQuickEntrypointChecks sqec(self); \ |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 128 | return AllocArrayFromCodeResolved<false, instrumented_bool>(klass, component_count, method, self, \ |
Hiroshi Yamauchi | bb8f0ab | 2014-01-27 16:50:29 -0800 | [diff] [blame] | 129 | allocator_type); \ |
| 130 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 131 | extern "C" mirror::Array* artAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 132 | uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \ |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 133 | REQUIRES_SHARED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 134 | ScopedQuickEntrypointChecks sqec(self); \ |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame^] | 135 | return AllocArrayFromCode<true, instrumented_bool>(dex::TypeIndex(type_idx), \ |
| 136 | component_count, \ |
| 137 | method, \ |
| 138 | self, \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 139 | allocator_type); \ |
| 140 | } \ |
| 141 | extern "C" mirror::Array* artCheckAndAllocArrayFromCode##suffix##suffix2( \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 142 | uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \ |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 143 | REQUIRES_SHARED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 144 | ScopedQuickEntrypointChecks sqec(self); \ |
Chih-Hung Hsieh | fba3997 | 2016-05-11 11:26:48 -0700 | [diff] [blame] | 145 | if (!(instrumented_bool)) { \ |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame^] | 146 | return CheckAndAllocArrayFromCode(dex::TypeIndex(type_idx), \ |
| 147 | component_count, \ |
| 148 | method, \ |
| 149 | self, \ |
| 150 | false, \ |
| 151 | allocator_type); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 152 | } else { \ |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame^] | 153 | return CheckAndAllocArrayFromCodeInstrumented(dex::TypeIndex(type_idx), \ |
| 154 | component_count, \ |
| 155 | method, \ |
| 156 | self, \ |
| 157 | false, \ |
| 158 | allocator_type); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 159 | } \ |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 160 | } \ |
| 161 | extern "C" mirror::Array* artCheckAndAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 162 | uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \ |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 163 | REQUIRES_SHARED(Locks::mutator_lock_) { \ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 164 | ScopedQuickEntrypointChecks sqec(self); \ |
Chih-Hung Hsieh | fba3997 | 2016-05-11 11:26:48 -0700 | [diff] [blame] | 165 | if (!(instrumented_bool)) { \ |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame^] | 166 | return CheckAndAllocArrayFromCode(dex::TypeIndex(type_idx), \ |
| 167 | component_count, \ |
| 168 | method, \ |
| 169 | self, \ |
| 170 | true, \ |
| 171 | allocator_type); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 172 | } else { \ |
Andreas Gampe | a5b09a6 | 2016-11-17 15:21:22 -0800 | [diff] [blame^] | 173 | return CheckAndAllocArrayFromCodeInstrumented(dex::TypeIndex(type_idx), \ |
| 174 | component_count, \ |
| 175 | method, \ |
| 176 | self, \ |
| 177 | true, \ |
| 178 | allocator_type); \ |
Hiroshi Yamauchi | cbbb080 | 2013-11-21 12:42:36 -0800 | [diff] [blame] | 179 | } \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 180 | } \ |
| 181 | extern "C" mirror::String* artAllocStringFromBytesFromCode##suffix##suffix2( \ |
| 182 | mirror::ByteArray* byte_array, int32_t high, int32_t offset, int32_t byte_count, \ |
| 183 | Thread* self) \ |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 184 | REQUIRES_SHARED(Locks::mutator_lock_) { \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 185 | ScopedQuickEntrypointChecks sqec(self); \ |
| 186 | StackHandleScope<1> hs(self); \ |
| 187 | Handle<mirror::ByteArray> handle_array(hs.NewHandle(byte_array)); \ |
| 188 | return mirror::String::AllocFromByteArray<instrumented_bool>(self, byte_count, handle_array, \ |
| 189 | offset, high, allocator_type); \ |
| 190 | } \ |
| 191 | extern "C" mirror::String* artAllocStringFromCharsFromCode##suffix##suffix2( \ |
| 192 | int32_t offset, int32_t char_count, mirror::CharArray* char_array, Thread* self) \ |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 193 | REQUIRES_SHARED(Locks::mutator_lock_) { \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 194 | StackHandleScope<1> hs(self); \ |
| 195 | Handle<mirror::CharArray> handle_array(hs.NewHandle(char_array)); \ |
| 196 | return mirror::String::AllocFromCharArray<instrumented_bool>(self, char_count, handle_array, \ |
| 197 | offset, allocator_type); \ |
| 198 | } \ |
Chih-Hung Hsieh | fba3997 | 2016-05-11 11:26:48 -0700 | [diff] [blame] | 199 | extern "C" mirror::String* artAllocStringFromStringFromCode##suffix##suffix2( /* NOLINT */ \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 200 | mirror::String* string, Thread* self) \ |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 201 | REQUIRES_SHARED(Locks::mutator_lock_) { \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 202 | StackHandleScope<1> hs(self); \ |
| 203 | Handle<mirror::String> handle_string(hs.NewHandle(string)); \ |
| 204 | return mirror::String::AllocFromString<instrumented_bool>(self, handle_string->GetLength(), \ |
| 205 | handle_string, 0, allocator_type); \ |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 206 | } |
| 207 | |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 208 | #define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(suffix, allocator_type) \ |
| 209 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, Instrumented, true, allocator_type) \ |
| 210 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, , false, allocator_type) |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 211 | |
Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 212 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(DlMalloc, gc::kAllocatorTypeDlMalloc) |
| 213 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RosAlloc, gc::kAllocatorTypeRosAlloc) |
Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 214 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(BumpPointer, gc::kAllocatorTypeBumpPointer) |
Mathieu Chartier | 692fafd | 2013-11-29 17:24:40 -0800 | [diff] [blame] | 215 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(TLAB, gc::kAllocatorTypeTLAB) |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 216 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(Region, gc::kAllocatorTypeRegion) |
| 217 | GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RegionTLAB, gc::kAllocatorTypeRegionTLAB) |
Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 218 | |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 219 | #define GENERATE_ENTRYPOINTS(suffix) \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 220 | extern "C" void* art_quick_alloc_array##suffix(uint32_t, int32_t, ArtMethod* ref); \ |
| 221 | extern "C" void* art_quick_alloc_array_resolved##suffix(mirror::Class* klass, int32_t, ArtMethod* ref); \ |
| 222 | extern "C" void* art_quick_alloc_array_with_access_check##suffix(uint32_t, int32_t, ArtMethod* ref); \ |
| 223 | extern "C" void* art_quick_alloc_object##suffix(uint32_t type_idx, ArtMethod* ref); \ |
| 224 | extern "C" void* art_quick_alloc_object_resolved##suffix(mirror::Class* klass, ArtMethod* ref); \ |
| 225 | extern "C" void* art_quick_alloc_object_initialized##suffix(mirror::Class* klass, ArtMethod* ref); \ |
| 226 | extern "C" void* art_quick_alloc_object_with_access_check##suffix(uint32_t type_idx, ArtMethod* ref); \ |
| 227 | extern "C" void* art_quick_check_and_alloc_array##suffix(uint32_t, int32_t, ArtMethod* ref); \ |
| 228 | extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix(uint32_t, int32_t, ArtMethod* ref); \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 229 | extern "C" void* art_quick_alloc_string_from_bytes##suffix(void*, int32_t, int32_t, int32_t); \ |
| 230 | extern "C" void* art_quick_alloc_string_from_chars##suffix(int32_t, int32_t, void*); \ |
| 231 | extern "C" void* art_quick_alloc_string_from_string##suffix(void*); \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 232 | extern "C" void* art_quick_alloc_array##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \ |
| 233 | extern "C" void* art_quick_alloc_array_resolved##suffix##_instrumented(mirror::Class* klass, int32_t, ArtMethod* ref); \ |
| 234 | extern "C" void* art_quick_alloc_array_with_access_check##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \ |
| 235 | extern "C" void* art_quick_alloc_object##suffix##_instrumented(uint32_t type_idx, ArtMethod* ref); \ |
| 236 | extern "C" void* art_quick_alloc_object_resolved##suffix##_instrumented(mirror::Class* klass, ArtMethod* ref); \ |
| 237 | extern "C" void* art_quick_alloc_object_initialized##suffix##_instrumented(mirror::Class* klass, ArtMethod* ref); \ |
| 238 | extern "C" void* art_quick_alloc_object_with_access_check##suffix##_instrumented(uint32_t type_idx, ArtMethod* ref); \ |
| 239 | extern "C" void* art_quick_check_and_alloc_array##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \ |
| 240 | extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 241 | extern "C" void* art_quick_alloc_string_from_bytes##suffix##_instrumented(void*, int32_t, int32_t, int32_t); \ |
| 242 | extern "C" void* art_quick_alloc_string_from_chars##suffix##_instrumented(int32_t, int32_t, void*); \ |
| 243 | extern "C" void* art_quick_alloc_string_from_string##suffix##_instrumented(void*); \ |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 244 | void SetQuickAllocEntryPoints##suffix(QuickEntryPoints* qpoints, bool instrumented) { \ |
| 245 | if (instrumented) { \ |
| 246 | qpoints->pAllocArray = art_quick_alloc_array##suffix##_instrumented; \ |
| 247 | qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix##_instrumented; \ |
| 248 | qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check##suffix##_instrumented; \ |
| 249 | qpoints->pAllocObject = art_quick_alloc_object##suffix##_instrumented; \ |
| 250 | qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix##_instrumented; \ |
| 251 | qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix##_instrumented; \ |
| 252 | qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check##suffix##_instrumented; \ |
| 253 | qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array##suffix##_instrumented; \ |
| 254 | qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented; \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 255 | qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix##_instrumented; \ |
| 256 | qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix##_instrumented; \ |
| 257 | qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix##_instrumented; \ |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 258 | } else { \ |
| 259 | qpoints->pAllocArray = art_quick_alloc_array##suffix; \ |
| 260 | qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix; \ |
| 261 | qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check##suffix; \ |
| 262 | qpoints->pAllocObject = art_quick_alloc_object##suffix; \ |
| 263 | qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix; \ |
| 264 | qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix; \ |
| 265 | qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check##suffix; \ |
| 266 | qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array##suffix; \ |
| 267 | qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check##suffix; \ |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 268 | qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix; \ |
| 269 | qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix; \ |
| 270 | qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix; \ |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 271 | } \ |
| 272 | } |
| 273 | |
| 274 | // Generate the entrypoint functions. |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 275 | #if !defined(__APPLE__) || !defined(__LP64__) |
Andreas Gampe | c8ccf68 | 2014-09-29 20:07:43 -0700 | [diff] [blame] | 276 | GENERATE_ENTRYPOINTS(_dlmalloc) |
| 277 | GENERATE_ENTRYPOINTS(_rosalloc) |
| 278 | GENERATE_ENTRYPOINTS(_bump_pointer) |
| 279 | GENERATE_ENTRYPOINTS(_tlab) |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 280 | GENERATE_ENTRYPOINTS(_region) |
| 281 | GENERATE_ENTRYPOINTS(_region_tlab) |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 282 | #endif |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 283 | |
| 284 | static bool entry_points_instrumented = false; |
| 285 | static gc::AllocatorType entry_points_allocator = gc::kAllocatorTypeDlMalloc; |
| 286 | |
| 287 | void SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator) { |
| 288 | entry_points_allocator = allocator; |
| 289 | } |
| 290 | |
| 291 | void SetQuickAllocEntryPointsInstrumented(bool instrumented) { |
| 292 | entry_points_instrumented = instrumented; |
| 293 | } |
| 294 | |
| 295 | void ResetQuickAllocEntryPoints(QuickEntryPoints* qpoints) { |
Andreas Gampe | 48cc32c | 2015-04-07 02:53:04 +0000 | [diff] [blame] | 296 | #if !defined(__APPLE__) || !defined(__LP64__) |
Ian Rogers | de2db52 | 2014-11-04 14:43:18 -0800 | [diff] [blame] | 297 | switch (entry_points_allocator) { |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 298 | case gc::kAllocatorTypeDlMalloc: { |
| 299 | SetQuickAllocEntryPoints_dlmalloc(qpoints, entry_points_instrumented); |
Ian Rogers | 7dc9c81 | 2014-11-04 15:10:55 -0800 | [diff] [blame] | 300 | return; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 301 | } |
| 302 | case gc::kAllocatorTypeRosAlloc: { |
| 303 | SetQuickAllocEntryPoints_rosalloc(qpoints, entry_points_instrumented); |
Ian Rogers | 7dc9c81 | 2014-11-04 15:10:55 -0800 | [diff] [blame] | 304 | return; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 305 | } |
| 306 | case gc::kAllocatorTypeBumpPointer: { |
| 307 | CHECK(kMovingCollector); |
| 308 | SetQuickAllocEntryPoints_bump_pointer(qpoints, entry_points_instrumented); |
Ian Rogers | 7dc9c81 | 2014-11-04 15:10:55 -0800 | [diff] [blame] | 309 | return; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 310 | } |
| 311 | case gc::kAllocatorTypeTLAB: { |
| 312 | CHECK(kMovingCollector); |
| 313 | SetQuickAllocEntryPoints_tlab(qpoints, entry_points_instrumented); |
Ian Rogers | 7dc9c81 | 2014-11-04 15:10:55 -0800 | [diff] [blame] | 314 | return; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 315 | } |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 316 | case gc::kAllocatorTypeRegion: { |
| 317 | CHECK(kMovingCollector); |
| 318 | SetQuickAllocEntryPoints_region(qpoints, entry_points_instrumented); |
| 319 | return; |
| 320 | } |
| 321 | case gc::kAllocatorTypeRegionTLAB: { |
| 322 | CHECK(kMovingCollector); |
| 323 | SetQuickAllocEntryPoints_region_tlab(qpoints, entry_points_instrumented); |
| 324 | return; |
| 325 | } |
Andreas Gampe | 48cc32c | 2015-04-07 02:53:04 +0000 | [diff] [blame] | 326 | default: |
| 327 | break; |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 328 | } |
Andreas Gampe | 48cc32c | 2015-04-07 02:53:04 +0000 | [diff] [blame] | 329 | #else |
| 330 | UNUSED(qpoints); |
| 331 | #endif |
| 332 | UNIMPLEMENTED(FATAL); |
Ian Rogers | de2db52 | 2014-11-04 14:43:18 -0800 | [diff] [blame] | 333 | UNREACHABLE(); |
Mathieu Chartier | d889178 | 2014-03-02 13:28:37 -0800 | [diff] [blame] | 334 | } |
| 335 | |
Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 336 | } // namespace art |