blob: 515fcbf6e6ac4ebdef0672614ebff79f4fca62df [file] [log] [blame]
Ian Rogers57b86d42012-03-27 16:05:41 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierd8891782014-03-02 13:28:37 -080017#include "entrypoints/quick/quick_alloc_entrypoints.h"
18
Mathieu Chartiere401d142015-04-22 13:56:20 -070019#include "art_method-inl.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070020#include "base/enums.h"
Ian Rogers57b86d42012-03-27 16:05:41 -070021#include "callee_save_frame.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070022#include "entrypoints/entrypoint_utils-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080023#include "mirror/class-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080024#include "mirror/object_array-inl.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070025#include "mirror/object-inl.h"
Ian Rogers57b86d42012-03-27 16:05:41 -070026
27namespace art {
28
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -070029static constexpr bool kUseTlabFastPath = true;
30
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080031#define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, suffix2, instrumented_bool, allocator_type) \
32extern "C" mirror::Object* artAllocObjectFromCode ##suffix##suffix2( \
Mathieu Chartiere401d142015-04-22 13:56:20 -070033 uint32_t type_idx, ArtMethod* method, Thread* self) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070034 REQUIRES_SHARED(Locks::mutator_lock_) { \
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070035 ScopedQuickEntrypointChecks sqec(self); \
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -070036 if (kUseTlabFastPath && !(instrumented_bool) && (allocator_type) == gc::kAllocatorTypeTLAB) { \
Andreas Gampe542451c2016-07-26 09:02:02 -070037 mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx, kRuntimePointerSize); \
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -070038 if (LIKELY(klass != nullptr && klass->IsInitialized() && !klass->IsFinalizable())) { \
39 size_t byte_count = klass->GetObjectSize(); \
40 byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \
41 mirror::Object* obj; \
42 if (LIKELY(byte_count < self->TlabSize())) { \
43 obj = self->AllocTlab(byte_count); \
44 DCHECK(obj != nullptr) << "AllocTlab can't fail"; \
45 obj->SetClass(klass); \
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -070046 if (kUseBakerReadBarrier) { \
47 obj->AssertReadBarrierState(); \
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -070048 } \
49 QuasiAtomic::ThreadFenceForConstructor(); \
50 return obj; \
51 } \
52 } \
53 } \
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080054 return AllocObjectFromCode<false, instrumented_bool>(type_idx, method, self, allocator_type); \
55} \
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -080056extern "C" mirror::Object* artAllocObjectFromCodeResolved##suffix##suffix2( \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +010057 mirror::Class* klass, ArtMethod* method ATTRIBUTE_UNUSED, Thread* self) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070058 REQUIRES_SHARED(Locks::mutator_lock_) { \
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070059 ScopedQuickEntrypointChecks sqec(self); \
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -070060 if (kUseTlabFastPath && !(instrumented_bool) && (allocator_type) == gc::kAllocatorTypeTLAB) { \
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -070061 if (LIKELY(klass->IsInitialized())) { \
62 size_t byte_count = klass->GetObjectSize(); \
63 byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \
64 mirror::Object* obj; \
65 if (LIKELY(byte_count < self->TlabSize())) { \
66 obj = self->AllocTlab(byte_count); \
67 DCHECK(obj != nullptr) << "AllocTlab can't fail"; \
68 obj->SetClass(klass); \
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -070069 if (kUseBakerReadBarrier) { \
70 obj->AssertReadBarrierState(); \
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -070071 } \
72 QuasiAtomic::ThreadFenceForConstructor(); \
73 return obj; \
74 } \
75 } \
76 } \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070077 return AllocObjectFromCodeResolved<instrumented_bool>(klass, self, allocator_type); \
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -080078} \
79extern "C" mirror::Object* artAllocObjectFromCodeInitialized##suffix##suffix2( \
Roland Levillain4b8f1ec2015-08-26 18:34:03 +010080 mirror::Class* klass, ArtMethod* method ATTRIBUTE_UNUSED, Thread* self) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070081 REQUIRES_SHARED(Locks::mutator_lock_) { \
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070082 ScopedQuickEntrypointChecks sqec(self); \
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -070083 if (kUseTlabFastPath && !(instrumented_bool) && (allocator_type) == gc::kAllocatorTypeTLAB) { \
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -070084 size_t byte_count = klass->GetObjectSize(); \
85 byte_count = RoundUp(byte_count, gc::space::BumpPointerSpace::kAlignment); \
86 mirror::Object* obj; \
87 if (LIKELY(byte_count < self->TlabSize())) { \
88 obj = self->AllocTlab(byte_count); \
89 DCHECK(obj != nullptr) << "AllocTlab can't fail"; \
90 obj->SetClass(klass); \
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -070091 if (kUseBakerReadBarrier) { \
92 obj->AssertReadBarrierState(); \
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -070093 } \
94 QuasiAtomic::ThreadFenceForConstructor(); \
95 return obj; \
96 } \
97 } \
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070098 return AllocObjectFromCodeInitialized<instrumented_bool>(klass, self, allocator_type); \
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -080099} \
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800100extern "C" mirror::Object* artAllocObjectFromCodeWithAccessCheck##suffix##suffix2( \
Mathieu Chartiere401d142015-04-22 13:56:20 -0700101 uint32_t type_idx, ArtMethod* method, Thread* self) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700102 REQUIRES_SHARED(Locks::mutator_lock_) { \
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700103 ScopedQuickEntrypointChecks sqec(self); \
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800104 return AllocObjectFromCode<true, instrumented_bool>(type_idx, method, self, allocator_type); \
105} \
106extern "C" mirror::Array* artAllocArrayFromCode##suffix##suffix2( \
Mathieu Chartiere401d142015-04-22 13:56:20 -0700107 uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700108 REQUIRES_SHARED(Locks::mutator_lock_) { \
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700109 ScopedQuickEntrypointChecks sqec(self); \
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800110 return AllocArrayFromCode<false, instrumented_bool>(type_idx, component_count, method, self, \
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800111 allocator_type); \
112} \
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800113extern "C" mirror::Array* artAllocArrayFromCodeResolved##suffix##suffix2( \
Mathieu Chartiere401d142015-04-22 13:56:20 -0700114 mirror::Class* klass, int32_t component_count, ArtMethod* method, Thread* self) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700115 REQUIRES_SHARED(Locks::mutator_lock_) { \
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700116 ScopedQuickEntrypointChecks sqec(self); \
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800117 return AllocArrayFromCodeResolved<false, instrumented_bool>(klass, component_count, method, self, \
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800118 allocator_type); \
119} \
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800120extern "C" mirror::Array* artAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \
Mathieu Chartiere401d142015-04-22 13:56:20 -0700121 uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700122 REQUIRES_SHARED(Locks::mutator_lock_) { \
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700123 ScopedQuickEntrypointChecks sqec(self); \
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800124 return AllocArrayFromCode<true, instrumented_bool>(type_idx, component_count, method, self, \
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800125 allocator_type); \
126} \
127extern "C" mirror::Array* artCheckAndAllocArrayFromCode##suffix##suffix2( \
Mathieu Chartiere401d142015-04-22 13:56:20 -0700128 uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700129 REQUIRES_SHARED(Locks::mutator_lock_) { \
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700130 ScopedQuickEntrypointChecks sqec(self); \
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700131 if (!(instrumented_bool)) { \
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800132 return CheckAndAllocArrayFromCode(type_idx, component_count, method, self, false, allocator_type); \
Hiroshi Yamauchicbbb0802013-11-21 12:42:36 -0800133 } else { \
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800134 return CheckAndAllocArrayFromCodeInstrumented(type_idx, component_count, method, self, false, allocator_type); \
Hiroshi Yamauchicbbb0802013-11-21 12:42:36 -0800135 } \
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800136} \
137extern "C" mirror::Array* artCheckAndAllocArrayFromCodeWithAccessCheck##suffix##suffix2( \
Mathieu Chartiere401d142015-04-22 13:56:20 -0700138 uint32_t type_idx, int32_t component_count, ArtMethod* method, Thread* self) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700139 REQUIRES_SHARED(Locks::mutator_lock_) { \
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700140 ScopedQuickEntrypointChecks sqec(self); \
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700141 if (!(instrumented_bool)) { \
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800142 return CheckAndAllocArrayFromCode(type_idx, component_count, method, self, true, allocator_type); \
Hiroshi Yamauchicbbb0802013-11-21 12:42:36 -0800143 } else { \
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800144 return CheckAndAllocArrayFromCodeInstrumented(type_idx, component_count, method, self, true, allocator_type); \
Hiroshi Yamauchicbbb0802013-11-21 12:42:36 -0800145 } \
Jeff Hao848f70a2014-01-15 13:49:50 -0800146} \
147extern "C" mirror::String* artAllocStringFromBytesFromCode##suffix##suffix2( \
148 mirror::ByteArray* byte_array, int32_t high, int32_t offset, int32_t byte_count, \
149 Thread* self) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700150 REQUIRES_SHARED(Locks::mutator_lock_) { \
Jeff Hao848f70a2014-01-15 13:49:50 -0800151 ScopedQuickEntrypointChecks sqec(self); \
152 StackHandleScope<1> hs(self); \
153 Handle<mirror::ByteArray> handle_array(hs.NewHandle(byte_array)); \
154 return mirror::String::AllocFromByteArray<instrumented_bool>(self, byte_count, handle_array, \
155 offset, high, allocator_type); \
156} \
157extern "C" mirror::String* artAllocStringFromCharsFromCode##suffix##suffix2( \
158 int32_t offset, int32_t char_count, mirror::CharArray* char_array, Thread* self) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700159 REQUIRES_SHARED(Locks::mutator_lock_) { \
Jeff Hao848f70a2014-01-15 13:49:50 -0800160 StackHandleScope<1> hs(self); \
161 Handle<mirror::CharArray> handle_array(hs.NewHandle(char_array)); \
162 return mirror::String::AllocFromCharArray<instrumented_bool>(self, char_count, handle_array, \
163 offset, allocator_type); \
164} \
Chih-Hung Hsiehfba39972016-05-11 11:26:48 -0700165extern "C" mirror::String* artAllocStringFromStringFromCode##suffix##suffix2( /* NOLINT */ \
Jeff Hao848f70a2014-01-15 13:49:50 -0800166 mirror::String* string, Thread* self) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700167 REQUIRES_SHARED(Locks::mutator_lock_) { \
Jeff Hao848f70a2014-01-15 13:49:50 -0800168 StackHandleScope<1> hs(self); \
169 Handle<mirror::String> handle_string(hs.NewHandle(string)); \
170 return mirror::String::AllocFromString<instrumented_bool>(self, handle_string->GetLength(), \
171 handle_string, 0, allocator_type); \
Ian Rogers57b86d42012-03-27 16:05:41 -0700172}
173
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800174#define GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(suffix, allocator_type) \
175 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, Instrumented, true, allocator_type) \
176 GENERATE_ENTRYPOINTS_FOR_ALLOCATOR_INST(suffix, , false, allocator_type)
Ian Rogers57b86d42012-03-27 16:05:41 -0700177
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800178GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(DlMalloc, gc::kAllocatorTypeDlMalloc)
179GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RosAlloc, gc::kAllocatorTypeRosAlloc)
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800180GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(BumpPointer, gc::kAllocatorTypeBumpPointer)
Mathieu Chartier692fafd2013-11-29 17:24:40 -0800181GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(TLAB, gc::kAllocatorTypeTLAB)
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800182GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(Region, gc::kAllocatorTypeRegion)
183GENERATE_ENTRYPOINTS_FOR_ALLOCATOR(RegionTLAB, gc::kAllocatorTypeRegionTLAB)
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -0700184
Mathieu Chartierd8891782014-03-02 13:28:37 -0800185#define GENERATE_ENTRYPOINTS(suffix) \
Mathieu Chartiere401d142015-04-22 13:56:20 -0700186extern "C" void* art_quick_alloc_array##suffix(uint32_t, int32_t, ArtMethod* ref); \
187extern "C" void* art_quick_alloc_array_resolved##suffix(mirror::Class* klass, int32_t, ArtMethod* ref); \
188extern "C" void* art_quick_alloc_array_with_access_check##suffix(uint32_t, int32_t, ArtMethod* ref); \
189extern "C" void* art_quick_alloc_object##suffix(uint32_t type_idx, ArtMethod* ref); \
190extern "C" void* art_quick_alloc_object_resolved##suffix(mirror::Class* klass, ArtMethod* ref); \
191extern "C" void* art_quick_alloc_object_initialized##suffix(mirror::Class* klass, ArtMethod* ref); \
192extern "C" void* art_quick_alloc_object_with_access_check##suffix(uint32_t type_idx, ArtMethod* ref); \
193extern "C" void* art_quick_check_and_alloc_array##suffix(uint32_t, int32_t, ArtMethod* ref); \
194extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix(uint32_t, int32_t, ArtMethod* ref); \
Jeff Hao848f70a2014-01-15 13:49:50 -0800195extern "C" void* art_quick_alloc_string_from_bytes##suffix(void*, int32_t, int32_t, int32_t); \
196extern "C" void* art_quick_alloc_string_from_chars##suffix(int32_t, int32_t, void*); \
197extern "C" void* art_quick_alloc_string_from_string##suffix(void*); \
Mathieu Chartiere401d142015-04-22 13:56:20 -0700198extern "C" void* art_quick_alloc_array##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \
199extern "C" void* art_quick_alloc_array_resolved##suffix##_instrumented(mirror::Class* klass, int32_t, ArtMethod* ref); \
200extern "C" void* art_quick_alloc_array_with_access_check##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \
201extern "C" void* art_quick_alloc_object##suffix##_instrumented(uint32_t type_idx, ArtMethod* ref); \
202extern "C" void* art_quick_alloc_object_resolved##suffix##_instrumented(mirror::Class* klass, ArtMethod* ref); \
203extern "C" void* art_quick_alloc_object_initialized##suffix##_instrumented(mirror::Class* klass, ArtMethod* ref); \
204extern "C" void* art_quick_alloc_object_with_access_check##suffix##_instrumented(uint32_t type_idx, ArtMethod* ref); \
205extern "C" void* art_quick_check_and_alloc_array##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \
206extern "C" void* art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented(uint32_t, int32_t, ArtMethod* ref); \
Jeff Hao848f70a2014-01-15 13:49:50 -0800207extern "C" void* art_quick_alloc_string_from_bytes##suffix##_instrumented(void*, int32_t, int32_t, int32_t); \
208extern "C" void* art_quick_alloc_string_from_chars##suffix##_instrumented(int32_t, int32_t, void*); \
209extern "C" void* art_quick_alloc_string_from_string##suffix##_instrumented(void*); \
Mathieu Chartierd8891782014-03-02 13:28:37 -0800210void SetQuickAllocEntryPoints##suffix(QuickEntryPoints* qpoints, bool instrumented) { \
211 if (instrumented) { \
212 qpoints->pAllocArray = art_quick_alloc_array##suffix##_instrumented; \
213 qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix##_instrumented; \
214 qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check##suffix##_instrumented; \
215 qpoints->pAllocObject = art_quick_alloc_object##suffix##_instrumented; \
216 qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix##_instrumented; \
217 qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix##_instrumented; \
218 qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check##suffix##_instrumented; \
219 qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array##suffix##_instrumented; \
220 qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check##suffix##_instrumented; \
Jeff Hao848f70a2014-01-15 13:49:50 -0800221 qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix##_instrumented; \
222 qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix##_instrumented; \
223 qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix##_instrumented; \
Mathieu Chartierd8891782014-03-02 13:28:37 -0800224 } else { \
225 qpoints->pAllocArray = art_quick_alloc_array##suffix; \
226 qpoints->pAllocArrayResolved = art_quick_alloc_array_resolved##suffix; \
227 qpoints->pAllocArrayWithAccessCheck = art_quick_alloc_array_with_access_check##suffix; \
228 qpoints->pAllocObject = art_quick_alloc_object##suffix; \
229 qpoints->pAllocObjectResolved = art_quick_alloc_object_resolved##suffix; \
230 qpoints->pAllocObjectInitialized = art_quick_alloc_object_initialized##suffix; \
231 qpoints->pAllocObjectWithAccessCheck = art_quick_alloc_object_with_access_check##suffix; \
232 qpoints->pCheckAndAllocArray = art_quick_check_and_alloc_array##suffix; \
233 qpoints->pCheckAndAllocArrayWithAccessCheck = art_quick_check_and_alloc_array_with_access_check##suffix; \
Jeff Hao848f70a2014-01-15 13:49:50 -0800234 qpoints->pAllocStringFromBytes = art_quick_alloc_string_from_bytes##suffix; \
235 qpoints->pAllocStringFromChars = art_quick_alloc_string_from_chars##suffix; \
236 qpoints->pAllocStringFromString = art_quick_alloc_string_from_string##suffix; \
Mathieu Chartierd8891782014-03-02 13:28:37 -0800237 } \
238}
239
240// Generate the entrypoint functions.
Ian Rogersc3ccc102014-06-25 11:52:14 -0700241#if !defined(__APPLE__) || !defined(__LP64__)
Andreas Gampec8ccf682014-09-29 20:07:43 -0700242GENERATE_ENTRYPOINTS(_dlmalloc)
243GENERATE_ENTRYPOINTS(_rosalloc)
244GENERATE_ENTRYPOINTS(_bump_pointer)
245GENERATE_ENTRYPOINTS(_tlab)
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800246GENERATE_ENTRYPOINTS(_region)
247GENERATE_ENTRYPOINTS(_region_tlab)
Ian Rogersc3ccc102014-06-25 11:52:14 -0700248#endif
Mathieu Chartierd8891782014-03-02 13:28:37 -0800249
250static bool entry_points_instrumented = false;
251static gc::AllocatorType entry_points_allocator = gc::kAllocatorTypeDlMalloc;
252
253void SetQuickAllocEntryPointsAllocator(gc::AllocatorType allocator) {
254 entry_points_allocator = allocator;
255}
256
257void SetQuickAllocEntryPointsInstrumented(bool instrumented) {
258 entry_points_instrumented = instrumented;
259}
260
261void ResetQuickAllocEntryPoints(QuickEntryPoints* qpoints) {
Andreas Gampe48cc32c2015-04-07 02:53:04 +0000262#if !defined(__APPLE__) || !defined(__LP64__)
Ian Rogersde2db522014-11-04 14:43:18 -0800263 switch (entry_points_allocator) {
Mathieu Chartierd8891782014-03-02 13:28:37 -0800264 case gc::kAllocatorTypeDlMalloc: {
265 SetQuickAllocEntryPoints_dlmalloc(qpoints, entry_points_instrumented);
Ian Rogers7dc9c812014-11-04 15:10:55 -0800266 return;
Mathieu Chartierd8891782014-03-02 13:28:37 -0800267 }
268 case gc::kAllocatorTypeRosAlloc: {
269 SetQuickAllocEntryPoints_rosalloc(qpoints, entry_points_instrumented);
Ian Rogers7dc9c812014-11-04 15:10:55 -0800270 return;
Mathieu Chartierd8891782014-03-02 13:28:37 -0800271 }
272 case gc::kAllocatorTypeBumpPointer: {
273 CHECK(kMovingCollector);
274 SetQuickAllocEntryPoints_bump_pointer(qpoints, entry_points_instrumented);
Ian Rogers7dc9c812014-11-04 15:10:55 -0800275 return;
Mathieu Chartierd8891782014-03-02 13:28:37 -0800276 }
277 case gc::kAllocatorTypeTLAB: {
278 CHECK(kMovingCollector);
279 SetQuickAllocEntryPoints_tlab(qpoints, entry_points_instrumented);
Ian Rogers7dc9c812014-11-04 15:10:55 -0800280 return;
Mathieu Chartierd8891782014-03-02 13:28:37 -0800281 }
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800282 case gc::kAllocatorTypeRegion: {
283 CHECK(kMovingCollector);
284 SetQuickAllocEntryPoints_region(qpoints, entry_points_instrumented);
285 return;
286 }
287 case gc::kAllocatorTypeRegionTLAB: {
288 CHECK(kMovingCollector);
289 SetQuickAllocEntryPoints_region_tlab(qpoints, entry_points_instrumented);
290 return;
291 }
Andreas Gampe48cc32c2015-04-07 02:53:04 +0000292 default:
293 break;
Mathieu Chartierd8891782014-03-02 13:28:37 -0800294 }
Andreas Gampe48cc32c2015-04-07 02:53:04 +0000295#else
296 UNUSED(qpoints);
297#endif
298 UNIMPLEMENTED(FATAL);
Ian Rogersde2db522014-11-04 14:43:18 -0800299 UNREACHABLE();
Mathieu Chartierd8891782014-03-02 13:28:37 -0800300}
301
Ian Rogers57b86d42012-03-27 16:05:41 -0700302} // namespace art