blob: 469c45c10cf0624f91e5672319101e253a85112a [file] [log] [blame]
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
18#define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
19
20#include "entrypoint_utils.h"
21
Matthew Gharrity465ecc82016-07-19 21:32:52 +000022#include "art_method-inl.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070023#include "base/enums.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070024#include "class_linker-inl.h"
25#include "common_throws.h"
26#include "dex_file.h"
Vladimir Marko5ea536a2015-04-20 20:11:30 +010027#include "entrypoints/quick/callee_save_frame.h"
28#include "handle_scope-inl.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070029#include "imt_conflict_table.h"
30#include "imtable-inl.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070031#include "indirect_reference_table.h"
32#include "invoke_type.h"
33#include "jni_internal.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070034#include "mirror/array.h"
35#include "mirror/class-inl.h"
36#include "mirror/object-inl.h"
37#include "mirror/throwable.h"
Vladimir Marko5ea536a2015-04-20 20:11:30 +010038#include "nth_caller_visitor.h"
39#include "runtime.h"
Nicolas Geoffray6bc43742015-10-12 18:11:10 +010040#include "stack_map.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070041#include "thread.h"
42
43namespace art {
44
Mathieu Chartiere401d142015-04-22 13:56:20 -070045inline ArtMethod* GetResolvedMethod(ArtMethod* outer_method,
Nicolas Geoffray32c9ea52015-06-12 14:52:33 +010046 const InlineInfo& inline_info,
David Srbecky61b28a12016-02-25 21:55:03 +000047 const InlineInfoEncoding& encoding,
Nicolas Geoffray32c9ea52015-06-12 14:52:33 +010048 uint8_t inlining_depth)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070049 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffrayc6df1e32016-07-04 10:15:47 +010050 // This method is being used by artQuickResolutionTrampoline, before it sets up
51 // the passed parameters in a GC friendly way. Therefore we must never be
52 // suspended while executing it.
Mathieu Chartier268764d2016-09-13 12:09:38 -070053 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
Nicolas Geoffrayc6df1e32016-07-04 10:15:47 +010054
David Srbecky61b28a12016-02-25 21:55:03 +000055 uint32_t method_index = inline_info.GetMethodIndexAtDepth(encoding, inlining_depth);
Nicolas Geoffray32c9ea52015-06-12 14:52:33 +010056 InvokeType invoke_type = static_cast<InvokeType>(
David Srbecky61b28a12016-02-25 21:55:03 +000057 inline_info.GetInvokeTypeAtDepth(encoding, inlining_depth));
Andreas Gampe542451c2016-07-26 09:02:02 -070058 ArtMethod* inlined_method = outer_method->GetDexCacheResolvedMethod(method_index,
59 kRuntimePointerSize);
Nicolas Geoffrayc6df1e32016-07-04 10:15:47 +010060 if (!inlined_method->IsRuntimeMethod()) {
61 return inlined_method;
Mathieu Chartier45bf2502016-03-31 11:07:09 -070062 }
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +010063
Nicolas Geoffrayc6df1e32016-07-04 10:15:47 +010064 // The method in the dex cache is the runtime method responsible for invoking
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +010065 // the stub that will then update the dex cache. Therefore, we need to do the
66 // resolution ourselves.
Nicolas Geoffray3976e5e2015-06-15 08:58:03 +010067
Nicolas Geoffrayc6df1e32016-07-04 10:15:47 +010068 // We first find the dex cache of our caller. If it is the outer method, we can directly
69 // use its dex cache. Otherwise, we also need to resolve our caller.
70 ArtMethod* caller = outer_method;
71 if (inlining_depth != 0) {
72 caller = GetResolvedMethod(outer_method,
73 inline_info,
74 encoding,
75 inlining_depth - 1);
76 }
77 DCHECK_EQ(caller->GetDexCache(), outer_method->GetDexCache())
78 << "Compiler only supports inlining calls within the same dex cache";
79 const DexFile* dex_file = outer_method->GetDexFile();
80 const DexFile::MethodId& method_id = dex_file->GetMethodId(method_index);
81
82 if (inline_info.GetDexPcAtDepth(encoding, inlining_depth) == static_cast<uint32_t>(-1)) {
83 // "charAt" special case. It is the only non-leaf method we inline across dex files.
84 if (kIsDebugBuild) {
85 const char* name = dex_file->StringDataByIdx(method_id.name_idx_);
86 DCHECK_EQ(std::string(name), "charAt");
87 DCHECK_EQ(std::string(dex_file->GetMethodShorty(method_id)), "CI")
88 << std::string(dex_file->GetMethodShorty(method_id));
89 DCHECK_EQ(std::string(dex_file->StringByTypeIdx(method_id.class_idx_)), "Ljava/lang/String;")
90 << std::string(dex_file->StringByTypeIdx(method_id.class_idx_));
91 }
92 mirror::Class* cls =
93 Runtime::Current()->GetClassLinker()->GetClassRoot(ClassLinker::kJavaLangString);
94 // Update the dex cache for future lookups.
95 caller->GetDexCache()->SetResolvedType(method_id.class_idx_, cls);
Andreas Gampe542451c2016-07-26 09:02:02 -070096 inlined_method = cls->FindVirtualMethod("charAt", "(I)C", kRuntimePointerSize);
Nicolas Geoffray32c9ea52015-06-12 14:52:33 +010097 } else {
Nicolas Geoffrayc6df1e32016-07-04 10:15:47 +010098 mirror::Class* klass = caller->GetDexCache()->GetResolvedType(method_id.class_idx_);
99 DCHECK_EQ(klass->GetDexCache(), caller->GetDexCache())
100 << "Compiler only supports inlining calls within the same dex cache";
101 switch (invoke_type) {
102 case kDirect:
103 case kStatic:
104 inlined_method =
Andreas Gampe542451c2016-07-26 09:02:02 -0700105 klass->FindDirectMethod(klass->GetDexCache(), method_index, kRuntimePointerSize);
Nicolas Geoffrayc6df1e32016-07-04 10:15:47 +0100106 break;
107 case kSuper:
108 case kVirtual:
109 inlined_method =
Andreas Gampe542451c2016-07-26 09:02:02 -0700110 klass->FindVirtualMethod(klass->GetDexCache(), method_index, kRuntimePointerSize);
Nicolas Geoffrayc6df1e32016-07-04 10:15:47 +0100111 break;
112 default:
113 LOG(FATAL) << "Unimplemented inlined invocation type: " << invoke_type;
114 UNREACHABLE();
115 }
Nicolas Geoffray32c9ea52015-06-12 14:52:33 +0100116 }
117
Nicolas Geoffrayc6df1e32016-07-04 10:15:47 +0100118 // Update the dex cache for future lookups. Note that for static methods, this is safe
119 // when the class is being initialized, as the entrypoint for the ArtMethod is at
120 // this point still the resolution trampoline.
Andreas Gampe542451c2016-07-26 09:02:02 -0700121 outer_method->SetDexCacheResolvedMethod(method_index, inlined_method, kRuntimePointerSize);
Nicolas Geoffrayc6df1e32016-07-04 10:15:47 +0100122 return inlined_method;
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100123}
124
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700125inline ArtMethod* GetCalleeSaveMethodCaller(Thread* self, Runtime::CalleeSaveType type) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100126 return GetCalleeSaveMethodCaller(
127 self->GetManagedStack()->GetTopQuickFrame(), type, true /* do_caller_check */);
128}
129
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +0000130ALWAYS_INLINE inline mirror::Class* CheckObjectAlloc(mirror::Class* klass,
131 Thread* self,
132 bool* slow_path)
133 REQUIRES_SHARED(Locks::mutator_lock_)
134 REQUIRES(!Roles::uninterruptible_) {
135 if (UNLIKELY(!klass->IsInstantiable())) {
136 self->ThrowNewException("Ljava/lang/InstantiationError;", klass->PrettyDescriptor().c_str());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700137 *slow_path = true;
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +0000138 return nullptr; // Failure
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700139 }
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +0000140 if (UNLIKELY(klass->IsClassClass())) {
141 ThrowIllegalAccessError(nullptr, "Class %s is inaccessible",
142 klass->PrettyDescriptor().c_str());
143 *slow_path = true;
144 return nullptr; // Failure
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700145 }
146 if (UNLIKELY(!klass->IsInitialized())) {
147 StackHandleScope<1> hs(self);
148 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
149 // EnsureInitialized (the class initializer) might cause a GC.
150 // may cause us to suspend meaning that another thread may try to
151 // change the allocator while we are stuck in the entrypoints of
152 // an old allocator. Also, the class initialization may fail. To
153 // handle these cases we mark the slow path boolean as true so
154 // that the caller knows to check the allocator type to see if it
155 // has changed and to null-check the return value in case the
156 // initialization fails.
157 *slow_path = true;
Ian Rogers7b078e82014-09-10 14:44:24 -0700158 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_klass, true, true)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700159 DCHECK(self->IsExceptionPending());
160 return nullptr; // Failure
Mathieu Chartier524507a2014-08-27 15:28:28 -0700161 } else {
162 DCHECK(!self->IsExceptionPending());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700163 }
164 return h_klass.Get();
165 }
166 return klass;
167}
168
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700169ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800170inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
171 Thread* self,
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +0000172 bool* slow_path)
173 REQUIRES_SHARED(Locks::mutator_lock_)
174 REQUIRES(!Roles::uninterruptible_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700175 if (UNLIKELY(!klass->IsInitialized())) {
176 StackHandleScope<1> hs(self);
177 Handle<mirror::Class> h_class(hs.NewHandle(klass));
178 // EnsureInitialized (the class initializer) might cause a GC.
179 // may cause us to suspend meaning that another thread may try to
180 // change the allocator while we are stuck in the entrypoints of
181 // an old allocator. Also, the class initialization may fail. To
182 // handle these cases we mark the slow path boolean as true so
183 // that the caller knows to check the allocator type to see if it
184 // has changed and to null-check the return value in case the
185 // initialization fails.
186 *slow_path = true;
Ian Rogers7b078e82014-09-10 14:44:24 -0700187 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700188 DCHECK(self->IsExceptionPending());
189 return nullptr; // Failure
190 }
191 return h_class.Get();
192 }
193 return klass;
194}
195
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +0000196// Allocate an instance of klass. Throws InstantationError if klass is not instantiable,
197// or IllegalAccessError if klass is j.l.Class. Performs a clinit check too.
198template <bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700199ALWAYS_INLINE
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +0000200inline mirror::Object* AllocObjectFromCode(mirror::Class* klass,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800201 Thread* self,
202 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700203 bool slow_path = false;
Nicolas Geoffray0d3998b2017-01-12 15:35:12 +0000204 klass = CheckObjectAlloc(klass, self, &slow_path);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700205 if (UNLIKELY(slow_path)) {
206 if (klass == nullptr) {
207 return nullptr;
208 }
Mathieu Chartier14b0a5d2016-03-11 17:22:23 -0800209 // CheckObjectAlloc can cause thread suspension which means we may now be instrumented.
210 return klass->Alloc</*kInstrumented*/true>(
211 self,
Mathieu Chartier28bd2e42016-10-04 13:54:57 -0700212 Runtime::Current()->GetHeap()->GetCurrentAllocator()).Ptr();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700213 }
214 DCHECK(klass != nullptr);
Mathieu Chartier28bd2e42016-10-04 13:54:57 -0700215 return klass->Alloc<kInstrumented>(self, allocator_type).Ptr();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700216}
217
218// Given the context of a calling Method and a resolved class, create an instance.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700219template <bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700220ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800221inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
222 Thread* self,
223 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700224 DCHECK(klass != nullptr);
225 bool slow_path = false;
226 klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path);
227 if (UNLIKELY(slow_path)) {
228 if (klass == nullptr) {
229 return nullptr;
230 }
231 gc::Heap* heap = Runtime::Current()->GetHeap();
Roland Levillain91d65e02016-01-19 15:59:16 +0000232 // Pass in false since the object cannot be finalizable.
Mathieu Chartier14b0a5d2016-03-11 17:22:23 -0800233 // CheckClassInitializedForObjectAlloc can cause thread suspension which means we may now be
234 // instrumented.
Mathieu Chartier28bd2e42016-10-04 13:54:57 -0700235 return klass->Alloc</*kInstrumented*/true, false>(self, heap->GetCurrentAllocator()).Ptr();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700236 }
Roland Levillain91d65e02016-01-19 15:59:16 +0000237 // Pass in false since the object cannot be finalizable.
Mathieu Chartier28bd2e42016-10-04 13:54:57 -0700238 return klass->Alloc<kInstrumented, false>(self, allocator_type).Ptr();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700239}
240
241// Given the context of a calling Method and an initialized class, create an instance.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700242template <bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700243ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800244inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
245 Thread* self,
246 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700247 DCHECK(klass != nullptr);
Roland Levillain91d65e02016-01-19 15:59:16 +0000248 // Pass in false since the object cannot be finalizable.
Mathieu Chartier28bd2e42016-10-04 13:54:57 -0700249 return klass->Alloc<kInstrumented, false>(self, allocator_type).Ptr();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700250}
251
252
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700253template <bool kAccessCheck>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700254ALWAYS_INLINE
Andreas Gampea5b09a62016-11-17 15:21:22 -0800255inline mirror::Class* CheckArrayAlloc(dex::TypeIndex type_idx,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800256 int32_t component_count,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700257 ArtMethod* method,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800258 bool* slow_path) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700259 if (UNLIKELY(component_count < 0)) {
260 ThrowNegativeArraySizeException(component_count);
261 *slow_path = true;
262 return nullptr; // Failure
263 }
Vladimir Marko05792b92015-08-03 11:56:49 +0100264 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Andreas Gampe542451c2016-07-26 09:02:02 -0700265 PointerSize pointer_size = class_linker->GetImagePointerSize();
Vladimir Marko05792b92015-08-03 11:56:49 +0100266 mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx, pointer_size);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700267 if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve
Vladimir Marko05792b92015-08-03 11:56:49 +0100268 klass = class_linker->ResolveType(type_idx, method);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700269 *slow_path = true;
270 if (klass == nullptr) { // Error
271 DCHECK(Thread::Current()->IsExceptionPending());
272 return nullptr; // Failure
273 }
David Sehr709b0702016-10-13 09:12:37 -0700274 CHECK(klass->IsArrayClass()) << klass->PrettyClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700275 }
276 if (kAccessCheck) {
277 mirror::Class* referrer = method->GetDeclaringClass();
278 if (UNLIKELY(!referrer->CanAccess(klass))) {
279 ThrowIllegalAccessErrorClass(referrer, klass);
280 *slow_path = true;
281 return nullptr; // Failure
282 }
283 }
284 return klass;
285}
286
287// Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
288// it cannot be resolved, throw an error. If it can, use it to create an array.
289// When verification/compiler hasn't been able to verify access, optionally perform an access
290// check.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700291template <bool kAccessCheck, bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700292ALWAYS_INLINE
Andreas Gampea5b09a62016-11-17 15:21:22 -0800293inline mirror::Array* AllocArrayFromCode(dex::TypeIndex type_idx,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800294 int32_t component_count,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700295 ArtMethod* method,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800296 Thread* self,
297 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700298 bool slow_path = false;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800299 mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, component_count, method,
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700300 &slow_path);
301 if (UNLIKELY(slow_path)) {
302 if (klass == nullptr) {
303 return nullptr;
304 }
305 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier14b0a5d2016-03-11 17:22:23 -0800306 // CheckArrayAlloc can cause thread suspension which means we may now be instrumented.
307 return mirror::Array::Alloc</*kInstrumented*/true>(self,
308 klass,
309 component_count,
310 klass->GetComponentSizeShift(),
311 heap->GetCurrentAllocator());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700312 }
313 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
Hiroshi Yamauchif0edfc32014-09-25 11:46:46 -0700314 klass->GetComponentSizeShift(), allocator_type);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700315}
316
317template <bool kAccessCheck, bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700318ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800319inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800320 int32_t component_count,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700321 ArtMethod* method,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800322 Thread* self,
323 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700324 DCHECK(klass != nullptr);
325 if (UNLIKELY(component_count < 0)) {
326 ThrowNegativeArraySizeException(component_count);
327 return nullptr; // Failure
328 }
329 if (kAccessCheck) {
330 mirror::Class* referrer = method->GetDeclaringClass();
331 if (UNLIKELY(!referrer->CanAccess(klass))) {
332 ThrowIllegalAccessErrorClass(referrer, klass);
333 return nullptr; // Failure
334 }
335 }
336 // No need to retry a slow-path allocation as the above code won't cause a GC or thread
337 // suspension.
338 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
Hiroshi Yamauchif0edfc32014-09-25 11:46:46 -0700339 klass->GetComponentSizeShift(), allocator_type);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700340}
341
342template<FindFieldType type, bool access_check>
Mathieu Chartierbf369182016-02-04 18:13:32 -0800343inline ArtField* FindFieldFromCode(uint32_t field_idx,
344 ArtMethod* referrer,
345 Thread* self,
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700346 size_t expected_size) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700347 bool is_primitive;
348 bool is_set;
349 bool is_static;
350 switch (type) {
351 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
352 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
353 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
354 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
355 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
356 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
357 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
358 case StaticPrimitiveWrite: // Keep GCC happy by having a default handler, fall-through.
359 default: is_primitive = true; is_set = true; is_static = true; break;
360 }
361 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Igor Murashkinf1b4c412016-02-01 17:40:19 -0800362
363 ArtField* resolved_field;
364 if (access_check) {
365 // Slow path: According to JLS 13.4.8, a linkage error may occur if a compile-time
366 // qualifying type of a field and the resolved run-time qualifying type of a field differed
367 // in their static-ness.
368 //
369 // In particular, don't assume the dex instruction already correctly knows if the
370 // real field is static or not. The resolution must not be aware of this.
Andreas Gampe542451c2016-07-26 09:02:02 -0700371 ArtMethod* method = referrer->GetInterfaceMethodIfProxy(kRuntimePointerSize);
Igor Murashkinf1b4c412016-02-01 17:40:19 -0800372
373 StackHandleScope<2> hs(self);
374 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(method->GetDexCache()));
375 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(method->GetClassLoader()));
376
377 resolved_field = class_linker->ResolveFieldJLS(*method->GetDexFile(),
378 field_idx,
379 h_dex_cache,
380 h_class_loader);
381 } else {
382 // Fast path: Verifier already would've called ResolveFieldJLS and we wouldn't
383 // be executing here if there was a static/non-static mismatch.
384 resolved_field = class_linker->ResolveField(field_idx, referrer, is_static);
385 }
386
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700387 if (UNLIKELY(resolved_field == nullptr)) {
388 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
389 return nullptr; // Failure.
390 }
Mathieu Chartier3398c782016-09-30 10:27:43 -0700391 ObjPtr<mirror::Class> fields_class = resolved_field->GetDeclaringClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700392 if (access_check) {
393 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
394 ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer);
395 return nullptr;
396 }
397 mirror::Class* referring_class = referrer->GetDeclaringClass();
Mathieu Chartier3398c782016-09-30 10:27:43 -0700398 if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class,
399 resolved_field,
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700400 field_idx))) {
401 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
402 return nullptr; // Failure.
403 }
404 if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) {
405 ThrowIllegalAccessErrorFinalField(referrer, resolved_field);
406 return nullptr; // Failure.
407 } else {
408 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
409 resolved_field->FieldSize() != expected_size)) {
Nicolas Geoffray0aa50ce2015-03-10 11:03:29 +0000410 self->ThrowNewExceptionF("Ljava/lang/NoSuchFieldError;",
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700411 "Attempted read of %zd-bit %s on field '%s'",
412 expected_size * (32 / sizeof(int32_t)),
413 is_primitive ? "primitive" : "non-primitive",
David Sehr709b0702016-10-13 09:12:37 -0700414 resolved_field->PrettyField(true).c_str());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700415 return nullptr; // Failure.
416 }
417 }
418 }
419 if (!is_static) {
420 // instance fields must be being accessed on an initialized class
421 return resolved_field;
422 } else {
423 // If the class is initialized we're done.
424 if (LIKELY(fields_class->IsInitialized())) {
425 return resolved_field;
426 } else {
427 StackHandleScope<1> hs(self);
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700428 if (LIKELY(class_linker->EnsureInitialized(self, hs.NewHandle(fields_class), true, true))) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700429 // Otherwise let's ensure the class is initialized before resolving the field.
430 return resolved_field;
431 }
432 DCHECK(self->IsExceptionPending()); // Throw exception and unwind
433 return nullptr; // Failure.
434 }
435 }
436}
437
438// Explicit template declarations of FindFieldFromCode for all field access types.
439#define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700440template REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE \
Mathieu Chartierc7853442015-03-27 14:35:38 -0700441ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \
Mathieu Chartiere401d142015-04-22 13:56:20 -0700442 ArtMethod* referrer, \
443 Thread* self, size_t expected_size) \
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700444
445#define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
446 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \
447 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true)
448
449EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead);
450EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite);
451EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead);
452EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite);
453EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead);
454EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite);
455EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead);
456EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite);
457
458#undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL
459#undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL
460
461template<InvokeType type, bool access_check>
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700462inline ArtMethod* FindMethodFromCode(uint32_t method_idx,
Mathieu Chartieref41db72016-10-25 15:08:01 -0700463 ObjPtr<mirror::Object>* this_object,
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700464 ArtMethod* referrer,
465 Thread* self) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700466 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
Andreas Gampe3a357142015-08-07 17:20:11 -0700467 ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, referrer);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700468 if (resolved_method == nullptr) {
469 StackHandleScope<1> hs(self);
Mathieu Chartieref41db72016-10-25 15:08:01 -0700470 ObjPtr<mirror::Object> null_this = nullptr;
471 HandleWrapperObjPtr<mirror::Object> h_this(
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700472 hs.NewHandleWrapper(type == kStatic ? &null_this : this_object));
Andreas Gampe42ef8ab2015-12-03 17:27:32 -0800473 constexpr ClassLinker::ResolveMode resolve_mode =
474 access_check ? ClassLinker::kForceICCECheck
475 : ClassLinker::kNoICCECheckForCache;
476 resolved_method = class_linker->ResolveMethod<resolve_mode>(self, method_idx, referrer, type);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700477 }
Aart Bik2a1b7ac2016-06-29 14:54:26 -0700478 // Resolution and access check.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700479 if (UNLIKELY(resolved_method == nullptr)) {
480 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
481 return nullptr; // Failure.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700482 } else if (access_check) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700483 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700484 bool can_access_resolved_method =
Alex Light705ad492015-09-21 11:36:30 -0700485 referrer->GetDeclaringClass()->CheckResolvedMethodAccess<type>(methods_class,
486 resolved_method,
487 method_idx);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700488 if (UNLIKELY(!can_access_resolved_method)) {
489 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
490 return nullptr; // Failure.
491 }
Nicolas Geoffray470d54f2015-10-02 17:14:53 +0100492 // Incompatible class change should have been handled in resolve method.
493 if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) {
494 ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method,
495 referrer);
496 return nullptr; // Failure.
497 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700498 }
Aart Bik2a1b7ac2016-06-29 14:54:26 -0700499 // Next, null pointer check.
500 if (UNLIKELY(*this_object == nullptr && type != kStatic)) {
501 if (UNLIKELY(resolved_method->GetDeclaringClass()->IsStringClass() &&
502 resolved_method->IsConstructor())) {
503 // Hack for String init:
504 //
505 // We assume that the input of String.<init> in verified code is always
506 // an unitialized reference. If it is a null constant, it must have been
507 // optimized out by the compiler. Do not throw NullPointerException.
508 } else {
509 // Maintain interpreter-like semantics where NullPointerException is thrown
510 // after potential NoSuchMethodError from class linker.
511 ThrowNullPointerExceptionForMethodAccess(method_idx, type);
512 return nullptr; // Failure.
513 }
514 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700515 switch (type) {
516 case kStatic:
517 case kDirect:
518 return resolved_method;
519 case kVirtual: {
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700520 mirror::Class* klass = (*this_object)->GetClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700521 uint16_t vtable_index = resolved_method->GetMethodIndex();
522 if (access_check &&
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700523 (!klass->HasVTable() ||
524 vtable_index >= static_cast<uint32_t>(klass->GetVTableLength()))) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700525 // Behavior to agree with that of the verifier.
526 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
527 resolved_method->GetName(), resolved_method->GetSignature());
528 return nullptr; // Failure.
529 }
David Sehr709b0702016-10-13 09:12:37 -0700530 DCHECK(klass->HasVTable()) << klass->PrettyClass();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700531 return klass->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700532 }
533 case kSuper: {
Alex Light705ad492015-09-21 11:36:30 -0700534 // TODO This lookup is quite slow.
535 // NB This is actually quite tricky to do any other way. We cannot use GetDeclaringClass since
536 // that will actually not be what we want in some cases where there are miranda methods or
537 // defaults. What we actually need is a GetContainingClass that says which classes virtuals
538 // this method is coming from.
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700539 StackHandleScope<2> hs2(self);
Mathieu Chartieref41db72016-10-25 15:08:01 -0700540 HandleWrapperObjPtr<mirror::Object> h_this(hs2.NewHandleWrapper(this_object));
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700541 Handle<mirror::Class> h_referring_class(hs2.NewHandle(referrer->GetDeclaringClass()));
Andreas Gampea5b09a62016-11-17 15:21:22 -0800542 const dex::TypeIndex method_type_idx =
Alex Lightdba61482016-12-21 08:20:29 -0800543 referrer->GetDexFile()->GetMethodId(method_idx).class_idx_;
Alex Light705ad492015-09-21 11:36:30 -0700544 mirror::Class* method_reference_class = class_linker->ResolveType(method_type_idx, referrer);
545 if (UNLIKELY(method_reference_class == nullptr)) {
546 // Bad type idx.
547 CHECK(self->IsExceptionPending());
548 return nullptr;
549 } else if (!method_reference_class->IsInterface()) {
Aart Bikf663e342016-04-04 17:28:59 -0700550 // It is not an interface. If the referring class is in the class hierarchy of the
551 // referenced class in the bytecode, we use its super class. Otherwise, we throw
552 // a NoSuchMethodError.
553 mirror::Class* super_class = nullptr;
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700554 if (method_reference_class->IsAssignableFrom(h_referring_class.Get())) {
555 super_class = h_referring_class->GetSuperClass();
Aart Bikf663e342016-04-04 17:28:59 -0700556 }
Alex Light705ad492015-09-21 11:36:30 -0700557 uint16_t vtable_index = resolved_method->GetMethodIndex();
558 if (access_check) {
559 // Check existence of super class.
Aart Bikf663e342016-04-04 17:28:59 -0700560 if (super_class == nullptr ||
561 !super_class->HasVTable() ||
Alex Light705ad492015-09-21 11:36:30 -0700562 vtable_index >= static_cast<uint32_t>(super_class->GetVTableLength())) {
563 // Behavior to agree with that of the verifier.
564 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
565 resolved_method->GetName(), resolved_method->GetSignature());
566 return nullptr; // Failure.
567 }
568 }
569 DCHECK(super_class != nullptr);
570 DCHECK(super_class->HasVTable());
571 return super_class->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize());
572 } else {
573 // It is an interface.
574 if (access_check) {
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700575 if (!method_reference_class->IsAssignableFrom(h_this->GetClass())) {
Alex Light705ad492015-09-21 11:36:30 -0700576 ThrowIncompatibleClassChangeErrorClassForInterfaceSuper(resolved_method,
577 method_reference_class,
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700578 h_this.Get(),
Alex Light705ad492015-09-21 11:36:30 -0700579 referrer);
580 return nullptr; // Failure.
581 }
582 }
583 // TODO We can do better than this for a (compiled) fastpath.
584 ArtMethod* result = method_reference_class->FindVirtualMethodForInterfaceSuper(
585 resolved_method, class_linker->GetImagePointerSize());
586 // Throw an NSME if nullptr;
587 if (result == nullptr) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700588 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
589 resolved_method->GetName(), resolved_method->GetSignature());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700590 }
Alex Light705ad492015-09-21 11:36:30 -0700591 return result;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700592 }
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700593 UNREACHABLE();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700594 }
595 case kInterface: {
Andreas Gampe75a7db62016-09-26 12:04:26 -0700596 uint32_t imt_index = ImTable::GetImtIndex(resolved_method);
Andreas Gampe542451c2016-07-26 09:02:02 -0700597 PointerSize pointer_size = class_linker->GetImagePointerSize();
Artem Udovichenkoa62cb9b2016-06-30 09:18:25 +0000598 ArtMethod* imt_method = (*this_object)->GetClass()->GetImt(pointer_size)->
599 Get(imt_index, pointer_size);
Nicolas Geoffray796d6302016-03-13 22:22:31 +0000600 if (!imt_method->IsRuntimeMethod()) {
Mathieu Chartier2d2621a2014-10-23 16:48:06 -0700601 if (kIsDebugBuild) {
602 mirror::Class* klass = (*this_object)->GetClass();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700603 ArtMethod* method = klass->FindVirtualMethodForInterface(
604 resolved_method, class_linker->GetImagePointerSize());
David Sehr709b0702016-10-13 09:12:37 -0700605 CHECK_EQ(imt_method, method) << ArtMethod::PrettyMethod(resolved_method) << " / "
606 << imt_method->PrettyMethod() << " / "
607 << ArtMethod::PrettyMethod(method) << " / "
608 << klass->PrettyClass();
Mathieu Chartier2d2621a2014-10-23 16:48:06 -0700609 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700610 return imt_method;
611 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700612 ArtMethod* interface_method = (*this_object)->GetClass()->FindVirtualMethodForInterface(
613 resolved_method, class_linker->GetImagePointerSize());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700614 if (UNLIKELY(interface_method == nullptr)) {
615 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method,
Andreas Gampe3a357142015-08-07 17:20:11 -0700616 *this_object, referrer);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700617 return nullptr; // Failure.
618 }
619 return interface_method;
620 }
621 }
622 default:
623 LOG(FATAL) << "Unknown invoke type " << type;
624 return nullptr; // Failure.
625 }
626}
627
628// Explicit template declarations of FindMethodFromCode for all invoke types.
629#define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700630 template REQUIRES_SHARED(Locks::mutator_lock_) ALWAYS_INLINE \
Mathieu Chartiere401d142015-04-22 13:56:20 -0700631 ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \
Mathieu Chartieref41db72016-10-25 15:08:01 -0700632 ObjPtr<mirror::Object>* this_object, \
Andreas Gampe3a357142015-08-07 17:20:11 -0700633 ArtMethod* referrer, \
Mathieu Chartiere401d142015-04-22 13:56:20 -0700634 Thread* self)
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700635#define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
636 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \
637 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true)
638
639EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic);
640EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect);
641EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual);
642EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper);
643EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface);
644
645#undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL
646#undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL
647
648// Fast path field resolution that can't initialize classes or throw exceptions.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700649inline ArtField* FindFieldFast(uint32_t field_idx, ArtMethod* referrer, FindFieldType type,
650 size_t expected_size) {
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700651 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700652 ArtField* resolved_field =
Alex Lightdba61482016-12-21 08:20:29 -0800653 referrer->GetDexCache()->GetResolvedField(field_idx, kRuntimePointerSize);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700654 if (UNLIKELY(resolved_field == nullptr)) {
655 return nullptr;
656 }
657 // Check for incompatible class change.
658 bool is_primitive;
659 bool is_set;
660 bool is_static;
661 switch (type) {
662 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
663 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
664 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
665 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
666 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
667 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
668 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
669 case StaticPrimitiveWrite: is_primitive = true; is_set = true; is_static = true; break;
670 default:
Ian Rogers2c4257b2014-10-24 14:20:06 -0700671 LOG(FATAL) << "UNREACHABLE";
672 UNREACHABLE();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700673 }
674 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
675 // Incompatible class change.
676 return nullptr;
677 }
Mathieu Chartier3398c782016-09-30 10:27:43 -0700678 ObjPtr<mirror::Class> fields_class = resolved_field->GetDeclaringClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700679 if (is_static) {
680 // Check class is initialized else fail so that we can contend to initialize the class with
681 // other threads that may be racing to do this.
682 if (UNLIKELY(!fields_class->IsInitialized())) {
683 return nullptr;
684 }
685 }
686 mirror::Class* referring_class = referrer->GetDeclaringClass();
687 if (UNLIKELY(!referring_class->CanAccess(fields_class) ||
Mathieu Chartiere401d142015-04-22 13:56:20 -0700688 !referring_class->CanAccessMember(fields_class, resolved_field->GetAccessFlags()) ||
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700689 (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) {
690 // Illegal access.
691 return nullptr;
692 }
693 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
694 resolved_field->FieldSize() != expected_size)) {
695 return nullptr;
696 }
697 return resolved_field;
698}
699
700// Fast path method resolution that can't throw exceptions.
Mathieu Chartieref41db72016-10-25 15:08:01 -0700701inline ArtMethod* FindMethodFast(uint32_t method_idx,
702 ObjPtr<mirror::Object> this_object,
703 ArtMethod* referrer,
704 bool access_check,
705 InvokeType type) {
Mathieu Chartierbe08cf52016-09-13 13:41:24 -0700706 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700707 if (UNLIKELY(this_object == nullptr && type != kStatic)) {
708 return nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700709 }
Alex Light705ad492015-09-21 11:36:30 -0700710 mirror::Class* referring_class = referrer->GetDeclaringClass();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700711 ArtMethod* resolved_method =
Alex Lightdba61482016-12-21 08:20:29 -0800712 referrer->GetDexCache()->GetResolvedMethod(method_idx, kRuntimePointerSize);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700713 if (UNLIKELY(resolved_method == nullptr)) {
714 return nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700715 }
716 if (access_check) {
717 // Check for incompatible class change errors and access.
718 bool icce = resolved_method->CheckIncompatibleClassChange(type);
719 if (UNLIKELY(icce)) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700720 return nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700721 }
722 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700723 if (UNLIKELY(!referring_class->CanAccess(methods_class) ||
724 !referring_class->CanAccessMember(methods_class,
725 resolved_method->GetAccessFlags()))) {
726 // Potential illegal access, may need to refine the method's class.
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700727 return nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700728 }
729 }
730 if (type == kInterface) { // Most common form of slow path dispatch.
Andreas Gampe542451c2016-07-26 09:02:02 -0700731 return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method,
732 kRuntimePointerSize);
Jeff Hao207a37d2014-10-29 17:24:25 -0700733 } else if (type == kStatic || type == kDirect) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700734 return resolved_method;
735 } else if (type == kSuper) {
Alex Light705ad492015-09-21 11:36:30 -0700736 // TODO This lookup is rather slow.
Andreas Gampea5b09a62016-11-17 15:21:22 -0800737 dex::TypeIndex method_type_idx =
Alex Lightdba61482016-12-21 08:20:29 -0800738 referrer->GetDexFile()->GetMethodId(method_idx).class_idx_;
Alex Light705ad492015-09-21 11:36:30 -0700739 mirror::Class* method_reference_class =
Alex Lightdba61482016-12-21 08:20:29 -0800740 referrer->GetDexCache()->GetResolvedType(method_type_idx);
Alex Light705ad492015-09-21 11:36:30 -0700741 if (method_reference_class == nullptr) {
742 // Need to do full type resolution...
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000743 return nullptr;
Alex Light705ad492015-09-21 11:36:30 -0700744 } else if (!method_reference_class->IsInterface()) {
Aart Bikf663e342016-04-04 17:28:59 -0700745 // It is not an interface. If the referring class is in the class hierarchy of the
746 // referenced class in the bytecode, we use its super class. Otherwise, we cannot
747 // resolve the method.
748 if (!method_reference_class->IsAssignableFrom(referring_class)) {
749 return nullptr;
750 }
751 mirror::Class* super_class = referring_class->GetSuperClass();
Alex Light705ad492015-09-21 11:36:30 -0700752 if (resolved_method->GetMethodIndex() >= super_class->GetVTableLength()) {
753 // The super class does not have the method.
754 return nullptr;
755 }
Andreas Gampe542451c2016-07-26 09:02:02 -0700756 return super_class->GetVTableEntry(resolved_method->GetMethodIndex(), kRuntimePointerSize);
Alex Light705ad492015-09-21 11:36:30 -0700757 } else {
758 return method_reference_class->FindVirtualMethodForInterfaceSuper(
Andreas Gampe542451c2016-07-26 09:02:02 -0700759 resolved_method, kRuntimePointerSize);
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000760 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700761 } else {
762 DCHECK(type == kVirtual);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700763 return this_object->GetClass()->GetVTableEntry(
Andreas Gampe542451c2016-07-26 09:02:02 -0700764 resolved_method->GetMethodIndex(), kRuntimePointerSize);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700765 }
766}
767
Andreas Gampea5b09a62016-11-17 15:21:22 -0800768inline mirror::Class* ResolveVerifyAndClinit(dex::TypeIndex type_idx,
769 ArtMethod* referrer,
770 Thread* self,
771 bool can_run_clinit,
772 bool verify_access) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700773 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
774 mirror::Class* klass = class_linker->ResolveType(type_idx, referrer);
775 if (UNLIKELY(klass == nullptr)) {
776 CHECK(self->IsExceptionPending());
777 return nullptr; // Failure - Indicate to caller to deliver exception
778 }
779 // Perform access check if necessary.
780 mirror::Class* referring_class = referrer->GetDeclaringClass();
781 if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) {
782 ThrowIllegalAccessErrorClass(referring_class, klass);
783 return nullptr; // Failure - Indicate to caller to deliver exception
784 }
785 // If we're just implementing const-class, we shouldn't call <clinit>.
786 if (!can_run_clinit) {
787 return klass;
788 }
789 // If we are the <clinit> of this class, just return our storage.
790 //
791 // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished
792 // running.
793 if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) {
794 return klass;
795 }
796 StackHandleScope<1> hs(self);
797 Handle<mirror::Class> h_class(hs.NewHandle(klass));
Ian Rogers7b078e82014-09-10 14:44:24 -0700798 if (!class_linker->EnsureInitialized(self, h_class, true, true)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700799 CHECK(self->IsExceptionPending());
800 return nullptr; // Failure - Indicate to caller to deliver exception
801 }
802 return h_class.Get();
803}
804
Andreas Gampe8a0128a2016-11-28 07:38:35 -0800805inline mirror::String* ResolveStringFromCode(ArtMethod* referrer, dex::StringIndex string_idx) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700806 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
807 return class_linker->ResolveString(string_idx, referrer);
808}
809
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800810inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700811 // Save any pending exception over monitor exit call.
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700812 mirror::Throwable* saved_exception = nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700813 if (UNLIKELY(self->IsExceptionPending())) {
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000814 saved_exception = self->GetException();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700815 self->ClearException();
816 }
817 // Decode locked object and unlock, before popping local references.
818 self->DecodeJObject(locked)->MonitorExit(self);
819 if (UNLIKELY(self->IsExceptionPending())) {
820 LOG(FATAL) << "Synchronized JNI code returning with an exception:\n"
821 << saved_exception->Dump()
822 << "\nEncountered second exception during implicit MonitorExit:\n"
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000823 << self->GetException()->Dump();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700824 }
825 // Restore pending exception.
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700826 if (saved_exception != nullptr) {
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000827 self->SetException(saved_exception);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700828 }
829}
830
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700831template <typename INT_TYPE, typename FLOAT_TYPE>
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800832inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700833 const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max());
834 const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min());
835 const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt);
836 const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt);
837 if (LIKELY(f > kMinIntAsFloat)) {
838 if (LIKELY(f < kMaxIntAsFloat)) {
839 return static_cast<INT_TYPE>(f);
840 } else {
841 return kMaxInt;
842 }
843 } else {
844 return (f != f) ? 0 : kMinInt; // f != f implies NaN
845 }
846}
847
848} // namespace art
849
850#endif // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_