Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_ |
| 18 | #define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_ |
| 19 | |
| 20 | #include "entrypoint_utils.h" |
| 21 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 22 | #include "art_method.h" |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 23 | #include "class_linker-inl.h" |
| 24 | #include "common_throws.h" |
| 25 | #include "dex_file.h" |
Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 26 | #include "entrypoints/quick/callee_save_frame.h" |
| 27 | #include "handle_scope-inl.h" |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 28 | #include "indirect_reference_table.h" |
| 29 | #include "invoke_type.h" |
| 30 | #include "jni_internal.h" |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 31 | #include "mirror/array.h" |
| 32 | #include "mirror/class-inl.h" |
| 33 | #include "mirror/object-inl.h" |
| 34 | #include "mirror/throwable.h" |
Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 35 | #include "nth_caller_visitor.h" |
| 36 | #include "runtime.h" |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 37 | #include "thread.h" |
| 38 | |
| 39 | namespace art { |
| 40 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 41 | inline ArtMethod* GetResolvedMethod(ArtMethod* outer_method, |
| 42 | uint32_t method_index, |
| 43 | InvokeType invoke_type) |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 44 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 45 | ArtMethod* caller = outer_method->GetDexCacheResolvedMethod(method_index, sizeof(void*)); |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 46 | if (!caller->IsRuntimeMethod()) { |
| 47 | return caller; |
| 48 | } |
| 49 | |
| 50 | // The method in the dex cache can be the runtime method responsible for invoking |
| 51 | // the stub that will then update the dex cache. Therefore, we need to do the |
| 52 | // resolution ourselves. |
| 53 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 54 | StackHandleScope<2> hs(Thread::Current()); |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 55 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 56 | Handle<mirror::ClassLoader> class_loader(hs.NewHandle(outer_method->GetClassLoader())); |
| 57 | Handle<mirror::DexCache> dex_cache(hs.NewHandle(outer_method->GetDexCache())); |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 58 | return class_linker->ResolveMethod( |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 59 | *outer_method->GetDexFile(), method_index, dex_cache, class_loader, nullptr, invoke_type); |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 60 | } |
| 61 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 62 | inline ArtMethod* GetCalleeSaveMethodCaller(ArtMethod** sp, |
| 63 | Runtime::CalleeSaveType type, |
| 64 | bool do_caller_check = false) |
Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 65 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 66 | DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(type)); |
Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 67 | |
| 68 | const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA, type); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 69 | auto** caller_sp = reinterpret_cast<ArtMethod**>( |
| 70 | reinterpret_cast<uintptr_t>(sp) + callee_frame_size); |
| 71 | ArtMethod* outer_method = *caller_sp; |
| 72 | ArtMethod* caller = outer_method; |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 73 | |
| 74 | if ((outer_method != nullptr) && outer_method->IsOptimized(sizeof(void*))) { |
| 75 | const size_t callee_return_pc_offset = GetCalleeSaveReturnPcOffset(kRuntimeISA, type); |
| 76 | uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>( |
| 77 | (reinterpret_cast<uint8_t*>(sp) + callee_return_pc_offset)); |
| 78 | uintptr_t native_pc_offset = outer_method->NativeQuickPcOffset(caller_pc); |
| 79 | CodeInfo code_info = outer_method->GetOptimizedCodeInfo(); |
David Brazdil | f677ebf | 2015-05-29 16:29:43 +0100 | [diff] [blame^] | 80 | StackMapEncoding encoding = code_info.ExtractEncoding(); |
| 81 | StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding); |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 82 | DCHECK(stack_map.IsValid()); |
David Brazdil | f677ebf | 2015-05-29 16:29:43 +0100 | [diff] [blame^] | 83 | if (stack_map.HasInlineInfo(encoding)) { |
| 84 | InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding); |
Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 85 | uint32_t method_index = inline_info.GetMethodIndexAtDepth(inline_info.GetDepth() - 1); |
| 86 | InvokeType invoke_type = static_cast<InvokeType>( |
| 87 | inline_info.GetInvokeTypeAtDepth(inline_info.GetDepth() - 1)); |
| 88 | caller = GetResolvedMethod(outer_method, method_index, invoke_type); |
| 89 | } |
| 90 | } |
Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 91 | |
Nicolas Geoffray | 7ea6a17 | 2015-05-19 18:58:54 +0100 | [diff] [blame] | 92 | if (kIsDebugBuild && do_caller_check) { |
| 93 | // Note that do_caller_check is optional, as this method can be called by |
| 94 | // stubs, and tests without a proper call stack. |
| 95 | NthCallerVisitor visitor(Thread::Current(), 1, true); |
Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 96 | visitor.WalkStack(); |
Nicolas Geoffray | 7ea6a17 | 2015-05-19 18:58:54 +0100 | [diff] [blame] | 97 | CHECK_EQ(caller, visitor.caller); |
Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 98 | } |
| 99 | |
| 100 | return caller; |
| 101 | } |
| 102 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 103 | inline ArtMethod* GetCalleeSaveMethodCaller(Thread* self, Runtime::CalleeSaveType type) |
Nicolas Geoffray | 7ea6a17 | 2015-05-19 18:58:54 +0100 | [diff] [blame] | 104 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
| 105 | return GetCalleeSaveMethodCaller( |
| 106 | self->GetManagedStack()->GetTopQuickFrame(), type, true /* do_caller_check */); |
| 107 | } |
| 108 | |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 109 | template <const bool kAccessCheck> |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 110 | ALWAYS_INLINE |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 111 | inline mirror::Class* CheckObjectAlloc(uint32_t type_idx, |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 112 | ArtMethod* method, |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 113 | Thread* self, bool* slow_path) { |
Andreas Gampe | 58a5af8 | 2014-07-31 16:23:49 -0700 | [diff] [blame] | 114 | mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx); |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 115 | if (UNLIKELY(klass == nullptr)) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 116 | klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method); |
| 117 | *slow_path = true; |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 118 | if (klass == nullptr) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 119 | DCHECK(self->IsExceptionPending()); |
| 120 | return nullptr; // Failure |
Mathieu Chartier | 524507a | 2014-08-27 15:28:28 -0700 | [diff] [blame] | 121 | } else { |
| 122 | DCHECK(!self->IsExceptionPending()); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 123 | } |
| 124 | } |
| 125 | if (kAccessCheck) { |
| 126 | if (UNLIKELY(!klass->IsInstantiable())) { |
Nicolas Geoffray | 0aa50ce | 2015-03-10 11:03:29 +0000 | [diff] [blame] | 127 | self->ThrowNewException("Ljava/lang/InstantiationError;", PrettyDescriptor(klass).c_str()); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 128 | *slow_path = true; |
| 129 | return nullptr; // Failure |
| 130 | } |
| 131 | mirror::Class* referrer = method->GetDeclaringClass(); |
| 132 | if (UNLIKELY(!referrer->CanAccess(klass))) { |
| 133 | ThrowIllegalAccessErrorClass(referrer, klass); |
| 134 | *slow_path = true; |
| 135 | return nullptr; // Failure |
| 136 | } |
| 137 | } |
| 138 | if (UNLIKELY(!klass->IsInitialized())) { |
| 139 | StackHandleScope<1> hs(self); |
| 140 | Handle<mirror::Class> h_klass(hs.NewHandle(klass)); |
| 141 | // EnsureInitialized (the class initializer) might cause a GC. |
| 142 | // may cause us to suspend meaning that another thread may try to |
| 143 | // change the allocator while we are stuck in the entrypoints of |
| 144 | // an old allocator. Also, the class initialization may fail. To |
| 145 | // handle these cases we mark the slow path boolean as true so |
| 146 | // that the caller knows to check the allocator type to see if it |
| 147 | // has changed and to null-check the return value in case the |
| 148 | // initialization fails. |
| 149 | *slow_path = true; |
Ian Rogers | 7b078e8 | 2014-09-10 14:44:24 -0700 | [diff] [blame] | 150 | if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_klass, true, true)) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 151 | DCHECK(self->IsExceptionPending()); |
| 152 | return nullptr; // Failure |
Mathieu Chartier | 524507a | 2014-08-27 15:28:28 -0700 | [diff] [blame] | 153 | } else { |
| 154 | DCHECK(!self->IsExceptionPending()); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 155 | } |
| 156 | return h_klass.Get(); |
| 157 | } |
| 158 | return klass; |
| 159 | } |
| 160 | |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 161 | ALWAYS_INLINE |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 162 | inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass, |
| 163 | Thread* self, |
| 164 | bool* slow_path) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 165 | if (UNLIKELY(!klass->IsInitialized())) { |
| 166 | StackHandleScope<1> hs(self); |
| 167 | Handle<mirror::Class> h_class(hs.NewHandle(klass)); |
| 168 | // EnsureInitialized (the class initializer) might cause a GC. |
| 169 | // may cause us to suspend meaning that another thread may try to |
| 170 | // change the allocator while we are stuck in the entrypoints of |
| 171 | // an old allocator. Also, the class initialization may fail. To |
| 172 | // handle these cases we mark the slow path boolean as true so |
| 173 | // that the caller knows to check the allocator type to see if it |
| 174 | // has changed and to null-check the return value in case the |
| 175 | // initialization fails. |
| 176 | *slow_path = true; |
Ian Rogers | 7b078e8 | 2014-09-10 14:44:24 -0700 | [diff] [blame] | 177 | if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 178 | DCHECK(self->IsExceptionPending()); |
| 179 | return nullptr; // Failure |
| 180 | } |
| 181 | return h_class.Get(); |
| 182 | } |
| 183 | return klass; |
| 184 | } |
| 185 | |
| 186 | // Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it |
| 187 | // cannot be resolved, throw an error. If it can, use it to create an instance. |
| 188 | // When verification/compiler hasn't been able to verify access, optionally perform an access |
| 189 | // check. |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 190 | template <bool kAccessCheck, bool kInstrumented> |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 191 | ALWAYS_INLINE |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 192 | inline mirror::Object* AllocObjectFromCode(uint32_t type_idx, |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 193 | ArtMethod* method, |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 194 | Thread* self, |
| 195 | gc::AllocatorType allocator_type) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 196 | bool slow_path = false; |
| 197 | mirror::Class* klass = CheckObjectAlloc<kAccessCheck>(type_idx, method, self, &slow_path); |
| 198 | if (UNLIKELY(slow_path)) { |
| 199 | if (klass == nullptr) { |
| 200 | return nullptr; |
| 201 | } |
| 202 | return klass->Alloc<kInstrumented>(self, Runtime::Current()->GetHeap()->GetCurrentAllocator()); |
| 203 | } |
| 204 | DCHECK(klass != nullptr); |
| 205 | return klass->Alloc<kInstrumented>(self, allocator_type); |
| 206 | } |
| 207 | |
| 208 | // Given the context of a calling Method and a resolved class, create an instance. |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 209 | template <bool kInstrumented> |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 210 | ALWAYS_INLINE |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 211 | inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass, |
| 212 | Thread* self, |
| 213 | gc::AllocatorType allocator_type) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 214 | DCHECK(klass != nullptr); |
| 215 | bool slow_path = false; |
| 216 | klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path); |
| 217 | if (UNLIKELY(slow_path)) { |
| 218 | if (klass == nullptr) { |
| 219 | return nullptr; |
| 220 | } |
| 221 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
| 222 | // Pass in false since the object can not be finalizable. |
| 223 | return klass->Alloc<kInstrumented, false>(self, heap->GetCurrentAllocator()); |
| 224 | } |
| 225 | // Pass in false since the object can not be finalizable. |
| 226 | return klass->Alloc<kInstrumented, false>(self, allocator_type); |
| 227 | } |
| 228 | |
| 229 | // Given the context of a calling Method and an initialized class, create an instance. |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 230 | template <bool kInstrumented> |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 231 | ALWAYS_INLINE |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 232 | inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass, |
| 233 | Thread* self, |
| 234 | gc::AllocatorType allocator_type) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 235 | DCHECK(klass != nullptr); |
| 236 | // Pass in false since the object can not be finalizable. |
| 237 | return klass->Alloc<kInstrumented, false>(self, allocator_type); |
| 238 | } |
| 239 | |
| 240 | |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 241 | template <bool kAccessCheck> |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 242 | ALWAYS_INLINE |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 243 | inline mirror::Class* CheckArrayAlloc(uint32_t type_idx, |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 244 | int32_t component_count, |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 245 | ArtMethod* method, |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 246 | bool* slow_path) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 247 | if (UNLIKELY(component_count < 0)) { |
| 248 | ThrowNegativeArraySizeException(component_count); |
| 249 | *slow_path = true; |
| 250 | return nullptr; // Failure |
| 251 | } |
Andreas Gampe | 58a5af8 | 2014-07-31 16:23:49 -0700 | [diff] [blame] | 252 | mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 253 | if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve |
| 254 | klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method); |
| 255 | *slow_path = true; |
| 256 | if (klass == nullptr) { // Error |
| 257 | DCHECK(Thread::Current()->IsExceptionPending()); |
| 258 | return nullptr; // Failure |
| 259 | } |
| 260 | CHECK(klass->IsArrayClass()) << PrettyClass(klass); |
| 261 | } |
| 262 | if (kAccessCheck) { |
| 263 | mirror::Class* referrer = method->GetDeclaringClass(); |
| 264 | if (UNLIKELY(!referrer->CanAccess(klass))) { |
| 265 | ThrowIllegalAccessErrorClass(referrer, klass); |
| 266 | *slow_path = true; |
| 267 | return nullptr; // Failure |
| 268 | } |
| 269 | } |
| 270 | return klass; |
| 271 | } |
| 272 | |
| 273 | // Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If |
| 274 | // it cannot be resolved, throw an error. If it can, use it to create an array. |
| 275 | // When verification/compiler hasn't been able to verify access, optionally perform an access |
| 276 | // check. |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 277 | template <bool kAccessCheck, bool kInstrumented> |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 278 | ALWAYS_INLINE |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 279 | inline mirror::Array* AllocArrayFromCode(uint32_t type_idx, |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 280 | int32_t component_count, |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 281 | ArtMethod* method, |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 282 | Thread* self, |
| 283 | gc::AllocatorType allocator_type) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 284 | bool slow_path = false; |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 285 | mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, component_count, method, |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 286 | &slow_path); |
| 287 | if (UNLIKELY(slow_path)) { |
| 288 | if (klass == nullptr) { |
| 289 | return nullptr; |
| 290 | } |
| 291 | gc::Heap* heap = Runtime::Current()->GetHeap(); |
| 292 | return mirror::Array::Alloc<kInstrumented>(self, klass, component_count, |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 293 | klass->GetComponentSizeShift(), |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 294 | heap->GetCurrentAllocator()); |
| 295 | } |
| 296 | return mirror::Array::Alloc<kInstrumented>(self, klass, component_count, |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 297 | klass->GetComponentSizeShift(), allocator_type); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 298 | } |
| 299 | |
| 300 | template <bool kAccessCheck, bool kInstrumented> |
Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 301 | ALWAYS_INLINE |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 302 | inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass, |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 303 | int32_t component_count, |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 304 | ArtMethod* method, |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 305 | Thread* self, |
| 306 | gc::AllocatorType allocator_type) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 307 | DCHECK(klass != nullptr); |
| 308 | if (UNLIKELY(component_count < 0)) { |
| 309 | ThrowNegativeArraySizeException(component_count); |
| 310 | return nullptr; // Failure |
| 311 | } |
| 312 | if (kAccessCheck) { |
| 313 | mirror::Class* referrer = method->GetDeclaringClass(); |
| 314 | if (UNLIKELY(!referrer->CanAccess(klass))) { |
| 315 | ThrowIllegalAccessErrorClass(referrer, klass); |
| 316 | return nullptr; // Failure |
| 317 | } |
| 318 | } |
| 319 | // No need to retry a slow-path allocation as the above code won't cause a GC or thread |
| 320 | // suspension. |
| 321 | return mirror::Array::Alloc<kInstrumented>(self, klass, component_count, |
Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 322 | klass->GetComponentSizeShift(), allocator_type); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 323 | } |
| 324 | |
| 325 | template<FindFieldType type, bool access_check> |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 326 | inline ArtField* FindFieldFromCode(uint32_t field_idx, ArtMethod* referrer, |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 327 | Thread* self, size_t expected_size) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 328 | bool is_primitive; |
| 329 | bool is_set; |
| 330 | bool is_static; |
| 331 | switch (type) { |
| 332 | case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break; |
| 333 | case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break; |
| 334 | case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break; |
| 335 | case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break; |
| 336 | case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break; |
| 337 | case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break; |
| 338 | case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break; |
| 339 | case StaticPrimitiveWrite: // Keep GCC happy by having a default handler, fall-through. |
| 340 | default: is_primitive = true; is_set = true; is_static = true; break; |
| 341 | } |
| 342 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 343 | ArtField* resolved_field = class_linker->ResolveField(field_idx, referrer, is_static); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 344 | if (UNLIKELY(resolved_field == nullptr)) { |
| 345 | DCHECK(self->IsExceptionPending()); // Throw exception and unwind. |
| 346 | return nullptr; // Failure. |
| 347 | } |
| 348 | mirror::Class* fields_class = resolved_field->GetDeclaringClass(); |
| 349 | if (access_check) { |
| 350 | if (UNLIKELY(resolved_field->IsStatic() != is_static)) { |
| 351 | ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer); |
| 352 | return nullptr; |
| 353 | } |
| 354 | mirror::Class* referring_class = referrer->GetDeclaringClass(); |
| 355 | if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class, resolved_field, |
| 356 | field_idx))) { |
| 357 | DCHECK(self->IsExceptionPending()); // Throw exception and unwind. |
| 358 | return nullptr; // Failure. |
| 359 | } |
| 360 | if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) { |
| 361 | ThrowIllegalAccessErrorFinalField(referrer, resolved_field); |
| 362 | return nullptr; // Failure. |
| 363 | } else { |
| 364 | if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive || |
| 365 | resolved_field->FieldSize() != expected_size)) { |
Nicolas Geoffray | 0aa50ce | 2015-03-10 11:03:29 +0000 | [diff] [blame] | 366 | self->ThrowNewExceptionF("Ljava/lang/NoSuchFieldError;", |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 367 | "Attempted read of %zd-bit %s on field '%s'", |
| 368 | expected_size * (32 / sizeof(int32_t)), |
| 369 | is_primitive ? "primitive" : "non-primitive", |
| 370 | PrettyField(resolved_field, true).c_str()); |
| 371 | return nullptr; // Failure. |
| 372 | } |
| 373 | } |
| 374 | } |
| 375 | if (!is_static) { |
| 376 | // instance fields must be being accessed on an initialized class |
| 377 | return resolved_field; |
| 378 | } else { |
| 379 | // If the class is initialized we're done. |
| 380 | if (LIKELY(fields_class->IsInitialized())) { |
| 381 | return resolved_field; |
| 382 | } else { |
| 383 | StackHandleScope<1> hs(self); |
| 384 | Handle<mirror::Class> h_class(hs.NewHandle(fields_class)); |
Ian Rogers | 7b078e8 | 2014-09-10 14:44:24 -0700 | [diff] [blame] | 385 | if (LIKELY(class_linker->EnsureInitialized(self, h_class, true, true))) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 386 | // Otherwise let's ensure the class is initialized before resolving the field. |
| 387 | return resolved_field; |
| 388 | } |
| 389 | DCHECK(self->IsExceptionPending()); // Throw exception and unwind |
| 390 | return nullptr; // Failure. |
| 391 | } |
| 392 | } |
| 393 | } |
| 394 | |
| 395 | // Explicit template declarations of FindFieldFromCode for all field access types. |
| 396 | #define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \ |
| 397 | template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \ |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 398 | ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 399 | ArtMethod* referrer, \ |
| 400 | Thread* self, size_t expected_size) \ |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 401 | |
| 402 | #define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \ |
| 403 | EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \ |
| 404 | EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true) |
| 405 | |
| 406 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead); |
| 407 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite); |
| 408 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead); |
| 409 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite); |
| 410 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead); |
| 411 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite); |
| 412 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead); |
| 413 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite); |
| 414 | |
| 415 | #undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL |
| 416 | #undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL |
| 417 | |
| 418 | template<InvokeType type, bool access_check> |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 419 | inline ArtMethod* FindMethodFromCode(uint32_t method_idx, mirror::Object** this_object, |
| 420 | ArtMethod** referrer, Thread* self) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 421 | ClassLinker* const class_linker = Runtime::Current()->GetClassLinker(); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 422 | ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, *referrer); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 423 | if (resolved_method == nullptr) { |
| 424 | StackHandleScope<1> hs(self); |
| 425 | mirror::Object* null_this = nullptr; |
| 426 | HandleWrapper<mirror::Object> h_this( |
| 427 | hs.NewHandleWrapper(type == kStatic ? &null_this : this_object)); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 428 | resolved_method = class_linker->ResolveMethod(self, method_idx, *referrer, type); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 429 | } |
| 430 | if (UNLIKELY(resolved_method == nullptr)) { |
| 431 | DCHECK(self->IsExceptionPending()); // Throw exception and unwind. |
| 432 | return nullptr; // Failure. |
| 433 | } else if (UNLIKELY(*this_object == nullptr && type != kStatic)) { |
| 434 | // Maintain interpreter-like semantics where NullPointerException is thrown |
| 435 | // after potential NoSuchMethodError from class linker. |
Nicolas Geoffray | 0aa50ce | 2015-03-10 11:03:29 +0000 | [diff] [blame] | 436 | ThrowNullPointerExceptionForMethodAccess(method_idx, type); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 437 | return nullptr; // Failure. |
| 438 | } else if (access_check) { |
| 439 | // Incompatible class change should have been handled in resolve method. |
| 440 | if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) { |
| 441 | ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method, |
| 442 | *referrer); |
| 443 | return nullptr; // Failure. |
| 444 | } |
| 445 | mirror::Class* methods_class = resolved_method->GetDeclaringClass(); |
| 446 | mirror::Class* referring_class = (*referrer)->GetDeclaringClass(); |
| 447 | bool can_access_resolved_method = |
| 448 | referring_class->CheckResolvedMethodAccess<type>(methods_class, resolved_method, |
| 449 | method_idx); |
| 450 | if (UNLIKELY(!can_access_resolved_method)) { |
| 451 | DCHECK(self->IsExceptionPending()); // Throw exception and unwind. |
| 452 | return nullptr; // Failure. |
| 453 | } |
| 454 | } |
| 455 | switch (type) { |
| 456 | case kStatic: |
| 457 | case kDirect: |
| 458 | return resolved_method; |
| 459 | case kVirtual: { |
Mingyao Yang | 2cdbad7 | 2014-07-16 10:44:41 -0700 | [diff] [blame] | 460 | mirror::Class* klass = (*this_object)->GetClass(); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 461 | uint16_t vtable_index = resolved_method->GetMethodIndex(); |
| 462 | if (access_check && |
Mingyao Yang | 2cdbad7 | 2014-07-16 10:44:41 -0700 | [diff] [blame] | 463 | (!klass->HasVTable() || |
| 464 | vtable_index >= static_cast<uint32_t>(klass->GetVTableLength()))) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 465 | // Behavior to agree with that of the verifier. |
| 466 | ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(), |
| 467 | resolved_method->GetName(), resolved_method->GetSignature()); |
| 468 | return nullptr; // Failure. |
| 469 | } |
Mingyao Yang | 2cdbad7 | 2014-07-16 10:44:41 -0700 | [diff] [blame] | 470 | DCHECK(klass->HasVTable()) << PrettyClass(klass); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 471 | return klass->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize()); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 472 | } |
| 473 | case kSuper: { |
| 474 | mirror::Class* super_class = (*referrer)->GetDeclaringClass()->GetSuperClass(); |
| 475 | uint16_t vtable_index = resolved_method->GetMethodIndex(); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 476 | if (access_check) { |
| 477 | // Check existence of super class. |
Mingyao Yang | 2cdbad7 | 2014-07-16 10:44:41 -0700 | [diff] [blame] | 478 | if (super_class == nullptr || !super_class->HasVTable() || |
| 479 | vtable_index >= static_cast<uint32_t>(super_class->GetVTableLength())) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 480 | // Behavior to agree with that of the verifier. |
| 481 | ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(), |
| 482 | resolved_method->GetName(), resolved_method->GetSignature()); |
| 483 | return nullptr; // Failure. |
| 484 | } |
| 485 | } else { |
| 486 | // Super class must exist. |
| 487 | DCHECK(super_class != nullptr); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 488 | } |
Mingyao Yang | 2cdbad7 | 2014-07-16 10:44:41 -0700 | [diff] [blame] | 489 | DCHECK(super_class->HasVTable()); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 490 | return super_class->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize()); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 491 | } |
| 492 | case kInterface: { |
| 493 | uint32_t imt_index = resolved_method->GetDexMethodIndex() % mirror::Class::kImtSize; |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 494 | ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry( |
| 495 | imt_index, class_linker->GetImagePointerSize()); |
Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 496 | if (!imt_method->IsImtConflictMethod() && !imt_method->IsImtUnimplementedMethod()) { |
| 497 | if (kIsDebugBuild) { |
| 498 | mirror::Class* klass = (*this_object)->GetClass(); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 499 | ArtMethod* method = klass->FindVirtualMethodForInterface( |
| 500 | resolved_method, class_linker->GetImagePointerSize()); |
Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 501 | CHECK_EQ(imt_method, method) << PrettyMethod(resolved_method) << " / " << |
| 502 | PrettyMethod(imt_method) << " / " << PrettyMethod(method) << " / " << |
| 503 | PrettyClass(klass); |
| 504 | } |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 505 | return imt_method; |
| 506 | } else { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 507 | ArtMethod* interface_method = (*this_object)->GetClass()->FindVirtualMethodForInterface( |
| 508 | resolved_method, class_linker->GetImagePointerSize()); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 509 | if (UNLIKELY(interface_method == nullptr)) { |
| 510 | ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method, |
| 511 | *this_object, *referrer); |
| 512 | return nullptr; // Failure. |
| 513 | } |
| 514 | return interface_method; |
| 515 | } |
| 516 | } |
| 517 | default: |
| 518 | LOG(FATAL) << "Unknown invoke type " << type; |
| 519 | return nullptr; // Failure. |
| 520 | } |
| 521 | } |
| 522 | |
| 523 | // Explicit template declarations of FindMethodFromCode for all invoke types. |
| 524 | #define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \ |
| 525 | template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \ |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 526 | ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \ |
| 527 | mirror::Object** this_object, \ |
| 528 | ArtMethod** referrer, \ |
| 529 | Thread* self) |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 530 | #define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \ |
| 531 | EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \ |
| 532 | EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true) |
| 533 | |
| 534 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic); |
| 535 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect); |
| 536 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual); |
| 537 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper); |
| 538 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface); |
| 539 | |
| 540 | #undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL |
| 541 | #undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL |
| 542 | |
| 543 | // Fast path field resolution that can't initialize classes or throw exceptions. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 544 | inline ArtField* FindFieldFast(uint32_t field_idx, ArtMethod* referrer, FindFieldType type, |
| 545 | size_t expected_size) { |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 546 | ArtField* resolved_field = |
| 547 | referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx, sizeof(void*)); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 548 | if (UNLIKELY(resolved_field == nullptr)) { |
| 549 | return nullptr; |
| 550 | } |
| 551 | // Check for incompatible class change. |
| 552 | bool is_primitive; |
| 553 | bool is_set; |
| 554 | bool is_static; |
| 555 | switch (type) { |
| 556 | case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break; |
| 557 | case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break; |
| 558 | case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break; |
| 559 | case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break; |
| 560 | case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break; |
| 561 | case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break; |
| 562 | case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break; |
| 563 | case StaticPrimitiveWrite: is_primitive = true; is_set = true; is_static = true; break; |
| 564 | default: |
Ian Rogers | 2c4257b | 2014-10-24 14:20:06 -0700 | [diff] [blame] | 565 | LOG(FATAL) << "UNREACHABLE"; |
| 566 | UNREACHABLE(); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 567 | } |
| 568 | if (UNLIKELY(resolved_field->IsStatic() != is_static)) { |
| 569 | // Incompatible class change. |
| 570 | return nullptr; |
| 571 | } |
| 572 | mirror::Class* fields_class = resolved_field->GetDeclaringClass(); |
| 573 | if (is_static) { |
| 574 | // Check class is initialized else fail so that we can contend to initialize the class with |
| 575 | // other threads that may be racing to do this. |
| 576 | if (UNLIKELY(!fields_class->IsInitialized())) { |
| 577 | return nullptr; |
| 578 | } |
| 579 | } |
| 580 | mirror::Class* referring_class = referrer->GetDeclaringClass(); |
| 581 | if (UNLIKELY(!referring_class->CanAccess(fields_class) || |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 582 | !referring_class->CanAccessMember(fields_class, resolved_field->GetAccessFlags()) || |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 583 | (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) { |
| 584 | // Illegal access. |
| 585 | return nullptr; |
| 586 | } |
| 587 | if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive || |
| 588 | resolved_field->FieldSize() != expected_size)) { |
| 589 | return nullptr; |
| 590 | } |
| 591 | return resolved_field; |
| 592 | } |
| 593 | |
| 594 | // Fast path method resolution that can't throw exceptions. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 595 | inline ArtMethod* FindMethodFast(uint32_t method_idx, mirror::Object* this_object, |
| 596 | ArtMethod* referrer, bool access_check, InvokeType type) { |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 597 | if (UNLIKELY(this_object == nullptr && type != kStatic)) { |
| 598 | return nullptr; |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 599 | } |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 600 | ArtMethod* resolved_method = |
| 601 | referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx, sizeof(void*)); |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 602 | if (UNLIKELY(resolved_method == nullptr)) { |
| 603 | return nullptr; |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 604 | } |
| 605 | if (access_check) { |
| 606 | // Check for incompatible class change errors and access. |
| 607 | bool icce = resolved_method->CheckIncompatibleClassChange(type); |
| 608 | if (UNLIKELY(icce)) { |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 609 | return nullptr; |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 610 | } |
| 611 | mirror::Class* methods_class = resolved_method->GetDeclaringClass(); |
| 612 | mirror::Class* referring_class = referrer->GetDeclaringClass(); |
| 613 | if (UNLIKELY(!referring_class->CanAccess(methods_class) || |
| 614 | !referring_class->CanAccessMember(methods_class, |
| 615 | resolved_method->GetAccessFlags()))) { |
| 616 | // Potential illegal access, may need to refine the method's class. |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 617 | return nullptr; |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 618 | } |
| 619 | } |
| 620 | if (type == kInterface) { // Most common form of slow path dispatch. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 621 | return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method, sizeof(void*)); |
Jeff Hao | 207a37d | 2014-10-29 17:24:25 -0700 | [diff] [blame] | 622 | } else if (type == kStatic || type == kDirect) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 623 | return resolved_method; |
| 624 | } else if (type == kSuper) { |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 625 | return referrer->GetDeclaringClass()->GetSuperClass()->GetVTableEntry( |
| 626 | resolved_method->GetMethodIndex(), sizeof(void*)); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 627 | } else { |
| 628 | DCHECK(type == kVirtual); |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 629 | return this_object->GetClass()->GetVTableEntry( |
| 630 | resolved_method->GetMethodIndex(), sizeof(void*)); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 631 | } |
| 632 | } |
| 633 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 634 | inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx, ArtMethod* referrer, Thread* self, |
| 635 | bool can_run_clinit, bool verify_access) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 636 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); |
| 637 | mirror::Class* klass = class_linker->ResolveType(type_idx, referrer); |
| 638 | if (UNLIKELY(klass == nullptr)) { |
| 639 | CHECK(self->IsExceptionPending()); |
| 640 | return nullptr; // Failure - Indicate to caller to deliver exception |
| 641 | } |
| 642 | // Perform access check if necessary. |
| 643 | mirror::Class* referring_class = referrer->GetDeclaringClass(); |
| 644 | if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) { |
| 645 | ThrowIllegalAccessErrorClass(referring_class, klass); |
| 646 | return nullptr; // Failure - Indicate to caller to deliver exception |
| 647 | } |
| 648 | // If we're just implementing const-class, we shouldn't call <clinit>. |
| 649 | if (!can_run_clinit) { |
| 650 | return klass; |
| 651 | } |
| 652 | // If we are the <clinit> of this class, just return our storage. |
| 653 | // |
| 654 | // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished |
| 655 | // running. |
| 656 | if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) { |
| 657 | return klass; |
| 658 | } |
| 659 | StackHandleScope<1> hs(self); |
| 660 | Handle<mirror::Class> h_class(hs.NewHandle(klass)); |
Ian Rogers | 7b078e8 | 2014-09-10 14:44:24 -0700 | [diff] [blame] | 661 | if (!class_linker->EnsureInitialized(self, h_class, true, true)) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 662 | CHECK(self->IsExceptionPending()); |
| 663 | return nullptr; // Failure - Indicate to caller to deliver exception |
| 664 | } |
| 665 | return h_class.Get(); |
| 666 | } |
| 667 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 668 | inline mirror::String* ResolveStringFromCode(ArtMethod* referrer, uint32_t string_idx) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 669 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); |
| 670 | return class_linker->ResolveString(string_idx, referrer); |
| 671 | } |
| 672 | |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 673 | inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 674 | // Save any pending exception over monitor exit call. |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 675 | mirror::Throwable* saved_exception = nullptr; |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 676 | if (UNLIKELY(self->IsExceptionPending())) { |
Nicolas Geoffray | 14691c5 | 2015-03-05 10:40:17 +0000 | [diff] [blame] | 677 | saved_exception = self->GetException(); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 678 | self->ClearException(); |
| 679 | } |
| 680 | // Decode locked object and unlock, before popping local references. |
| 681 | self->DecodeJObject(locked)->MonitorExit(self); |
| 682 | if (UNLIKELY(self->IsExceptionPending())) { |
| 683 | LOG(FATAL) << "Synchronized JNI code returning with an exception:\n" |
| 684 | << saved_exception->Dump() |
| 685 | << "\nEncountered second exception during implicit MonitorExit:\n" |
Nicolas Geoffray | 14691c5 | 2015-03-05 10:40:17 +0000 | [diff] [blame] | 686 | << self->GetException()->Dump(); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 687 | } |
| 688 | // Restore pending exception. |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 689 | if (saved_exception != nullptr) { |
Nicolas Geoffray | 14691c5 | 2015-03-05 10:40:17 +0000 | [diff] [blame] | 690 | self->SetException(saved_exception); |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 691 | } |
| 692 | } |
| 693 | |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 694 | template <typename INT_TYPE, typename FLOAT_TYPE> |
Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 695 | inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) { |
Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 696 | const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max()); |
| 697 | const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min()); |
| 698 | const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt); |
| 699 | const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt); |
| 700 | if (LIKELY(f > kMinIntAsFloat)) { |
| 701 | if (LIKELY(f < kMaxIntAsFloat)) { |
| 702 | return static_cast<INT_TYPE>(f); |
| 703 | } else { |
| 704 | return kMaxInt; |
| 705 | } |
| 706 | } else { |
| 707 | return (f != f) ? 0 : kMinInt; // f != f implies NaN |
| 708 | } |
| 709 | } |
| 710 | |
| 711 | } // namespace art |
| 712 | |
| 713 | #endif // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_ |