blob: 0315c3a953a59243c75a089919f9be2eaf402524 [file] [log] [blame]
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartiere401d142015-04-22 13:56:20 -070017#ifndef ART_RUNTIME_ART_METHOD_H_
18#define ART_RUNTIME_ART_METHOD_H_
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080019
Vladimir Marko05792b92015-08-03 11:56:49 +010020#include "base/casts.h"
Jeff Hao790ad902013-05-22 15:02:08 -070021#include "dex_file.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070022#include "gc_root.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080023#include "invoke_type.h"
Mathieu Chartier36b58f52014-12-10 12:06:45 -080024#include "method_reference.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080025#include "modifiers.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070026#include "mirror/object.h"
Vladimir Marko7624d252014-05-02 14:40:15 +010027#include "quick/quick_method_frame_info.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070028#include "read_barrier_option.h"
Sebastien Hertze4b7c892014-12-17 20:02:50 +010029#include "stack.h"
Nicolas Geoffray39468442014-09-02 15:17:15 +010030#include "stack_map.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070031#include "utils.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080032
33namespace art {
34
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080035union JValue;
Nicolas Geoffray5550ca82015-08-21 18:38:30 +010036class ProfilingInfo;
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -070037class ScopedObjectAccessAlreadyRunnable;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080038class StringPiece;
Jeff Hao16743632013-05-08 10:59:04 -070039class ShadowFrame;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080040
41namespace mirror {
Mathieu Chartiere401d142015-04-22 13:56:20 -070042class Array;
43class Class;
44class PointerArray;
45} // namespace mirror
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080046
Mathieu Chartiere401d142015-04-22 13:56:20 -070047class ArtMethod FINAL {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080048 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -070049 ArtMethod() : access_flags_(0), dex_code_item_offset_(0), dex_method_index_(0),
50 method_index_(0) { }
51
52 ArtMethod(const ArtMethod& src, size_t image_pointer_size) {
53 CopyFrom(&src, image_pointer_size);
54 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -070055
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -070056 static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
57 jobject jlr_method)
Mathieu Chartier90443472015-07-16 20:32:27 -070058 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers62f05122014-03-21 11:21:29 -070059
Mathieu Chartier90443472015-07-16 20:32:27 -070060 ALWAYS_INLINE mirror::Class* GetDeclaringClass() SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080061
Mathieu Chartiere401d142015-04-22 13:56:20 -070062 ALWAYS_INLINE mirror::Class* GetDeclaringClassNoBarrier()
Mathieu Chartier90443472015-07-16 20:32:27 -070063 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -070064
65 ALWAYS_INLINE mirror::Class* GetDeclaringClassUnchecked()
Mathieu Chartier90443472015-07-16 20:32:27 -070066 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -070067
68 void SetDeclaringClass(mirror::Class *new_declaring_class)
Mathieu Chartier90443472015-07-16 20:32:27 -070069 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080070
Mathieu Chartier10e5ea92015-08-13 12:56:31 -070071 bool CASDeclaringClass(mirror::Class* expected_class, mirror::Class* desired_class)
72 SHARED_REQUIRES(Locks::mutator_lock_);
73
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080074 static MemberOffset DeclaringClassOffset() {
Brian Carlstromea46f952013-07-30 01:26:50 -070075 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080076 }
77
Andreas Gampecbc96b82015-09-30 20:05:24 +000078 // Note: GetAccessFlags acquires the mutator lock in debug mode to check that it is not called for
79 // a proxy method.
80 ALWAYS_INLINE uint32_t GetAccessFlags();
Jeff Hao5d917302013-02-27 17:57:33 -080081
Mathieu Chartiere401d142015-04-22 13:56:20 -070082 void SetAccessFlags(uint32_t new_access_flags) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +010083 // Not called within a transaction.
Mathieu Chartiere401d142015-04-22 13:56:20 -070084 access_flags_ = new_access_flags;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080085 }
86
87 // Approximate what kind of method call would be used for this method.
Mathieu Chartier90443472015-07-16 20:32:27 -070088 InvokeType GetInvokeType() SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080089
90 // Returns true if the method is declared public.
Andreas Gampecbc96b82015-09-30 20:05:24 +000091 bool IsPublic() {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080092 return (GetAccessFlags() & kAccPublic) != 0;
93 }
94
95 // Returns true if the method is declared private.
Andreas Gampecbc96b82015-09-30 20:05:24 +000096 bool IsPrivate() {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080097 return (GetAccessFlags() & kAccPrivate) != 0;
98 }
99
100 // Returns true if the method is declared static.
Andreas Gampecbc96b82015-09-30 20:05:24 +0000101 bool IsStatic() {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800102 return (GetAccessFlags() & kAccStatic) != 0;
103 }
104
105 // Returns true if the method is a constructor.
Andreas Gampecbc96b82015-09-30 20:05:24 +0000106 bool IsConstructor() {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800107 return (GetAccessFlags() & kAccConstructor) != 0;
108 }
109
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700110 // Returns true if the method is a class initializer.
Andreas Gampecbc96b82015-09-30 20:05:24 +0000111 bool IsClassInitializer() {
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700112 return IsConstructor() && IsStatic();
113 }
114
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800115 // Returns true if the method is static, private, or a constructor.
Andreas Gampecbc96b82015-09-30 20:05:24 +0000116 bool IsDirect() {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800117 return IsDirect(GetAccessFlags());
118 }
119
120 static bool IsDirect(uint32_t access_flags) {
Andreas Gampecbc96b82015-09-30 20:05:24 +0000121 constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor;
122 return (access_flags & direct) != 0;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800123 }
124
125 // Returns true if the method is declared synchronized.
Andreas Gampecbc96b82015-09-30 20:05:24 +0000126 bool IsSynchronized() {
127 constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800128 return (GetAccessFlags() & synchonized) != 0;
129 }
130
Andreas Gampecbc96b82015-09-30 20:05:24 +0000131 bool IsFinal() {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800132 return (GetAccessFlags() & kAccFinal) != 0;
133 }
134
Andreas Gampecbc96b82015-09-30 20:05:24 +0000135 bool IsMiranda() {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800136 return (GetAccessFlags() & kAccMiranda) != 0;
137 }
138
Andreas Gampecbc96b82015-09-30 20:05:24 +0000139 bool IsNative() {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800140 return (GetAccessFlags() & kAccNative) != 0;
141 }
142
Andreas Gampecbc96b82015-09-30 20:05:24 +0000143 bool IsFastNative() {
144 constexpr uint32_t mask = kAccFastNative | kAccNative;
Ian Rogers16ce0922014-01-10 14:59:36 -0800145 return (GetAccessFlags() & mask) == mask;
Ian Rogers1eb512d2013-10-18 15:42:20 -0700146 }
147
Andreas Gampecbc96b82015-09-30 20:05:24 +0000148 bool IsAbstract() {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800149 return (GetAccessFlags() & kAccAbstract) != 0;
150 }
151
Andreas Gampecbc96b82015-09-30 20:05:24 +0000152 bool IsSynthetic() {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800153 return (GetAccessFlags() & kAccSynthetic) != 0;
154 }
155
Mathieu Chartier90443472015-07-16 20:32:27 -0700156 bool IsProxyMethod() SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800157
Andreas Gampecbc96b82015-09-30 20:05:24 +0000158 bool IsPreverified() {
Sebastien Hertz233ea8e2013-06-06 11:57:09 +0200159 return (GetAccessFlags() & kAccPreverified) != 0;
160 }
161
Andreas Gampecbc96b82015-09-30 20:05:24 +0000162 void SetPreverified() {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800163 DCHECK(!IsPreverified());
Sebastien Hertz233ea8e2013-06-06 11:57:09 +0200164 SetAccessFlags(GetAccessFlags() | kAccPreverified);
165 }
166
Mathieu Chartier90443472015-07-16 20:32:27 -0700167 bool IsOptimized(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray39468442014-09-02 15:17:15 +0100168 // Temporary solution for detecting if a method has been optimized: the compiler
169 // does not create a GC map. Instead, the vmap table contains the stack map
170 // (as in stack_map.h).
Nicolas Geoffray376b2bb2014-12-09 14:26:32 +0000171 return !IsNative()
172 && GetEntryPointFromQuickCompiledCodePtrSize(pointer_size) != nullptr
Mathieu Chartiera7dd0382014-11-20 17:08:58 -0800173 && GetQuickOatCodePointer(pointer_size) != nullptr
Mathieu Chartier957ca1c2014-11-21 16:51:29 -0800174 && GetNativeGcMap(pointer_size) == nullptr;
Nicolas Geoffray39468442014-09-02 15:17:15 +0100175 }
176
Mathieu Chartier90443472015-07-16 20:32:27 -0700177 bool CheckIncompatibleClassChange(InvokeType type) SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800178
Mathieu Chartier90443472015-07-16 20:32:27 -0700179 uint16_t GetMethodIndex() SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800180
Mathieu Chartier9f3629d2014-10-28 18:23:02 -0700181 // Doesn't do erroneous / unresolved class checks.
Mathieu Chartier90443472015-07-16 20:32:27 -0700182 uint16_t GetMethodIndexDuringLinking() SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier9f3629d2014-10-28 18:23:02 -0700183
Mathieu Chartier90443472015-07-16 20:32:27 -0700184 size_t GetVtableIndex() SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800185 return GetMethodIndex();
186 }
187
Mathieu Chartier90443472015-07-16 20:32:27 -0700188 void SetMethodIndex(uint16_t new_method_index) SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100189 // Not called within a transaction.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700190 method_index_ = new_method_index;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800191 }
192
Vladimir Markoc1363122015-04-09 14:13:13 +0100193 static MemberOffset DexMethodIndexOffset() {
194 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_method_index_);
195 }
196
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800197 static MemberOffset MethodIndexOffset() {
Brian Carlstromea46f952013-07-30 01:26:50 -0700198 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800199 }
200
Mathieu Chartiere401d142015-04-22 13:56:20 -0700201 uint32_t GetCodeItemOffset() {
202 return dex_code_item_offset_;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800203 }
204
Mathieu Chartiere401d142015-04-22 13:56:20 -0700205 void SetCodeItemOffset(uint32_t new_code_off) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100206 // Not called within a transaction.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700207 dex_code_item_offset_ = new_code_off;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800208 }
209
210 // Number of 32bit registers that would be required to hold all the arguments
211 static size_t NumArgRegisters(const StringPiece& shorty);
212
Mathieu Chartier90443472015-07-16 20:32:27 -0700213 ALWAYS_INLINE uint32_t GetDexMethodIndex() SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800214
Mathieu Chartiere401d142015-04-22 13:56:20 -0700215 void SetDexMethodIndex(uint32_t new_idx) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100216 // Not called within a transaction.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700217 dex_method_index_ = new_idx;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800218 }
219
Vladimir Marko05792b92015-08-03 11:56:49 +0100220 ALWAYS_INLINE ArtMethod** GetDexCacheResolvedMethods(size_t pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700221 SHARED_REQUIRES(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100222 ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_index, size_t ptr_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700223 SHARED_REQUIRES(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100224 ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_index,
225 ArtMethod* new_method,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700226 size_t ptr_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700227 SHARED_REQUIRES(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100228 ALWAYS_INLINE void SetDexCacheResolvedMethods(ArtMethod** new_dex_cache_methods, size_t ptr_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700229 SHARED_REQUIRES(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100230 bool HasDexCacheResolvedMethods(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_);
231 bool HasSameDexCacheResolvedMethods(ArtMethod* other, size_t pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700232 SHARED_REQUIRES(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100233 bool HasSameDexCacheResolvedMethods(ArtMethod** other_cache, size_t pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700234 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800235
Andreas Gampe58a5af82014-07-31 16:23:49 -0700236 template <bool kWithCheck = true>
Vladimir Marko05792b92015-08-03 11:56:49 +0100237 mirror::Class* GetDexCacheResolvedType(uint32_t type_idx, size_t ptr_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700238 SHARED_REQUIRES(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100239 void SetDexCacheResolvedTypes(GcRoot<mirror::Class>* new_dex_cache_types, size_t ptr_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700240 SHARED_REQUIRES(Locks::mutator_lock_);
Vladimir Marko05792b92015-08-03 11:56:49 +0100241 bool HasDexCacheResolvedTypes(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_);
242 bool HasSameDexCacheResolvedTypes(ArtMethod* other, size_t pointer_size)
243 SHARED_REQUIRES(Locks::mutator_lock_);
244 bool HasSameDexCacheResolvedTypes(GcRoot<mirror::Class>* other_cache, size_t pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700245 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800246
Ian Rogersa0485602014-12-02 15:48:04 -0800247 // Get the Class* from the type index into this method's dex cache.
Vladimir Marko05792b92015-08-03 11:56:49 +0100248 mirror::Class* GetClassFromTypeIndex(uint16_t type_idx, bool resolve, size_t ptr_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700249 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogersa0485602014-12-02 15:48:04 -0800250
Ian Rogerse0a02da2014-12-02 14:10:53 -0800251 // Find the method that this method overrides.
Mathieu Chartier90443472015-07-16 20:32:27 -0700252 ArtMethod* FindOverriddenMethod(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800253
Ian Rogerse0a02da2014-12-02 14:10:53 -0800254 // Find the method index for this method within other_dexfile. If this method isn't present then
255 // return DexFile::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
256 // name and signature in the other_dexfile, such as the method index used to resolve this method
257 // in the other_dexfile.
258 uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
259 uint32_t name_and_signature_idx)
Mathieu Chartier90443472015-07-16 20:32:27 -0700260 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogerse0a02da2014-12-02 14:10:53 -0800261
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700262 void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
Mathieu Chartier90443472015-07-16 20:32:27 -0700263 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800264
Mathieu Chartiere401d142015-04-22 13:56:20 -0700265 const void* GetEntryPointFromQuickCompiledCode() {
Mathieu Chartier2d721012014-11-10 11:08:06 -0800266 return GetEntryPointFromQuickCompiledCodePtrSize(sizeof(void*));
267 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700268 ALWAYS_INLINE const void* GetEntryPointFromQuickCompiledCodePtrSize(size_t pointer_size) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100269 return GetNativePointer<const void*>(
Mathieu Chartier2d721012014-11-10 11:08:06 -0800270 EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
Ian Rogersef7d42f2014-01-06 12:55:46 -0800271 }
272
Mathieu Chartiere401d142015-04-22 13:56:20 -0700273 void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code) {
Mathieu Chartier2d721012014-11-10 11:08:06 -0800274 SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
275 sizeof(void*));
276 }
Mathieu Chartier2d721012014-11-10 11:08:06 -0800277 ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
Mathieu Chartiere401d142015-04-22 13:56:20 -0700278 const void* entry_point_from_quick_compiled_code, size_t pointer_size) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100279 SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size),
280 entry_point_from_quick_compiled_code, pointer_size);
Ian Rogersef7d42f2014-01-06 12:55:46 -0800281 }
282
Mathieu Chartier90443472015-07-16 20:32:27 -0700283 uint32_t GetCodeSize() SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogersef7d42f2014-01-06 12:55:46 -0800284
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700285 // Check whether the given PC is within the quick compiled code associated with this method's
286 // quick entrypoint. This code isn't robust for instrumentation, etc. and is only used for
287 // debug purposes.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700288 bool PcIsWithinQuickCode(uintptr_t pc) {
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800289 return PcIsWithinQuickCode(
290 reinterpret_cast<uintptr_t>(GetEntryPointFromQuickCompiledCode()), pc);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800291 }
292
Mathieu Chartier90443472015-07-16 20:32:27 -0700293 void AssertPcIsWithinQuickCode(uintptr_t pc) SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800294
Hiroshi Yamauchi9bdec882014-08-15 17:11:12 -0700295 // Returns true if the entrypoint points to the interpreter, as
296 // opposed to the compiled code, that is, this method will be
297 // interpretered on invocation.
Mathieu Chartier90443472015-07-16 20:32:27 -0700298 bool IsEntrypointInterpreter() SHARED_REQUIRES(Locks::mutator_lock_);
Hiroshi Yamauchi9bdec882014-08-15 17:11:12 -0700299
Mathieu Chartiere401d142015-04-22 13:56:20 -0700300 uint32_t GetQuickOatCodeOffset();
301 void SetQuickOatCodeOffset(uint32_t code_offset);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800302
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700303 ALWAYS_INLINE static const void* EntryPointToCodePointer(const void* entry_point) {
Vladimir Marko8a630572014-04-09 18:45:35 +0100304 uintptr_t code = reinterpret_cast<uintptr_t>(entry_point);
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700305 // TODO: Make this Thumb2 specific. It is benign on other architectures as code is always at
306 // least 2 byte aligned.
307 code &= ~0x1;
Vladimir Marko8a630572014-04-09 18:45:35 +0100308 return reinterpret_cast<const void*>(code);
309 }
310
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700311 // Actual entry point pointer to compiled oat code or null.
Mathieu Chartiera7dd0382014-11-20 17:08:58 -0800312 const void* GetQuickOatEntryPoint(size_t pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700313 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700314 // Actual pointer to compiled oat code or null.
Mathieu Chartiera7dd0382014-11-20 17:08:58 -0800315 const void* GetQuickOatCodePointer(size_t pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700316 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiera7dd0382014-11-20 17:08:58 -0800317 return EntryPointToCodePointer(GetQuickOatEntryPoint(pointer_size));
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700318 }
Vladimir Marko8a630572014-04-09 18:45:35 +0100319
Ian Rogers1809a722013-08-09 22:05:32 -0700320 // Callers should wrap the uint8_t* in a MappingTable instance for convenient access.
Mathieu Chartiera7dd0382014-11-20 17:08:58 -0800321 const uint8_t* GetMappingTable(size_t pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700322 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiera7dd0382014-11-20 17:08:58 -0800323 const uint8_t* GetMappingTable(const void* code_pointer, size_t pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700324 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800325
Ian Rogers1809a722013-08-09 22:05:32 -0700326 // Callers should wrap the uint8_t* in a VmapTable instance for convenient access.
Mathieu Chartiera7dd0382014-11-20 17:08:58 -0800327 const uint8_t* GetVmapTable(size_t pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700328 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiera7dd0382014-11-20 17:08:58 -0800329 const uint8_t* GetVmapTable(const void* code_pointer, size_t pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700330 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800331
Mathieu Chartier90443472015-07-16 20:32:27 -0700332 const uint8_t* GetQuickenedInfo() SHARED_REQUIRES(Locks::mutator_lock_);
Nicolas Geoffray9523a3e2015-07-17 11:51:28 +0000333
Mathieu Chartier90443472015-07-16 20:32:27 -0700334 CodeInfo GetOptimizedCodeInfo() SHARED_REQUIRES(Locks::mutator_lock_);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100335
Mathieu Chartier957ca1c2014-11-21 16:51:29 -0800336 // Callers should wrap the uint8_t* in a GcMap instance for convenient access.
337 const uint8_t* GetNativeGcMap(size_t pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700338 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier957ca1c2014-11-21 16:51:29 -0800339 const uint8_t* GetNativeGcMap(const void* code_pointer, size_t pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700340 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800341
Andreas Gampe90546832014-03-12 18:07:19 -0700342 template <bool kCheckFrameSize = true>
Mathieu Chartier90443472015-07-16 20:32:27 -0700343 uint32_t GetFrameSizeInBytes() SHARED_REQUIRES(Locks::mutator_lock_) {
Vladimir Marko7624d252014-05-02 14:40:15 +0100344 uint32_t result = GetQuickFrameInfo().FrameSizeInBytes();
Andreas Gampe90546832014-03-12 18:07:19 -0700345 if (kCheckFrameSize) {
346 DCHECK_LE(static_cast<size_t>(kStackAlignment), result);
347 }
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800348 return result;
349 }
350
Mathieu Chartier90443472015-07-16 20:32:27 -0700351 QuickMethodFrameInfo GetQuickFrameInfo() SHARED_REQUIRES(Locks::mutator_lock_);
Vladimir Marko4c1c5102014-05-14 16:51:16 +0100352 QuickMethodFrameInfo GetQuickFrameInfo(const void* code_pointer)
Mathieu Chartier90443472015-07-16 20:32:27 -0700353 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800354
Mathieu Chartier90443472015-07-16 20:32:27 -0700355 FrameOffset GetReturnPcOffset() SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700356 return GetReturnPcOffset(GetFrameSizeInBytes());
Vladimir Marko4c1c5102014-05-14 16:51:16 +0100357 }
358
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700359 FrameOffset GetReturnPcOffset(uint32_t frame_size_in_bytes)
Mathieu Chartier90443472015-07-16 20:32:27 -0700360 SHARED_REQUIRES(Locks::mutator_lock_) {
Vladimir Marko4c1c5102014-05-14 16:51:16 +0100361 DCHECK_EQ(frame_size_in_bytes, GetFrameSizeInBytes());
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700362 return FrameOffset(frame_size_in_bytes - sizeof(void*));
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800363 }
364
Mathieu Chartier90443472015-07-16 20:32:27 -0700365 FrameOffset GetHandleScopeOffset() SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700366 constexpr size_t handle_scope_offset = sizeof(ArtMethod*);
Sebastien Hertze4b7c892014-12-17 20:02:50 +0100367 DCHECK_LT(handle_scope_offset, GetFrameSizeInBytes());
368 return FrameOffset(handle_scope_offset);
Ian Rogers62d6c772013-02-27 08:32:07 -0800369 }
370
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700371 void RegisterNative(const void* native_method, bool is_fast)
Mathieu Chartier90443472015-07-16 20:32:27 -0700372 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800373
Mathieu Chartier90443472015-07-16 20:32:27 -0700374 void UnregisterNative() SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800375
Vladimir Marko05792b92015-08-03 11:56:49 +0100376 static MemberOffset DexCacheResolvedMethodsOffset(size_t pointer_size) {
377 return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
378 PtrSizedFields, dex_cache_resolved_methods_) / sizeof(void*) * pointer_size);
379 }
380
381 static MemberOffset DexCacheResolvedTypesOffset(size_t pointer_size) {
382 return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
383 PtrSizedFields, dex_cache_resolved_types_) / sizeof(void*) * pointer_size);
384 }
385
Mathieu Chartier2d721012014-11-10 11:08:06 -0800386 static MemberOffset EntryPointFromJniOffset(size_t pointer_size) {
Mathieu Chartiereace4582014-11-24 18:29:54 -0800387 return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
Mathieu Chartier2d721012014-11-10 11:08:06 -0800388 PtrSizedFields, entry_point_from_jni_) / sizeof(void*) * pointer_size);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800389 }
390
Mathieu Chartier2d721012014-11-10 11:08:06 -0800391 static MemberOffset EntryPointFromQuickCompiledCodeOffset(size_t pointer_size) {
Mathieu Chartiereace4582014-11-24 18:29:54 -0800392 return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
Mathieu Chartier2d721012014-11-10 11:08:06 -0800393 PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*) * pointer_size);
394 }
395
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100396 ProfilingInfo* CreateProfilingInfo() SHARED_REQUIRES(Locks::mutator_lock_);
397
Mathieu Chartier1147b9b2015-09-14 18:50:08 -0700398 ProfilingInfo* GetProfilingInfo(size_t pointer_size) {
399 return reinterpret_cast<ProfilingInfo*>(GetEntryPointFromJniPtrSize(pointer_size));
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100400 }
401
Mathieu Chartiere401d142015-04-22 13:56:20 -0700402 void* GetEntryPointFromJni() {
Mathieu Chartier2d721012014-11-10 11:08:06 -0800403 return GetEntryPointFromJniPtrSize(sizeof(void*));
404 }
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100405
Mathieu Chartiere401d142015-04-22 13:56:20 -0700406 ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(size_t pointer_size) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100407 return GetNativePointer<void*>(EntryPointFromJniOffset(pointer_size), pointer_size);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800408 }
409
Andreas Gampecbc96b82015-09-30 20:05:24 +0000410 void SetEntryPointFromJni(const void* entrypoint) {
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100411 DCHECK(IsNative());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700412 SetEntryPointFromJniPtrSize(entrypoint, sizeof(void*));
Mathieu Chartier2d721012014-11-10 11:08:06 -0800413 }
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100414
Mathieu Chartiere401d142015-04-22 13:56:20 -0700415 ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, size_t pointer_size) {
Vladimir Marko05792b92015-08-03 11:56:49 +0100416 SetNativePointer(EntryPointFromJniOffset(pointer_size), entrypoint, pointer_size);
Mathieu Chartier2d721012014-11-10 11:08:06 -0800417 }
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800418
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800419 // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
420 // conventions for a method of managed code. Returns false for Proxy methods.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700421 ALWAYS_INLINE bool IsRuntimeMethod();
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800422
423 // Is this a hand crafted method used for something like describing callee saves?
Mathieu Chartier90443472015-07-16 20:32:27 -0700424 bool IsCalleeSaveMethod() SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800425
Mathieu Chartier90443472015-07-16 20:32:27 -0700426 bool IsResolutionMethod() SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800427
Mathieu Chartier90443472015-07-16 20:32:27 -0700428 bool IsImtConflictMethod() SHARED_REQUIRES(Locks::mutator_lock_);
Jeff Hao88474b42013-10-23 16:24:40 -0700429
Mathieu Chartier90443472015-07-16 20:32:27 -0700430 bool IsImtUnimplementedMethod() SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -0700431
Mathieu Chartier90443472015-07-16 20:32:27 -0700432 uintptr_t NativeQuickPcOffset(const uintptr_t pc) SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700433#ifdef NDEBUG
434 uintptr_t NativeQuickPcOffset(const uintptr_t pc, const void* quick_entry_point)
Mathieu Chartier90443472015-07-16 20:32:27 -0700435 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700436 return pc - reinterpret_cast<uintptr_t>(quick_entry_point);
437 }
438#else
439 uintptr_t NativeQuickPcOffset(const uintptr_t pc, const void* quick_entry_point)
Mathieu Chartier90443472015-07-16 20:32:27 -0700440 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700441#endif
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800442
443 // Converts a native PC to a dex PC.
Dave Allisonb373e092014-02-20 16:06:36 -0800444 uint32_t ToDexPc(const uintptr_t pc, bool abort_on_failure = true)
Mathieu Chartier90443472015-07-16 20:32:27 -0700445 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800446
447 // Converts a dex PC to a native PC.
David Brazdil72f7b882015-09-15 17:00:52 +0100448 uintptr_t ToNativeQuickPc(const uint32_t dex_pc,
David Brazdilf1fbd522015-09-15 17:57:31 +0100449 bool is_for_catch_handler,
David Brazdil72f7b882015-09-15 17:00:52 +0100450 bool abort_on_failure = true)
Mathieu Chartier90443472015-07-16 20:32:27 -0700451 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800452
Mathieu Chartier90443472015-07-16 20:32:27 -0700453 MethodReference ToMethodReference() SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartier36b58f52014-12-10 12:06:45 -0800454 return MethodReference(GetDexFile(), GetDexMethodIndex());
455 }
456
Ian Rogersc449aa82013-07-29 14:35:46 -0700457 // Find the catch block for the given exception type and dex_pc. When a catch block is found,
458 // indicates whether the found catch block is responsible for clearing the exception or whether
459 // a move-exception instruction is present.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700460 uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc,
461 bool* has_no_move_exception)
Mathieu Chartier90443472015-07-16 20:32:27 -0700462 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800463
Mathieu Chartierda7c6502015-07-23 16:01:26 -0700464 // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700465 template<typename RootVisitorType>
Mathieu Chartier1147b9b2015-09-14 18:50:08 -0700466 void VisitRoots(RootVisitorType& visitor, size_t pointer_size) NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartierc528dba2013-11-26 12:00:11 -0800467
Mathieu Chartier90443472015-07-16 20:32:27 -0700468 const DexFile* GetDexFile() SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700469
Mathieu Chartier90443472015-07-16 20:32:27 -0700470 const char* GetDeclaringClassDescriptor() SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700471
Mathieu Chartier90443472015-07-16 20:32:27 -0700472 const char* GetShorty() SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700473 uint32_t unused_length;
474 return GetShorty(&unused_length);
475 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700476
Mathieu Chartier90443472015-07-16 20:32:27 -0700477 const char* GetShorty(uint32_t* out_length) SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700478
Mathieu Chartier90443472015-07-16 20:32:27 -0700479 const Signature GetSignature() SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700480
Mathieu Chartier90443472015-07-16 20:32:27 -0700481 ALWAYS_INLINE const char* GetName() SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700482
Mathieu Chartier90443472015-07-16 20:32:27 -0700483 mirror::String* GetNameAsString(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers6b14d552014-10-28 21:50:58 -0700484
Mathieu Chartier90443472015-07-16 20:32:27 -0700485 const DexFile::CodeItem* GetCodeItem() SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700486
Vladimir Marko05792b92015-08-03 11:56:49 +0100487 bool IsResolvedTypeIdx(uint16_t type_idx, size_t ptr_size) SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700488
Mathieu Chartier90443472015-07-16 20:32:27 -0700489 int32_t GetLineNumFromDexPC(uint32_t dex_pc) SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700490
Mathieu Chartier90443472015-07-16 20:32:27 -0700491 const DexFile::ProtoId& GetPrototype() SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700492
Mathieu Chartier90443472015-07-16 20:32:27 -0700493 const DexFile::TypeList* GetParameterTypeList() SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700494
Mathieu Chartier90443472015-07-16 20:32:27 -0700495 const char* GetDeclaringClassSourceFile() SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700496
Mathieu Chartier90443472015-07-16 20:32:27 -0700497 uint16_t GetClassDefIndex() SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700498
Mathieu Chartier90443472015-07-16 20:32:27 -0700499 const DexFile::ClassDef& GetClassDef() SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700500
Mathieu Chartier90443472015-07-16 20:32:27 -0700501 const char* GetReturnTypeDescriptor() SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700502
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700503 const char* GetTypeDescriptorFromTypeIdx(uint16_t type_idx)
Mathieu Chartier90443472015-07-16 20:32:27 -0700504 SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700505
Ian Rogersded66a02014-10-28 18:12:55 -0700506 // May cause thread suspension due to GetClassFromTypeIdx calling ResolveType this caused a large
507 // number of bugs at call sites.
Vladimir Marko05792b92015-08-03 11:56:49 +0100508 mirror::Class* GetReturnType(bool resolve, size_t ptr_size)
509 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogersded66a02014-10-28 18:12:55 -0700510
Mathieu Chartier90443472015-07-16 20:32:27 -0700511 mirror::ClassLoader* GetClassLoader() SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700512
Mathieu Chartier90443472015-07-16 20:32:27 -0700513 mirror::DexCache* GetDexCache() SHARED_REQUIRES(Locks::mutator_lock_);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700514
Mathieu Chartiere401d142015-04-22 13:56:20 -0700515 ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(size_t pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700516 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700517
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700518 // May cause thread suspension due to class resolution.
519 bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
Mathieu Chartier90443472015-07-16 20:32:27 -0700520 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700521
Vladimir Marko14632852015-08-17 12:07:23 +0100522 // Size of an instance of this native class.
523 static size_t Size(size_t pointer_size) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700524 return RoundUp(OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_), pointer_size) +
Mathieu Chartiereace4582014-11-24 18:29:54 -0800525 (sizeof(PtrSizedFields) / sizeof(void*)) * pointer_size;
Mathieu Chartier2d721012014-11-10 11:08:06 -0800526 }
527
Vladimir Marko14632852015-08-17 12:07:23 +0100528 // Alignment of an instance of this native class.
529 static size_t Alignment(size_t pointer_size) {
Vladimir Markocf36d492015-08-12 19:27:26 +0100530 // The ArtMethod alignment is the same as image pointer size. This differs from
Vladimir Marko14632852015-08-17 12:07:23 +0100531 // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*).
Vladimir Markocf36d492015-08-12 19:27:26 +0100532 return pointer_size;
533 }
534
Mathieu Chartiere401d142015-04-22 13:56:20 -0700535 void CopyFrom(const ArtMethod* src, size_t image_pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700536 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700537
Vladimir Marko05792b92015-08-03 11:56:49 +0100538 ALWAYS_INLINE GcRoot<mirror::Class>* GetDexCacheResolvedTypes(size_t pointer_size)
Mathieu Chartier90443472015-07-16 20:32:27 -0700539 SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700540
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100541 uint16_t IncrementCounter() {
542 return ++hotness_count_;
543 }
544
Mathieu Chartier2d721012014-11-10 11:08:06 -0800545 protected:
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800546 // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
Ian Rogersef7d42f2014-01-06 12:55:46 -0800547 // The class we are a part of.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700548 GcRoot<mirror::Class> declaring_class_;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800549
Ian Rogersef7d42f2014-01-06 12:55:46 -0800550 // Access flags; low 16 bits are defined by spec.
551 uint32_t access_flags_;
552
553 /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
554
555 // Offset to the CodeItem.
556 uint32_t dex_code_item_offset_;
557
558 // Index into method_ids of the dex file associated with this method.
559 uint32_t dex_method_index_;
560
561 /* End of dex file fields. */
562
563 // Entry within a dispatch table for this method. For static/direct methods the index is into
564 // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
565 // ifTable.
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100566 uint16_t method_index_;
567
568 // The hotness we measure for this method. Incremented by the interpreter. Not atomic, as we allow
569 // missing increments: if the method is hot, we will see it eventually.
570 uint16_t hotness_count_;
Ian Rogersef7d42f2014-01-06 12:55:46 -0800571
Mathieu Chartiereace4582014-11-24 18:29:54 -0800572 // Fake padding field gets inserted here.
Mathieu Chartier2d721012014-11-10 11:08:06 -0800573
574 // Must be the last fields in the method.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700575 // PACKED(4) is necessary for the correctness of
576 // RoundUp(OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_), pointer_size).
Mathieu Chartier2d721012014-11-10 11:08:06 -0800577 struct PACKED(4) PtrSizedFields {
Vladimir Marko05792b92015-08-03 11:56:49 +0100578 // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
579 ArtMethod** dex_cache_resolved_methods_;
580
581 // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
582 GcRoot<mirror::Class>* dex_cache_resolved_types_;
583
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100584 // Pointer to JNI function registered to this method, or a function to resolve the JNI function,
585 // or the profiling data for non-native methods.
Mathieu Chartier2d721012014-11-10 11:08:06 -0800586 void* entry_point_from_jni_;
587
588 // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
Elliott Hughes956af0f2014-12-11 14:34:28 -0800589 // the interpreter.
Mathieu Chartier2d721012014-11-10 11:08:06 -0800590 void* entry_point_from_quick_compiled_code_;
Mathieu Chartier2d721012014-11-10 11:08:06 -0800591 } ptr_sized_fields_;
592
Mathieu Chartier02e25112013-08-14 16:14:24 -0700593 private:
Mathieu Chartiereace4582014-11-24 18:29:54 -0800594 static size_t PtrSizedFieldsOffset(size_t pointer_size) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700595 // Round up to pointer size for padding field.
596 return RoundUp(OFFSETOF_MEMBER(ArtMethod, ptr_sized_fields_), pointer_size);
597 }
598
599 template<typename T>
Vladimir Marko05792b92015-08-03 11:56:49 +0100600 ALWAYS_INLINE T GetNativePointer(MemberOffset offset, size_t pointer_size) const {
601 static_assert(std::is_pointer<T>::value, "T must be a pointer type");
Mathieu Chartiere401d142015-04-22 13:56:20 -0700602 DCHECK(ValidPointerSize(pointer_size)) << pointer_size;
603 const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
604 if (pointer_size == sizeof(uint32_t)) {
605 return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr));
606 } else {
607 auto v = *reinterpret_cast<const uint64_t*>(addr);
Vladimir Marko05792b92015-08-03 11:56:49 +0100608 return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v));
Mathieu Chartiere401d142015-04-22 13:56:20 -0700609 }
610 }
611
612 template<typename T>
Vladimir Marko05792b92015-08-03 11:56:49 +0100613 ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, size_t pointer_size) {
614 static_assert(std::is_pointer<T>::value, "T must be a pointer type");
Mathieu Chartiere401d142015-04-22 13:56:20 -0700615 DCHECK(ValidPointerSize(pointer_size)) << pointer_size;
616 const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
617 if (pointer_size == sizeof(uint32_t)) {
618 uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
Vladimir Marko05792b92015-08-03 11:56:49 +0100619 *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700620 } else {
621 *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
622 }
Mathieu Chartier2d721012014-11-10 11:08:06 -0800623 }
624
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800625 // Code points to the start of the quick code.
626 static uint32_t GetCodeSize(const void* code);
627
628 static bool PcIsWithinQuickCode(uintptr_t code, uintptr_t pc) {
629 if (code == 0) {
630 return pc == 0;
631 }
632 /*
633 * During a stack walk, a return PC may point past-the-end of the code
634 * in the case that the last instruction is a call that isn't expected to
635 * return. Thus, we check <= code + GetCodeSize().
636 *
637 * NOTE: For Thumb both pc and code are offset by 1 indicating the Thumb state.
638 */
639 return code <= pc && pc <= code + GetCodeSize(
640 EntryPointToCodePointer(reinterpret_cast<const void*>(code)));
641 }
642
Mathieu Chartiere401d142015-04-22 13:56:20 -0700643 DISALLOW_COPY_AND_ASSIGN(ArtMethod); // Need to use CopyFrom to deal with 32 vs 64 bits.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800644};
645
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800646} // namespace art
647
Mathieu Chartiere401d142015-04-22 13:56:20 -0700648#endif // ART_RUNTIME_ART_METHOD_H_