blob: 5d4a6ea0d5acac2af84db9189d9c8e95e018af83 [file] [log] [blame]
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromea46f952013-07-30 01:26:50 -070017#ifndef ART_RUNTIME_MIRROR_ART_METHOD_H_
18#define ART_RUNTIME_MIRROR_ART_METHOD_H_
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080019
20#include "class.h"
Jeff Hao790ad902013-05-22 15:02:08 -070021#include "dex_file.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080022#include "invoke_type.h"
23#include "locks.h"
24#include "modifiers.h"
25#include "object.h"
26
27namespace art {
28
Brian Carlstromea46f952013-07-30 01:26:50 -070029struct ArtMethodOffsets;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080030struct ConstructorMethodOffsets;
31union JValue;
32struct MethodClassOffsets;
Jeff Hao790ad902013-05-22 15:02:08 -070033class MethodHelper;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080034class StringPiece;
Jeff Hao16743632013-05-08 10:59:04 -070035class ShadowFrame;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080036
37namespace mirror {
38
39class StaticStorageBase;
40
Jeff Hao790ad902013-05-22 15:02:08 -070041typedef void (EntryPointFromInterpreter)(Thread* self, MethodHelper& mh,
42 const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame, JValue* result);
Jeff Hao16743632013-05-08 10:59:04 -070043
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080044// C++ mirror of java.lang.reflect.Method and java.lang.reflect.Constructor
Brian Carlstromea46f952013-07-30 01:26:50 -070045class MANAGED ArtMethod : public Object {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080046 public:
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080047 Class* GetDeclaringClass() const;
48
49 void SetDeclaringClass(Class *new_declaring_class) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
50
51 static MemberOffset DeclaringClassOffset() {
Brian Carlstromea46f952013-07-30 01:26:50 -070052 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080053 }
54
Jeff Haoaa4a7932013-05-13 11:28:27 -070055 static MemberOffset EntryPointFromCompiledCodeOffset() {
Brian Carlstromea46f952013-07-30 01:26:50 -070056 return MemberOffset(OFFSETOF_MEMBER(ArtMethod, entry_point_from_compiled_code_));
Jeff Hao5d917302013-02-27 17:57:33 -080057 }
58
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080059 uint32_t GetAccessFlags() const;
60
61 void SetAccessFlags(uint32_t new_access_flags) {
Brian Carlstromea46f952013-07-30 01:26:50 -070062 SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, access_flags_), new_access_flags, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080063 }
64
65 // Approximate what kind of method call would be used for this method.
66 InvokeType GetInvokeType() const;
67
68 // Returns true if the method is declared public.
69 bool IsPublic() const {
70 return (GetAccessFlags() & kAccPublic) != 0;
71 }
72
73 // Returns true if the method is declared private.
74 bool IsPrivate() const {
75 return (GetAccessFlags() & kAccPrivate) != 0;
76 }
77
78 // Returns true if the method is declared static.
79 bool IsStatic() const {
80 return (GetAccessFlags() & kAccStatic) != 0;
81 }
82
83 // Returns true if the method is a constructor.
84 bool IsConstructor() const {
85 return (GetAccessFlags() & kAccConstructor) != 0;
86 }
87
88 // Returns true if the method is static, private, or a constructor.
89 bool IsDirect() const {
90 return IsDirect(GetAccessFlags());
91 }
92
93 static bool IsDirect(uint32_t access_flags) {
94 return (access_flags & (kAccStatic | kAccPrivate | kAccConstructor)) != 0;
95 }
96
97 // Returns true if the method is declared synchronized.
98 bool IsSynchronized() const {
99 uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
100 return (GetAccessFlags() & synchonized) != 0;
101 }
102
103 bool IsFinal() const {
104 return (GetAccessFlags() & kAccFinal) != 0;
105 }
106
107 bool IsMiranda() const {
108 return (GetAccessFlags() & kAccMiranda) != 0;
109 }
110
111 bool IsNative() const {
112 return (GetAccessFlags() & kAccNative) != 0;
113 }
114
115 bool IsAbstract() const {
116 return (GetAccessFlags() & kAccAbstract) != 0;
117 }
118
119 bool IsSynthetic() const {
120 return (GetAccessFlags() & kAccSynthetic) != 0;
121 }
122
123 bool IsProxyMethod() const;
124
Sebastien Hertz233ea8e2013-06-06 11:57:09 +0200125 bool IsPreverified() const {
126 return (GetAccessFlags() & kAccPreverified) != 0;
127 }
128
129 void SetPreverified() {
130 SetAccessFlags(GetAccessFlags() | kAccPreverified);
131 }
132
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800133 bool CheckIncompatibleClassChange(InvokeType type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
134
135 uint16_t GetMethodIndex() const;
136
137 size_t GetVtableIndex() const {
138 return GetMethodIndex();
139 }
140
141 void SetMethodIndex(uint16_t new_method_index) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700142 SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_), new_method_index, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800143 }
144
145 static MemberOffset MethodIndexOffset() {
Brian Carlstromea46f952013-07-30 01:26:50 -0700146 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800147 }
148
149 uint32_t GetCodeItemOffset() const {
Brian Carlstromea46f952013-07-30 01:26:50 -0700150 return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, code_item_offset_), false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800151 }
152
153 void SetCodeItemOffset(uint32_t new_code_off) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700154 SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, code_item_offset_), new_code_off, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800155 }
156
157 // Number of 32bit registers that would be required to hold all the arguments
158 static size_t NumArgRegisters(const StringPiece& shorty);
159
160 uint32_t GetDexMethodIndex() const;
161
162 void SetDexMethodIndex(uint32_t new_idx) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700163 SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_dex_index_), new_idx, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800164 }
165
166 ObjectArray<String>* GetDexCacheStrings() const;
167 void SetDexCacheStrings(ObjectArray<String>* new_dex_cache_strings)
168 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
169
170 static MemberOffset DexCacheStringsOffset() {
Brian Carlstromea46f952013-07-30 01:26:50 -0700171 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_strings_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800172 }
173
174 static MemberOffset DexCacheResolvedMethodsOffset() {
Brian Carlstromea46f952013-07-30 01:26:50 -0700175 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_methods_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800176 }
177
178 static MemberOffset DexCacheResolvedTypesOffset() {
Brian Carlstromea46f952013-07-30 01:26:50 -0700179 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_types_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800180 }
181
182 static MemberOffset DexCacheInitializedStaticStorageOffset() {
Brian Carlstromea46f952013-07-30 01:26:50 -0700183 return OFFSET_OF_OBJECT_MEMBER(ArtMethod,
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800184 dex_cache_initialized_static_storage_);
185 }
186
Brian Carlstromea46f952013-07-30 01:26:50 -0700187 ObjectArray<ArtMethod>* GetDexCacheResolvedMethods() const;
188 void SetDexCacheResolvedMethods(ObjectArray<ArtMethod>* new_dex_cache_methods)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800189 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
190
191 ObjectArray<Class>* GetDexCacheResolvedTypes() const;
192 void SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_types)
193 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
194
195 ObjectArray<StaticStorageBase>* GetDexCacheInitializedStaticStorage() const;
196 void SetDexCacheInitializedStaticStorage(ObjectArray<StaticStorageBase>* new_value)
197 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
198
199 // Find the method that this method overrides
Brian Carlstromea46f952013-07-30 01:26:50 -0700200 ArtMethod* FindOverriddenMethod() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800201
Jeff Hao6474d192013-03-26 14:08:09 -0700202 void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, char result_type)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800203 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
204
Jeff Hao16743632013-05-08 10:59:04 -0700205 EntryPointFromInterpreter* GetEntryPointFromInterpreter() const {
Brian Carlstromea46f952013-07-30 01:26:50 -0700206 return GetFieldPtr<EntryPointFromInterpreter*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_interpreter_), false);
Jeff Hao16743632013-05-08 10:59:04 -0700207 }
208
209 void SetEntryPointFromInterpreter(EntryPointFromInterpreter* entry_point_from_interpreter) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700210 SetFieldPtr<EntryPointFromInterpreter*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_interpreter_), entry_point_from_interpreter, false);
Jeff Hao16743632013-05-08 10:59:04 -0700211 }
212
Jeff Haoaa4a7932013-05-13 11:28:27 -0700213 const void* GetEntryPointFromCompiledCode() const {
Brian Carlstromea46f952013-07-30 01:26:50 -0700214 return GetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_compiled_code_), false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800215 }
216
Jeff Haoaa4a7932013-05-13 11:28:27 -0700217 void SetEntryPointFromCompiledCode(const void* entry_point_from_compiled_code) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700218 SetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_compiled_code_), entry_point_from_compiled_code, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800219 }
220
221 uint32_t GetCodeSize() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
222
223 bool IsWithinCode(uintptr_t pc) const
224 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Jeff Haoaa4a7932013-05-13 11:28:27 -0700225 uintptr_t code = reinterpret_cast<uintptr_t>(GetEntryPointFromCompiledCode());
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800226 if (code == 0) {
227 return pc == 0;
228 }
229 /*
230 * During a stack walk, a return PC may point to the end of the code + 1
231 * (in the case that the last instruction is a call that isn't expected to
232 * return. Thus, we check <= code + GetCodeSize().
233 */
234 return (code <= pc && pc <= code + GetCodeSize());
235 }
236
237 void AssertPcIsWithinCode(uintptr_t pc) const
238 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
239
240 uint32_t GetOatCodeOffset() const;
241
242 void SetOatCodeOffset(uint32_t code_offset);
243
Jeff Haoaa4a7932013-05-13 11:28:27 -0700244 static MemberOffset GetEntryPointFromCompiledCodeOffset() {
Brian Carlstromea46f952013-07-30 01:26:50 -0700245 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_compiled_code_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800246 }
247
Ian Rogers1809a722013-08-09 22:05:32 -0700248 // Callers should wrap the uint8_t* in a MappingTable instance for convenient access.
249 const uint8_t* GetMappingTable() const {
Brian Carlstromea46f952013-07-30 01:26:50 -0700250 return GetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, mapping_table_), false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800251 }
252
Ian Rogers1809a722013-08-09 22:05:32 -0700253 void SetMappingTable(const uint8_t* mapping_table) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700254 SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, mapping_table_),
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800255 mapping_table, false);
256 }
257
258 uint32_t GetOatMappingTableOffset() const;
259
260 void SetOatMappingTableOffset(uint32_t mapping_table_offset);
261
Ian Rogers1809a722013-08-09 22:05:32 -0700262 // Callers should wrap the uint8_t* in a VmapTable instance for convenient access.
263 const uint8_t* GetVmapTable() const {
Brian Carlstromea46f952013-07-30 01:26:50 -0700264 return GetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, vmap_table_), false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800265 }
266
Ian Rogers1809a722013-08-09 22:05:32 -0700267 void SetVmapTable(const uint8_t* vmap_table) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700268 SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, vmap_table_), vmap_table, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800269 }
270
271 uint32_t GetOatVmapTableOffset() const;
272
273 void SetOatVmapTableOffset(uint32_t vmap_table_offset);
274
275 const uint8_t* GetNativeGcMap() const {
Brian Carlstromea46f952013-07-30 01:26:50 -0700276 return GetFieldPtr<uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, gc_map_), false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800277 }
278 void SetNativeGcMap(const uint8_t* data) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700279 SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, gc_map_), data, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800280 }
281
282 // When building the oat need a convenient place to stuff the offset of the native GC map.
283 void SetOatNativeGcMapOffset(uint32_t gc_map_offset);
284 uint32_t GetOatNativeGcMapOffset() const;
285
286 size_t GetFrameSizeInBytes() const {
287 DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
Brian Carlstromea46f952013-07-30 01:26:50 -0700288 size_t result = GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, frame_size_in_bytes_), false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800289 DCHECK_LE(static_cast<size_t>(kStackAlignment), result);
290 return result;
291 }
292
293 void SetFrameSizeInBytes(size_t new_frame_size_in_bytes) {
294 DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
Brian Carlstromea46f952013-07-30 01:26:50 -0700295 SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, frame_size_in_bytes_),
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800296 new_frame_size_in_bytes, false);
297 }
298
299 size_t GetReturnPcOffsetInBytes() const {
300 return GetFrameSizeInBytes() - kPointerSize;
301 }
302
Ian Rogers62d6c772013-02-27 08:32:07 -0800303 size_t GetSirtOffsetInBytes() const {
304 CHECK(IsNative());
305 return kPointerSize;
306 }
307
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800308 bool IsRegistered() const;
309
310 void RegisterNative(Thread* self, const void* native_method)
311 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
312
313 void UnregisterNative(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
314
315 static MemberOffset NativeMethodOffset() {
Brian Carlstromea46f952013-07-30 01:26:50 -0700316 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, native_method_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800317 }
318
319 const void* GetNativeMethod() const {
320 return reinterpret_cast<const void*>(GetField32(NativeMethodOffset(), false));
321 }
322
323 void SetNativeMethod(const void*);
324
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800325 static MemberOffset GetMethodIndexOffset() {
Brian Carlstromea46f952013-07-30 01:26:50 -0700326 return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800327 }
328
329 uint32_t GetCoreSpillMask() const {
Brian Carlstromea46f952013-07-30 01:26:50 -0700330 return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, core_spill_mask_), false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800331 }
332
333 void SetCoreSpillMask(uint32_t core_spill_mask) {
334 // Computed during compilation
Brian Carlstromea46f952013-07-30 01:26:50 -0700335 SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, core_spill_mask_), core_spill_mask, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800336 }
337
338 uint32_t GetFpSpillMask() const {
Brian Carlstromea46f952013-07-30 01:26:50 -0700339 return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, fp_spill_mask_), false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800340 }
341
342 void SetFpSpillMask(uint32_t fp_spill_mask) {
343 // Computed during compilation
Brian Carlstromea46f952013-07-30 01:26:50 -0700344 SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, fp_spill_mask_), fp_spill_mask, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800345 }
346
347 // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
348 // conventions for a method of managed code. Returns false for Proxy methods.
349 bool IsRuntimeMethod() const;
350
351 // Is this a hand crafted method used for something like describing callee saves?
352 bool IsCalleeSaveMethod() const;
353
354 bool IsResolutionMethod() const;
355
356 uintptr_t NativePcOffset(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
357
358 // Converts a native PC to a dex PC.
359 uint32_t ToDexPc(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
360
361 // Converts a dex PC to a native PC.
362 uintptr_t ToNativePc(const uint32_t dex_pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
363
Ian Rogersc449aa82013-07-29 14:35:46 -0700364 // Find the catch block for the given exception type and dex_pc. When a catch block is found,
365 // indicates whether the found catch block is responsible for clearing the exception or whether
366 // a move-exception instruction is present.
367 uint32_t FindCatchBlock(Class* exception_type, uint32_t dex_pc, bool* has_no_move_exception) const
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800368 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
369
Brian Carlstromea46f952013-07-30 01:26:50 -0700370 static void SetClass(Class* java_lang_reflect_ArtMethod);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800371
Brian Carlstromea46f952013-07-30 01:26:50 -0700372 static Class* GetJavaLangReflectArtMethod() {
373 return java_lang_reflect_ArtMethod_;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800374 }
375
Brian Carlstromea46f952013-07-30 01:26:50 -0700376 static void ResetClass();
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800377
378 protected:
379 // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
380 // The class we are a part of
381 Class* declaring_class_;
382
383 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
384 ObjectArray<StaticStorageBase>* dex_cache_initialized_static_storage_;
385
386 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
Brian Carlstromea46f952013-07-30 01:26:50 -0700387 ObjectArray<ArtMethod>* dex_cache_resolved_methods_;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800388
389 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
390 ObjectArray<Class>* dex_cache_resolved_types_;
391
392 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
393 ObjectArray<String>* dex_cache_strings_;
394
395 // Access flags; low 16 bits are defined by spec.
396 uint32_t access_flags_;
397
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800398 // Offset to the CodeItem.
399 uint32_t code_item_offset_;
400
401 // Architecture-dependent register spill mask
402 uint32_t core_spill_mask_;
403
Jeff Haoaa4a7932013-05-13 11:28:27 -0700404 // Compiled code associated with this method for callers from managed code.
405 // May be compiled managed code or a bridge for invoking a native method.
406 // TODO: Break apart this into portable and quick.
407 const void* entry_point_from_compiled_code_;
408
Jeff Hao16743632013-05-08 10:59:04 -0700409 // Called by the interpreter to execute this method.
410 EntryPointFromInterpreter* entry_point_from_interpreter_;
411
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800412 // Architecture-dependent register spill mask
413 uint32_t fp_spill_mask_;
414
415 // Total size in bytes of the frame
416 size_t frame_size_in_bytes_;
417
Jeff Hao16743632013-05-08 10:59:04 -0700418 // Garbage collection map of native PC offsets (quick) or dex PCs (portable) to reference bitmaps.
419 const uint8_t* gc_map_;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800420
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800421 // Mapping from native pc to dex pc
422 const uint32_t* mapping_table_;
423
424 // Index into method_ids of the dex file associated with this method
425 uint32_t method_dex_index_;
426
427 // For concrete virtual methods, this is the offset of the method in Class::vtable_.
428 //
429 // For abstract methods in an interface class, this is the offset of the method in
430 // "iftable_->Get(n)->GetMethodArray()".
431 //
432 // For static and direct methods this is the index in the direct methods table.
433 uint32_t method_index_;
434
435 // The target native method registered with this method
436 const void* native_method_;
437
438 // When a register is promoted into a register, the spill mask holds which registers hold dex
439 // registers. The first promoted register's corresponding dex register is vmap_table_[1], the Nth
440 // is vmap_table_[N]. vmap_table_[0] holds the length of the table.
441 const uint16_t* vmap_table_;
442
Brian Carlstromea46f952013-07-30 01:26:50 -0700443 static Class* java_lang_reflect_ArtMethod_;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800444
Mathieu Chartier02e25112013-08-14 16:14:24 -0700445 private:
Brian Carlstromea46f952013-07-30 01:26:50 -0700446 friend struct art::ArtMethodOffsets; // for verifying offset information
447 DISALLOW_IMPLICIT_CONSTRUCTORS(ArtMethod);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800448};
449
Brian Carlstromea46f952013-07-30 01:26:50 -0700450class MANAGED ArtMethodClass : public Class {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800451 private:
Brian Carlstromea46f952013-07-30 01:26:50 -0700452 DISALLOW_IMPLICIT_CONSTRUCTORS(ArtMethodClass);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800453};
454
455} // namespace mirror
456} // namespace art
457
Brian Carlstromea46f952013-07-30 01:26:50 -0700458#endif // ART_RUNTIME_MIRROR_ART_METHOD_H_