Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_MIRROR_ART_METHOD_H_ |
| 18 | #define ART_RUNTIME_MIRROR_ART_METHOD_H_ |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 19 | |
| 20 | #include "class.h" |
Jeff Hao | 790ad90 | 2013-05-22 15:02:08 -0700 | [diff] [blame] | 21 | #include "dex_file.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 22 | #include "invoke_type.h" |
| 23 | #include "locks.h" |
| 24 | #include "modifiers.h" |
| 25 | #include "object.h" |
Mathieu Chartier | c528dba | 2013-11-26 12:00:11 -0800 | [diff] [blame^] | 26 | #include "root_visitor.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 27 | |
| 28 | namespace art { |
| 29 | |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 30 | struct ArtMethodOffsets; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 31 | struct ConstructorMethodOffsets; |
| 32 | union JValue; |
| 33 | struct MethodClassOffsets; |
Jeff Hao | 790ad90 | 2013-05-22 15:02:08 -0700 | [diff] [blame] | 34 | class MethodHelper; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 35 | class StringPiece; |
Jeff Hao | 1674363 | 2013-05-08 10:59:04 -0700 | [diff] [blame] | 36 | class ShadowFrame; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 37 | |
| 38 | namespace mirror { |
| 39 | |
| 40 | class StaticStorageBase; |
| 41 | |
Jeff Hao | 790ad90 | 2013-05-22 15:02:08 -0700 | [diff] [blame] | 42 | typedef void (EntryPointFromInterpreter)(Thread* self, MethodHelper& mh, |
| 43 | const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame, JValue* result); |
Jeff Hao | 1674363 | 2013-05-08 10:59:04 -0700 | [diff] [blame] | 44 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 45 | // C++ mirror of java.lang.reflect.Method and java.lang.reflect.Constructor |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 46 | class MANAGED ArtMethod : public Object { |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 47 | public: |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 48 | Class* GetDeclaringClass() const; |
| 49 | |
| 50 | void SetDeclaringClass(Class *new_declaring_class) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 51 | |
| 52 | static MemberOffset DeclaringClassOffset() { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 53 | return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_)); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 54 | } |
| 55 | |
Jeff Hao | aa4a793 | 2013-05-13 11:28:27 -0700 | [diff] [blame] | 56 | static MemberOffset EntryPointFromCompiledCodeOffset() { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 57 | return MemberOffset(OFFSETOF_MEMBER(ArtMethod, entry_point_from_compiled_code_)); |
Jeff Hao | 5d91730 | 2013-02-27 17:57:33 -0800 | [diff] [blame] | 58 | } |
| 59 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 60 | uint32_t GetAccessFlags() const; |
| 61 | |
| 62 | void SetAccessFlags(uint32_t new_access_flags) { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 63 | SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, access_flags_), new_access_flags, false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 64 | } |
| 65 | |
| 66 | // Approximate what kind of method call would be used for this method. |
| 67 | InvokeType GetInvokeType() const; |
| 68 | |
| 69 | // Returns true if the method is declared public. |
| 70 | bool IsPublic() const { |
| 71 | return (GetAccessFlags() & kAccPublic) != 0; |
| 72 | } |
| 73 | |
| 74 | // Returns true if the method is declared private. |
| 75 | bool IsPrivate() const { |
| 76 | return (GetAccessFlags() & kAccPrivate) != 0; |
| 77 | } |
| 78 | |
| 79 | // Returns true if the method is declared static. |
| 80 | bool IsStatic() const { |
| 81 | return (GetAccessFlags() & kAccStatic) != 0; |
| 82 | } |
| 83 | |
| 84 | // Returns true if the method is a constructor. |
| 85 | bool IsConstructor() const { |
| 86 | return (GetAccessFlags() & kAccConstructor) != 0; |
| 87 | } |
| 88 | |
| 89 | // Returns true if the method is static, private, or a constructor. |
| 90 | bool IsDirect() const { |
| 91 | return IsDirect(GetAccessFlags()); |
| 92 | } |
| 93 | |
| 94 | static bool IsDirect(uint32_t access_flags) { |
| 95 | return (access_flags & (kAccStatic | kAccPrivate | kAccConstructor)) != 0; |
| 96 | } |
| 97 | |
| 98 | // Returns true if the method is declared synchronized. |
| 99 | bool IsSynchronized() const { |
| 100 | uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized; |
| 101 | return (GetAccessFlags() & synchonized) != 0; |
| 102 | } |
| 103 | |
| 104 | bool IsFinal() const { |
| 105 | return (GetAccessFlags() & kAccFinal) != 0; |
| 106 | } |
| 107 | |
| 108 | bool IsMiranda() const { |
| 109 | return (GetAccessFlags() & kAccMiranda) != 0; |
| 110 | } |
| 111 | |
| 112 | bool IsNative() const { |
| 113 | return (GetAccessFlags() & kAccNative) != 0; |
| 114 | } |
| 115 | |
Ian Rogers | 1eb512d | 2013-10-18 15:42:20 -0700 | [diff] [blame] | 116 | bool IsFastNative() const { |
| 117 | return (GetAccessFlags() & kAccFastNative) != 0; |
| 118 | } |
| 119 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 120 | bool IsAbstract() const { |
| 121 | return (GetAccessFlags() & kAccAbstract) != 0; |
| 122 | } |
| 123 | |
| 124 | bool IsSynthetic() const { |
| 125 | return (GetAccessFlags() & kAccSynthetic) != 0; |
| 126 | } |
| 127 | |
| 128 | bool IsProxyMethod() const; |
| 129 | |
Sebastien Hertz | 233ea8e | 2013-06-06 11:57:09 +0200 | [diff] [blame] | 130 | bool IsPreverified() const { |
| 131 | return (GetAccessFlags() & kAccPreverified) != 0; |
| 132 | } |
| 133 | |
| 134 | void SetPreverified() { |
| 135 | SetAccessFlags(GetAccessFlags() | kAccPreverified); |
| 136 | } |
| 137 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 138 | bool CheckIncompatibleClassChange(InvokeType type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 139 | |
| 140 | uint16_t GetMethodIndex() const; |
| 141 | |
| 142 | size_t GetVtableIndex() const { |
| 143 | return GetMethodIndex(); |
| 144 | } |
| 145 | |
| 146 | void SetMethodIndex(uint16_t new_method_index) { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 147 | SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_), new_method_index, false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 148 | } |
| 149 | |
| 150 | static MemberOffset MethodIndexOffset() { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 151 | return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 152 | } |
| 153 | |
| 154 | uint32_t GetCodeItemOffset() const { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 155 | return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, code_item_offset_), false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 156 | } |
| 157 | |
| 158 | void SetCodeItemOffset(uint32_t new_code_off) { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 159 | SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, code_item_offset_), new_code_off, false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 160 | } |
| 161 | |
| 162 | // Number of 32bit registers that would be required to hold all the arguments |
| 163 | static size_t NumArgRegisters(const StringPiece& shorty); |
| 164 | |
| 165 | uint32_t GetDexMethodIndex() const; |
| 166 | |
| 167 | void SetDexMethodIndex(uint32_t new_idx) { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 168 | SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_dex_index_), new_idx, false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 169 | } |
| 170 | |
| 171 | ObjectArray<String>* GetDexCacheStrings() const; |
| 172 | void SetDexCacheStrings(ObjectArray<String>* new_dex_cache_strings) |
| 173 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 174 | |
| 175 | static MemberOffset DexCacheStringsOffset() { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 176 | return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_strings_); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 177 | } |
| 178 | |
| 179 | static MemberOffset DexCacheResolvedMethodsOffset() { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 180 | return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_methods_); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 181 | } |
| 182 | |
| 183 | static MemberOffset DexCacheResolvedTypesOffset() { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 184 | return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_cache_resolved_types_); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 185 | } |
| 186 | |
| 187 | static MemberOffset DexCacheInitializedStaticStorageOffset() { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 188 | return OFFSET_OF_OBJECT_MEMBER(ArtMethod, |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 189 | dex_cache_initialized_static_storage_); |
| 190 | } |
| 191 | |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 192 | ObjectArray<ArtMethod>* GetDexCacheResolvedMethods() const; |
| 193 | void SetDexCacheResolvedMethods(ObjectArray<ArtMethod>* new_dex_cache_methods) |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 194 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 195 | |
| 196 | ObjectArray<Class>* GetDexCacheResolvedTypes() const; |
| 197 | void SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_types) |
| 198 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 199 | |
| 200 | ObjectArray<StaticStorageBase>* GetDexCacheInitializedStaticStorage() const; |
| 201 | void SetDexCacheInitializedStaticStorage(ObjectArray<StaticStorageBase>* new_value) |
| 202 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 203 | |
| 204 | // Find the method that this method overrides |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 205 | ArtMethod* FindOverriddenMethod() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 206 | |
Jeff Hao | 6474d19 | 2013-03-26 14:08:09 -0700 | [diff] [blame] | 207 | void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, char result_type) |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 208 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 209 | |
Jeff Hao | 1674363 | 2013-05-08 10:59:04 -0700 | [diff] [blame] | 210 | EntryPointFromInterpreter* GetEntryPointFromInterpreter() const { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 211 | return GetFieldPtr<EntryPointFromInterpreter*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_interpreter_), false); |
Jeff Hao | 1674363 | 2013-05-08 10:59:04 -0700 | [diff] [blame] | 212 | } |
| 213 | |
| 214 | void SetEntryPointFromInterpreter(EntryPointFromInterpreter* entry_point_from_interpreter) { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 215 | SetFieldPtr<EntryPointFromInterpreter*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_interpreter_), entry_point_from_interpreter, false); |
Jeff Hao | 1674363 | 2013-05-08 10:59:04 -0700 | [diff] [blame] | 216 | } |
| 217 | |
Jeff Hao | aa4a793 | 2013-05-13 11:28:27 -0700 | [diff] [blame] | 218 | const void* GetEntryPointFromCompiledCode() const { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 219 | return GetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_compiled_code_), false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 220 | } |
| 221 | |
Jeff Hao | aa4a793 | 2013-05-13 11:28:27 -0700 | [diff] [blame] | 222 | void SetEntryPointFromCompiledCode(const void* entry_point_from_compiled_code) { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 223 | SetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_compiled_code_), entry_point_from_compiled_code, false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 224 | } |
| 225 | |
| 226 | uint32_t GetCodeSize() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 227 | |
| 228 | bool IsWithinCode(uintptr_t pc) const |
| 229 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { |
Jeff Hao | aa4a793 | 2013-05-13 11:28:27 -0700 | [diff] [blame] | 230 | uintptr_t code = reinterpret_cast<uintptr_t>(GetEntryPointFromCompiledCode()); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 231 | if (code == 0) { |
| 232 | return pc == 0; |
| 233 | } |
| 234 | /* |
| 235 | * During a stack walk, a return PC may point to the end of the code + 1 |
| 236 | * (in the case that the last instruction is a call that isn't expected to |
| 237 | * return. Thus, we check <= code + GetCodeSize(). |
| 238 | */ |
| 239 | return (code <= pc && pc <= code + GetCodeSize()); |
| 240 | } |
| 241 | |
| 242 | void AssertPcIsWithinCode(uintptr_t pc) const |
| 243 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 244 | |
| 245 | uint32_t GetOatCodeOffset() const; |
| 246 | |
| 247 | void SetOatCodeOffset(uint32_t code_offset); |
| 248 | |
Jeff Hao | aa4a793 | 2013-05-13 11:28:27 -0700 | [diff] [blame] | 249 | static MemberOffset GetEntryPointFromCompiledCodeOffset() { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 250 | return OFFSET_OF_OBJECT_MEMBER(ArtMethod, entry_point_from_compiled_code_); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 251 | } |
| 252 | |
Ian Rogers | 1809a72 | 2013-08-09 22:05:32 -0700 | [diff] [blame] | 253 | // Callers should wrap the uint8_t* in a MappingTable instance for convenient access. |
| 254 | const uint8_t* GetMappingTable() const { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 255 | return GetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, mapping_table_), false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 256 | } |
| 257 | |
Ian Rogers | 1809a72 | 2013-08-09 22:05:32 -0700 | [diff] [blame] | 258 | void SetMappingTable(const uint8_t* mapping_table) { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 259 | SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, mapping_table_), |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 260 | mapping_table, false); |
| 261 | } |
| 262 | |
| 263 | uint32_t GetOatMappingTableOffset() const; |
| 264 | |
| 265 | void SetOatMappingTableOffset(uint32_t mapping_table_offset); |
| 266 | |
Ian Rogers | 1809a72 | 2013-08-09 22:05:32 -0700 | [diff] [blame] | 267 | // Callers should wrap the uint8_t* in a VmapTable instance for convenient access. |
| 268 | const uint8_t* GetVmapTable() const { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 269 | return GetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, vmap_table_), false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 270 | } |
| 271 | |
Ian Rogers | 1809a72 | 2013-08-09 22:05:32 -0700 | [diff] [blame] | 272 | void SetVmapTable(const uint8_t* vmap_table) { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 273 | SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, vmap_table_), vmap_table, false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 274 | } |
| 275 | |
| 276 | uint32_t GetOatVmapTableOffset() const; |
| 277 | |
| 278 | void SetOatVmapTableOffset(uint32_t vmap_table_offset); |
| 279 | |
| 280 | const uint8_t* GetNativeGcMap() const { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 281 | return GetFieldPtr<uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, gc_map_), false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 282 | } |
| 283 | void SetNativeGcMap(const uint8_t* data) { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 284 | SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(ArtMethod, gc_map_), data, false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 285 | } |
| 286 | |
| 287 | // When building the oat need a convenient place to stuff the offset of the native GC map. |
| 288 | void SetOatNativeGcMapOffset(uint32_t gc_map_offset); |
| 289 | uint32_t GetOatNativeGcMapOffset() const; |
| 290 | |
| 291 | size_t GetFrameSizeInBytes() const { |
| 292 | DCHECK_EQ(sizeof(size_t), sizeof(uint32_t)); |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 293 | size_t result = GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, frame_size_in_bytes_), false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 294 | DCHECK_LE(static_cast<size_t>(kStackAlignment), result); |
| 295 | return result; |
| 296 | } |
| 297 | |
| 298 | void SetFrameSizeInBytes(size_t new_frame_size_in_bytes) { |
| 299 | DCHECK_EQ(sizeof(size_t), sizeof(uint32_t)); |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 300 | SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, frame_size_in_bytes_), |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 301 | new_frame_size_in_bytes, false); |
| 302 | } |
| 303 | |
| 304 | size_t GetReturnPcOffsetInBytes() const { |
| 305 | return GetFrameSizeInBytes() - kPointerSize; |
| 306 | } |
| 307 | |
Ian Rogers | 62d6c77 | 2013-02-27 08:32:07 -0800 | [diff] [blame] | 308 | size_t GetSirtOffsetInBytes() const { |
| 309 | CHECK(IsNative()); |
| 310 | return kPointerSize; |
| 311 | } |
| 312 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 313 | bool IsRegistered() const; |
| 314 | |
Ian Rogers | 1eb512d | 2013-10-18 15:42:20 -0700 | [diff] [blame] | 315 | void RegisterNative(Thread* self, const void* native_method, bool is_fast) |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 316 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 317 | |
| 318 | void UnregisterNative(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 319 | |
| 320 | static MemberOffset NativeMethodOffset() { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 321 | return OFFSET_OF_OBJECT_MEMBER(ArtMethod, native_method_); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 322 | } |
| 323 | |
| 324 | const void* GetNativeMethod() const { |
| 325 | return reinterpret_cast<const void*>(GetField32(NativeMethodOffset(), false)); |
| 326 | } |
| 327 | |
| 328 | void SetNativeMethod(const void*); |
| 329 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 330 | static MemberOffset GetMethodIndexOffset() { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 331 | return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 332 | } |
| 333 | |
| 334 | uint32_t GetCoreSpillMask() const { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 335 | return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, core_spill_mask_), false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 336 | } |
| 337 | |
| 338 | void SetCoreSpillMask(uint32_t core_spill_mask) { |
| 339 | // Computed during compilation |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 340 | SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, core_spill_mask_), core_spill_mask, false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 341 | } |
| 342 | |
| 343 | uint32_t GetFpSpillMask() const { |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 344 | return GetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, fp_spill_mask_), false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 345 | } |
| 346 | |
| 347 | void SetFpSpillMask(uint32_t fp_spill_mask) { |
| 348 | // Computed during compilation |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 349 | SetField32(OFFSET_OF_OBJECT_MEMBER(ArtMethod, fp_spill_mask_), fp_spill_mask, false); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 350 | } |
| 351 | |
| 352 | // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal |
| 353 | // conventions for a method of managed code. Returns false for Proxy methods. |
| 354 | bool IsRuntimeMethod() const; |
| 355 | |
| 356 | // Is this a hand crafted method used for something like describing callee saves? |
| 357 | bool IsCalleeSaveMethod() const; |
| 358 | |
| 359 | bool IsResolutionMethod() const; |
| 360 | |
Jeff Hao | 88474b4 | 2013-10-23 16:24:40 -0700 | [diff] [blame] | 361 | bool IsImtConflictMethod() const; |
| 362 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 363 | uintptr_t NativePcOffset(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 364 | |
| 365 | // Converts a native PC to a dex PC. |
| 366 | uint32_t ToDexPc(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 367 | |
| 368 | // Converts a dex PC to a native PC. |
| 369 | uintptr_t ToNativePc(const uint32_t dex_pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 370 | |
Ian Rogers | c449aa8 | 2013-07-29 14:35:46 -0700 | [diff] [blame] | 371 | // Find the catch block for the given exception type and dex_pc. When a catch block is found, |
| 372 | // indicates whether the found catch block is responsible for clearing the exception or whether |
| 373 | // a move-exception instruction is present. |
| 374 | uint32_t FindCatchBlock(Class* exception_type, uint32_t dex_pc, bool* has_no_move_exception) const |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 375 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 376 | |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 377 | static void SetClass(Class* java_lang_reflect_ArtMethod); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 378 | |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 379 | static Class* GetJavaLangReflectArtMethod() { |
| 380 | return java_lang_reflect_ArtMethod_; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 381 | } |
| 382 | |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 383 | static void ResetClass(); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 384 | |
Mathieu Chartier | c528dba | 2013-11-26 12:00:11 -0800 | [diff] [blame^] | 385 | static void VisitRoots(RootVisitor* visitor, void* arg) |
| 386 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 387 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 388 | protected: |
| 389 | // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses". |
| 390 | // The class we are a part of |
| 391 | Class* declaring_class_; |
| 392 | |
| 393 | // short cuts to declaring_class_->dex_cache_ member for fast compiled code access |
| 394 | ObjectArray<StaticStorageBase>* dex_cache_initialized_static_storage_; |
| 395 | |
| 396 | // short cuts to declaring_class_->dex_cache_ member for fast compiled code access |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 397 | ObjectArray<ArtMethod>* dex_cache_resolved_methods_; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 398 | |
| 399 | // short cuts to declaring_class_->dex_cache_ member for fast compiled code access |
| 400 | ObjectArray<Class>* dex_cache_resolved_types_; |
| 401 | |
| 402 | // short cuts to declaring_class_->dex_cache_ member for fast compiled code access |
| 403 | ObjectArray<String>* dex_cache_strings_; |
| 404 | |
| 405 | // Access flags; low 16 bits are defined by spec. |
| 406 | uint32_t access_flags_; |
| 407 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 408 | // Offset to the CodeItem. |
| 409 | uint32_t code_item_offset_; |
| 410 | |
| 411 | // Architecture-dependent register spill mask |
| 412 | uint32_t core_spill_mask_; |
| 413 | |
Jeff Hao | aa4a793 | 2013-05-13 11:28:27 -0700 | [diff] [blame] | 414 | // Compiled code associated with this method for callers from managed code. |
| 415 | // May be compiled managed code or a bridge for invoking a native method. |
| 416 | // TODO: Break apart this into portable and quick. |
| 417 | const void* entry_point_from_compiled_code_; |
| 418 | |
Jeff Hao | 1674363 | 2013-05-08 10:59:04 -0700 | [diff] [blame] | 419 | // Called by the interpreter to execute this method. |
| 420 | EntryPointFromInterpreter* entry_point_from_interpreter_; |
| 421 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 422 | // Architecture-dependent register spill mask |
| 423 | uint32_t fp_spill_mask_; |
| 424 | |
| 425 | // Total size in bytes of the frame |
| 426 | size_t frame_size_in_bytes_; |
| 427 | |
Jeff Hao | 1674363 | 2013-05-08 10:59:04 -0700 | [diff] [blame] | 428 | // Garbage collection map of native PC offsets (quick) or dex PCs (portable) to reference bitmaps. |
| 429 | const uint8_t* gc_map_; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 430 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 431 | // Mapping from native pc to dex pc |
| 432 | const uint32_t* mapping_table_; |
| 433 | |
| 434 | // Index into method_ids of the dex file associated with this method |
| 435 | uint32_t method_dex_index_; |
| 436 | |
| 437 | // For concrete virtual methods, this is the offset of the method in Class::vtable_. |
| 438 | // |
| 439 | // For abstract methods in an interface class, this is the offset of the method in |
| 440 | // "iftable_->Get(n)->GetMethodArray()". |
| 441 | // |
| 442 | // For static and direct methods this is the index in the direct methods table. |
| 443 | uint32_t method_index_; |
| 444 | |
| 445 | // The target native method registered with this method |
| 446 | const void* native_method_; |
| 447 | |
| 448 | // When a register is promoted into a register, the spill mask holds which registers hold dex |
| 449 | // registers. The first promoted register's corresponding dex register is vmap_table_[1], the Nth |
| 450 | // is vmap_table_[N]. vmap_table_[0] holds the length of the table. |
| 451 | const uint16_t* vmap_table_; |
| 452 | |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 453 | static Class* java_lang_reflect_ArtMethod_; |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 454 | |
Mathieu Chartier | 02e2511 | 2013-08-14 16:14:24 -0700 | [diff] [blame] | 455 | private: |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 456 | friend struct art::ArtMethodOffsets; // for verifying offset information |
| 457 | DISALLOW_IMPLICIT_CONSTRUCTORS(ArtMethod); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 458 | }; |
| 459 | |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 460 | class MANAGED ArtMethodClass : public Class { |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 461 | private: |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 462 | DISALLOW_IMPLICIT_CONSTRUCTORS(ArtMethodClass); |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 463 | }; |
| 464 | |
| 465 | } // namespace mirror |
| 466 | } // namespace art |
| 467 | |
Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 468 | #endif // ART_RUNTIME_MIRROR_ART_METHOD_H_ |