blob: 8f8038f05f1c730bd02733f7137f1ebb91bf0bb6 [file] [log] [blame]
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_SRC_MIRROR_METHOD_H_
18#define ART_SRC_MIRROR_METHOD_H_
19
20#include "class.h"
21#include "invoke_type.h"
22#include "locks.h"
23#include "modifiers.h"
24#include "object.h"
25
26namespace art {
27
28struct AbstractMethodOffsets;
29struct ConstructorMethodOffsets;
30union JValue;
31struct MethodClassOffsets;
32struct MethodOffsets;
33class StringPiece;
Jeff Hao16743632013-05-08 10:59:04 -070034class ShadowFrame;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080035
36namespace mirror {
37
38class StaticStorageBase;
39
Jeff Hao16743632013-05-08 10:59:04 -070040typedef JValue (EntryPointFromInterpreter)(Thread* self, ShadowFrame* shadow_frame);
41
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080042// C++ mirror of java.lang.reflect.Method and java.lang.reflect.Constructor
43class MANAGED AbstractMethod : public Object {
44 public:
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080045 Class* GetDeclaringClass() const;
46
47 void SetDeclaringClass(Class *new_declaring_class) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
48
49 static MemberOffset DeclaringClassOffset() {
50 return MemberOffset(OFFSETOF_MEMBER(AbstractMethod, declaring_class_));
51 }
52
Jeff Haoaa4a7932013-05-13 11:28:27 -070053 static MemberOffset EntryPointFromCompiledCodeOffset() {
54 return MemberOffset(OFFSETOF_MEMBER(AbstractMethod, entry_point_from_compiled_code_));
Jeff Hao5d917302013-02-27 17:57:33 -080055 }
56
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080057 uint32_t GetAccessFlags() const;
58
59 void SetAccessFlags(uint32_t new_access_flags) {
60 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, access_flags_), new_access_flags, false);
61 }
62
63 // Approximate what kind of method call would be used for this method.
64 InvokeType GetInvokeType() const;
65
66 // Returns true if the method is declared public.
67 bool IsPublic() const {
68 return (GetAccessFlags() & kAccPublic) != 0;
69 }
70
71 // Returns true if the method is declared private.
72 bool IsPrivate() const {
73 return (GetAccessFlags() & kAccPrivate) != 0;
74 }
75
76 // Returns true if the method is declared static.
77 bool IsStatic() const {
78 return (GetAccessFlags() & kAccStatic) != 0;
79 }
80
81 // Returns true if the method is a constructor.
82 bool IsConstructor() const {
83 return (GetAccessFlags() & kAccConstructor) != 0;
84 }
85
86 // Returns true if the method is static, private, or a constructor.
87 bool IsDirect() const {
88 return IsDirect(GetAccessFlags());
89 }
90
91 static bool IsDirect(uint32_t access_flags) {
92 return (access_flags & (kAccStatic | kAccPrivate | kAccConstructor)) != 0;
93 }
94
95 // Returns true if the method is declared synchronized.
96 bool IsSynchronized() const {
97 uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
98 return (GetAccessFlags() & synchonized) != 0;
99 }
100
101 bool IsFinal() const {
102 return (GetAccessFlags() & kAccFinal) != 0;
103 }
104
105 bool IsMiranda() const {
106 return (GetAccessFlags() & kAccMiranda) != 0;
107 }
108
109 bool IsNative() const {
110 return (GetAccessFlags() & kAccNative) != 0;
111 }
112
113 bool IsAbstract() const {
114 return (GetAccessFlags() & kAccAbstract) != 0;
115 }
116
117 bool IsSynthetic() const {
118 return (GetAccessFlags() & kAccSynthetic) != 0;
119 }
120
121 bool IsProxyMethod() const;
122
123 bool CheckIncompatibleClassChange(InvokeType type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
124
125 uint16_t GetMethodIndex() const;
126
127 size_t GetVtableIndex() const {
128 return GetMethodIndex();
129 }
130
131 void SetMethodIndex(uint16_t new_method_index) {
132 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_), new_method_index, false);
133 }
134
135 static MemberOffset MethodIndexOffset() {
136 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_);
137 }
138
139 uint32_t GetCodeItemOffset() const {
140 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_item_offset_), false);
141 }
142
143 void SetCodeItemOffset(uint32_t new_code_off) {
144 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_item_offset_), new_code_off, false);
145 }
146
147 // Number of 32bit registers that would be required to hold all the arguments
148 static size_t NumArgRegisters(const StringPiece& shorty);
149
150 uint32_t GetDexMethodIndex() const;
151
152 void SetDexMethodIndex(uint32_t new_idx) {
153 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_dex_index_), new_idx, false);
154 }
155
156 ObjectArray<String>* GetDexCacheStrings() const;
157 void SetDexCacheStrings(ObjectArray<String>* new_dex_cache_strings)
158 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
159
160 static MemberOffset DexCacheStringsOffset() {
161 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_strings_);
162 }
163
164 static MemberOffset DexCacheResolvedMethodsOffset() {
165 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_resolved_methods_);
166 }
167
168 static MemberOffset DexCacheResolvedTypesOffset() {
169 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_resolved_types_);
170 }
171
172 static MemberOffset DexCacheInitializedStaticStorageOffset() {
173 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod,
174 dex_cache_initialized_static_storage_);
175 }
176
177 ObjectArray<AbstractMethod>* GetDexCacheResolvedMethods() const;
178 void SetDexCacheResolvedMethods(ObjectArray<AbstractMethod>* new_dex_cache_methods)
179 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
180
181 ObjectArray<Class>* GetDexCacheResolvedTypes() const;
182 void SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_types)
183 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
184
185 ObjectArray<StaticStorageBase>* GetDexCacheInitializedStaticStorage() const;
186 void SetDexCacheInitializedStaticStorage(ObjectArray<StaticStorageBase>* new_value)
187 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
188
189 // Find the method that this method overrides
190 AbstractMethod* FindOverriddenMethod() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
191
Jeff Hao6474d192013-03-26 14:08:09 -0700192 void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, char result_type)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800193 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
194
Jeff Hao16743632013-05-08 10:59:04 -0700195 EntryPointFromInterpreter* GetEntryPointFromInterpreter() const {
196 return GetFieldPtr<EntryPointFromInterpreter*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_interpreter_), false);
197 }
198
199 void SetEntryPointFromInterpreter(EntryPointFromInterpreter* entry_point_from_interpreter) {
200 SetFieldPtr<EntryPointFromInterpreter*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_interpreter_), entry_point_from_interpreter, false);
201 }
202
Jeff Haoaa4a7932013-05-13 11:28:27 -0700203 const void* GetEntryPointFromCompiledCode() const {
204 return GetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_compiled_code_), false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800205 }
206
Jeff Haoaa4a7932013-05-13 11:28:27 -0700207 void SetEntryPointFromCompiledCode(const void* entry_point_from_compiled_code) {
208 SetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_compiled_code_), entry_point_from_compiled_code, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800209 }
210
211 uint32_t GetCodeSize() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
212
213 bool IsWithinCode(uintptr_t pc) const
214 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Jeff Haoaa4a7932013-05-13 11:28:27 -0700215 uintptr_t code = reinterpret_cast<uintptr_t>(GetEntryPointFromCompiledCode());
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800216 if (code == 0) {
217 return pc == 0;
218 }
219 /*
220 * During a stack walk, a return PC may point to the end of the code + 1
221 * (in the case that the last instruction is a call that isn't expected to
222 * return. Thus, we check <= code + GetCodeSize().
223 */
224 return (code <= pc && pc <= code + GetCodeSize());
225 }
226
227 void AssertPcIsWithinCode(uintptr_t pc) const
228 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
229
230 uint32_t GetOatCodeOffset() const;
231
232 void SetOatCodeOffset(uint32_t code_offset);
233
Jeff Haoaa4a7932013-05-13 11:28:27 -0700234 static MemberOffset GetEntryPointFromCompiledCodeOffset() {
235 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_compiled_code_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800236 }
237
238 const uint32_t* GetMappingTable() const {
239 const uint32_t* map = GetMappingTableRaw();
240 if (map == NULL) {
241 return map;
242 }
243 return map + 1;
244 }
245
246 uint32_t GetPcToDexMappingTableLength() const {
247 const uint32_t* map = GetMappingTableRaw();
248 if (map == NULL) {
249 return 0;
250 }
251 return map[2];
252 }
253
254 const uint32_t* GetPcToDexMappingTable() const {
255 const uint32_t* map = GetMappingTableRaw();
256 if (map == NULL) {
257 return map;
258 }
259 return map + 3;
260 }
261
262
263 uint32_t GetDexToPcMappingTableLength() const {
264 const uint32_t* map = GetMappingTableRaw();
265 if (map == NULL) {
266 return 0;
267 }
268 return map[1] - map[2];
269 }
270
271 const uint32_t* GetDexToPcMappingTable() const {
272 const uint32_t* map = GetMappingTableRaw();
273 if (map == NULL) {
274 return map;
275 }
276 return map + 3 + map[2];
277 }
278
279
280 const uint32_t* GetMappingTableRaw() const {
281 return GetFieldPtr<const uint32_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, mapping_table_), false);
282 }
283
284 void SetMappingTable(const uint32_t* mapping_table) {
285 SetFieldPtr<const uint32_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, mapping_table_),
286 mapping_table, false);
287 }
288
289 uint32_t GetOatMappingTableOffset() const;
290
291 void SetOatMappingTableOffset(uint32_t mapping_table_offset);
292
293 // Callers should wrap the uint16_t* in a VmapTable instance for convenient access.
294 const uint16_t* GetVmapTableRaw() const {
295 return GetFieldPtr<const uint16_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, vmap_table_), false);
296 }
297
298 void SetVmapTable(const uint16_t* vmap_table) {
299 SetFieldPtr<const uint16_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, vmap_table_), vmap_table, false);
300 }
301
302 uint32_t GetOatVmapTableOffset() const;
303
304 void SetOatVmapTableOffset(uint32_t vmap_table_offset);
305
306 const uint8_t* GetNativeGcMap() const {
Jeff Hao16743632013-05-08 10:59:04 -0700307 return GetFieldPtr<uint8_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, gc_map_), false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800308 }
309 void SetNativeGcMap(const uint8_t* data) {
Jeff Hao16743632013-05-08 10:59:04 -0700310 SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, gc_map_), data, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800311 }
312
313 // When building the oat need a convenient place to stuff the offset of the native GC map.
314 void SetOatNativeGcMapOffset(uint32_t gc_map_offset);
315 uint32_t GetOatNativeGcMapOffset() const;
316
317 size_t GetFrameSizeInBytes() const {
318 DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
319 size_t result = GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, frame_size_in_bytes_), false);
320 DCHECK_LE(static_cast<size_t>(kStackAlignment), result);
321 return result;
322 }
323
324 void SetFrameSizeInBytes(size_t new_frame_size_in_bytes) {
325 DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
326 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, frame_size_in_bytes_),
327 new_frame_size_in_bytes, false);
328 }
329
330 size_t GetReturnPcOffsetInBytes() const {
331 return GetFrameSizeInBytes() - kPointerSize;
332 }
333
Ian Rogers62d6c772013-02-27 08:32:07 -0800334 size_t GetSirtOffsetInBytes() const {
335 CHECK(IsNative());
336 return kPointerSize;
337 }
338
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800339 bool IsRegistered() const;
340
341 void RegisterNative(Thread* self, const void* native_method)
342 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
343
344 void UnregisterNative(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
345
346 static MemberOffset NativeMethodOffset() {
347 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, native_method_);
348 }
349
350 const void* GetNativeMethod() const {
351 return reinterpret_cast<const void*>(GetField32(NativeMethodOffset(), false));
352 }
353
354 void SetNativeMethod(const void*);
355
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800356 static MemberOffset GetMethodIndexOffset() {
357 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_);
358 }
359
360 uint32_t GetCoreSpillMask() const {
361 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, core_spill_mask_), false);
362 }
363
364 void SetCoreSpillMask(uint32_t core_spill_mask) {
365 // Computed during compilation
366 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, core_spill_mask_), core_spill_mask, false);
367 }
368
369 uint32_t GetFpSpillMask() const {
370 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, fp_spill_mask_), false);
371 }
372
373 void SetFpSpillMask(uint32_t fp_spill_mask) {
374 // Computed during compilation
375 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, fp_spill_mask_), fp_spill_mask, false);
376 }
377
378 // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
379 // conventions for a method of managed code. Returns false for Proxy methods.
380 bool IsRuntimeMethod() const;
381
382 // Is this a hand crafted method used for something like describing callee saves?
383 bool IsCalleeSaveMethod() const;
384
385 bool IsResolutionMethod() const;
386
387 uintptr_t NativePcOffset(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
388
389 // Converts a native PC to a dex PC.
390 uint32_t ToDexPc(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
391
392 // Converts a dex PC to a native PC.
393 uintptr_t ToNativePc(const uint32_t dex_pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
394
395 // Converts a dex PC to the first corresponding safepoint PC.
396 uintptr_t ToFirstNativeSafepointPc(const uint32_t dex_pc)
397 const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
398
399 // Find the catch block for the given exception type and dex_pc
400 uint32_t FindCatchBlock(Class* exception_type, uint32_t dex_pc) const
401 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
402
403 static void SetClasses(Class* java_lang_reflect_Constructor, Class* java_lang_reflect_Method);
404
405 static Class* GetConstructorClass() {
406 return java_lang_reflect_Constructor_;
407 }
408
409 static Class* GetMethodClass() {
410 return java_lang_reflect_Method_;
411 }
412
413 static void ResetClasses();
414
415 protected:
416 // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
417 // The class we are a part of
418 Class* declaring_class_;
419
420 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
421 ObjectArray<StaticStorageBase>* dex_cache_initialized_static_storage_;
422
423 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
424 ObjectArray<AbstractMethod>* dex_cache_resolved_methods_;
425
426 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
427 ObjectArray<Class>* dex_cache_resolved_types_;
428
429 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
430 ObjectArray<String>* dex_cache_strings_;
431
432 // Access flags; low 16 bits are defined by spec.
433 uint32_t access_flags_;
434
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800435 // Offset to the CodeItem.
436 uint32_t code_item_offset_;
437
438 // Architecture-dependent register spill mask
439 uint32_t core_spill_mask_;
440
Jeff Haoaa4a7932013-05-13 11:28:27 -0700441 // Compiled code associated with this method for callers from managed code.
442 // May be compiled managed code or a bridge for invoking a native method.
443 // TODO: Break apart this into portable and quick.
444 const void* entry_point_from_compiled_code_;
445
Jeff Hao16743632013-05-08 10:59:04 -0700446 // Called by the interpreter to execute this method.
447 EntryPointFromInterpreter* entry_point_from_interpreter_;
448
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800449 // Architecture-dependent register spill mask
450 uint32_t fp_spill_mask_;
451
452 // Total size in bytes of the frame
453 size_t frame_size_in_bytes_;
454
Jeff Hao16743632013-05-08 10:59:04 -0700455 // Garbage collection map of native PC offsets (quick) or dex PCs (portable) to reference bitmaps.
456 const uint8_t* gc_map_;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800457
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800458 // Mapping from native pc to dex pc
459 const uint32_t* mapping_table_;
460
461 // Index into method_ids of the dex file associated with this method
462 uint32_t method_dex_index_;
463
464 // For concrete virtual methods, this is the offset of the method in Class::vtable_.
465 //
466 // For abstract methods in an interface class, this is the offset of the method in
467 // "iftable_->Get(n)->GetMethodArray()".
468 //
469 // For static and direct methods this is the index in the direct methods table.
470 uint32_t method_index_;
471
472 // The target native method registered with this method
473 const void* native_method_;
474
475 // When a register is promoted into a register, the spill mask holds which registers hold dex
476 // registers. The first promoted register's corresponding dex register is vmap_table_[1], the Nth
477 // is vmap_table_[N]. vmap_table_[0] holds the length of the table.
478 const uint16_t* vmap_table_;
479
480 static Class* java_lang_reflect_Constructor_;
481 static Class* java_lang_reflect_Method_;
482
483 friend struct art::AbstractMethodOffsets; // for verifying offset information
484 friend struct art::ConstructorMethodOffsets; // for verifying offset information
485 friend struct art::MethodOffsets; // for verifying offset information
486 DISALLOW_IMPLICIT_CONSTRUCTORS(AbstractMethod);
487};
488
489class MANAGED Method : public AbstractMethod {
490
491};
492
493class MANAGED Constructor : public AbstractMethod {
494
495};
496
497class MANAGED AbstractMethodClass : public Class {
498 private:
499 Object* ORDER_BY_SIGNATURE_;
500 friend struct art::MethodClassOffsets; // for verifying offset information
501 DISALLOW_IMPLICIT_CONSTRUCTORS(AbstractMethodClass);
502};
503
504} // namespace mirror
505} // namespace art
506
507#endif // ART_SRC_MIRROR_METHOD_H_