blob: 59dfdd57606b8a81e7cd6dd39fd0be7923508597 [file] [log] [blame]
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_SRC_MIRROR_METHOD_H_
18#define ART_SRC_MIRROR_METHOD_H_
19
20#include "class.h"
Jeff Hao790ad902013-05-22 15:02:08 -070021#include "dex_file.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080022#include "invoke_type.h"
23#include "locks.h"
24#include "modifiers.h"
25#include "object.h"
26
27namespace art {
28
29struct AbstractMethodOffsets;
30struct ConstructorMethodOffsets;
31union JValue;
32struct MethodClassOffsets;
Jeff Hao790ad902013-05-22 15:02:08 -070033class MethodHelper;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080034struct MethodOffsets;
35class StringPiece;
Jeff Hao16743632013-05-08 10:59:04 -070036class ShadowFrame;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080037
38namespace mirror {
39
40class StaticStorageBase;
41
Jeff Hao790ad902013-05-22 15:02:08 -070042typedef void (EntryPointFromInterpreter)(Thread* self, MethodHelper& mh,
43 const DexFile::CodeItem* code_item, ShadowFrame* shadow_frame, JValue* result);
Jeff Hao16743632013-05-08 10:59:04 -070044
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080045// C++ mirror of java.lang.reflect.Method and java.lang.reflect.Constructor
46class MANAGED AbstractMethod : public Object {
47 public:
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080048 Class* GetDeclaringClass() const;
49
50 void SetDeclaringClass(Class *new_declaring_class) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
51
52 static MemberOffset DeclaringClassOffset() {
53 return MemberOffset(OFFSETOF_MEMBER(AbstractMethod, declaring_class_));
54 }
55
Jeff Haoaa4a7932013-05-13 11:28:27 -070056 static MemberOffset EntryPointFromCompiledCodeOffset() {
57 return MemberOffset(OFFSETOF_MEMBER(AbstractMethod, entry_point_from_compiled_code_));
Jeff Hao5d917302013-02-27 17:57:33 -080058 }
59
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080060 uint32_t GetAccessFlags() const;
61
62 void SetAccessFlags(uint32_t new_access_flags) {
63 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, access_flags_), new_access_flags, false);
64 }
65
66 // Approximate what kind of method call would be used for this method.
67 InvokeType GetInvokeType() const;
68
69 // Returns true if the method is declared public.
70 bool IsPublic() const {
71 return (GetAccessFlags() & kAccPublic) != 0;
72 }
73
74 // Returns true if the method is declared private.
75 bool IsPrivate() const {
76 return (GetAccessFlags() & kAccPrivate) != 0;
77 }
78
79 // Returns true if the method is declared static.
80 bool IsStatic() const {
81 return (GetAccessFlags() & kAccStatic) != 0;
82 }
83
84 // Returns true if the method is a constructor.
85 bool IsConstructor() const {
86 return (GetAccessFlags() & kAccConstructor) != 0;
87 }
88
89 // Returns true if the method is static, private, or a constructor.
90 bool IsDirect() const {
91 return IsDirect(GetAccessFlags());
92 }
93
94 static bool IsDirect(uint32_t access_flags) {
95 return (access_flags & (kAccStatic | kAccPrivate | kAccConstructor)) != 0;
96 }
97
98 // Returns true if the method is declared synchronized.
99 bool IsSynchronized() const {
100 uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
101 return (GetAccessFlags() & synchonized) != 0;
102 }
103
104 bool IsFinal() const {
105 return (GetAccessFlags() & kAccFinal) != 0;
106 }
107
108 bool IsMiranda() const {
109 return (GetAccessFlags() & kAccMiranda) != 0;
110 }
111
112 bool IsNative() const {
113 return (GetAccessFlags() & kAccNative) != 0;
114 }
115
116 bool IsAbstract() const {
117 return (GetAccessFlags() & kAccAbstract) != 0;
118 }
119
120 bool IsSynthetic() const {
121 return (GetAccessFlags() & kAccSynthetic) != 0;
122 }
123
124 bool IsProxyMethod() const;
125
126 bool CheckIncompatibleClassChange(InvokeType type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
127
128 uint16_t GetMethodIndex() const;
129
130 size_t GetVtableIndex() const {
131 return GetMethodIndex();
132 }
133
134 void SetMethodIndex(uint16_t new_method_index) {
135 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_), new_method_index, false);
136 }
137
138 static MemberOffset MethodIndexOffset() {
139 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_);
140 }
141
142 uint32_t GetCodeItemOffset() const {
143 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_item_offset_), false);
144 }
145
146 void SetCodeItemOffset(uint32_t new_code_off) {
147 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_item_offset_), new_code_off, false);
148 }
149
150 // Number of 32bit registers that would be required to hold all the arguments
151 static size_t NumArgRegisters(const StringPiece& shorty);
152
153 uint32_t GetDexMethodIndex() const;
154
155 void SetDexMethodIndex(uint32_t new_idx) {
156 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_dex_index_), new_idx, false);
157 }
158
159 ObjectArray<String>* GetDexCacheStrings() const;
160 void SetDexCacheStrings(ObjectArray<String>* new_dex_cache_strings)
161 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
162
163 static MemberOffset DexCacheStringsOffset() {
164 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_strings_);
165 }
166
167 static MemberOffset DexCacheResolvedMethodsOffset() {
168 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_resolved_methods_);
169 }
170
171 static MemberOffset DexCacheResolvedTypesOffset() {
172 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_resolved_types_);
173 }
174
175 static MemberOffset DexCacheInitializedStaticStorageOffset() {
176 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod,
177 dex_cache_initialized_static_storage_);
178 }
179
180 ObjectArray<AbstractMethod>* GetDexCacheResolvedMethods() const;
181 void SetDexCacheResolvedMethods(ObjectArray<AbstractMethod>* new_dex_cache_methods)
182 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
183
184 ObjectArray<Class>* GetDexCacheResolvedTypes() const;
185 void SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_types)
186 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
187
188 ObjectArray<StaticStorageBase>* GetDexCacheInitializedStaticStorage() const;
189 void SetDexCacheInitializedStaticStorage(ObjectArray<StaticStorageBase>* new_value)
190 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
191
192 // Find the method that this method overrides
193 AbstractMethod* FindOverriddenMethod() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
194
Jeff Hao6474d192013-03-26 14:08:09 -0700195 void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, char result_type)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800196 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
197
Jeff Hao16743632013-05-08 10:59:04 -0700198 EntryPointFromInterpreter* GetEntryPointFromInterpreter() const {
199 return GetFieldPtr<EntryPointFromInterpreter*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_interpreter_), false);
200 }
201
202 void SetEntryPointFromInterpreter(EntryPointFromInterpreter* entry_point_from_interpreter) {
203 SetFieldPtr<EntryPointFromInterpreter*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_interpreter_), entry_point_from_interpreter, false);
204 }
205
Jeff Haoaa4a7932013-05-13 11:28:27 -0700206 const void* GetEntryPointFromCompiledCode() const {
207 return GetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_compiled_code_), false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800208 }
209
Jeff Haoaa4a7932013-05-13 11:28:27 -0700210 void SetEntryPointFromCompiledCode(const void* entry_point_from_compiled_code) {
211 SetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_compiled_code_), entry_point_from_compiled_code, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800212 }
213
214 uint32_t GetCodeSize() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
215
216 bool IsWithinCode(uintptr_t pc) const
217 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Jeff Haoaa4a7932013-05-13 11:28:27 -0700218 uintptr_t code = reinterpret_cast<uintptr_t>(GetEntryPointFromCompiledCode());
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800219 if (code == 0) {
220 return pc == 0;
221 }
222 /*
223 * During a stack walk, a return PC may point to the end of the code + 1
224 * (in the case that the last instruction is a call that isn't expected to
225 * return. Thus, we check <= code + GetCodeSize().
226 */
227 return (code <= pc && pc <= code + GetCodeSize());
228 }
229
230 void AssertPcIsWithinCode(uintptr_t pc) const
231 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
232
233 uint32_t GetOatCodeOffset() const;
234
235 void SetOatCodeOffset(uint32_t code_offset);
236
Jeff Haoaa4a7932013-05-13 11:28:27 -0700237 static MemberOffset GetEntryPointFromCompiledCodeOffset() {
238 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, entry_point_from_compiled_code_);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800239 }
240
241 const uint32_t* GetMappingTable() const {
242 const uint32_t* map = GetMappingTableRaw();
243 if (map == NULL) {
244 return map;
245 }
246 return map + 1;
247 }
248
249 uint32_t GetPcToDexMappingTableLength() const {
250 const uint32_t* map = GetMappingTableRaw();
251 if (map == NULL) {
252 return 0;
253 }
254 return map[2];
255 }
256
257 const uint32_t* GetPcToDexMappingTable() const {
258 const uint32_t* map = GetMappingTableRaw();
259 if (map == NULL) {
260 return map;
261 }
262 return map + 3;
263 }
264
265
266 uint32_t GetDexToPcMappingTableLength() const {
267 const uint32_t* map = GetMappingTableRaw();
268 if (map == NULL) {
269 return 0;
270 }
271 return map[1] - map[2];
272 }
273
274 const uint32_t* GetDexToPcMappingTable() const {
275 const uint32_t* map = GetMappingTableRaw();
276 if (map == NULL) {
277 return map;
278 }
279 return map + 3 + map[2];
280 }
281
282
283 const uint32_t* GetMappingTableRaw() const {
284 return GetFieldPtr<const uint32_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, mapping_table_), false);
285 }
286
287 void SetMappingTable(const uint32_t* mapping_table) {
288 SetFieldPtr<const uint32_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, mapping_table_),
289 mapping_table, false);
290 }
291
292 uint32_t GetOatMappingTableOffset() const;
293
294 void SetOatMappingTableOffset(uint32_t mapping_table_offset);
295
296 // Callers should wrap the uint16_t* in a VmapTable instance for convenient access.
297 const uint16_t* GetVmapTableRaw() const {
298 return GetFieldPtr<const uint16_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, vmap_table_), false);
299 }
300
301 void SetVmapTable(const uint16_t* vmap_table) {
302 SetFieldPtr<const uint16_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, vmap_table_), vmap_table, false);
303 }
304
305 uint32_t GetOatVmapTableOffset() const;
306
307 void SetOatVmapTableOffset(uint32_t vmap_table_offset);
308
309 const uint8_t* GetNativeGcMap() const {
Jeff Hao16743632013-05-08 10:59:04 -0700310 return GetFieldPtr<uint8_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, gc_map_), false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800311 }
312 void SetNativeGcMap(const uint8_t* data) {
Jeff Hao16743632013-05-08 10:59:04 -0700313 SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, gc_map_), data, false);
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800314 }
315
316 // When building the oat need a convenient place to stuff the offset of the native GC map.
317 void SetOatNativeGcMapOffset(uint32_t gc_map_offset);
318 uint32_t GetOatNativeGcMapOffset() const;
319
320 size_t GetFrameSizeInBytes() const {
321 DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
322 size_t result = GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, frame_size_in_bytes_), false);
323 DCHECK_LE(static_cast<size_t>(kStackAlignment), result);
324 return result;
325 }
326
327 void SetFrameSizeInBytes(size_t new_frame_size_in_bytes) {
328 DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
329 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, frame_size_in_bytes_),
330 new_frame_size_in_bytes, false);
331 }
332
333 size_t GetReturnPcOffsetInBytes() const {
334 return GetFrameSizeInBytes() - kPointerSize;
335 }
336
Ian Rogers62d6c772013-02-27 08:32:07 -0800337 size_t GetSirtOffsetInBytes() const {
338 CHECK(IsNative());
339 return kPointerSize;
340 }
341
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800342 bool IsRegistered() const;
343
344 void RegisterNative(Thread* self, const void* native_method)
345 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
346
347 void UnregisterNative(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
348
349 static MemberOffset NativeMethodOffset() {
350 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, native_method_);
351 }
352
353 const void* GetNativeMethod() const {
354 return reinterpret_cast<const void*>(GetField32(NativeMethodOffset(), false));
355 }
356
357 void SetNativeMethod(const void*);
358
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800359 static MemberOffset GetMethodIndexOffset() {
360 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_);
361 }
362
363 uint32_t GetCoreSpillMask() const {
364 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, core_spill_mask_), false);
365 }
366
367 void SetCoreSpillMask(uint32_t core_spill_mask) {
368 // Computed during compilation
369 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, core_spill_mask_), core_spill_mask, false);
370 }
371
372 uint32_t GetFpSpillMask() const {
373 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, fp_spill_mask_), false);
374 }
375
376 void SetFpSpillMask(uint32_t fp_spill_mask) {
377 // Computed during compilation
378 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, fp_spill_mask_), fp_spill_mask, false);
379 }
380
381 // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
382 // conventions for a method of managed code. Returns false for Proxy methods.
383 bool IsRuntimeMethod() const;
384
385 // Is this a hand crafted method used for something like describing callee saves?
386 bool IsCalleeSaveMethod() const;
387
388 bool IsResolutionMethod() const;
389
390 uintptr_t NativePcOffset(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
391
392 // Converts a native PC to a dex PC.
393 uint32_t ToDexPc(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
394
395 // Converts a dex PC to a native PC.
396 uintptr_t ToNativePc(const uint32_t dex_pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
397
398 // Converts a dex PC to the first corresponding safepoint PC.
399 uintptr_t ToFirstNativeSafepointPc(const uint32_t dex_pc)
400 const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
401
402 // Find the catch block for the given exception type and dex_pc
403 uint32_t FindCatchBlock(Class* exception_type, uint32_t dex_pc) const
404 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
405
406 static void SetClasses(Class* java_lang_reflect_Constructor, Class* java_lang_reflect_Method);
407
408 static Class* GetConstructorClass() {
409 return java_lang_reflect_Constructor_;
410 }
411
412 static Class* GetMethodClass() {
413 return java_lang_reflect_Method_;
414 }
415
416 static void ResetClasses();
417
418 protected:
419 // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
420 // The class we are a part of
421 Class* declaring_class_;
422
423 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
424 ObjectArray<StaticStorageBase>* dex_cache_initialized_static_storage_;
425
426 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
427 ObjectArray<AbstractMethod>* dex_cache_resolved_methods_;
428
429 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
430 ObjectArray<Class>* dex_cache_resolved_types_;
431
432 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
433 ObjectArray<String>* dex_cache_strings_;
434
435 // Access flags; low 16 bits are defined by spec.
436 uint32_t access_flags_;
437
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800438 // Offset to the CodeItem.
439 uint32_t code_item_offset_;
440
441 // Architecture-dependent register spill mask
442 uint32_t core_spill_mask_;
443
Jeff Haoaa4a7932013-05-13 11:28:27 -0700444 // Compiled code associated with this method for callers from managed code.
445 // May be compiled managed code or a bridge for invoking a native method.
446 // TODO: Break apart this into portable and quick.
447 const void* entry_point_from_compiled_code_;
448
Jeff Hao16743632013-05-08 10:59:04 -0700449 // Called by the interpreter to execute this method.
450 EntryPointFromInterpreter* entry_point_from_interpreter_;
451
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800452 // Architecture-dependent register spill mask
453 uint32_t fp_spill_mask_;
454
455 // Total size in bytes of the frame
456 size_t frame_size_in_bytes_;
457
Jeff Hao16743632013-05-08 10:59:04 -0700458 // Garbage collection map of native PC offsets (quick) or dex PCs (portable) to reference bitmaps.
459 const uint8_t* gc_map_;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800460
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800461 // Mapping from native pc to dex pc
462 const uint32_t* mapping_table_;
463
464 // Index into method_ids of the dex file associated with this method
465 uint32_t method_dex_index_;
466
467 // For concrete virtual methods, this is the offset of the method in Class::vtable_.
468 //
469 // For abstract methods in an interface class, this is the offset of the method in
470 // "iftable_->Get(n)->GetMethodArray()".
471 //
472 // For static and direct methods this is the index in the direct methods table.
473 uint32_t method_index_;
474
475 // The target native method registered with this method
476 const void* native_method_;
477
478 // When a register is promoted into a register, the spill mask holds which registers hold dex
479 // registers. The first promoted register's corresponding dex register is vmap_table_[1], the Nth
480 // is vmap_table_[N]. vmap_table_[0] holds the length of the table.
481 const uint16_t* vmap_table_;
482
483 static Class* java_lang_reflect_Constructor_;
484 static Class* java_lang_reflect_Method_;
485
486 friend struct art::AbstractMethodOffsets; // for verifying offset information
487 friend struct art::ConstructorMethodOffsets; // for verifying offset information
488 friend struct art::MethodOffsets; // for verifying offset information
489 DISALLOW_IMPLICIT_CONSTRUCTORS(AbstractMethod);
490};
491
492class MANAGED Method : public AbstractMethod {
493
494};
495
496class MANAGED Constructor : public AbstractMethod {
497
498};
499
500class MANAGED AbstractMethodClass : public Class {
501 private:
502 Object* ORDER_BY_SIGNATURE_;
503 friend struct art::MethodClassOffsets; // for verifying offset information
504 DISALLOW_IMPLICIT_CONSTRUCTORS(AbstractMethodClass);
505};
506
507} // namespace mirror
508} // namespace art
509
510#endif // ART_SRC_MIRROR_METHOD_H_