blob: a489b1d06f4f69e427b2c754ed0c6937ea1ef115 [file] [log] [blame]
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_SRC_MIRROR_METHOD_H_
18#define ART_SRC_MIRROR_METHOD_H_
19
20#include "class.h"
21#include "invoke_type.h"
22#include "locks.h"
23#include "modifiers.h"
24#include "object.h"
25
26namespace art {
27
28struct AbstractMethodOffsets;
29struct ConstructorMethodOffsets;
30union JValue;
31struct MethodClassOffsets;
32struct MethodOffsets;
33class StringPiece;
34
35namespace mirror {
36
37class StaticStorageBase;
38
39// C++ mirror of java.lang.reflect.Method and java.lang.reflect.Constructor
40class MANAGED AbstractMethod : public Object {
41 public:
42 // A function that invokes a method with an array of its arguments.
43 typedef void InvokeStub(const AbstractMethod* method,
44 Object* obj,
45 Thread* thread,
46 JValue* args,
47 JValue* result);
48
49 Class* GetDeclaringClass() const;
50
51 void SetDeclaringClass(Class *new_declaring_class) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
52
53 static MemberOffset DeclaringClassOffset() {
54 return MemberOffset(OFFSETOF_MEMBER(AbstractMethod, declaring_class_));
55 }
56
Jeff Hao5d917302013-02-27 17:57:33 -080057 static MemberOffset CodeOffset() {
58 return MemberOffset(OFFSETOF_MEMBER(AbstractMethod, code_));
59 }
60
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080061 uint32_t GetAccessFlags() const;
62
63 void SetAccessFlags(uint32_t new_access_flags) {
64 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, access_flags_), new_access_flags, false);
65 }
66
67 // Approximate what kind of method call would be used for this method.
68 InvokeType GetInvokeType() const;
69
70 // Returns true if the method is declared public.
71 bool IsPublic() const {
72 return (GetAccessFlags() & kAccPublic) != 0;
73 }
74
75 // Returns true if the method is declared private.
76 bool IsPrivate() const {
77 return (GetAccessFlags() & kAccPrivate) != 0;
78 }
79
80 // Returns true if the method is declared static.
81 bool IsStatic() const {
82 return (GetAccessFlags() & kAccStatic) != 0;
83 }
84
85 // Returns true if the method is a constructor.
86 bool IsConstructor() const {
87 return (GetAccessFlags() & kAccConstructor) != 0;
88 }
89
90 // Returns true if the method is static, private, or a constructor.
91 bool IsDirect() const {
92 return IsDirect(GetAccessFlags());
93 }
94
95 static bool IsDirect(uint32_t access_flags) {
96 return (access_flags & (kAccStatic | kAccPrivate | kAccConstructor)) != 0;
97 }
98
99 // Returns true if the method is declared synchronized.
100 bool IsSynchronized() const {
101 uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
102 return (GetAccessFlags() & synchonized) != 0;
103 }
104
105 bool IsFinal() const {
106 return (GetAccessFlags() & kAccFinal) != 0;
107 }
108
109 bool IsMiranda() const {
110 return (GetAccessFlags() & kAccMiranda) != 0;
111 }
112
113 bool IsNative() const {
114 return (GetAccessFlags() & kAccNative) != 0;
115 }
116
117 bool IsAbstract() const {
118 return (GetAccessFlags() & kAccAbstract) != 0;
119 }
120
121 bool IsSynthetic() const {
122 return (GetAccessFlags() & kAccSynthetic) != 0;
123 }
124
125 bool IsProxyMethod() const;
126
127 bool CheckIncompatibleClassChange(InvokeType type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
128
129 uint16_t GetMethodIndex() const;
130
131 size_t GetVtableIndex() const {
132 return GetMethodIndex();
133 }
134
135 void SetMethodIndex(uint16_t new_method_index) {
136 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_), new_method_index, false);
137 }
138
139 static MemberOffset MethodIndexOffset() {
140 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_);
141 }
142
143 uint32_t GetCodeItemOffset() const {
144 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_item_offset_), false);
145 }
146
147 void SetCodeItemOffset(uint32_t new_code_off) {
148 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_item_offset_), new_code_off, false);
149 }
150
151 // Number of 32bit registers that would be required to hold all the arguments
152 static size_t NumArgRegisters(const StringPiece& shorty);
153
154 uint32_t GetDexMethodIndex() const;
155
156 void SetDexMethodIndex(uint32_t new_idx) {
157 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_dex_index_), new_idx, false);
158 }
159
160 ObjectArray<String>* GetDexCacheStrings() const;
161 void SetDexCacheStrings(ObjectArray<String>* new_dex_cache_strings)
162 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
163
164 static MemberOffset DexCacheStringsOffset() {
165 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_strings_);
166 }
167
168 static MemberOffset DexCacheResolvedMethodsOffset() {
169 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_resolved_methods_);
170 }
171
172 static MemberOffset DexCacheResolvedTypesOffset() {
173 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_resolved_types_);
174 }
175
176 static MemberOffset DexCacheInitializedStaticStorageOffset() {
177 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod,
178 dex_cache_initialized_static_storage_);
179 }
180
181 ObjectArray<AbstractMethod>* GetDexCacheResolvedMethods() const;
182 void SetDexCacheResolvedMethods(ObjectArray<AbstractMethod>* new_dex_cache_methods)
183 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
184
185 ObjectArray<Class>* GetDexCacheResolvedTypes() const;
186 void SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_types)
187 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
188
189 ObjectArray<StaticStorageBase>* GetDexCacheInitializedStaticStorage() const;
190 void SetDexCacheInitializedStaticStorage(ObjectArray<StaticStorageBase>* new_value)
191 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
192
193 // Find the method that this method overrides
194 AbstractMethod* FindOverriddenMethod() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
195
Jeff Hao5d917302013-02-27 17:57:33 -0800196 void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result,
197 JValue* float_result)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800198 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
199
200 const void* GetCode() const {
201 return GetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_), false);
202 }
203
204 void SetCode(const void* code) {
205 SetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_), code, false);
206 }
207
208 uint32_t GetCodeSize() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
209
210 bool IsWithinCode(uintptr_t pc) const
211 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
212 uintptr_t code = reinterpret_cast<uintptr_t>(GetCode());
213 if (code == 0) {
214 return pc == 0;
215 }
216 /*
217 * During a stack walk, a return PC may point to the end of the code + 1
218 * (in the case that the last instruction is a call that isn't expected to
219 * return. Thus, we check <= code + GetCodeSize().
220 */
221 return (code <= pc && pc <= code + GetCodeSize());
222 }
223
224 void AssertPcIsWithinCode(uintptr_t pc) const
225 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
226
227 uint32_t GetOatCodeOffset() const;
228
229 void SetOatCodeOffset(uint32_t code_offset);
230
231 static MemberOffset GetCodeOffset() {
232 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_);
233 }
234
235 const uint32_t* GetMappingTable() const {
236 const uint32_t* map = GetMappingTableRaw();
237 if (map == NULL) {
238 return map;
239 }
240 return map + 1;
241 }
242
243 uint32_t GetPcToDexMappingTableLength() const {
244 const uint32_t* map = GetMappingTableRaw();
245 if (map == NULL) {
246 return 0;
247 }
248 return map[2];
249 }
250
251 const uint32_t* GetPcToDexMappingTable() const {
252 const uint32_t* map = GetMappingTableRaw();
253 if (map == NULL) {
254 return map;
255 }
256 return map + 3;
257 }
258
259
260 uint32_t GetDexToPcMappingTableLength() const {
261 const uint32_t* map = GetMappingTableRaw();
262 if (map == NULL) {
263 return 0;
264 }
265 return map[1] - map[2];
266 }
267
268 const uint32_t* GetDexToPcMappingTable() const {
269 const uint32_t* map = GetMappingTableRaw();
270 if (map == NULL) {
271 return map;
272 }
273 return map + 3 + map[2];
274 }
275
276
277 const uint32_t* GetMappingTableRaw() const {
278 return GetFieldPtr<const uint32_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, mapping_table_), false);
279 }
280
281 void SetMappingTable(const uint32_t* mapping_table) {
282 SetFieldPtr<const uint32_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, mapping_table_),
283 mapping_table, false);
284 }
285
286 uint32_t GetOatMappingTableOffset() const;
287
288 void SetOatMappingTableOffset(uint32_t mapping_table_offset);
289
290 // Callers should wrap the uint16_t* in a VmapTable instance for convenient access.
291 const uint16_t* GetVmapTableRaw() const {
292 return GetFieldPtr<const uint16_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, vmap_table_), false);
293 }
294
295 void SetVmapTable(const uint16_t* vmap_table) {
296 SetFieldPtr<const uint16_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, vmap_table_), vmap_table, false);
297 }
298
299 uint32_t GetOatVmapTableOffset() const;
300
301 void SetOatVmapTableOffset(uint32_t vmap_table_offset);
302
303 const uint8_t* GetNativeGcMap() const {
304 return GetFieldPtr<uint8_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, native_gc_map_), false);
305 }
306 void SetNativeGcMap(const uint8_t* data) {
307 SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, native_gc_map_), data,
308 false);
309 }
310
311 // When building the oat need a convenient place to stuff the offset of the native GC map.
312 void SetOatNativeGcMapOffset(uint32_t gc_map_offset);
313 uint32_t GetOatNativeGcMapOffset() const;
314
315 size_t GetFrameSizeInBytes() const {
316 DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
317 size_t result = GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, frame_size_in_bytes_), false);
318 DCHECK_LE(static_cast<size_t>(kStackAlignment), result);
319 return result;
320 }
321
322 void SetFrameSizeInBytes(size_t new_frame_size_in_bytes) {
323 DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
324 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, frame_size_in_bytes_),
325 new_frame_size_in_bytes, false);
326 }
327
328 size_t GetReturnPcOffsetInBytes() const {
329 return GetFrameSizeInBytes() - kPointerSize;
330 }
331
332 bool IsRegistered() const;
333
334 void RegisterNative(Thread* self, const void* native_method)
335 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
336
337 void UnregisterNative(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
338
339 static MemberOffset NativeMethodOffset() {
340 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, native_method_);
341 }
342
343 const void* GetNativeMethod() const {
344 return reinterpret_cast<const void*>(GetField32(NativeMethodOffset(), false));
345 }
346
347 void SetNativeMethod(const void*);
348
349 // Native to managed invocation stub entry point
350 InvokeStub* GetInvokeStub() const {
351 InvokeStub* result = GetFieldPtr<InvokeStub*>(
352 OFFSET_OF_OBJECT_MEMBER(AbstractMethod, invoke_stub_), false);
353 // TODO: DCHECK(result != NULL); should be ahead of time compiled
354 return result;
355 }
356
357 void SetInvokeStub(InvokeStub* invoke_stub) {
358 SetFieldPtr<InvokeStub*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, invoke_stub_),
359 invoke_stub, false);
360 }
361
362 uint32_t GetInvokeStubSize() const {
363 uintptr_t invoke_stub = reinterpret_cast<uintptr_t>(GetInvokeStub());
364 if (invoke_stub == 0) {
365 return 0;
366 }
367 // TODO: make this Thumb2 specific
368 invoke_stub &= ~0x1;
369 return reinterpret_cast<const uint32_t*>(invoke_stub)[-1];
370 }
371
372 uint32_t GetOatInvokeStubOffset() const;
373 void SetOatInvokeStubOffset(uint32_t invoke_stub_offset);
374
375 static MemberOffset GetInvokeStubOffset() {
376 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, invoke_stub_);
377 }
378
379 static MemberOffset GetMethodIndexOffset() {
380 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_);
381 }
382
383 uint32_t GetCoreSpillMask() const {
384 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, core_spill_mask_), false);
385 }
386
387 void SetCoreSpillMask(uint32_t core_spill_mask) {
388 // Computed during compilation
389 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, core_spill_mask_), core_spill_mask, false);
390 }
391
392 uint32_t GetFpSpillMask() const {
393 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, fp_spill_mask_), false);
394 }
395
396 void SetFpSpillMask(uint32_t fp_spill_mask) {
397 // Computed during compilation
398 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, fp_spill_mask_), fp_spill_mask, false);
399 }
400
401 // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
402 // conventions for a method of managed code. Returns false for Proxy methods.
403 bool IsRuntimeMethod() const;
404
405 // Is this a hand crafted method used for something like describing callee saves?
406 bool IsCalleeSaveMethod() const;
407
408 bool IsResolutionMethod() const;
409
410 uintptr_t NativePcOffset(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
411
412 // Converts a native PC to a dex PC.
413 uint32_t ToDexPc(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
414
415 // Converts a dex PC to a native PC.
416 uintptr_t ToNativePc(const uint32_t dex_pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
417
418 // Converts a dex PC to the first corresponding safepoint PC.
419 uintptr_t ToFirstNativeSafepointPc(const uint32_t dex_pc)
420 const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
421
422 // Find the catch block for the given exception type and dex_pc
423 uint32_t FindCatchBlock(Class* exception_type, uint32_t dex_pc) const
424 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
425
426 static void SetClasses(Class* java_lang_reflect_Constructor, Class* java_lang_reflect_Method);
427
428 static Class* GetConstructorClass() {
429 return java_lang_reflect_Constructor_;
430 }
431
432 static Class* GetMethodClass() {
433 return java_lang_reflect_Method_;
434 }
435
436 static void ResetClasses();
437
438 protected:
439 // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
440 // The class we are a part of
441 Class* declaring_class_;
442
443 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
444 ObjectArray<StaticStorageBase>* dex_cache_initialized_static_storage_;
445
446 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
447 ObjectArray<AbstractMethod>* dex_cache_resolved_methods_;
448
449 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
450 ObjectArray<Class>* dex_cache_resolved_types_;
451
452 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
453 ObjectArray<String>* dex_cache_strings_;
454
455 // Access flags; low 16 bits are defined by spec.
456 uint32_t access_flags_;
457
458 // Compiled code associated with this method for callers from managed code.
459 // May be compiled managed code or a bridge for invoking a native method.
460 const void* code_;
461
462 // Offset to the CodeItem.
463 uint32_t code_item_offset_;
464
465 // Architecture-dependent register spill mask
466 uint32_t core_spill_mask_;
467
468 // Architecture-dependent register spill mask
469 uint32_t fp_spill_mask_;
470
471 // Total size in bytes of the frame
472 size_t frame_size_in_bytes_;
473
474 // Garbage collection map of native PC offsets to reference bitmaps.
475 const uint8_t* native_gc_map_;
476
477 // Native invocation stub entry point for calling from native to managed code.
478 InvokeStub* invoke_stub_;
479
480 // Mapping from native pc to dex pc
481 const uint32_t* mapping_table_;
482
483 // Index into method_ids of the dex file associated with this method
484 uint32_t method_dex_index_;
485
486 // For concrete virtual methods, this is the offset of the method in Class::vtable_.
487 //
488 // For abstract methods in an interface class, this is the offset of the method in
489 // "iftable_->Get(n)->GetMethodArray()".
490 //
491 // For static and direct methods this is the index in the direct methods table.
492 uint32_t method_index_;
493
494 // The target native method registered with this method
495 const void* native_method_;
496
497 // When a register is promoted into a register, the spill mask holds which registers hold dex
498 // registers. The first promoted register's corresponding dex register is vmap_table_[1], the Nth
499 // is vmap_table_[N]. vmap_table_[0] holds the length of the table.
500 const uint16_t* vmap_table_;
501
502 static Class* java_lang_reflect_Constructor_;
503 static Class* java_lang_reflect_Method_;
504
505 friend struct art::AbstractMethodOffsets; // for verifying offset information
506 friend struct art::ConstructorMethodOffsets; // for verifying offset information
507 friend struct art::MethodOffsets; // for verifying offset information
508 DISALLOW_IMPLICIT_CONSTRUCTORS(AbstractMethod);
509};
510
511class MANAGED Method : public AbstractMethod {
512
513};
514
515class MANAGED Constructor : public AbstractMethod {
516
517};
518
519class MANAGED AbstractMethodClass : public Class {
520 private:
521 Object* ORDER_BY_SIGNATURE_;
522 friend struct art::MethodClassOffsets; // for verifying offset information
523 DISALLOW_IMPLICIT_CONSTRUCTORS(AbstractMethodClass);
524};
525
526} // namespace mirror
527} // namespace art
528
529#endif // ART_SRC_MIRROR_METHOD_H_