blob: 1d57abb1d25d274f8410973d211fb3eb38f5a360 [file] [log] [blame]
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_SRC_MIRROR_METHOD_H_
18#define ART_SRC_MIRROR_METHOD_H_
19
20#include "class.h"
21#include "invoke_type.h"
22#include "locks.h"
23#include "modifiers.h"
24#include "object.h"
25
26namespace art {
27
28struct AbstractMethodOffsets;
29struct ConstructorMethodOffsets;
30union JValue;
31struct MethodClassOffsets;
32struct MethodOffsets;
33class StringPiece;
34
35namespace mirror {
36
37class StaticStorageBase;
38
39// C++ mirror of java.lang.reflect.Method and java.lang.reflect.Constructor
40class MANAGED AbstractMethod : public Object {
41 public:
42 // A function that invokes a method with an array of its arguments.
43 typedef void InvokeStub(const AbstractMethod* method,
44 Object* obj,
45 Thread* thread,
46 JValue* args,
47 JValue* result);
48
49 Class* GetDeclaringClass() const;
50
51 void SetDeclaringClass(Class *new_declaring_class) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
52
53 static MemberOffset DeclaringClassOffset() {
54 return MemberOffset(OFFSETOF_MEMBER(AbstractMethod, declaring_class_));
55 }
56
57 uint32_t GetAccessFlags() const;
58
59 void SetAccessFlags(uint32_t new_access_flags) {
60 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, access_flags_), new_access_flags, false);
61 }
62
63 // Approximate what kind of method call would be used for this method.
64 InvokeType GetInvokeType() const;
65
66 // Returns true if the method is declared public.
67 bool IsPublic() const {
68 return (GetAccessFlags() & kAccPublic) != 0;
69 }
70
71 // Returns true if the method is declared private.
72 bool IsPrivate() const {
73 return (GetAccessFlags() & kAccPrivate) != 0;
74 }
75
76 // Returns true if the method is declared static.
77 bool IsStatic() const {
78 return (GetAccessFlags() & kAccStatic) != 0;
79 }
80
81 // Returns true if the method is a constructor.
82 bool IsConstructor() const {
83 return (GetAccessFlags() & kAccConstructor) != 0;
84 }
85
86 // Returns true if the method is static, private, or a constructor.
87 bool IsDirect() const {
88 return IsDirect(GetAccessFlags());
89 }
90
91 static bool IsDirect(uint32_t access_flags) {
92 return (access_flags & (kAccStatic | kAccPrivate | kAccConstructor)) != 0;
93 }
94
95 // Returns true if the method is declared synchronized.
96 bool IsSynchronized() const {
97 uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
98 return (GetAccessFlags() & synchonized) != 0;
99 }
100
101 bool IsFinal() const {
102 return (GetAccessFlags() & kAccFinal) != 0;
103 }
104
105 bool IsMiranda() const {
106 return (GetAccessFlags() & kAccMiranda) != 0;
107 }
108
109 bool IsNative() const {
110 return (GetAccessFlags() & kAccNative) != 0;
111 }
112
113 bool IsAbstract() const {
114 return (GetAccessFlags() & kAccAbstract) != 0;
115 }
116
117 bool IsSynthetic() const {
118 return (GetAccessFlags() & kAccSynthetic) != 0;
119 }
120
121 bool IsProxyMethod() const;
122
123 bool CheckIncompatibleClassChange(InvokeType type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
124
125 uint16_t GetMethodIndex() const;
126
127 size_t GetVtableIndex() const {
128 return GetMethodIndex();
129 }
130
131 void SetMethodIndex(uint16_t new_method_index) {
132 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_), new_method_index, false);
133 }
134
135 static MemberOffset MethodIndexOffset() {
136 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_);
137 }
138
139 uint32_t GetCodeItemOffset() const {
140 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_item_offset_), false);
141 }
142
143 void SetCodeItemOffset(uint32_t new_code_off) {
144 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_item_offset_), new_code_off, false);
145 }
146
147 // Number of 32bit registers that would be required to hold all the arguments
148 static size_t NumArgRegisters(const StringPiece& shorty);
149
150 uint32_t GetDexMethodIndex() const;
151
152 void SetDexMethodIndex(uint32_t new_idx) {
153 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_dex_index_), new_idx, false);
154 }
155
156 ObjectArray<String>* GetDexCacheStrings() const;
157 void SetDexCacheStrings(ObjectArray<String>* new_dex_cache_strings)
158 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
159
160 static MemberOffset DexCacheStringsOffset() {
161 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_strings_);
162 }
163
164 static MemberOffset DexCacheResolvedMethodsOffset() {
165 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_resolved_methods_);
166 }
167
168 static MemberOffset DexCacheResolvedTypesOffset() {
169 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_resolved_types_);
170 }
171
172 static MemberOffset DexCacheInitializedStaticStorageOffset() {
173 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod,
174 dex_cache_initialized_static_storage_);
175 }
176
177 ObjectArray<AbstractMethod>* GetDexCacheResolvedMethods() const;
178 void SetDexCacheResolvedMethods(ObjectArray<AbstractMethod>* new_dex_cache_methods)
179 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
180
181 ObjectArray<Class>* GetDexCacheResolvedTypes() const;
182 void SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_types)
183 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
184
185 ObjectArray<StaticStorageBase>* GetDexCacheInitializedStaticStorage() const;
186 void SetDexCacheInitializedStaticStorage(ObjectArray<StaticStorageBase>* new_value)
187 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
188
189 // Find the method that this method overrides
190 AbstractMethod* FindOverriddenMethod() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
191
192 void Invoke(Thread* self, Object* receiver, JValue* args, JValue* result)
193 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
194
195 const void* GetCode() const {
196 return GetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_), false);
197 }
198
199 void SetCode(const void* code) {
200 SetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_), code, false);
201 }
202
203 uint32_t GetCodeSize() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
204
205 bool IsWithinCode(uintptr_t pc) const
206 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
207 uintptr_t code = reinterpret_cast<uintptr_t>(GetCode());
208 if (code == 0) {
209 return pc == 0;
210 }
211 /*
212 * During a stack walk, a return PC may point to the end of the code + 1
213 * (in the case that the last instruction is a call that isn't expected to
214 * return. Thus, we check <= code + GetCodeSize().
215 */
216 return (code <= pc && pc <= code + GetCodeSize());
217 }
218
219 void AssertPcIsWithinCode(uintptr_t pc) const
220 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
221
222 uint32_t GetOatCodeOffset() const;
223
224 void SetOatCodeOffset(uint32_t code_offset);
225
226 static MemberOffset GetCodeOffset() {
227 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_);
228 }
229
230 const uint32_t* GetMappingTable() const {
231 const uint32_t* map = GetMappingTableRaw();
232 if (map == NULL) {
233 return map;
234 }
235 return map + 1;
236 }
237
238 uint32_t GetPcToDexMappingTableLength() const {
239 const uint32_t* map = GetMappingTableRaw();
240 if (map == NULL) {
241 return 0;
242 }
243 return map[2];
244 }
245
246 const uint32_t* GetPcToDexMappingTable() const {
247 const uint32_t* map = GetMappingTableRaw();
248 if (map == NULL) {
249 return map;
250 }
251 return map + 3;
252 }
253
254
255 uint32_t GetDexToPcMappingTableLength() const {
256 const uint32_t* map = GetMappingTableRaw();
257 if (map == NULL) {
258 return 0;
259 }
260 return map[1] - map[2];
261 }
262
263 const uint32_t* GetDexToPcMappingTable() const {
264 const uint32_t* map = GetMappingTableRaw();
265 if (map == NULL) {
266 return map;
267 }
268 return map + 3 + map[2];
269 }
270
271
272 const uint32_t* GetMappingTableRaw() const {
273 return GetFieldPtr<const uint32_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, mapping_table_), false);
274 }
275
276 void SetMappingTable(const uint32_t* mapping_table) {
277 SetFieldPtr<const uint32_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, mapping_table_),
278 mapping_table, false);
279 }
280
281 uint32_t GetOatMappingTableOffset() const;
282
283 void SetOatMappingTableOffset(uint32_t mapping_table_offset);
284
285 // Callers should wrap the uint16_t* in a VmapTable instance for convenient access.
286 const uint16_t* GetVmapTableRaw() const {
287 return GetFieldPtr<const uint16_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, vmap_table_), false);
288 }
289
290 void SetVmapTable(const uint16_t* vmap_table) {
291 SetFieldPtr<const uint16_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, vmap_table_), vmap_table, false);
292 }
293
294 uint32_t GetOatVmapTableOffset() const;
295
296 void SetOatVmapTableOffset(uint32_t vmap_table_offset);
297
298 const uint8_t* GetNativeGcMap() const {
299 return GetFieldPtr<uint8_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, native_gc_map_), false);
300 }
301 void SetNativeGcMap(const uint8_t* data) {
302 SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, native_gc_map_), data,
303 false);
304 }
305
306 // When building the oat need a convenient place to stuff the offset of the native GC map.
307 void SetOatNativeGcMapOffset(uint32_t gc_map_offset);
308 uint32_t GetOatNativeGcMapOffset() const;
309
310 size_t GetFrameSizeInBytes() const {
311 DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
312 size_t result = GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, frame_size_in_bytes_), false);
313 DCHECK_LE(static_cast<size_t>(kStackAlignment), result);
314 return result;
315 }
316
317 void SetFrameSizeInBytes(size_t new_frame_size_in_bytes) {
318 DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
319 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, frame_size_in_bytes_),
320 new_frame_size_in_bytes, false);
321 }
322
323 size_t GetReturnPcOffsetInBytes() const {
324 return GetFrameSizeInBytes() - kPointerSize;
325 }
326
327 bool IsRegistered() const;
328
329 void RegisterNative(Thread* self, const void* native_method)
330 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
331
332 void UnregisterNative(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
333
334 static MemberOffset NativeMethodOffset() {
335 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, native_method_);
336 }
337
338 const void* GetNativeMethod() const {
339 return reinterpret_cast<const void*>(GetField32(NativeMethodOffset(), false));
340 }
341
342 void SetNativeMethod(const void*);
343
344 // Native to managed invocation stub entry point
345 InvokeStub* GetInvokeStub() const {
346 InvokeStub* result = GetFieldPtr<InvokeStub*>(
347 OFFSET_OF_OBJECT_MEMBER(AbstractMethod, invoke_stub_), false);
348 // TODO: DCHECK(result != NULL); should be ahead of time compiled
349 return result;
350 }
351
352 void SetInvokeStub(InvokeStub* invoke_stub) {
353 SetFieldPtr<InvokeStub*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, invoke_stub_),
354 invoke_stub, false);
355 }
356
357 uint32_t GetInvokeStubSize() const {
358 uintptr_t invoke_stub = reinterpret_cast<uintptr_t>(GetInvokeStub());
359 if (invoke_stub == 0) {
360 return 0;
361 }
362 // TODO: make this Thumb2 specific
363 invoke_stub &= ~0x1;
364 return reinterpret_cast<const uint32_t*>(invoke_stub)[-1];
365 }
366
367 uint32_t GetOatInvokeStubOffset() const;
368 void SetOatInvokeStubOffset(uint32_t invoke_stub_offset);
369
370 static MemberOffset GetInvokeStubOffset() {
371 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, invoke_stub_);
372 }
373
374 static MemberOffset GetMethodIndexOffset() {
375 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_);
376 }
377
378 uint32_t GetCoreSpillMask() const {
379 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, core_spill_mask_), false);
380 }
381
382 void SetCoreSpillMask(uint32_t core_spill_mask) {
383 // Computed during compilation
384 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, core_spill_mask_), core_spill_mask, false);
385 }
386
387 uint32_t GetFpSpillMask() const {
388 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, fp_spill_mask_), false);
389 }
390
391 void SetFpSpillMask(uint32_t fp_spill_mask) {
392 // Computed during compilation
393 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, fp_spill_mask_), fp_spill_mask, false);
394 }
395
396 // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
397 // conventions for a method of managed code. Returns false for Proxy methods.
398 bool IsRuntimeMethod() const;
399
400 // Is this a hand crafted method used for something like describing callee saves?
401 bool IsCalleeSaveMethod() const;
402
403 bool IsResolutionMethod() const;
404
405 uintptr_t NativePcOffset(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
406
407 // Converts a native PC to a dex PC.
408 uint32_t ToDexPc(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
409
410 // Converts a dex PC to a native PC.
411 uintptr_t ToNativePc(const uint32_t dex_pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
412
413 // Converts a dex PC to the first corresponding safepoint PC.
414 uintptr_t ToFirstNativeSafepointPc(const uint32_t dex_pc)
415 const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
416
417 // Find the catch block for the given exception type and dex_pc
418 uint32_t FindCatchBlock(Class* exception_type, uint32_t dex_pc) const
419 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
420
421 static void SetClasses(Class* java_lang_reflect_Constructor, Class* java_lang_reflect_Method);
422
423 static Class* GetConstructorClass() {
424 return java_lang_reflect_Constructor_;
425 }
426
427 static Class* GetMethodClass() {
428 return java_lang_reflect_Method_;
429 }
430
431 static void ResetClasses();
432
433 protected:
434 // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
435 // The class we are a part of
436 Class* declaring_class_;
437
438 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
439 ObjectArray<StaticStorageBase>* dex_cache_initialized_static_storage_;
440
441 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
442 ObjectArray<AbstractMethod>* dex_cache_resolved_methods_;
443
444 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
445 ObjectArray<Class>* dex_cache_resolved_types_;
446
447 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
448 ObjectArray<String>* dex_cache_strings_;
449
450 // Access flags; low 16 bits are defined by spec.
451 uint32_t access_flags_;
452
453 // Compiled code associated with this method for callers from managed code.
454 // May be compiled managed code or a bridge for invoking a native method.
455 const void* code_;
456
457 // Offset to the CodeItem.
458 uint32_t code_item_offset_;
459
460 // Architecture-dependent register spill mask
461 uint32_t core_spill_mask_;
462
463 // Architecture-dependent register spill mask
464 uint32_t fp_spill_mask_;
465
466 // Total size in bytes of the frame
467 size_t frame_size_in_bytes_;
468
469 // Garbage collection map of native PC offsets to reference bitmaps.
470 const uint8_t* native_gc_map_;
471
472 // Native invocation stub entry point for calling from native to managed code.
473 InvokeStub* invoke_stub_;
474
475 // Mapping from native pc to dex pc
476 const uint32_t* mapping_table_;
477
478 // Index into method_ids of the dex file associated with this method
479 uint32_t method_dex_index_;
480
481 // For concrete virtual methods, this is the offset of the method in Class::vtable_.
482 //
483 // For abstract methods in an interface class, this is the offset of the method in
484 // "iftable_->Get(n)->GetMethodArray()".
485 //
486 // For static and direct methods this is the index in the direct methods table.
487 uint32_t method_index_;
488
489 // The target native method registered with this method
490 const void* native_method_;
491
492 // When a register is promoted into a register, the spill mask holds which registers hold dex
493 // registers. The first promoted register's corresponding dex register is vmap_table_[1], the Nth
494 // is vmap_table_[N]. vmap_table_[0] holds the length of the table.
495 const uint16_t* vmap_table_;
496
497 static Class* java_lang_reflect_Constructor_;
498 static Class* java_lang_reflect_Method_;
499
500 friend struct art::AbstractMethodOffsets; // for verifying offset information
501 friend struct art::ConstructorMethodOffsets; // for verifying offset information
502 friend struct art::MethodOffsets; // for verifying offset information
503 DISALLOW_IMPLICIT_CONSTRUCTORS(AbstractMethod);
504};
505
506class MANAGED Method : public AbstractMethod {
507
508};
509
510class MANAGED Constructor : public AbstractMethod {
511
512};
513
514class MANAGED AbstractMethodClass : public Class {
515 private:
516 Object* ORDER_BY_SIGNATURE_;
517 friend struct art::MethodClassOffsets; // for verifying offset information
518 DISALLOW_IMPLICIT_CONSTRUCTORS(AbstractMethodClass);
519};
520
521} // namespace mirror
522} // namespace art
523
524#endif // ART_SRC_MIRROR_METHOD_H_