blob: b91885af61569fcd2022ef46c1d38467f4075300 [file] [log] [blame]
Ian Rogers2dd0e2c2013-01-24 12:42:14 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_SRC_MIRROR_METHOD_H_
18#define ART_SRC_MIRROR_METHOD_H_
19
20#include "class.h"
21#include "invoke_type.h"
22#include "locks.h"
23#include "modifiers.h"
24#include "object.h"
25
26namespace art {
27
28struct AbstractMethodOffsets;
29struct ConstructorMethodOffsets;
30union JValue;
31struct MethodClassOffsets;
32struct MethodOffsets;
33class StringPiece;
34
35namespace mirror {
36
37class StaticStorageBase;
38
39// C++ mirror of java.lang.reflect.Method and java.lang.reflect.Constructor
40class MANAGED AbstractMethod : public Object {
41 public:
42 // A function that invokes a method with an array of its arguments.
43 typedef void InvokeStub(const AbstractMethod* method,
44 Object* obj,
45 Thread* thread,
46 JValue* args,
47 JValue* result);
48
49 Class* GetDeclaringClass() const;
50
51 void SetDeclaringClass(Class *new_declaring_class) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
52
53 static MemberOffset DeclaringClassOffset() {
54 return MemberOffset(OFFSETOF_MEMBER(AbstractMethod, declaring_class_));
55 }
56
Jeff Hao5d917302013-02-27 17:57:33 -080057 static MemberOffset CodeOffset() {
58 return MemberOffset(OFFSETOF_MEMBER(AbstractMethod, code_));
59 }
60
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080061 uint32_t GetAccessFlags() const;
62
63 void SetAccessFlags(uint32_t new_access_flags) {
64 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, access_flags_), new_access_flags, false);
65 }
66
67 // Approximate what kind of method call would be used for this method.
68 InvokeType GetInvokeType() const;
69
70 // Returns true if the method is declared public.
71 bool IsPublic() const {
72 return (GetAccessFlags() & kAccPublic) != 0;
73 }
74
75 // Returns true if the method is declared private.
76 bool IsPrivate() const {
77 return (GetAccessFlags() & kAccPrivate) != 0;
78 }
79
80 // Returns true if the method is declared static.
81 bool IsStatic() const {
82 return (GetAccessFlags() & kAccStatic) != 0;
83 }
84
85 // Returns true if the method is a constructor.
86 bool IsConstructor() const {
87 return (GetAccessFlags() & kAccConstructor) != 0;
88 }
89
90 // Returns true if the method is static, private, or a constructor.
91 bool IsDirect() const {
92 return IsDirect(GetAccessFlags());
93 }
94
95 static bool IsDirect(uint32_t access_flags) {
96 return (access_flags & (kAccStatic | kAccPrivate | kAccConstructor)) != 0;
97 }
98
99 // Returns true if the method is declared synchronized.
100 bool IsSynchronized() const {
101 uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
102 return (GetAccessFlags() & synchonized) != 0;
103 }
104
105 bool IsFinal() const {
106 return (GetAccessFlags() & kAccFinal) != 0;
107 }
108
109 bool IsMiranda() const {
110 return (GetAccessFlags() & kAccMiranda) != 0;
111 }
112
113 bool IsNative() const {
114 return (GetAccessFlags() & kAccNative) != 0;
115 }
116
117 bool IsAbstract() const {
118 return (GetAccessFlags() & kAccAbstract) != 0;
119 }
120
121 bool IsSynthetic() const {
122 return (GetAccessFlags() & kAccSynthetic) != 0;
123 }
124
125 bool IsProxyMethod() const;
126
127 bool CheckIncompatibleClassChange(InvokeType type) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
128
129 uint16_t GetMethodIndex() const;
130
131 size_t GetVtableIndex() const {
132 return GetMethodIndex();
133 }
134
135 void SetMethodIndex(uint16_t new_method_index) {
136 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_), new_method_index, false);
137 }
138
139 static MemberOffset MethodIndexOffset() {
140 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_);
141 }
142
143 uint32_t GetCodeItemOffset() const {
144 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_item_offset_), false);
145 }
146
147 void SetCodeItemOffset(uint32_t new_code_off) {
148 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_item_offset_), new_code_off, false);
149 }
150
151 // Number of 32bit registers that would be required to hold all the arguments
152 static size_t NumArgRegisters(const StringPiece& shorty);
153
154 uint32_t GetDexMethodIndex() const;
155
156 void SetDexMethodIndex(uint32_t new_idx) {
157 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_dex_index_), new_idx, false);
158 }
159
160 ObjectArray<String>* GetDexCacheStrings() const;
161 void SetDexCacheStrings(ObjectArray<String>* new_dex_cache_strings)
162 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
163
164 static MemberOffset DexCacheStringsOffset() {
165 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_strings_);
166 }
167
168 static MemberOffset DexCacheResolvedMethodsOffset() {
169 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_resolved_methods_);
170 }
171
172 static MemberOffset DexCacheResolvedTypesOffset() {
173 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, dex_cache_resolved_types_);
174 }
175
176 static MemberOffset DexCacheInitializedStaticStorageOffset() {
177 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod,
178 dex_cache_initialized_static_storage_);
179 }
180
181 ObjectArray<AbstractMethod>* GetDexCacheResolvedMethods() const;
182 void SetDexCacheResolvedMethods(ObjectArray<AbstractMethod>* new_dex_cache_methods)
183 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
184
185 ObjectArray<Class>* GetDexCacheResolvedTypes() const;
186 void SetDexCacheResolvedTypes(ObjectArray<Class>* new_dex_cache_types)
187 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
188
189 ObjectArray<StaticStorageBase>* GetDexCacheInitializedStaticStorage() const;
190 void SetDexCacheInitializedStaticStorage(ObjectArray<StaticStorageBase>* new_value)
191 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
192
193 // Find the method that this method overrides
194 AbstractMethod* FindOverriddenMethod() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
195
Jeff Hao6474d192013-03-26 14:08:09 -0700196 void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, char result_type)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800197 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
198
199 const void* GetCode() const {
200 return GetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_), false);
201 }
202
203 void SetCode(const void* code) {
204 SetFieldPtr<const void*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_), code, false);
205 }
206
207 uint32_t GetCodeSize() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
208
209 bool IsWithinCode(uintptr_t pc) const
210 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
211 uintptr_t code = reinterpret_cast<uintptr_t>(GetCode());
212 if (code == 0) {
213 return pc == 0;
214 }
215 /*
216 * During a stack walk, a return PC may point to the end of the code + 1
217 * (in the case that the last instruction is a call that isn't expected to
218 * return. Thus, we check <= code + GetCodeSize().
219 */
220 return (code <= pc && pc <= code + GetCodeSize());
221 }
222
223 void AssertPcIsWithinCode(uintptr_t pc) const
224 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
225
226 uint32_t GetOatCodeOffset() const;
227
228 void SetOatCodeOffset(uint32_t code_offset);
229
230 static MemberOffset GetCodeOffset() {
231 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, code_);
232 }
233
234 const uint32_t* GetMappingTable() const {
235 const uint32_t* map = GetMappingTableRaw();
236 if (map == NULL) {
237 return map;
238 }
239 return map + 1;
240 }
241
242 uint32_t GetPcToDexMappingTableLength() const {
243 const uint32_t* map = GetMappingTableRaw();
244 if (map == NULL) {
245 return 0;
246 }
247 return map[2];
248 }
249
250 const uint32_t* GetPcToDexMappingTable() const {
251 const uint32_t* map = GetMappingTableRaw();
252 if (map == NULL) {
253 return map;
254 }
255 return map + 3;
256 }
257
258
259 uint32_t GetDexToPcMappingTableLength() const {
260 const uint32_t* map = GetMappingTableRaw();
261 if (map == NULL) {
262 return 0;
263 }
264 return map[1] - map[2];
265 }
266
267 const uint32_t* GetDexToPcMappingTable() const {
268 const uint32_t* map = GetMappingTableRaw();
269 if (map == NULL) {
270 return map;
271 }
272 return map + 3 + map[2];
273 }
274
275
276 const uint32_t* GetMappingTableRaw() const {
277 return GetFieldPtr<const uint32_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, mapping_table_), false);
278 }
279
280 void SetMappingTable(const uint32_t* mapping_table) {
281 SetFieldPtr<const uint32_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, mapping_table_),
282 mapping_table, false);
283 }
284
285 uint32_t GetOatMappingTableOffset() const;
286
287 void SetOatMappingTableOffset(uint32_t mapping_table_offset);
288
289 // Callers should wrap the uint16_t* in a VmapTable instance for convenient access.
290 const uint16_t* GetVmapTableRaw() const {
291 return GetFieldPtr<const uint16_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, vmap_table_), false);
292 }
293
294 void SetVmapTable(const uint16_t* vmap_table) {
295 SetFieldPtr<const uint16_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, vmap_table_), vmap_table, false);
296 }
297
298 uint32_t GetOatVmapTableOffset() const;
299
300 void SetOatVmapTableOffset(uint32_t vmap_table_offset);
301
302 const uint8_t* GetNativeGcMap() const {
303 return GetFieldPtr<uint8_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, native_gc_map_), false);
304 }
305 void SetNativeGcMap(const uint8_t* data) {
306 SetFieldPtr<const uint8_t*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, native_gc_map_), data,
307 false);
308 }
309
310 // When building the oat need a convenient place to stuff the offset of the native GC map.
311 void SetOatNativeGcMapOffset(uint32_t gc_map_offset);
312 uint32_t GetOatNativeGcMapOffset() const;
313
314 size_t GetFrameSizeInBytes() const {
315 DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
316 size_t result = GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, frame_size_in_bytes_), false);
317 DCHECK_LE(static_cast<size_t>(kStackAlignment), result);
318 return result;
319 }
320
321 void SetFrameSizeInBytes(size_t new_frame_size_in_bytes) {
322 DCHECK_EQ(sizeof(size_t), sizeof(uint32_t));
323 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, frame_size_in_bytes_),
324 new_frame_size_in_bytes, false);
325 }
326
327 size_t GetReturnPcOffsetInBytes() const {
328 return GetFrameSizeInBytes() - kPointerSize;
329 }
330
331 bool IsRegistered() const;
332
333 void RegisterNative(Thread* self, const void* native_method)
334 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
335
336 void UnregisterNative(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
337
338 static MemberOffset NativeMethodOffset() {
339 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, native_method_);
340 }
341
342 const void* GetNativeMethod() const {
343 return reinterpret_cast<const void*>(GetField32(NativeMethodOffset(), false));
344 }
345
346 void SetNativeMethod(const void*);
347
348 // Native to managed invocation stub entry point
349 InvokeStub* GetInvokeStub() const {
350 InvokeStub* result = GetFieldPtr<InvokeStub*>(
351 OFFSET_OF_OBJECT_MEMBER(AbstractMethod, invoke_stub_), false);
352 // TODO: DCHECK(result != NULL); should be ahead of time compiled
353 return result;
354 }
355
356 void SetInvokeStub(InvokeStub* invoke_stub) {
357 SetFieldPtr<InvokeStub*>(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, invoke_stub_),
358 invoke_stub, false);
359 }
360
361 uint32_t GetInvokeStubSize() const {
362 uintptr_t invoke_stub = reinterpret_cast<uintptr_t>(GetInvokeStub());
363 if (invoke_stub == 0) {
364 return 0;
365 }
366 // TODO: make this Thumb2 specific
367 invoke_stub &= ~0x1;
368 return reinterpret_cast<const uint32_t*>(invoke_stub)[-1];
369 }
370
371 uint32_t GetOatInvokeStubOffset() const;
372 void SetOatInvokeStubOffset(uint32_t invoke_stub_offset);
373
374 static MemberOffset GetInvokeStubOffset() {
375 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, invoke_stub_);
376 }
377
378 static MemberOffset GetMethodIndexOffset() {
379 return OFFSET_OF_OBJECT_MEMBER(AbstractMethod, method_index_);
380 }
381
382 uint32_t GetCoreSpillMask() const {
383 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, core_spill_mask_), false);
384 }
385
386 void SetCoreSpillMask(uint32_t core_spill_mask) {
387 // Computed during compilation
388 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, core_spill_mask_), core_spill_mask, false);
389 }
390
391 uint32_t GetFpSpillMask() const {
392 return GetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, fp_spill_mask_), false);
393 }
394
395 void SetFpSpillMask(uint32_t fp_spill_mask) {
396 // Computed during compilation
397 SetField32(OFFSET_OF_OBJECT_MEMBER(AbstractMethod, fp_spill_mask_), fp_spill_mask, false);
398 }
399
400 // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
401 // conventions for a method of managed code. Returns false for Proxy methods.
402 bool IsRuntimeMethod() const;
403
404 // Is this a hand crafted method used for something like describing callee saves?
405 bool IsCalleeSaveMethod() const;
406
407 bool IsResolutionMethod() const;
408
409 uintptr_t NativePcOffset(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
410
411 // Converts a native PC to a dex PC.
412 uint32_t ToDexPc(const uintptr_t pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
413
414 // Converts a dex PC to a native PC.
415 uintptr_t ToNativePc(const uint32_t dex_pc) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
416
417 // Converts a dex PC to the first corresponding safepoint PC.
418 uintptr_t ToFirstNativeSafepointPc(const uint32_t dex_pc)
419 const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
420
421 // Find the catch block for the given exception type and dex_pc
422 uint32_t FindCatchBlock(Class* exception_type, uint32_t dex_pc) const
423 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
424
425 static void SetClasses(Class* java_lang_reflect_Constructor, Class* java_lang_reflect_Method);
426
427 static Class* GetConstructorClass() {
428 return java_lang_reflect_Constructor_;
429 }
430
431 static Class* GetMethodClass() {
432 return java_lang_reflect_Method_;
433 }
434
435 static void ResetClasses();
436
437 protected:
438 // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
439 // The class we are a part of
440 Class* declaring_class_;
441
442 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
443 ObjectArray<StaticStorageBase>* dex_cache_initialized_static_storage_;
444
445 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
446 ObjectArray<AbstractMethod>* dex_cache_resolved_methods_;
447
448 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
449 ObjectArray<Class>* dex_cache_resolved_types_;
450
451 // short cuts to declaring_class_->dex_cache_ member for fast compiled code access
452 ObjectArray<String>* dex_cache_strings_;
453
454 // Access flags; low 16 bits are defined by spec.
455 uint32_t access_flags_;
456
457 // Compiled code associated with this method for callers from managed code.
458 // May be compiled managed code or a bridge for invoking a native method.
459 const void* code_;
460
461 // Offset to the CodeItem.
462 uint32_t code_item_offset_;
463
464 // Architecture-dependent register spill mask
465 uint32_t core_spill_mask_;
466
467 // Architecture-dependent register spill mask
468 uint32_t fp_spill_mask_;
469
470 // Total size in bytes of the frame
471 size_t frame_size_in_bytes_;
472
473 // Garbage collection map of native PC offsets to reference bitmaps.
474 const uint8_t* native_gc_map_;
475
476 // Native invocation stub entry point for calling from native to managed code.
477 InvokeStub* invoke_stub_;
478
479 // Mapping from native pc to dex pc
480 const uint32_t* mapping_table_;
481
482 // Index into method_ids of the dex file associated with this method
483 uint32_t method_dex_index_;
484
485 // For concrete virtual methods, this is the offset of the method in Class::vtable_.
486 //
487 // For abstract methods in an interface class, this is the offset of the method in
488 // "iftable_->Get(n)->GetMethodArray()".
489 //
490 // For static and direct methods this is the index in the direct methods table.
491 uint32_t method_index_;
492
493 // The target native method registered with this method
494 const void* native_method_;
495
496 // When a register is promoted into a register, the spill mask holds which registers hold dex
497 // registers. The first promoted register's corresponding dex register is vmap_table_[1], the Nth
498 // is vmap_table_[N]. vmap_table_[0] holds the length of the table.
499 const uint16_t* vmap_table_;
500
501 static Class* java_lang_reflect_Constructor_;
502 static Class* java_lang_reflect_Method_;
503
504 friend struct art::AbstractMethodOffsets; // for verifying offset information
505 friend struct art::ConstructorMethodOffsets; // for verifying offset information
506 friend struct art::MethodOffsets; // for verifying offset information
507 DISALLOW_IMPLICIT_CONSTRUCTORS(AbstractMethod);
508};
509
510class MANAGED Method : public AbstractMethod {
511
512};
513
514class MANAGED Constructor : public AbstractMethod {
515
516};
517
518class MANAGED AbstractMethodClass : public Class {
519 private:
520 Object* ORDER_BY_SIGNATURE_;
521 friend struct art::MethodClassOffsets; // for verifying offset information
522 DISALLOW_IMPLICIT_CONSTRUCTORS(AbstractMethodClass);
523};
524
525} // namespace mirror
526} // namespace art
527
528#endif // ART_SRC_MIRROR_METHOD_H_