blob: ad0b1700790011c1c1f92d8d29acbd6d97f0fc9b [file] [log] [blame]
Nicolas Geoffray6bc43742015-10-12 18:11:10 +01001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "art_code.h"
18
19#include "art_method.h"
20#include "art_method-inl.h"
21#include "class_linker.h"
22#include "entrypoints/runtime_asm_entrypoints.h"
23#include "handle_scope.h"
24#include "jit/jit.h"
25#include "jit/jit_code_cache.h"
26#include "mapping_table.h"
27#include "oat.h"
28#include "runtime.h"
29#include "utils.h"
30
31namespace art {
32
33 // Converts a dex PC to a native PC.
34uintptr_t ArtCode::ToNativeQuickPc(const uint32_t dex_pc,
35 bool is_for_catch_handler,
36 bool abort_on_failure)
37 SHARED_REQUIRES(Locks::mutator_lock_) {
38 const void* entry_point = GetQuickOatEntryPoint(sizeof(void*));
39 if (IsOptimized(sizeof(void*))) {
40 // Optimized code does not have a mapping table. Search for the dex-to-pc
41 // mapping in stack maps.
42 CodeInfo code_info = GetOptimizedCodeInfo();
43 StackMapEncoding encoding = code_info.ExtractEncoding();
44
45 // All stack maps are stored in the same CodeItem section, safepoint stack
46 // maps first, then catch stack maps. We use `is_for_catch_handler` to select
47 // the order of iteration.
48 StackMap stack_map =
49 LIKELY(is_for_catch_handler) ? code_info.GetCatchStackMapForDexPc(dex_pc, encoding)
50 : code_info.GetStackMapForDexPc(dex_pc, encoding);
51 if (stack_map.IsValid()) {
52 return reinterpret_cast<uintptr_t>(entry_point) + stack_map.GetNativePcOffset(encoding);
53 }
54 } else {
55 MappingTable table((entry_point != nullptr) ? GetMappingTable(sizeof(void*)) : nullptr);
56 if (table.TotalSize() == 0) {
57 DCHECK_EQ(dex_pc, 0U);
58 return 0; // Special no mapping/pc == 0 case
59 }
60 // Assume the caller wants a dex-to-pc mapping so check here first.
61 typedef MappingTable::DexToPcIterator It;
62 for (It cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) {
63 if (cur.DexPc() == dex_pc) {
64 return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset();
65 }
66 }
67 // Now check pc-to-dex mappings.
68 typedef MappingTable::PcToDexIterator It2;
69 for (It2 cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) {
70 if (cur.DexPc() == dex_pc) {
71 return reinterpret_cast<uintptr_t>(entry_point) + cur.NativePcOffset();
72 }
73 }
74 }
75
76 if (abort_on_failure) {
77 LOG(FATAL) << "Failed to find native offset for dex pc 0x" << std::hex << dex_pc
78 << " in " << PrettyMethod(method_);
79 }
80 return UINTPTR_MAX;
81}
82
83bool ArtCode::IsOptimized(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_) {
84 // Temporary solution for detecting if a method has been optimized: the compiler
85 // does not create a GC map. Instead, the vmap table contains the stack map
86 // (as in stack_map.h).
87 return !method_->IsNative()
88 && method_->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size) != nullptr
89 && GetQuickOatEntryPoint(pointer_size) != nullptr
90 && GetNativeGcMap(pointer_size) == nullptr;
91}
92
93CodeInfo ArtCode::GetOptimizedCodeInfo() {
94 DCHECK(IsOptimized(sizeof(void*)));
95 const void* code_pointer = EntryPointToCodePointer(GetQuickOatEntryPoint(sizeof(void*)));
96 DCHECK(code_pointer != nullptr);
97 uint32_t offset =
98 reinterpret_cast<const OatQuickMethodHeader*>(code_pointer)[-1].vmap_table_offset_;
99 const void* data =
100 reinterpret_cast<const void*>(reinterpret_cast<const uint8_t*>(code_pointer) - offset);
101 return CodeInfo(data);
102}
103
104uintptr_t ArtCode::NativeQuickPcOffset(const uintptr_t pc) {
105 const void* quick_entry_point = GetQuickOatEntryPoint(sizeof(void*));
106 CHECK_NE(quick_entry_point, GetQuickToInterpreterBridge());
107 CHECK_EQ(quick_entry_point,
108 Runtime::Current()->GetInstrumentation()->GetQuickCodeFor(method_, sizeof(void*)));
109 return pc - reinterpret_cast<uintptr_t>(quick_entry_point);
110}
111
112uint32_t ArtCode::ToDexPc(const uintptr_t pc, bool abort_on_failure) {
113 const void* entry_point = GetQuickOatEntryPoint(sizeof(void*));
114 uint32_t sought_offset = pc - reinterpret_cast<uintptr_t>(entry_point);
115 if (IsOptimized(sizeof(void*))) {
116 CodeInfo code_info = GetOptimizedCodeInfo();
117 StackMapEncoding encoding = code_info.ExtractEncoding();
118 StackMap stack_map = code_info.GetStackMapForNativePcOffset(sought_offset, encoding);
119 if (stack_map.IsValid()) {
120 return stack_map.GetDexPc(encoding);
121 }
122 } else {
123 MappingTable table(entry_point != nullptr ? GetMappingTable(sizeof(void*)) : nullptr);
124 if (table.TotalSize() == 0) {
125 // NOTE: Special methods (see Mir2Lir::GenSpecialCase()) have an empty mapping
126 // but they have no suspend checks and, consequently, we never call ToDexPc() for them.
127 DCHECK(method_->IsNative() || method_->IsCalleeSaveMethod() || method_->IsProxyMethod())
128 << PrettyMethod(method_);
129 return DexFile::kDexNoIndex; // Special no mapping case
130 }
131 // Assume the caller wants a pc-to-dex mapping so check here first.
132 typedef MappingTable::PcToDexIterator It;
133 for (It cur = table.PcToDexBegin(), end = table.PcToDexEnd(); cur != end; ++cur) {
134 if (cur.NativePcOffset() == sought_offset) {
135 return cur.DexPc();
136 }
137 }
138 // Now check dex-to-pc mappings.
139 typedef MappingTable::DexToPcIterator It2;
140 for (It2 cur = table.DexToPcBegin(), end = table.DexToPcEnd(); cur != end; ++cur) {
141 if (cur.NativePcOffset() == sought_offset) {
142 return cur.DexPc();
143 }
144 }
145 }
146 if (abort_on_failure) {
147 LOG(FATAL) << "Failed to find Dex offset for PC offset " << reinterpret_cast<void*>(sought_offset)
148 << "(PC " << reinterpret_cast<void*>(pc) << ", entry_point=" << entry_point
149 << " current entry_point=" << GetQuickOatEntryPoint(sizeof(void*))
150 << ") in " << PrettyMethod(method_);
151 }
152 return DexFile::kDexNoIndex;
153}
154
155const uint8_t* ArtCode::GetNativeGcMap(size_t pointer_size) {
156 const void* code_pointer = EntryPointToCodePointer(GetQuickOatEntryPoint(pointer_size));
157 if (code_pointer == nullptr) {
158 return nullptr;
159 }
160 uint32_t offset =
161 reinterpret_cast<const OatQuickMethodHeader*>(code_pointer)[-1].gc_map_offset_;
162 if (UNLIKELY(offset == 0u)) {
163 return nullptr;
164 }
165 return reinterpret_cast<const uint8_t*>(code_pointer) - offset;
166}
167
168const uint8_t* ArtCode::GetVmapTable(size_t pointer_size) {
169 CHECK(!IsOptimized(pointer_size)) << "Unimplemented vmap table for optimized compiler";
170 const void* code_pointer = EntryPointToCodePointer(GetQuickOatEntryPoint(pointer_size));
171 if (code_pointer == nullptr) {
172 return nullptr;
173 }
174 uint32_t offset =
175 reinterpret_cast<const OatQuickMethodHeader*>(code_pointer)[-1].vmap_table_offset_;
176 if (UNLIKELY(offset == 0u)) {
177 return nullptr;
178 }
179 return reinterpret_cast<const uint8_t*>(code_pointer) - offset;
180}
181
182const uint8_t* ArtCode::GetMappingTable(size_t pointer_size) {
183 const void* code_pointer = EntryPointToCodePointer(GetQuickOatEntryPoint(pointer_size));
184 if (code_pointer == nullptr) {
185 return nullptr;
186 }
187 uint32_t offset =
188 reinterpret_cast<const OatQuickMethodHeader*>(code_pointer)[-1].mapping_table_offset_;
189 if (UNLIKELY(offset == 0u)) {
190 return nullptr;
191 }
192 return reinterpret_cast<const uint8_t*>(code_pointer) - offset;
193}
194
195// Counts the number of references in the parameter list of the corresponding method.
196// Note: Thus does _not_ include "this" for non-static methods.
197static uint32_t GetNumberOfReferenceArgsWithoutReceiver(ArtMethod* method)
198 SHARED_REQUIRES(Locks::mutator_lock_) {
199 uint32_t shorty_len;
200 const char* shorty = method->GetShorty(&shorty_len);
201 uint32_t refs = 0;
202 for (uint32_t i = 1; i < shorty_len ; ++i) {
203 if (shorty[i] == 'L') {
204 refs++;
205 }
206 }
207 return refs;
208}
209
210QuickMethodFrameInfo ArtCode::GetQuickFrameInfo() {
211 Runtime* runtime = Runtime::Current();
212
213 if (UNLIKELY(method_->IsAbstract())) {
214 return runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
215 }
216
217 // This goes before IsProxyMethod since runtime methods have a null declaring class.
218 if (UNLIKELY(method_->IsRuntimeMethod())) {
219 return runtime->GetRuntimeMethodFrameInfo(method_);
220 }
221
222 // For Proxy method we add special handling for the direct method case (there is only one
223 // direct method - constructor). Direct method is cloned from original
224 // java.lang.reflect.Proxy class together with code and as a result it is executed as usual
225 // quick compiled method without any stubs. So the frame info should be returned as it is a
226 // quick method not a stub. However, if instrumentation stubs are installed, the
227 // instrumentation->GetQuickCodeFor() returns the artQuickProxyInvokeHandler instead of an
228 // oat code pointer, thus we have to add a special case here.
229 if (UNLIKELY(method_->IsProxyMethod())) {
230 if (method_->IsDirect()) {
231 CHECK(method_->IsConstructor());
232 const void* code_pointer =
233 EntryPointToCodePointer(method_->GetEntryPointFromQuickCompiledCode());
234 return reinterpret_cast<const OatQuickMethodHeader*>(code_pointer)[-1].frame_info_;
235 } else {
236 return runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
237 }
238 }
239
240 const void* entry_point = runtime->GetInstrumentation()->GetQuickCodeFor(method_, sizeof(void*));
241 ClassLinker* class_linker = runtime->GetClassLinker();
242 // On failure, instead of null we get the quick-generic-jni-trampoline for native method
243 // indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline)
244 // for non-native methods. And we really shouldn't see a failure for non-native methods here.
245 DCHECK(!class_linker->IsQuickToInterpreterBridge(entry_point));
246
247 if (class_linker->IsQuickGenericJniStub(entry_point)) {
248 // Generic JNI frame.
249 DCHECK(method_->IsNative());
250 uint32_t handle_refs = GetNumberOfReferenceArgsWithoutReceiver(method_) + 1;
251 size_t scope_size = HandleScope::SizeOf(handle_refs);
252 QuickMethodFrameInfo callee_info = runtime->GetCalleeSaveMethodFrameInfo(Runtime::kRefsAndArgs);
253
254 // Callee saves + handle scope + method ref + alignment
255 // Note: -sizeof(void*) since callee-save frame stores a whole method pointer.
256 size_t frame_size = RoundUp(callee_info.FrameSizeInBytes() - sizeof(void*) +
257 sizeof(ArtMethod*) + scope_size, kStackAlignment);
258 return QuickMethodFrameInfo(frame_size, callee_info.CoreSpillMask(), callee_info.FpSpillMask());
259 }
260
261 const void* code_pointer = EntryPointToCodePointer(entry_point);
262 return reinterpret_cast<const OatQuickMethodHeader*>(code_pointer)[-1].frame_info_;
263}
264
265void ArtCode::AssertPcIsWithinQuickCode(uintptr_t pc) {
266 if (method_->IsNative() || method_->IsRuntimeMethod() || method_->IsProxyMethod()) {
267 return;
268 }
269 if (pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc())) {
270 return;
271 }
272 const void* code = method_->GetEntryPointFromQuickCompiledCode();
273 if (code == GetQuickInstrumentationEntryPoint()) {
274 return;
275 }
276 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
277 if (class_linker->IsQuickToInterpreterBridge(code) ||
278 class_linker->IsQuickResolutionStub(code)) {
279 return;
280 }
281 // If we are the JIT then we may have just compiled the method after the
282 // IsQuickToInterpreterBridge check.
283 jit::Jit* const jit = Runtime::Current()->GetJit();
284 if (jit != nullptr &&
285 jit->GetCodeCache()->ContainsCodePtr(reinterpret_cast<const void*>(code))) {
286 return;
287 }
288
Nicolas Geoffray6bec6b22015-10-13 19:14:15 +0100289 uint32_t code_size = reinterpret_cast<const OatQuickMethodHeader*>(
290 EntryPointToCodePointer(code))[-1].code_size_;
291 uintptr_t code_start = reinterpret_cast<uintptr_t>(code);
292 CHECK(code_start <= pc && pc <= (code_start + code_size))
Nicolas Geoffray6bc43742015-10-12 18:11:10 +0100293 << PrettyMethod(method_)
294 << " pc=" << std::hex << pc
295 << " code=" << code
296 << " size=" << code_size;
297}
298
299bool ArtCode::PcIsWithinQuickCode(uintptr_t pc) {
300 /*
301 * During a stack walk, a return PC may point past-the-end of the code
302 * in the case that the last instruction is a call that isn't expected to
303 * return. Thus, we check <= code + GetCodeSize().
304 *
305 * NOTE: For Thumb both pc and code are offset by 1 indicating the Thumb state.
306 */
307 uintptr_t code = reinterpret_cast<uintptr_t>(EntryPointToCodePointer(
308 method_->GetEntryPointFromQuickCompiledCode()));
309 if (code == 0) {
310 return pc == 0;
311 }
312 uintptr_t code_size = reinterpret_cast<const OatQuickMethodHeader*>(code)[-1].code_size_;
313 return code <= pc && pc <= (code + code_size);
314}
315
316const void* ArtCode::GetQuickOatEntryPoint(size_t pointer_size) {
317 if (method_->IsAbstract() || method_->IsRuntimeMethod() || method_->IsProxyMethod()) {
318 return nullptr;
319 }
320 Runtime* runtime = Runtime::Current();
321 ClassLinker* class_linker = runtime->GetClassLinker();
322 const void* code = runtime->GetInstrumentation()->GetQuickCodeFor(method_, pointer_size);
323 // On failure, instead of null we get the quick-generic-jni-trampoline for native method
324 // indicating the generic JNI, or the quick-to-interpreter-bridge (but not the trampoline)
325 // for non-native methods.
326 if (class_linker->IsQuickToInterpreterBridge(code) ||
327 class_linker->IsQuickGenericJniStub(code)) {
328 return nullptr;
329 }
330 return code;
331}
332
333} // namespace art