blob: 88a57f2a6ec892c189bd06b40d4bdceb06cdc6ce [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
Ian Rogerse63db272014-07-15 15:36:11 -070021#include "arch/context.h"
Ian Rogersef7d42f2014-01-06 12:55:46 -080022#include "atomic.h"
Elliott Hughes76160052012-12-12 16:31:20 -080023#include "base/unix_file/fd_file.h"
jeffhao725a9572012-11-13 18:20:12 -080024#include "class_linker.h"
25#include "debugger.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080026#include "dex_file-inl.h"
Mathieu Chartierd8891782014-03-02 13:28:37 -080027#include "entrypoints/quick/quick_alloc_entrypoints.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070028#include "gc_root-inl.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010029#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070030#include "mirror/art_method-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031#include "mirror/class-inl.h"
32#include "mirror/dex_cache.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080033#include "mirror/object_array-inl.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070034#include "mirror/object-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080035#include "nth_caller_visitor.h"
Ian Rogersc928de92013-02-27 14:30:44 -080036#if !defined(ART_USE_PORTABLE_COMPILER)
Ian Rogers166db042013-07-26 12:05:57 -070037#include "entrypoints/quick/quick_entrypoints.h"
jeffhao725a9572012-11-13 18:20:12 -080038#endif
jeffhao725a9572012-11-13 18:20:12 -080039#include "os.h"
40#include "scoped_thread_state_change.h"
41#include "thread.h"
42#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080043
44namespace art {
Ian Rogersfa824272013-11-05 16:12:57 -080045
Ian Rogers62d6c772013-02-27 08:32:07 -080046namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080047
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010048const bool kVerboseInstrumentation = false;
49
Ian Rogers816432e2013-09-06 15:47:45 -070050// Do we want to deoptimize for method entry and exit listeners or just try to intercept
51// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
52// application's performance.
Jeff Haob21f1332014-08-11 18:00:29 -070053static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
Ian Rogers816432e2013-09-06 15:47:45 -070054
Ian Rogers62d6c772013-02-27 08:32:07 -080055static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
jeffhao725a9572012-11-13 18:20:12 -080056 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -080057 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
58 return instrumentation->InstallStubsForClass(klass);
59}
60
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070061Instrumentation::Instrumentation()
62 : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
63 interpreter_stubs_installed_(false),
64 interpret_only_(false), forced_interpret_only_(false),
65 have_method_entry_listeners_(false), have_method_exit_listeners_(false),
66 have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +020067 have_field_read_listeners_(false), have_field_write_listeners_(false),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070068 have_exception_caught_listeners_(false),
69 deoptimized_methods_lock_("deoptimized methods lock"),
70 deoptimization_enabled_(false),
71 interpreter_handler_table_(kMainHandlerTable),
72 quick_alloc_entry_points_instrumentation_counter_(0) {
73}
74
Ian Rogers62d6c772013-02-27 08:32:07 -080075bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010076 for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
77 InstallStubsForMethod(klass->GetDirectMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080078 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010079 for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
80 InstallStubsForMethod(klass->GetVirtualMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080081 }
82 return true;
83}
84
Ian Rogersef7d42f2014-01-06 12:55:46 -080085static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
86 const void* portable_code, bool have_portable_code)
87 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers63bc11e2014-09-18 08:56:45 -070088#if defined(ART_USE_PORTABLE_COMPILER)
Ian Rogersef7d42f2014-01-06 12:55:46 -080089 method->SetEntryPointFromPortableCompiledCode(portable_code);
Ian Rogers63bc11e2014-09-18 08:56:45 -070090#endif
Ian Rogersef7d42f2014-01-06 12:55:46 -080091 method->SetEntryPointFromQuickCompiledCode(quick_code);
92 bool portable_enabled = method->IsPortableCompiled();
93 if (have_portable_code && !portable_enabled) {
94 method->SetIsPortableCompiled();
95 } else if (portable_enabled) {
96 method->ClearIsPortableCompiled();
97 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010098 if (!method->IsResolutionMethod()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -070099 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800100 if (quick_code == GetQuickToInterpreterBridge() ||
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700101 quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
102 (quick_code == class_linker->GetQuickResolutionTrampoline() &&
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800103 Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
104 && !method->IsNative() && !method->IsProxyMethod())) {
105 if (kIsDebugBuild) {
106 if (quick_code == GetQuickToInterpreterBridge()) {
Ian Rogers63bc11e2014-09-18 08:56:45 -0700107#if defined(ART_USE_PORTABLE_COMPILER)
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800108 DCHECK(portable_code == GetPortableToInterpreterBridge());
Ian Rogers63bc11e2014-09-18 08:56:45 -0700109#endif
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700110 } else if (quick_code == class_linker->GetQuickResolutionTrampoline()) {
Ian Rogers63bc11e2014-09-18 08:56:45 -0700111#if defined(ART_USE_PORTABLE_COMPILER)
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700112 DCHECK(portable_code == class_linker->GetPortableResolutionTrampoline());
Ian Rogers63bc11e2014-09-18 08:56:45 -0700113#endif
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800114 }
115 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800116 DCHECK(!method->IsNative()) << PrettyMethod(method);
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800117 DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100118 method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
119 } else {
120 method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
121 }
122 }
123}
124
125void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
126 if (method->IsAbstract() || method->IsProxyMethod()) {
127 // Do not change stubs for these methods.
128 return;
129 }
Jeff Hao5afe4662014-08-19 10:17:36 -0700130 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
131 if (method->IsConstructor() &&
132 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;")) {
Jeff Hao95b4c652014-08-14 17:18:52 -0700133 return;
134 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800135 const void* new_portable_code;
136 const void* new_quick_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100137 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
138 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
139 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800140 bool have_portable_code = false;
Ian Rogers63bc11e2014-09-18 08:56:45 -0700141#if !defined(ART_USE_PORTABLE_COMPILER)
142 new_portable_code = nullptr;
143#endif
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100144 if (uninstall) {
145 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogers63bc11e2014-09-18 08:56:45 -0700146#if defined(ART_USE_PORTABLE_COMPILER)
Ian Rogersef7d42f2014-01-06 12:55:46 -0800147 new_portable_code = GetPortableToInterpreterBridge();
Ian Rogers63bc11e2014-09-18 08:56:45 -0700148#endif
Ian Rogersef7d42f2014-01-06 12:55:46 -0800149 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100150 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Ian Rogers63bc11e2014-09-18 08:56:45 -0700151#if defined(ART_USE_PORTABLE_COMPILER)
Ian Rogersef7d42f2014-01-06 12:55:46 -0800152 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
Ian Rogers63bc11e2014-09-18 08:56:45 -0700153#endif
Ian Rogersef7d42f2014-01-06 12:55:46 -0800154 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100155 } else {
Ian Rogers63bc11e2014-09-18 08:56:45 -0700156#if defined(ART_USE_PORTABLE_COMPILER)
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700157 new_portable_code = class_linker->GetPortableResolutionTrampoline();
Ian Rogers63bc11e2014-09-18 08:56:45 -0700158#endif
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700159 new_quick_code = class_linker->GetQuickResolutionTrampoline();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100160 }
161 } else { // !uninstall
Sebastien Hertzbae182c2013-12-17 10:42:03 +0100162 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
163 !method->IsNative()) {
Ian Rogers63bc11e2014-09-18 08:56:45 -0700164#if defined(ART_USE_PORTABLE_COMPILER)
Ian Rogersef7d42f2014-01-06 12:55:46 -0800165 new_portable_code = GetPortableToInterpreterBridge();
Ian Rogers63bc11e2014-09-18 08:56:45 -0700166#endif
Ian Rogersef7d42f2014-01-06 12:55:46 -0800167 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100168 } else {
169 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
170 // class, all its static methods code will be set to the instrumentation entry point.
171 // For more details, see ClassLinker::FixupStaticTrampolines.
172 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200173 if (entry_exit_stubs_installed_) {
Ian Rogers63bc11e2014-09-18 08:56:45 -0700174#if defined(ART_USE_PORTABLE_COMPILER)
Ian Rogersef7d42f2014-01-06 12:55:46 -0800175 new_portable_code = GetPortableToInterpreterBridge();
Ian Rogers63bc11e2014-09-18 08:56:45 -0700176#endif
Ian Rogersef7d42f2014-01-06 12:55:46 -0800177 new_quick_code = GetQuickInstrumentationEntryPoint();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200178 } else {
Ian Rogers63bc11e2014-09-18 08:56:45 -0700179#if defined(ART_USE_PORTABLE_COMPILER)
Sebastien Hertz320deb22014-06-11 19:45:05 +0200180 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
Ian Rogers63bc11e2014-09-18 08:56:45 -0700181#endif
Sebastien Hertz320deb22014-06-11 19:45:05 +0200182 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700183 DCHECK(new_quick_code != class_linker->GetQuickToInterpreterBridgeTrampoline());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100184 }
185 } else {
Ian Rogers63bc11e2014-09-18 08:56:45 -0700186#if defined(ART_USE_PORTABLE_COMPILER)
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700187 new_portable_code = class_linker->GetPortableResolutionTrampoline();
Ian Rogers63bc11e2014-09-18 08:56:45 -0700188#endif
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700189 new_quick_code = class_linker->GetQuickResolutionTrampoline();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100190 }
191 }
192 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800193 UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100194}
195
Ian Rogers62d6c772013-02-27 08:32:07 -0800196// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
197// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100198// Since we may already have done this previously, we need to push new instrumentation frame before
199// existing instrumentation frames.
Ian Rogers62d6c772013-02-27 08:32:07 -0800200static void InstrumentationInstallStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800201 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
202 struct InstallStackVisitor : public StackVisitor {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100203 InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
Ian Rogers62d6c772013-02-27 08:32:07 -0800204 : StackVisitor(thread, context), instrumentation_stack_(thread->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100205 instrumentation_exit_pc_(instrumentation_exit_pc),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100206 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
207 last_return_pc_(0) {
208 }
jeffhao725a9572012-11-13 18:20:12 -0800209
Ian Rogers306057f2012-11-26 12:45:53 -0800210 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700211 mirror::ArtMethod* m = GetMethod();
Ian Rogers306057f2012-11-26 12:45:53 -0800212 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800213 if (kVerboseInstrumentation) {
214 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
215 }
216 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700217 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800218 }
Jeff Haoa15a81b2014-05-27 18:25:47 -0700219 if (GetCurrentQuickFrame() == NULL) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200220 bool interpreter_frame = !m->IsPortableCompiled();
221 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
222 interpreter_frame);
Jeff Haoa15a81b2014-05-27 18:25:47 -0700223 if (kVerboseInstrumentation) {
224 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
225 }
226 shadow_stack_.push_back(instrumentation_frame);
227 return true; // Continue.
228 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800229 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200230 if (m->IsRuntimeMethod()) {
231 if (return_pc == instrumentation_exit_pc_) {
232 if (kVerboseInstrumentation) {
233 LOG(INFO) << " Handling quick to interpreter transition. Frame " << GetFrameId();
234 }
235 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
Daniel Mihalyi96add972014-08-18 18:45:31 +0200236 const InstrumentationStackFrame& frame =
237 instrumentation_stack_->at(instrumentation_stack_depth_);
Sebastien Hertz320deb22014-06-11 19:45:05 +0200238 CHECK(frame.interpreter_entry_);
239 // This is an interpreter frame so method enter event must have been reported. However we
240 // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
241 // Since we won't report method entry here, we can safely push any DEX pc.
242 dex_pcs_.push_back(0);
243 last_return_pc_ = frame.return_pc_;
244 ++instrumentation_stack_depth_;
245 return true;
246 } else {
247 if (kVerboseInstrumentation) {
248 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
249 }
250 last_return_pc_ = GetReturnPc();
251 return true; // Ignore unresolved methods since they will be instrumented after resolution.
252 }
253 }
254 if (kVerboseInstrumentation) {
255 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
256 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100257 if (return_pc == instrumentation_exit_pc_) {
258 // We've reached a frame which has already been installed with instrumentation exit stub.
259 // We should have already installed instrumentation on previous frames.
260 reached_existing_instrumentation_frames_ = true;
261
262 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
Daniel Mihalyi96add972014-08-18 18:45:31 +0200263 const InstrumentationStackFrame& frame =
264 instrumentation_stack_->at(instrumentation_stack_depth_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100265 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
266 << ", Found " << PrettyMethod(frame.method_);
267 return_pc = frame.return_pc_;
268 if (kVerboseInstrumentation) {
269 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
270 }
271 } else {
272 CHECK_NE(return_pc, 0U);
273 CHECK(!reached_existing_instrumentation_frames_);
274 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
275 false);
276 if (kVerboseInstrumentation) {
277 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
278 }
279
Sebastien Hertz320deb22014-06-11 19:45:05 +0200280 // Insert frame at the right position so we do not corrupt the instrumentation stack.
281 // Instrumentation stack frames are in descending frame id order.
282 auto it = instrumentation_stack_->begin();
283 for (auto end = instrumentation_stack_->end(); it != end; ++it) {
284 const InstrumentationStackFrame& current = *it;
285 if (instrumentation_frame.frame_id_ >= current.frame_id_) {
286 break;
287 }
288 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100289 instrumentation_stack_->insert(it, instrumentation_frame);
290 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800291 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800292 dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
Ian Rogers62d6c772013-02-27 08:32:07 -0800293 last_return_pc_ = return_pc;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100294 ++instrumentation_stack_depth_;
Ian Rogers306057f2012-11-26 12:45:53 -0800295 return true; // Continue.
296 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800297 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
Jeff Haoa15a81b2014-05-27 18:25:47 -0700298 std::vector<InstrumentationStackFrame> shadow_stack_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800299 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800300 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100301 bool reached_existing_instrumentation_frames_;
302 size_t instrumentation_stack_depth_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800303 uintptr_t last_return_pc_;
Ian Rogers306057f2012-11-26 12:45:53 -0800304 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800305 if (kVerboseInstrumentation) {
306 std::string thread_name;
307 thread->GetThreadName(thread_name);
308 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800309 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100310
311 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers700a4022014-05-19 16:49:03 -0700312 std::unique_ptr<Context> context(Context::Create());
Ian Rogers848871b2013-08-05 10:56:33 -0700313 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100314 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800315 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100316 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800317
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100318 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100319 // Create method enter events for all methods currently on the thread's stack. We only do this
320 // if no debugger is attached to prevent from posting events twice.
Jeff Haoa15a81b2014-05-27 18:25:47 -0700321 auto ssi = visitor.shadow_stack_.rbegin();
322 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
323 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
324 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
325 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
326 ++ssi;
327 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100328 uint32_t dex_pc = visitor.dex_pcs_.back();
329 visitor.dex_pcs_.pop_back();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200330 if (!isi->interpreter_entry_) {
331 instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
332 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100333 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800334 }
335 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800336}
337
Ian Rogers62d6c772013-02-27 08:32:07 -0800338// Removes the instrumentation exit pc as the return PC for every quick frame.
339static void InstrumentationRestoreStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800340 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
341 struct RestoreStackVisitor : public StackVisitor {
Ian Rogers62d6c772013-02-27 08:32:07 -0800342 RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
343 Instrumentation* instrumentation)
344 : StackVisitor(thread, NULL), thread_(thread),
345 instrumentation_exit_pc_(instrumentation_exit_pc),
346 instrumentation_(instrumentation),
347 instrumentation_stack_(thread->GetInstrumentationStack()),
348 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800349
350 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800351 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800352 return false; // Stop.
353 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700354 mirror::ArtMethod* m = GetMethod();
Ian Rogers62d6c772013-02-27 08:32:07 -0800355 if (GetCurrentQuickFrame() == NULL) {
356 if (kVerboseInstrumentation) {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200357 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
358 << " Method=" << PrettyMethod(m);
Ian Rogers62d6c772013-02-27 08:32:07 -0800359 }
360 return true; // Ignore shadow frames.
361 }
Ian Rogers306057f2012-11-26 12:45:53 -0800362 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800363 if (kVerboseInstrumentation) {
364 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
365 }
Ian Rogers306057f2012-11-26 12:45:53 -0800366 return true; // Ignore upcalls.
367 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800368 bool removed_stub = false;
369 // TODO: make this search more efficient?
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100370 const size_t frameId = GetFrameId();
371 for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
372 if (instrumentation_frame.frame_id_ == frameId) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800373 if (kVerboseInstrumentation) {
374 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
375 }
Jeff Hao9a916d32013-06-27 18:45:37 -0700376 if (instrumentation_frame.interpreter_entry_) {
377 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
378 } else {
379 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
380 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800381 SetReturnPc(instrumentation_frame.return_pc_);
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100382 if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100383 // Create the method exit events. As the methods didn't really exit the result is 0.
384 // We only do this if no debugger is attached to prevent from posting events twice.
385 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
386 GetDexPc(), JValue());
387 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800388 frames_removed_++;
389 removed_stub = true;
390 break;
391 }
392 }
393 if (!removed_stub) {
394 if (kVerboseInstrumentation) {
395 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800396 }
jeffhao725a9572012-11-13 18:20:12 -0800397 }
398 return true; // Continue.
399 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800400 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800401 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800402 Instrumentation* const instrumentation_;
403 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
404 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800405 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800406 if (kVerboseInstrumentation) {
407 std::string thread_name;
408 thread->GetThreadName(thread_name);
409 LOG(INFO) << "Removing exit stubs in " << thread_name;
410 }
411 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
412 if (stack->size() > 0) {
413 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers848871b2013-08-05 10:56:33 -0700414 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Ian Rogers62d6c772013-02-27 08:32:07 -0800415 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
416 visitor.WalkStack(true);
417 CHECK_EQ(visitor.frames_removed_, stack->size());
418 while (stack->size() > 0) {
419 stack->pop_front();
420 }
jeffhao725a9572012-11-13 18:20:12 -0800421 }
422}
423
Ian Rogers62d6c772013-02-27 08:32:07 -0800424void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
425 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800426 if ((events & kMethodEntered) != 0) {
427 method_entry_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800428 have_method_entry_listeners_ = true;
429 }
430 if ((events & kMethodExited) != 0) {
431 method_exit_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800432 have_method_exit_listeners_ = true;
433 }
434 if ((events & kMethodUnwind) != 0) {
435 method_unwind_listeners_.push_back(listener);
436 have_method_unwind_listeners_ = true;
437 }
438 if ((events & kDexPcMoved) != 0) {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200439 std::list<InstrumentationListener*>* modified;
440 if (have_dex_pc_listeners_) {
441 modified = new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
442 } else {
443 modified = new std::list<InstrumentationListener*>();
444 }
445 modified->push_back(listener);
446 dex_pc_listeners_.reset(modified);
Ian Rogers62d6c772013-02-27 08:32:07 -0800447 have_dex_pc_listeners_ = true;
448 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200449 if ((events & kFieldRead) != 0) {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200450 std::list<InstrumentationListener*>* modified;
451 if (have_field_read_listeners_) {
452 modified = new std::list<InstrumentationListener*>(*field_read_listeners_.get());
453 } else {
454 modified = new std::list<InstrumentationListener*>();
455 }
456 modified->push_back(listener);
457 field_read_listeners_.reset(modified);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200458 have_field_read_listeners_ = true;
459 }
460 if ((events & kFieldWritten) != 0) {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200461 std::list<InstrumentationListener*>* modified;
462 if (have_field_write_listeners_) {
463 modified = new std::list<InstrumentationListener*>(*field_write_listeners_.get());
464 } else {
465 modified = new std::list<InstrumentationListener*>();
466 }
467 modified->push_back(listener);
468 field_write_listeners_.reset(modified);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200469 have_field_write_listeners_ = true;
470 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700471 if ((events & kExceptionCaught) != 0) {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200472 std::list<InstrumentationListener*>* modified;
473 if (have_exception_caught_listeners_) {
474 modified = new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
475 } else {
476 modified = new std::list<InstrumentationListener*>();
477 }
478 modified->push_back(listener);
479 exception_caught_listeners_.reset(modified);
Jeff Hao14dd5a82013-04-11 10:23:36 -0700480 have_exception_caught_listeners_ = true;
481 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200482 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800483}
484
Ian Rogers62d6c772013-02-27 08:32:07 -0800485void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
486 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800487
488 if ((events & kMethodEntered) != 0) {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200489 if (have_method_entry_listeners_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800490 method_entry_listeners_.remove(listener);
Daniel Mihalyi96add972014-08-18 18:45:31 +0200491 have_method_entry_listeners_ = !method_entry_listeners_.empty();
Ian Rogers62d6c772013-02-27 08:32:07 -0800492 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800493 }
494 if ((events & kMethodExited) != 0) {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200495 if (have_method_exit_listeners_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800496 method_exit_listeners_.remove(listener);
Daniel Mihalyi96add972014-08-18 18:45:31 +0200497 have_method_exit_listeners_ = !method_exit_listeners_.empty();
Ian Rogers62d6c772013-02-27 08:32:07 -0800498 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800499 }
500 if ((events & kMethodUnwind) != 0) {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200501 if (have_method_unwind_listeners_) {
502 method_unwind_listeners_.remove(listener);
503 have_method_unwind_listeners_ = !method_unwind_listeners_.empty();
504 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800505 }
506 if ((events & kDexPcMoved) != 0) {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200507 if (have_dex_pc_listeners_) {
508 std::list<InstrumentationListener*>* modified =
509 new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
510 modified->remove(listener);
511 have_dex_pc_listeners_ = !modified->empty();
512 if (have_dex_pc_listeners_) {
513 dex_pc_listeners_.reset(modified);
514 } else {
515 dex_pc_listeners_.reset();
516 delete modified;
517 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800518 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800519 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200520 if ((events & kFieldRead) != 0) {
Daniel Mihalyie246f332014-08-21 15:57:25 +0200521 if (have_field_read_listeners_) {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200522 std::list<InstrumentationListener*>* modified =
523 new std::list<InstrumentationListener*>(*field_read_listeners_.get());
524 modified->remove(listener);
525 have_field_read_listeners_ = !modified->empty();
526 if (have_field_read_listeners_) {
527 field_read_listeners_.reset(modified);
528 } else {
529 field_read_listeners_.reset();
530 delete modified;
531 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200532 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200533 }
534 if ((events & kFieldWritten) != 0) {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200535 if (have_field_write_listeners_) {
536 std::list<InstrumentationListener*>* modified =
537 new std::list<InstrumentationListener*>(*field_write_listeners_.get());
538 modified->remove(listener);
539 have_field_write_listeners_ = !modified->empty();
540 if (have_field_write_listeners_) {
541 field_write_listeners_.reset(modified);
542 } else {
543 field_write_listeners_.reset();
544 delete modified;
545 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200546 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200547 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700548 if ((events & kExceptionCaught) != 0) {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200549 if (have_exception_caught_listeners_) {
550 std::list<InstrumentationListener*>* modified =
551 new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
552 modified->remove(listener);
553 have_exception_caught_listeners_ = !modified->empty();
554 if (have_exception_caught_listeners_) {
555 exception_caught_listeners_.reset(modified);
556 } else {
557 exception_caught_listeners_.reset();
558 delete modified;
559 }
560 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700561 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200562 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800563}
564
Ian Rogers62d6c772013-02-27 08:32:07 -0800565void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
566 interpret_only_ = require_interpreter || forced_interpret_only_;
567 // Compute what level of instrumentation is required and compare to current.
568 int desired_level, current_level;
569 if (require_interpreter) {
570 desired_level = 2;
571 } else if (require_entry_exit_stubs) {
572 desired_level = 1;
573 } else {
574 desired_level = 0;
575 }
576 if (interpreter_stubs_installed_) {
577 current_level = 2;
578 } else if (entry_exit_stubs_installed_) {
579 current_level = 1;
580 } else {
581 current_level = 0;
582 }
583 if (desired_level == current_level) {
584 // We're already set.
585 return;
586 }
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100587 Thread* const self = Thread::Current();
Ian Rogers62d6c772013-02-27 08:32:07 -0800588 Runtime* runtime = Runtime::Current();
589 Locks::thread_list_lock_->AssertNotHeld(self);
590 if (desired_level > 0) {
591 if (require_interpreter) {
592 interpreter_stubs_installed_ = true;
593 } else {
594 CHECK(require_entry_exit_stubs);
595 entry_exit_stubs_installed_ = true;
596 }
597 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
598 instrumentation_stubs_installed_ = true;
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100599 MutexLock mu(self, *Locks::thread_list_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800600 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
601 } else {
602 interpreter_stubs_installed_ = false;
603 entry_exit_stubs_installed_ = false;
604 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100605 // Restore stack only if there is no method currently deoptimized.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700606 bool empty;
607 {
608 ReaderMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700609 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700610 }
611 if (empty) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100612 instrumentation_stubs_installed_ = false;
613 MutexLock mu(self, *Locks::thread_list_lock_);
614 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
615 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800616 }
jeffhao725a9572012-11-13 18:20:12 -0800617}
618
Ian Rogersfa824272013-11-05 16:12:57 -0800619static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
620 thread->ResetQuickAllocEntryPointsForThread();
621}
622
Jeff Hao1d6ee092014-09-15 18:03:41 -0700623void Instrumentation::SetEntrypointsInstrumented(bool instrumented, bool suspended) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800624 Runtime* runtime = Runtime::Current();
625 ThreadList* tl = runtime->GetThreadList();
Jeff Hao1d6ee092014-09-15 18:03:41 -0700626 if (suspended) {
627 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
628 }
629 if (runtime->IsStarted() && !suspended) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800630 tl->SuspendAll();
631 }
632 {
633 MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
634 SetQuickAllocEntryPointsInstrumented(instrumented);
635 ResetQuickAllocEntryPoints();
636 }
Jeff Hao1d6ee092014-09-15 18:03:41 -0700637 if (runtime->IsStarted() && !suspended) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800638 tl->ResumeAll();
639 }
640}
641
Jeff Hao1d6ee092014-09-15 18:03:41 -0700642void Instrumentation::InstrumentQuickAllocEntryPoints(bool suspended) {
Ian Rogersfa824272013-11-05 16:12:57 -0800643 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
644 // should be guarded by a lock.
Ian Rogers3e5cf302014-05-20 16:40:37 -0700645 DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800646 const bool enable_instrumentation =
Ian Rogers3e5cf302014-05-20 16:40:37 -0700647 quick_alloc_entry_points_instrumentation_counter_.FetchAndAddSequentiallyConsistent(1) == 0;
Ian Rogersfa824272013-11-05 16:12:57 -0800648 if (enable_instrumentation) {
Jeff Hao1d6ee092014-09-15 18:03:41 -0700649 SetEntrypointsInstrumented(true, suspended);
Ian Rogersfa824272013-11-05 16:12:57 -0800650 }
651}
652
Jeff Hao1d6ee092014-09-15 18:03:41 -0700653void Instrumentation::UninstrumentQuickAllocEntryPoints(bool suspended) {
Ian Rogersfa824272013-11-05 16:12:57 -0800654 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
655 // should be guarded by a lock.
Ian Rogers3e5cf302014-05-20 16:40:37 -0700656 DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800657 const bool disable_instrumentation =
Ian Rogers3e5cf302014-05-20 16:40:37 -0700658 quick_alloc_entry_points_instrumentation_counter_.FetchAndSubSequentiallyConsistent(1) == 1;
Ian Rogersfa824272013-11-05 16:12:57 -0800659 if (disable_instrumentation) {
Jeff Hao1d6ee092014-09-15 18:03:41 -0700660 SetEntrypointsInstrumented(false, suspended);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800661 }
662}
663
664void Instrumentation::ResetQuickAllocEntryPoints() {
665 Runtime* runtime = Runtime::Current();
666 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800667 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
668 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
Ian Rogersfa824272013-11-05 16:12:57 -0800669 }
670}
671
Ian Rogersef7d42f2014-01-06 12:55:46 -0800672void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700673 const void* portable_code, bool have_portable_code) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800674 const void* new_portable_code;
675 const void* new_quick_code;
676 bool new_have_portable_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800677 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800678 new_portable_code = portable_code;
679 new_quick_code = quick_code;
680 new_have_portable_code = have_portable_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700681 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100682 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogers63bc11e2014-09-18 08:56:45 -0700683#if defined(ART_USE_PORTABLE_COMPILER)
Ian Rogersef7d42f2014-01-06 12:55:46 -0800684 new_portable_code = GetPortableToInterpreterBridge();
Ian Rogers63bc11e2014-09-18 08:56:45 -0700685#else
686 new_portable_code = portable_code;
687#endif
Ian Rogersef7d42f2014-01-06 12:55:46 -0800688 new_quick_code = GetQuickToInterpreterBridge();
689 new_have_portable_code = false;
Jeff Hao65d15d92013-07-16 16:39:33 -0700690 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700691 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
692 if (quick_code == class_linker->GetQuickResolutionTrampoline() ||
693 quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
694 quick_code == GetQuickToInterpreterBridge()) {
Ian Rogers63bc11e2014-09-18 08:56:45 -0700695#if defined(ART_USE_PORTABLE_COMPILER)
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700696 DCHECK((portable_code == class_linker->GetPortableResolutionTrampoline()) ||
697 (portable_code == GetPortableToInterpreterBridge()));
Ian Rogers63bc11e2014-09-18 08:56:45 -0700698#endif
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700699 new_portable_code = portable_code;
700 new_quick_code = quick_code;
701 new_have_portable_code = have_portable_code;
702 } else if (entry_exit_stubs_installed_) {
703 new_quick_code = GetQuickInstrumentationEntryPoint();
Ian Rogers63bc11e2014-09-18 08:56:45 -0700704#if defined(ART_USE_PORTABLE_COMPILER)
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700705 new_portable_code = GetPortableToInterpreterBridge();
Ian Rogers63bc11e2014-09-18 08:56:45 -0700706#else
707 new_portable_code = portable_code;
708#endif
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700709 new_have_portable_code = false;
710 } else {
711 new_portable_code = portable_code;
712 new_quick_code = quick_code;
713 new_have_portable_code = have_portable_code;
714 }
Jeff Hao65d15d92013-07-16 16:39:33 -0700715 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800716 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800717 UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100718}
719
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700720bool Instrumentation::AddDeoptimizedMethod(mirror::ArtMethod* method) {
721 // Note that the insert() below isn't read barrier-aware. So, this
722 // FindDeoptimizedMethod() call is necessary or else we would end up
723 // storing the same method twice in the map (the from-space and the
724 // to-space ones).
725 if (FindDeoptimizedMethod(method)) {
726 // Already in the map. Return.
727 return false;
728 }
729 // Not found. Add it.
730 int32_t hash_code = method->IdentityHashCode();
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700731 deoptimized_methods_.insert(std::make_pair(hash_code, GcRoot<mirror::ArtMethod>(method)));
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700732 return true;
733}
734
735bool Instrumentation::FindDeoptimizedMethod(mirror::ArtMethod* method) {
736 int32_t hash_code = method->IdentityHashCode();
737 auto range = deoptimized_methods_.equal_range(hash_code);
738 for (auto it = range.first; it != range.second; ++it) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700739 mirror::ArtMethod* m = it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700740 if (m == method) {
741 // Found.
742 return true;
743 }
744 }
745 // Not found.
746 return false;
747}
748
749mirror::ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
750 auto it = deoptimized_methods_.begin();
751 if (it == deoptimized_methods_.end()) {
752 // Empty.
753 return nullptr;
754 }
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700755 return it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700756}
757
758bool Instrumentation::RemoveDeoptimizedMethod(mirror::ArtMethod* method) {
759 int32_t hash_code = method->IdentityHashCode();
760 auto range = deoptimized_methods_.equal_range(hash_code);
761 for (auto it = range.first; it != range.second; ++it) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700762 mirror::ArtMethod* m = it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700763 if (m == method) {
764 // Found. Erase and return.
765 deoptimized_methods_.erase(it);
766 return true;
767 }
768 }
769 // Not found.
770 return false;
771}
772
773bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
774 return deoptimized_methods_.empty();
775}
776
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100777void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
778 CHECK(!method->IsNative());
779 CHECK(!method->IsProxyMethod());
780 CHECK(!method->IsAbstract());
781
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700782 Thread* self = Thread::Current();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700783 {
784 WriterMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700785 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
Daniel Mihalyi96add972014-08-18 18:45:31 +0200786 CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method)
787 << " is already deoptimized";
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700788 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100789 if (!interpreter_stubs_installed_) {
Ian Rogers63bc11e2014-09-18 08:56:45 -0700790 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint(),
791#if defined(ART_USE_PORTABLE_COMPILER)
792 GetPortableToInterpreterBridge(),
793#else
794 nullptr,
795#endif
Ian Rogersef7d42f2014-01-06 12:55:46 -0800796 false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100797
798 // Install instrumentation exit stub and instrumentation frames. We may already have installed
799 // these previously so it will only cover the newly created frames.
800 instrumentation_stubs_installed_ = true;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700801 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100802 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
803 }
804}
805
806void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
807 CHECK(!method->IsNative());
808 CHECK(!method->IsProxyMethod());
809 CHECK(!method->IsAbstract());
810
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700811 Thread* self = Thread::Current();
812 bool empty;
813 {
814 WriterMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700815 bool found_and_erased = RemoveDeoptimizedMethod(method);
816 CHECK(found_and_erased) << "Method " << PrettyMethod(method)
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700817 << " is not deoptimized";
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700818 empty = IsDeoptimizedMethodsEmpty();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700819 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100820
821 // Restore code and possibly stack only if we did not deoptimize everything.
822 if (!interpreter_stubs_installed_) {
823 // Restore its code or resolution trampoline.
824 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800825 if (method->IsStatic() && !method->IsConstructor() &&
826 !method->GetDeclaringClass()->IsInitialized()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700827 // TODO: we're updating to entrypoints in the image here, we can avoid the trampoline.
828 UpdateEntrypoints(method, class_linker->GetQuickResolutionTrampoline(),
Ian Rogers63bc11e2014-09-18 08:56:45 -0700829#if defined(ART_USE_PORTABLE_COMPILER)
830 class_linker->GetPortableResolutionTrampoline(),
831#else
832 nullptr,
833#endif
834 false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100835 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800836 bool have_portable_code = false;
837 const void* quick_code = class_linker->GetQuickOatCodeFor(method);
Ian Rogers63bc11e2014-09-18 08:56:45 -0700838#if defined(ART_USE_PORTABLE_COMPILER)
Ian Rogersef7d42f2014-01-06 12:55:46 -0800839 const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
Ian Rogers63bc11e2014-09-18 08:56:45 -0700840#else
841 const void* portable_code = nullptr;
842#endif
Ian Rogersef7d42f2014-01-06 12:55:46 -0800843 UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100844 }
845
846 // If there is no deoptimized method left, we can restore the stack of each thread.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700847 if (empty) {
848 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100849 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
850 instrumentation_stubs_installed_ = false;
851 }
852 }
853}
854
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700855bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100856 DCHECK(method != nullptr);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700857 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
858 return FindDeoptimizedMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100859}
860
861void Instrumentation::EnableDeoptimization() {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700862 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700863 CHECK(IsDeoptimizedMethodsEmpty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100864 CHECK_EQ(deoptimization_enabled_, false);
865 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100866}
867
868void Instrumentation::DisableDeoptimization() {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100869 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100870 // If we deoptimized everything, undo it.
871 if (interpreter_stubs_installed_) {
872 UndeoptimizeEverything();
873 }
874 // Undeoptimized selected methods.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700875 while (true) {
876 mirror::ArtMethod* method;
877 {
878 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700879 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700880 break;
881 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700882 method = BeginDeoptimizedMethod();
883 CHECK(method != nullptr);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700884 }
885 Undeoptimize(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100886 }
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100887 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100888}
889
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100890// Indicates if instrumentation should notify method enter/exit events to the listeners.
891bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100892 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100893}
894
895void Instrumentation::DeoptimizeEverything() {
896 CHECK(!interpreter_stubs_installed_);
897 ConfigureStubs(false, true);
898}
899
900void Instrumentation::UndeoptimizeEverything() {
901 CHECK(interpreter_stubs_installed_);
902 ConfigureStubs(false, false);
903}
904
905void Instrumentation::EnableMethodTracing() {
906 bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
907 ConfigureStubs(!require_interpreter, require_interpreter);
908}
909
910void Instrumentation::DisableMethodTracing() {
911 ConfigureStubs(false, false);
jeffhao725a9572012-11-13 18:20:12 -0800912}
913
Ian Rogersef7d42f2014-01-06 12:55:46 -0800914const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800915 Runtime* runtime = Runtime::Current();
916 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800917 const void* code = method->GetEntryPointFromQuickCompiledCode();
Vladimir Marko8a630572014-04-09 18:45:35 +0100918 DCHECK(code != nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700919 ClassLinker* class_linker = runtime->GetClassLinker();
920 if (LIKELY(code != class_linker->GetQuickResolutionTrampoline()) &&
921 LIKELY(code != class_linker->GetQuickToInterpreterBridgeTrampoline()) &&
Vladimir Marko8a630572014-04-09 18:45:35 +0100922 LIKELY(code != GetQuickToInterpreterBridge())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800923 return code;
924 }
925 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800926 return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -0800927}
928
Ian Rogers62d6c772013-02-27 08:32:07 -0800929void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800930 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800931 uint32_t dex_pc) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700932 auto it = method_entry_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700933 bool is_end = (it == method_entry_listeners_.end());
934 // Implemented this way to prevent problems caused by modification of the list while iterating.
935 while (!is_end) {
936 InstrumentationListener* cur = *it;
937 ++it;
938 is_end = (it == method_entry_listeners_.end());
939 cur->MethodEntered(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800940 }
941}
942
943void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800944 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800945 uint32_t dex_pc, const JValue& return_value) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700946 auto it = method_exit_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700947 bool is_end = (it == method_exit_listeners_.end());
948 // Implemented this way to prevent problems caused by modification of the list while iterating.
949 while (!is_end) {
950 InstrumentationListener* cur = *it;
951 ++it;
952 is_end = (it == method_exit_listeners_.end());
953 cur->MethodExited(thread, this_object, method, dex_pc, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800954 }
955}
956
957void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800958 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800959 uint32_t dex_pc) const {
960 if (have_method_unwind_listeners_) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700961 for (InstrumentationListener* listener : method_unwind_listeners_) {
Sebastien Hertz51db44a2013-11-19 10:00:29 +0100962 listener->MethodUnwind(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800963 }
964 }
965}
966
967void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800968 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800969 uint32_t dex_pc) const {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200970 if (HasDexPcListeners()) {
971 std::shared_ptr<std::list<InstrumentationListener*>> original(dex_pc_listeners_);
972 for (InstrumentationListener* listener : *original.get()) {
973 listener->DexPcMoved(thread, this_object, method, dex_pc);
974 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800975 }
976}
977
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200978void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
979 mirror::ArtMethod* method, uint32_t dex_pc,
980 mirror::ArtField* field) const {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200981 if (HasFieldReadListeners()) {
982 std::shared_ptr<std::list<InstrumentationListener*>> original(field_read_listeners_);
983 for (InstrumentationListener* listener : *original.get()) {
984 listener->FieldRead(thread, this_object, method, dex_pc, field);
985 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200986 }
987}
988
989void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
990 mirror::ArtMethod* method, uint32_t dex_pc,
991 mirror::ArtField* field, const JValue& field_value) const {
Daniel Mihalyi96add972014-08-18 18:45:31 +0200992 if (HasFieldWriteListeners()) {
993 std::shared_ptr<std::list<InstrumentationListener*>> original(field_write_listeners_);
994 for (InstrumentationListener* listener : *original.get()) {
995 listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
996 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200997 }
998}
999
Ian Rogers62d6c772013-02-27 08:32:07 -08001000void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
Brian Carlstromea46f952013-07-30 01:26:50 -07001001 mirror::ArtMethod* catch_method,
Ian Rogers62d6c772013-02-27 08:32:07 -08001002 uint32_t catch_dex_pc,
Sebastien Hertz947ff082013-09-17 14:10:13 +02001003 mirror::Throwable* exception_object) const {
Sebastien Hertz9f102032014-05-23 08:59:42 +02001004 if (HasExceptionCaughtListeners()) {
1005 DCHECK_EQ(thread->GetException(nullptr), exception_object);
1006 bool is_exception_reported = thread->IsExceptionReportedToInstrumentation();
Jeff Haoc0bd4da2013-04-11 15:52:28 -07001007 thread->ClearException();
Daniel Mihalyi96add972014-08-18 18:45:31 +02001008 std::shared_ptr<std::list<InstrumentationListener*>> original(exception_caught_listeners_);
1009 for (InstrumentationListener* listener : *original.get()) {
1010 listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc,
1011 exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -08001012 }
Jeff Haoc0bd4da2013-04-11 15:52:28 -07001013 thread->SetException(throw_location, exception_object);
Sebastien Hertz9f102032014-05-23 08:59:42 +02001014 thread->SetExceptionReportedToInstrumentation(is_exception_reported);
Ian Rogers62d6c772013-02-27 08:32:07 -08001015 }
1016}
1017
1018static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
1019 int delta)
1020 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1021 size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
1022 if (frame_id != instrumentation_frame.frame_id_) {
1023 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
1024 << instrumentation_frame.frame_id_;
1025 StackVisitor::DescribeStack(self);
1026 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
1027 }
1028}
1029
1030void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -07001031 mirror::ArtMethod* method,
Jeff Hao9a916d32013-06-27 18:45:37 -07001032 uintptr_t lr, bool interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -08001033 // We have a callee-save frame meaning this value is guaranteed to never be 0.
1034 size_t frame_id = StackVisitor::ComputeNumFrames(self);
1035 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1036 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -07001037 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -08001038 }
1039 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
Jeff Hao9a916d32013-06-27 18:45:37 -07001040 frame_id, interpreter_entry);
Ian Rogers62d6c772013-02-27 08:32:07 -08001041 stack->push_front(instrumentation_frame);
1042
Sebastien Hertz320deb22014-06-11 19:45:05 +02001043 if (!interpreter_entry) {
1044 MethodEnterEvent(self, this_object, method, 0);
1045 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001046}
1047
Andreas Gamped58342c2014-06-05 14:18:08 -07001048TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
1049 uint64_t gpr_result,
1050 uint64_t fpr_result) {
Ian Rogers62d6c772013-02-27 08:32:07 -08001051 // Do the pop.
1052 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1053 CHECK_GT(stack->size(), 0U);
1054 InstrumentationStackFrame instrumentation_frame = stack->front();
1055 stack->pop_front();
1056
1057 // Set return PC and check the sanity of the stack.
1058 *return_pc = instrumentation_frame.return_pc_;
1059 CheckStackDepth(self, instrumentation_frame, 0);
1060
Brian Carlstromea46f952013-07-30 01:26:50 -07001061 mirror::ArtMethod* method = instrumentation_frame.method_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001062 uint32_t length;
1063 char return_shorty = method->GetShorty(&length)[0];
Ian Rogers62d6c772013-02-27 08:32:07 -08001064 JValue return_value;
1065 if (return_shorty == 'V') {
1066 return_value.SetJ(0);
1067 } else if (return_shorty == 'F' || return_shorty == 'D') {
1068 return_value.SetJ(fpr_result);
1069 } else {
1070 return_value.SetJ(gpr_result);
1071 }
1072 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1073 // return_pc.
1074 uint32_t dex_pc = DexFile::kDexNoIndex;
1075 mirror::Object* this_object = instrumentation_frame.this_object_;
Sebastien Hertz320deb22014-06-11 19:45:05 +02001076 if (!instrumentation_frame.interpreter_entry_) {
1077 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
1078 }
jeffhao725a9572012-11-13 18:20:12 -08001079
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001080 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1081 // back to an upcall.
1082 NthCallerVisitor visitor(self, 1, true);
1083 visitor.WalkStack(true);
1084 bool deoptimize = (visitor.caller != NULL) &&
1085 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
1086 if (deoptimize && kVerboseInstrumentation) {
1087 LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
Ian Rogers62d6c772013-02-27 08:32:07 -08001088 }
1089 if (deoptimize) {
1090 if (kVerboseInstrumentation) {
1091 LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001092 << " result is " << std::hex << return_value.GetJ();
Ian Rogers62d6c772013-02-27 08:32:07 -08001093 }
1094 self->SetDeoptimizationReturnValue(return_value);
Andreas Gamped58342c2014-06-05 14:18:08 -07001095 return GetTwoWordSuccessValue(*return_pc,
1096 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
Ian Rogers62d6c772013-02-27 08:32:07 -08001097 } else {
1098 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -07001099 LOG(INFO) << "Returning from " << PrettyMethod(method)
1100 << " to PC " << reinterpret_cast<void*>(*return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -08001101 }
Andreas Gamped58342c2014-06-05 14:18:08 -07001102 return GetTwoWordSuccessValue(0, *return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -08001103 }
jeffhao725a9572012-11-13 18:20:12 -08001104}
1105
Ian Rogers62d6c772013-02-27 08:32:07 -08001106void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
1107 // Do the pop.
1108 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1109 CHECK_GT(stack->size(), 0U);
1110 InstrumentationStackFrame instrumentation_frame = stack->front();
1111 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1112 stack->pop_front();
1113
Brian Carlstromea46f952013-07-30 01:26:50 -07001114 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -08001115 if (is_deoptimization) {
1116 if (kVerboseInstrumentation) {
1117 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
1118 }
1119 } else {
1120 if (kVerboseInstrumentation) {
1121 LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
1122 }
1123
1124 // Notify listeners of method unwind.
1125 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1126 // return_pc.
1127 uint32_t dex_pc = DexFile::kDexNoIndex;
1128 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1129 }
1130}
1131
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001132void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
1133 WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001134 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001135 return;
1136 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001137 for (auto pair : deoptimized_methods_) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -07001138 pair.second.VisitRoot(callback, arg, 0, kRootVMInternal);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001139 }
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001140}
1141
Ian Rogers62d6c772013-02-27 08:32:07 -08001142std::string InstrumentationStackFrame::Dump() const {
1143 std::ostringstream os;
1144 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
1145 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1146 return os.str();
1147}
1148
1149} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -08001150} // namespace art