blob: 2cd7f49a30fce781781fd83f52953e617db738d9 [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
Ian Rogersef7d42f2014-01-06 12:55:46 -080021#include "atomic.h"
Elliott Hughes76160052012-12-12 16:31:20 -080022#include "base/unix_file/fd_file.h"
jeffhao725a9572012-11-13 18:20:12 -080023#include "class_linker.h"
24#include "debugger.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080025#include "dex_file-inl.h"
Mathieu Chartierd8891782014-03-02 13:28:37 -080026#include "entrypoints/quick/quick_alloc_entrypoints.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010027#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070028#include "mirror/art_method-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080029#include "mirror/class-inl.h"
30#include "mirror/dex_cache.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031#include "mirror/object_array-inl.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070032#include "mirror/object-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080033#include "nth_caller_visitor.h"
Ian Rogersc928de92013-02-27 14:30:44 -080034#if !defined(ART_USE_PORTABLE_COMPILER)
Ian Rogers166db042013-07-26 12:05:57 -070035#include "entrypoints/quick/quick_entrypoints.h"
jeffhao725a9572012-11-13 18:20:12 -080036#endif
37#include "object_utils.h"
38#include "os.h"
39#include "scoped_thread_state_change.h"
40#include "thread.h"
41#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080042
43namespace art {
Ian Rogersfa824272013-11-05 16:12:57 -080044
Ian Rogers62d6c772013-02-27 08:32:07 -080045namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080046
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010047const bool kVerboseInstrumentation = false;
48
Ian Rogers816432e2013-09-06 15:47:45 -070049// Do we want to deoptimize for method entry and exit listeners or just try to intercept
50// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
51// application's performance.
Ian Rogers7b6da362013-09-11 09:29:40 -070052static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = false;
Ian Rogers816432e2013-09-06 15:47:45 -070053
Ian Rogers62d6c772013-02-27 08:32:07 -080054static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
jeffhao725a9572012-11-13 18:20:12 -080055 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -080056 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
57 return instrumentation->InstallStubsForClass(klass);
58}
59
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070060Instrumentation::Instrumentation()
61 : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
62 interpreter_stubs_installed_(false),
63 interpret_only_(false), forced_interpret_only_(false),
64 have_method_entry_listeners_(false), have_method_exit_listeners_(false),
65 have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +020066 have_field_read_listeners_(false), have_field_write_listeners_(false),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070067 have_exception_caught_listeners_(false),
68 deoptimized_methods_lock_("deoptimized methods lock"),
69 deoptimization_enabled_(false),
70 interpreter_handler_table_(kMainHandlerTable),
71 quick_alloc_entry_points_instrumentation_counter_(0) {
72}
73
Ian Rogers62d6c772013-02-27 08:32:07 -080074bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010075 for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
76 InstallStubsForMethod(klass->GetDirectMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080077 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010078 for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
79 InstallStubsForMethod(klass->GetVirtualMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080080 }
81 return true;
82}
83
Ian Rogersef7d42f2014-01-06 12:55:46 -080084static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
85 const void* portable_code, bool have_portable_code)
86 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
87 method->SetEntryPointFromPortableCompiledCode(portable_code);
88 method->SetEntryPointFromQuickCompiledCode(quick_code);
89 bool portable_enabled = method->IsPortableCompiled();
90 if (have_portable_code && !portable_enabled) {
91 method->SetIsPortableCompiled();
92 } else if (portable_enabled) {
93 method->ClearIsPortableCompiled();
94 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010095 if (!method->IsResolutionMethod()) {
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -080096 if (quick_code == GetQuickToInterpreterBridge() ||
97 (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) &&
98 Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
99 && !method->IsNative() && !method->IsProxyMethod())) {
100 if (kIsDebugBuild) {
101 if (quick_code == GetQuickToInterpreterBridge()) {
102 DCHECK(portable_code == GetPortableToInterpreterBridge());
103 } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker())) {
104 DCHECK(portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker()));
105 }
106 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800107 DCHECK(!method->IsNative()) << PrettyMethod(method);
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800108 DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100109 method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
110 } else {
111 method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
112 }
113 }
114}
115
116void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
117 if (method->IsAbstract() || method->IsProxyMethod()) {
118 // Do not change stubs for these methods.
119 return;
120 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800121 const void* new_portable_code;
122 const void* new_quick_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100123 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
124 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
125 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800126 bool have_portable_code = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100127 if (uninstall) {
128 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800129 new_portable_code = GetPortableToInterpreterBridge();
130 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100131 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800132 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
133 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100134 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800135 new_portable_code = GetPortableResolutionTrampoline(class_linker);
136 new_quick_code = GetQuickResolutionTrampoline(class_linker);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100137 }
138 } else { // !uninstall
139 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800140 new_portable_code = GetPortableToInterpreterBridge();
141 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100142 } else {
143 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
144 // class, all its static methods code will be set to the instrumentation entry point.
145 // For more details, see ClassLinker::FixupStaticTrampolines.
146 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
147 // Do not overwrite interpreter to prevent from posting method entry/exit events twice.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800148 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
149 new_quick_code = class_linker->GetQuickOatCodeFor(method);
150 if (entry_exit_stubs_installed_ && new_quick_code != GetQuickToInterpreterBridge()) {
151 DCHECK(new_portable_code != GetPortableToInterpreterBridge());
152 new_portable_code = GetPortableToInterpreterBridge();
153 new_quick_code = GetQuickInstrumentationEntryPoint();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100154 }
155 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800156 new_portable_code = GetPortableResolutionTrampoline(class_linker);
157 new_quick_code = GetQuickResolutionTrampoline(class_linker);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100158 }
159 }
160 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800161 UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100162}
163
Ian Rogers62d6c772013-02-27 08:32:07 -0800164// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
165// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100166// Since we may already have done this previously, we need to push new instrumentation frame before
167// existing instrumentation frames.
Ian Rogers62d6c772013-02-27 08:32:07 -0800168static void InstrumentationInstallStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800169 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
170 struct InstallStackVisitor : public StackVisitor {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100171 InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
Ian Rogers62d6c772013-02-27 08:32:07 -0800172 : StackVisitor(thread, context), instrumentation_stack_(thread->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100173 existing_instrumentation_frames_count_(instrumentation_stack_->size()),
174 instrumentation_exit_pc_(instrumentation_exit_pc),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100175 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
176 last_return_pc_(0) {
177 }
jeffhao725a9572012-11-13 18:20:12 -0800178
Ian Rogers306057f2012-11-26 12:45:53 -0800179 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700180 mirror::ArtMethod* m = GetMethod();
Ian Rogers306057f2012-11-26 12:45:53 -0800181 if (GetCurrentQuickFrame() == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800182 if (kVerboseInstrumentation) {
183 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100184 << " Method=" << PrettyMethod(m);
Ian Rogers62d6c772013-02-27 08:32:07 -0800185 }
Ian Rogers306057f2012-11-26 12:45:53 -0800186 return true; // Ignore shadow frames.
187 }
Ian Rogers306057f2012-11-26 12:45:53 -0800188 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800189 if (kVerboseInstrumentation) {
190 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
191 }
192 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700193 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800194 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800195 if (m->IsRuntimeMethod()) {
196 if (kVerboseInstrumentation) {
197 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
198 }
199 last_return_pc_ = GetReturnPc();
Ian Rogers306057f2012-11-26 12:45:53 -0800200 return true; // Ignore unresolved methods since they will be instrumented after resolution.
201 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800202 if (kVerboseInstrumentation) {
203 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
204 }
205 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100206 if (return_pc == instrumentation_exit_pc_) {
207 // We've reached a frame which has already been installed with instrumentation exit stub.
208 // We should have already installed instrumentation on previous frames.
209 reached_existing_instrumentation_frames_ = true;
210
211 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
212 const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
213 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
214 << ", Found " << PrettyMethod(frame.method_);
215 return_pc = frame.return_pc_;
216 if (kVerboseInstrumentation) {
217 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
218 }
219 } else {
220 CHECK_NE(return_pc, 0U);
221 CHECK(!reached_existing_instrumentation_frames_);
222 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
223 false);
224 if (kVerboseInstrumentation) {
225 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
226 }
227
228 // Insert frame before old ones so we do not corrupt the instrumentation stack.
229 auto it = instrumentation_stack_->end() - existing_instrumentation_frames_count_;
230 instrumentation_stack_->insert(it, instrumentation_frame);
231 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800232 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800233 dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
Ian Rogers62d6c772013-02-27 08:32:07 -0800234 last_return_pc_ = return_pc;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100235 ++instrumentation_stack_depth_;
Ian Rogers306057f2012-11-26 12:45:53 -0800236 return true; // Continue.
237 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800238 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100239 const size_t existing_instrumentation_frames_count_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800240 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800241 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100242 bool reached_existing_instrumentation_frames_;
243 size_t instrumentation_stack_depth_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800244 uintptr_t last_return_pc_;
Ian Rogers306057f2012-11-26 12:45:53 -0800245 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800246 if (kVerboseInstrumentation) {
247 std::string thread_name;
248 thread->GetThreadName(thread_name);
249 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800250 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100251
252 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers62d6c772013-02-27 08:32:07 -0800253 UniquePtr<Context> context(Context::Create());
Ian Rogers848871b2013-08-05 10:56:33 -0700254 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100255 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800256 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100257 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800258
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100259 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100260 // Create method enter events for all methods currently on the thread's stack. We only do this
261 // if no debugger is attached to prevent from posting events twice.
262 typedef std::deque<InstrumentationStackFrame>::const_reverse_iterator It;
263 for (It it = thread->GetInstrumentationStack()->rbegin(),
264 end = thread->GetInstrumentationStack()->rend(); it != end; ++it) {
265 mirror::Object* this_object = (*it).this_object_;
266 mirror::ArtMethod* method = (*it).method_;
267 uint32_t dex_pc = visitor.dex_pcs_.back();
268 visitor.dex_pcs_.pop_back();
269 instrumentation->MethodEnterEvent(thread, this_object, method, dex_pc);
270 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800271 }
272 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800273}
274
Ian Rogers62d6c772013-02-27 08:32:07 -0800275// Removes the instrumentation exit pc as the return PC for every quick frame.
276static void InstrumentationRestoreStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800277 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
278 struct RestoreStackVisitor : public StackVisitor {
Ian Rogers62d6c772013-02-27 08:32:07 -0800279 RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
280 Instrumentation* instrumentation)
281 : StackVisitor(thread, NULL), thread_(thread),
282 instrumentation_exit_pc_(instrumentation_exit_pc),
283 instrumentation_(instrumentation),
284 instrumentation_stack_(thread->GetInstrumentationStack()),
285 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800286
287 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800288 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800289 return false; // Stop.
290 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700291 mirror::ArtMethod* m = GetMethod();
Ian Rogers62d6c772013-02-27 08:32:07 -0800292 if (GetCurrentQuickFrame() == NULL) {
293 if (kVerboseInstrumentation) {
294 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
295 }
296 return true; // Ignore shadow frames.
297 }
Ian Rogers306057f2012-11-26 12:45:53 -0800298 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800299 if (kVerboseInstrumentation) {
300 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
301 }
Ian Rogers306057f2012-11-26 12:45:53 -0800302 return true; // Ignore upcalls.
303 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800304 bool removed_stub = false;
305 // TODO: make this search more efficient?
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100306 const size_t frameId = GetFrameId();
307 for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
308 if (instrumentation_frame.frame_id_ == frameId) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800309 if (kVerboseInstrumentation) {
310 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
311 }
Jeff Hao9a916d32013-06-27 18:45:37 -0700312 if (instrumentation_frame.interpreter_entry_) {
313 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
314 } else {
315 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
316 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800317 SetReturnPc(instrumentation_frame.return_pc_);
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100318 if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100319 // Create the method exit events. As the methods didn't really exit the result is 0.
320 // We only do this if no debugger is attached to prevent from posting events twice.
321 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
322 GetDexPc(), JValue());
323 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800324 frames_removed_++;
325 removed_stub = true;
326 break;
327 }
328 }
329 if (!removed_stub) {
330 if (kVerboseInstrumentation) {
331 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800332 }
jeffhao725a9572012-11-13 18:20:12 -0800333 }
334 return true; // Continue.
335 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800336 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800337 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800338 Instrumentation* const instrumentation_;
339 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
340 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800341 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800342 if (kVerboseInstrumentation) {
343 std::string thread_name;
344 thread->GetThreadName(thread_name);
345 LOG(INFO) << "Removing exit stubs in " << thread_name;
346 }
347 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
348 if (stack->size() > 0) {
349 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers848871b2013-08-05 10:56:33 -0700350 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Ian Rogers62d6c772013-02-27 08:32:07 -0800351 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
352 visitor.WalkStack(true);
353 CHECK_EQ(visitor.frames_removed_, stack->size());
354 while (stack->size() > 0) {
355 stack->pop_front();
356 }
jeffhao725a9572012-11-13 18:20:12 -0800357 }
358}
359
Ian Rogers62d6c772013-02-27 08:32:07 -0800360void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
361 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800362 if ((events & kMethodEntered) != 0) {
363 method_entry_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800364 have_method_entry_listeners_ = true;
365 }
366 if ((events & kMethodExited) != 0) {
367 method_exit_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800368 have_method_exit_listeners_ = true;
369 }
370 if ((events & kMethodUnwind) != 0) {
371 method_unwind_listeners_.push_back(listener);
372 have_method_unwind_listeners_ = true;
373 }
374 if ((events & kDexPcMoved) != 0) {
375 dex_pc_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800376 have_dex_pc_listeners_ = true;
377 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200378 if ((events & kFieldRead) != 0) {
379 field_read_listeners_.push_back(listener);
380 have_field_read_listeners_ = true;
381 }
382 if ((events & kFieldWritten) != 0) {
383 field_write_listeners_.push_back(listener);
384 have_field_write_listeners_ = true;
385 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700386 if ((events & kExceptionCaught) != 0) {
387 exception_caught_listeners_.push_back(listener);
388 have_exception_caught_listeners_ = true;
389 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200390 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800391}
392
Ian Rogers62d6c772013-02-27 08:32:07 -0800393void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
394 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800395
396 if ((events & kMethodEntered) != 0) {
397 bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
398 listener) != method_entry_listeners_.end();
399 if (contains) {
400 method_entry_listeners_.remove(listener);
401 }
402 have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800403 }
404 if ((events & kMethodExited) != 0) {
405 bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
406 listener) != method_exit_listeners_.end();
407 if (contains) {
408 method_exit_listeners_.remove(listener);
409 }
410 have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800411 }
412 if ((events & kMethodUnwind) != 0) {
413 method_unwind_listeners_.remove(listener);
414 }
415 if ((events & kDexPcMoved) != 0) {
416 bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
417 listener) != dex_pc_listeners_.end();
418 if (contains) {
419 dex_pc_listeners_.remove(listener);
420 }
421 have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800422 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200423 if ((events & kFieldRead) != 0) {
424 bool contains = std::find(field_read_listeners_.begin(), field_read_listeners_.end(),
425 listener) != field_read_listeners_.end();
426 if (contains) {
427 field_read_listeners_.remove(listener);
428 }
429 have_field_read_listeners_ = field_read_listeners_.size() > 0;
430 }
431 if ((events & kFieldWritten) != 0) {
432 bool contains = std::find(field_write_listeners_.begin(), field_write_listeners_.end(),
433 listener) != field_write_listeners_.end();
434 if (contains) {
435 field_write_listeners_.remove(listener);
436 }
437 have_field_write_listeners_ = field_write_listeners_.size() > 0;
438 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700439 if ((events & kExceptionCaught) != 0) {
440 exception_caught_listeners_.remove(listener);
441 have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
442 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200443 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800444}
445
Ian Rogers62d6c772013-02-27 08:32:07 -0800446void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
447 interpret_only_ = require_interpreter || forced_interpret_only_;
448 // Compute what level of instrumentation is required and compare to current.
449 int desired_level, current_level;
450 if (require_interpreter) {
451 desired_level = 2;
452 } else if (require_entry_exit_stubs) {
453 desired_level = 1;
454 } else {
455 desired_level = 0;
456 }
457 if (interpreter_stubs_installed_) {
458 current_level = 2;
459 } else if (entry_exit_stubs_installed_) {
460 current_level = 1;
461 } else {
462 current_level = 0;
463 }
464 if (desired_level == current_level) {
465 // We're already set.
466 return;
467 }
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100468 Thread* const self = Thread::Current();
Ian Rogers62d6c772013-02-27 08:32:07 -0800469 Runtime* runtime = Runtime::Current();
470 Locks::thread_list_lock_->AssertNotHeld(self);
471 if (desired_level > 0) {
472 if (require_interpreter) {
473 interpreter_stubs_installed_ = true;
474 } else {
475 CHECK(require_entry_exit_stubs);
476 entry_exit_stubs_installed_ = true;
477 }
478 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
479 instrumentation_stubs_installed_ = true;
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100480 MutexLock mu(self, *Locks::thread_list_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800481 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
482 } else {
483 interpreter_stubs_installed_ = false;
484 entry_exit_stubs_installed_ = false;
485 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100486 // Restore stack only if there is no method currently deoptimized.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700487 bool empty;
488 {
489 ReaderMutexLock mu(self, deoptimized_methods_lock_);
490 empty = deoptimized_methods_.empty(); // Avoid lock violation.
491 }
492 if (empty) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100493 instrumentation_stubs_installed_ = false;
494 MutexLock mu(self, *Locks::thread_list_lock_);
495 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
496 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800497 }
jeffhao725a9572012-11-13 18:20:12 -0800498}
499
Ian Rogersfa824272013-11-05 16:12:57 -0800500static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
501 thread->ResetQuickAllocEntryPointsForThread();
502}
503
Mathieu Chartier661974a2014-01-09 11:23:53 -0800504void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
505 Runtime* runtime = Runtime::Current();
506 ThreadList* tl = runtime->GetThreadList();
507 if (runtime->IsStarted()) {
508 tl->SuspendAll();
509 }
510 {
511 MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
512 SetQuickAllocEntryPointsInstrumented(instrumented);
513 ResetQuickAllocEntryPoints();
514 }
515 if (runtime->IsStarted()) {
516 tl->ResumeAll();
517 }
518}
519
Ian Rogersfa824272013-11-05 16:12:57 -0800520void Instrumentation::InstrumentQuickAllocEntryPoints() {
521 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
522 // should be guarded by a lock.
Ian Rogersb122a4b2013-11-19 18:00:50 -0800523 DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800524 const bool enable_instrumentation =
Ian Rogersb122a4b2013-11-19 18:00:50 -0800525 quick_alloc_entry_points_instrumentation_counter_.FetchAndAdd(1) == 0;
Ian Rogersfa824272013-11-05 16:12:57 -0800526 if (enable_instrumentation) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800527 SetEntrypointsInstrumented(true);
Ian Rogersfa824272013-11-05 16:12:57 -0800528 }
529}
530
531void Instrumentation::UninstrumentQuickAllocEntryPoints() {
532 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
533 // should be guarded by a lock.
Ian Rogersb122a4b2013-11-19 18:00:50 -0800534 DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800535 const bool disable_instrumentation =
Ian Rogersb122a4b2013-11-19 18:00:50 -0800536 quick_alloc_entry_points_instrumentation_counter_.FetchAndSub(1) == 1;
Ian Rogersfa824272013-11-05 16:12:57 -0800537 if (disable_instrumentation) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800538 SetEntrypointsInstrumented(false);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800539 }
540}
541
542void Instrumentation::ResetQuickAllocEntryPoints() {
543 Runtime* runtime = Runtime::Current();
544 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800545 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
546 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
Ian Rogersfa824272013-11-05 16:12:57 -0800547 }
548}
549
Ian Rogersef7d42f2014-01-06 12:55:46 -0800550void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
551 const void* portable_code, bool have_portable_code) const {
552 const void* new_portable_code;
553 const void* new_quick_code;
554 bool new_have_portable_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800555 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800556 new_portable_code = portable_code;
557 new_quick_code = quick_code;
558 new_have_portable_code = have_portable_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700559 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100560 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800561 new_portable_code = GetPortableToInterpreterBridge();
562 new_quick_code = GetQuickToInterpreterBridge();
563 new_have_portable_code = false;
564 } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) ||
565 quick_code == GetQuickToInterpreterBridge()) {
566 DCHECK((portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker())) ||
567 (portable_code == GetPortableToInterpreterBridge()));
568 new_portable_code = portable_code;
569 new_quick_code = quick_code;
570 new_have_portable_code = have_portable_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100571 } else if (entry_exit_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800572 new_quick_code = GetQuickInstrumentationEntryPoint();
573 new_portable_code = GetPortableToInterpreterBridge();
574 new_have_portable_code = false;
Jeff Hao65d15d92013-07-16 16:39:33 -0700575 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800576 new_portable_code = portable_code;
577 new_quick_code = quick_code;
578 new_have_portable_code = have_portable_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700579 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800580 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800581 UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100582}
583
584void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
585 CHECK(!method->IsNative());
586 CHECK(!method->IsProxyMethod());
587 CHECK(!method->IsAbstract());
588
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700589 Thread* self = Thread::Current();
590 std::pair<std::set<mirror::ArtMethod*>::iterator, bool> pair;
591 {
592 WriterMutexLock mu(self, deoptimized_methods_lock_);
593 pair = deoptimized_methods_.insert(method);
594 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100595 bool already_deoptimized = !pair.second;
596 CHECK(!already_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
597
598 if (!interpreter_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800599 UpdateEntrypoints(method, GetQuickToInterpreterBridge(), GetPortableToInterpreterBridge(),
600 false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100601
602 // Install instrumentation exit stub and instrumentation frames. We may already have installed
603 // these previously so it will only cover the newly created frames.
604 instrumentation_stubs_installed_ = true;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700605 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100606 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
607 }
608}
609
610void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
611 CHECK(!method->IsNative());
612 CHECK(!method->IsProxyMethod());
613 CHECK(!method->IsAbstract());
614
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700615 Thread* self = Thread::Current();
616 bool empty;
617 {
618 WriterMutexLock mu(self, deoptimized_methods_lock_);
619 auto it = deoptimized_methods_.find(method);
620 CHECK(it != deoptimized_methods_.end()) << "Method " << PrettyMethod(method)
621 << " is not deoptimized";
622 deoptimized_methods_.erase(it);
623 empty = deoptimized_methods_.empty();
624 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100625
626 // Restore code and possibly stack only if we did not deoptimize everything.
627 if (!interpreter_stubs_installed_) {
628 // Restore its code or resolution trampoline.
629 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800630 if (method->IsStatic() && !method->IsConstructor() &&
631 !method->GetDeclaringClass()->IsInitialized()) {
632 UpdateEntrypoints(method, GetQuickResolutionTrampoline(class_linker),
633 GetPortableResolutionTrampoline(class_linker), false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100634 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800635 bool have_portable_code = false;
636 const void* quick_code = class_linker->GetQuickOatCodeFor(method);
637 const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
638 UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100639 }
640
641 // If there is no deoptimized method left, we can restore the stack of each thread.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700642 if (empty) {
643 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100644 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
645 instrumentation_stubs_installed_ = false;
646 }
647 }
648}
649
650bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) const {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700651 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100652 DCHECK(method != nullptr);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700653 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100654}
655
656void Instrumentation::EnableDeoptimization() {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700657 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100658 CHECK(deoptimized_methods_.empty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100659 CHECK_EQ(deoptimization_enabled_, false);
660 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100661}
662
663void Instrumentation::DisableDeoptimization() {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100664 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100665 // If we deoptimized everything, undo it.
666 if (interpreter_stubs_installed_) {
667 UndeoptimizeEverything();
668 }
669 // Undeoptimized selected methods.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700670 while (true) {
671 mirror::ArtMethod* method;
672 {
673 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
674 if (deoptimized_methods_.empty()) {
675 break;
676 }
677 method = *deoptimized_methods_.begin();
678 }
679 Undeoptimize(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100680 }
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100681 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100682}
683
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100684// Indicates if instrumentation should notify method enter/exit events to the listeners.
685bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100686 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100687}
688
689void Instrumentation::DeoptimizeEverything() {
690 CHECK(!interpreter_stubs_installed_);
691 ConfigureStubs(false, true);
692}
693
694void Instrumentation::UndeoptimizeEverything() {
695 CHECK(interpreter_stubs_installed_);
696 ConfigureStubs(false, false);
697}
698
699void Instrumentation::EnableMethodTracing() {
700 bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
701 ConfigureStubs(!require_interpreter, require_interpreter);
702}
703
704void Instrumentation::DisableMethodTracing() {
705 ConfigureStubs(false, false);
jeffhao725a9572012-11-13 18:20:12 -0800706}
707
Ian Rogersef7d42f2014-01-06 12:55:46 -0800708const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800709 Runtime* runtime = Runtime::Current();
710 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800711 const void* code = method->GetEntryPointFromQuickCompiledCode();
Ian Rogers62d6c772013-02-27 08:32:07 -0800712 DCHECK(code != NULL);
Ian Rogers848871b2013-08-05 10:56:33 -0700713 if (LIKELY(code != GetQuickResolutionTrampoline(runtime->GetClassLinker()) &&
714 code != GetQuickToInterpreterBridge())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800715 return code;
716 }
717 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800718 return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -0800719}
720
Ian Rogers62d6c772013-02-27 08:32:07 -0800721void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800722 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800723 uint32_t dex_pc) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700724 auto it = method_entry_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700725 bool is_end = (it == method_entry_listeners_.end());
726 // Implemented this way to prevent problems caused by modification of the list while iterating.
727 while (!is_end) {
728 InstrumentationListener* cur = *it;
729 ++it;
730 is_end = (it == method_entry_listeners_.end());
731 cur->MethodEntered(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800732 }
733}
734
735void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800736 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800737 uint32_t dex_pc, const JValue& return_value) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700738 auto it = method_exit_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700739 bool is_end = (it == method_exit_listeners_.end());
740 // Implemented this way to prevent problems caused by modification of the list while iterating.
741 while (!is_end) {
742 InstrumentationListener* cur = *it;
743 ++it;
744 is_end = (it == method_exit_listeners_.end());
745 cur->MethodExited(thread, this_object, method, dex_pc, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800746 }
747}
748
749void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800750 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800751 uint32_t dex_pc) const {
752 if (have_method_unwind_listeners_) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700753 for (InstrumentationListener* listener : method_unwind_listeners_) {
Sebastien Hertz51db44a2013-11-19 10:00:29 +0100754 listener->MethodUnwind(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800755 }
756 }
757}
758
759void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800760 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800761 uint32_t dex_pc) const {
762 // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
763 // action where it can remove itself as a listener and break the iterator. The copy only works
764 // around the problem and in general we may have to move to something like reference counting to
765 // ensure listeners are deleted correctly.
766 std::list<InstrumentationListener*> copy(dex_pc_listeners_);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700767 for (InstrumentationListener* listener : copy) {
768 listener->DexPcMoved(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800769 }
770}
771
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200772void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
773 mirror::ArtMethod* method, uint32_t dex_pc,
774 mirror::ArtField* field) const {
775 if (have_field_read_listeners_) {
776 // TODO: same comment than DexPcMovedEventImpl.
777 std::list<InstrumentationListener*> copy(field_read_listeners_);
778 for (InstrumentationListener* listener : copy) {
779 listener->FieldRead(thread, this_object, method, dex_pc, field);
780 }
781 }
782}
783
784void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
785 mirror::ArtMethod* method, uint32_t dex_pc,
786 mirror::ArtField* field, const JValue& field_value) const {
787 if (have_field_write_listeners_) {
788 // TODO: same comment than DexPcMovedEventImpl.
789 std::list<InstrumentationListener*> copy(field_write_listeners_);
790 for (InstrumentationListener* listener : copy) {
791 listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
792 }
793 }
794}
795
Ian Rogers62d6c772013-02-27 08:32:07 -0800796void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
Brian Carlstromea46f952013-07-30 01:26:50 -0700797 mirror::ArtMethod* catch_method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800798 uint32_t catch_dex_pc,
Sebastien Hertz947ff082013-09-17 14:10:13 +0200799 mirror::Throwable* exception_object) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800800 if (have_exception_caught_listeners_) {
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700801 DCHECK_EQ(thread->GetException(NULL), exception_object);
802 thread->ClearException();
Sebastien Hertzbf079fe2014-04-01 15:31:05 +0200803 // TODO: The copy below is due to the debug listener having an action where it can remove
804 // itself as a listener and break the iterator. The copy only works around the problem.
805 std::list<InstrumentationListener*> copy(exception_caught_listeners_);
806 for (InstrumentationListener* listener : copy) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700807 listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800808 }
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700809 thread->SetException(throw_location, exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800810 }
811}
812
813static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
814 int delta)
815 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
816 size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
817 if (frame_id != instrumentation_frame.frame_id_) {
818 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
819 << instrumentation_frame.frame_id_;
820 StackVisitor::DescribeStack(self);
821 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
822 }
823}
824
825void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700826 mirror::ArtMethod* method,
Jeff Hao9a916d32013-06-27 18:45:37 -0700827 uintptr_t lr, bool interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800828 // We have a callee-save frame meaning this value is guaranteed to never be 0.
829 size_t frame_id = StackVisitor::ComputeNumFrames(self);
830 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
831 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700832 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -0800833 }
834 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700835 frame_id, interpreter_entry);
Ian Rogers62d6c772013-02-27 08:32:07 -0800836 stack->push_front(instrumentation_frame);
837
838 MethodEnterEvent(self, this_object, method, 0);
839}
840
841uint64_t Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
842 uint64_t gpr_result, uint64_t fpr_result) {
843 // Do the pop.
844 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
845 CHECK_GT(stack->size(), 0U);
846 InstrumentationStackFrame instrumentation_frame = stack->front();
847 stack->pop_front();
848
849 // Set return PC and check the sanity of the stack.
850 *return_pc = instrumentation_frame.return_pc_;
851 CheckStackDepth(self, instrumentation_frame, 0);
852
Brian Carlstromea46f952013-07-30 01:26:50 -0700853 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800854 char return_shorty = MethodHelper(method).GetShorty()[0];
855 JValue return_value;
856 if (return_shorty == 'V') {
857 return_value.SetJ(0);
858 } else if (return_shorty == 'F' || return_shorty == 'D') {
859 return_value.SetJ(fpr_result);
860 } else {
861 return_value.SetJ(gpr_result);
862 }
863 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
864 // return_pc.
865 uint32_t dex_pc = DexFile::kDexNoIndex;
866 mirror::Object* this_object = instrumentation_frame.this_object_;
867 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
jeffhao725a9572012-11-13 18:20:12 -0800868
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100869 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
870 // back to an upcall.
871 NthCallerVisitor visitor(self, 1, true);
872 visitor.WalkStack(true);
873 bool deoptimize = (visitor.caller != NULL) &&
874 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
875 if (deoptimize && kVerboseInstrumentation) {
876 LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
Ian Rogers62d6c772013-02-27 08:32:07 -0800877 }
878 if (deoptimize) {
879 if (kVerboseInstrumentation) {
880 LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100881 << " result is " << std::hex << return_value.GetJ();
Ian Rogers62d6c772013-02-27 08:32:07 -0800882 }
883 self->SetDeoptimizationReturnValue(return_value);
Ian Rogers848871b2013-08-05 10:56:33 -0700884 return static_cast<uint64_t>(GetQuickDeoptimizationEntryPoint()) |
Ian Rogers62d6c772013-02-27 08:32:07 -0800885 (static_cast<uint64_t>(*return_pc) << 32);
886 } else {
887 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700888 LOG(INFO) << "Returning from " << PrettyMethod(method)
889 << " to PC " << reinterpret_cast<void*>(*return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800890 }
891 return *return_pc;
892 }
jeffhao725a9572012-11-13 18:20:12 -0800893}
894
Ian Rogers62d6c772013-02-27 08:32:07 -0800895void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
896 // Do the pop.
897 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
898 CHECK_GT(stack->size(), 0U);
899 InstrumentationStackFrame instrumentation_frame = stack->front();
900 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
901 stack->pop_front();
902
Brian Carlstromea46f952013-07-30 01:26:50 -0700903 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800904 if (is_deoptimization) {
905 if (kVerboseInstrumentation) {
906 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
907 }
908 } else {
909 if (kVerboseInstrumentation) {
910 LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
911 }
912
913 // Notify listeners of method unwind.
914 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
915 // return_pc.
916 uint32_t dex_pc = DexFile::kDexNoIndex;
917 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
918 }
919}
920
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700921void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
922 WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
923 if (deoptimized_methods_.empty()) {
924 return;
925 }
926 std::set<mirror::ArtMethod*> new_deoptimized_methods;
927 for (mirror::ArtMethod* method : deoptimized_methods_) {
928 DCHECK(method != nullptr);
929 callback(reinterpret_cast<mirror::Object**>(&method), arg, 0, kRootVMInternal);
930 new_deoptimized_methods.insert(method);
931 }
932 deoptimized_methods_ = new_deoptimized_methods;
933}
934
Ian Rogers62d6c772013-02-27 08:32:07 -0800935std::string InstrumentationStackFrame::Dump() const {
936 std::ostringstream os;
937 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
938 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
939 return os.str();
940}
941
942} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -0800943} // namespace art