blob: 01ad46da8bee6085988f0e37d116e9b8771532aa [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
Ian Rogersef7d42f2014-01-06 12:55:46 -080021#include "atomic.h"
Elliott Hughes76160052012-12-12 16:31:20 -080022#include "base/unix_file/fd_file.h"
jeffhao725a9572012-11-13 18:20:12 -080023#include "class_linker.h"
24#include "debugger.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080025#include "dex_file-inl.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010026#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070027#include "mirror/art_method-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080028#include "mirror/class-inl.h"
29#include "mirror/dex_cache.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080030#include "mirror/object_array-inl.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070031#include "mirror/object-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080032#include "nth_caller_visitor.h"
Ian Rogersc928de92013-02-27 14:30:44 -080033#if !defined(ART_USE_PORTABLE_COMPILER)
Ian Rogers166db042013-07-26 12:05:57 -070034#include "entrypoints/quick/quick_entrypoints.h"
jeffhao725a9572012-11-13 18:20:12 -080035#endif
36#include "object_utils.h"
37#include "os.h"
38#include "scoped_thread_state_change.h"
39#include "thread.h"
40#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080041
42namespace art {
Ian Rogersfa824272013-11-05 16:12:57 -080043
44extern void SetQuickAllocEntryPointsInstrumented(bool instrumented);
45
Ian Rogers62d6c772013-02-27 08:32:07 -080046namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080047
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010048const bool kVerboseInstrumentation = false;
49
Ian Rogers816432e2013-09-06 15:47:45 -070050// Do we want to deoptimize for method entry and exit listeners or just try to intercept
51// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
52// application's performance.
Ian Rogers7b6da362013-09-11 09:29:40 -070053static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = false;
Ian Rogers816432e2013-09-06 15:47:45 -070054
Ian Rogers62d6c772013-02-27 08:32:07 -080055static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
jeffhao725a9572012-11-13 18:20:12 -080056 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -080057 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
58 return instrumentation->InstallStubsForClass(klass);
59}
60
61bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010062 for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
63 InstallStubsForMethod(klass->GetDirectMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080064 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010065 for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
66 InstallStubsForMethod(klass->GetVirtualMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080067 }
68 return true;
69}
70
Ian Rogersef7d42f2014-01-06 12:55:46 -080071static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
72 const void* portable_code, bool have_portable_code)
73 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
74 method->SetEntryPointFromPortableCompiledCode(portable_code);
75 method->SetEntryPointFromQuickCompiledCode(quick_code);
76 bool portable_enabled = method->IsPortableCompiled();
77 if (have_portable_code && !portable_enabled) {
78 method->SetIsPortableCompiled();
79 } else if (portable_enabled) {
80 method->ClearIsPortableCompiled();
81 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010082 if (!method->IsResolutionMethod()) {
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -080083 if (quick_code == GetQuickToInterpreterBridge() ||
84 (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) &&
85 Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
86 && !method->IsNative() && !method->IsProxyMethod())) {
87 if (kIsDebugBuild) {
88 if (quick_code == GetQuickToInterpreterBridge()) {
89 DCHECK(portable_code == GetPortableToInterpreterBridge());
90 } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker())) {
91 DCHECK(portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker()));
92 }
93 }
Ian Rogersef7d42f2014-01-06 12:55:46 -080094 DCHECK(!method->IsNative()) << PrettyMethod(method);
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -080095 DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010096 method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
97 } else {
98 method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
99 }
100 }
101}
102
103void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
104 if (method->IsAbstract() || method->IsProxyMethod()) {
105 // Do not change stubs for these methods.
106 return;
107 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800108 const void* new_portable_code;
109 const void* new_quick_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100110 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
111 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
112 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800113 bool have_portable_code = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100114 if (uninstall) {
115 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800116 new_portable_code = GetPortableToInterpreterBridge();
117 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100118 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800119 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
120 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100121 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800122 new_portable_code = GetPortableResolutionTrampoline(class_linker);
123 new_quick_code = GetQuickResolutionTrampoline(class_linker);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100124 }
125 } else { // !uninstall
126 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800127 new_portable_code = GetPortableToInterpreterBridge();
128 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100129 } else {
130 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
131 // class, all its static methods code will be set to the instrumentation entry point.
132 // For more details, see ClassLinker::FixupStaticTrampolines.
133 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
134 // Do not overwrite interpreter to prevent from posting method entry/exit events twice.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800135 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
136 new_quick_code = class_linker->GetQuickOatCodeFor(method);
137 if (entry_exit_stubs_installed_ && new_quick_code != GetQuickToInterpreterBridge()) {
138 DCHECK(new_portable_code != GetPortableToInterpreterBridge());
139 new_portable_code = GetPortableToInterpreterBridge();
140 new_quick_code = GetQuickInstrumentationEntryPoint();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100141 }
142 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800143 new_portable_code = GetPortableResolutionTrampoline(class_linker);
144 new_quick_code = GetQuickResolutionTrampoline(class_linker);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100145 }
146 }
147 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800148 UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100149}
150
Ian Rogers62d6c772013-02-27 08:32:07 -0800151// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
152// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100153// Since we may already have done this previously, we need to push new instrumentation frame before
154// existing instrumentation frames.
Ian Rogers62d6c772013-02-27 08:32:07 -0800155static void InstrumentationInstallStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800156 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
157 struct InstallStackVisitor : public StackVisitor {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100158 InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
Ian Rogers62d6c772013-02-27 08:32:07 -0800159 : StackVisitor(thread, context), instrumentation_stack_(thread->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100160 existing_instrumentation_frames_count_(instrumentation_stack_->size()),
161 instrumentation_exit_pc_(instrumentation_exit_pc),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100162 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
163 last_return_pc_(0) {
164 }
jeffhao725a9572012-11-13 18:20:12 -0800165
Ian Rogers306057f2012-11-26 12:45:53 -0800166 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700167 mirror::ArtMethod* m = GetMethod();
Ian Rogers306057f2012-11-26 12:45:53 -0800168 if (GetCurrentQuickFrame() == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800169 if (kVerboseInstrumentation) {
170 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100171 << " Method=" << PrettyMethod(m);
Ian Rogers62d6c772013-02-27 08:32:07 -0800172 }
Ian Rogers306057f2012-11-26 12:45:53 -0800173 return true; // Ignore shadow frames.
174 }
Ian Rogers306057f2012-11-26 12:45:53 -0800175 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800176 if (kVerboseInstrumentation) {
177 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
178 }
179 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700180 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800181 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800182 if (m->IsRuntimeMethod()) {
183 if (kVerboseInstrumentation) {
184 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
185 }
186 last_return_pc_ = GetReturnPc();
Ian Rogers306057f2012-11-26 12:45:53 -0800187 return true; // Ignore unresolved methods since they will be instrumented after resolution.
188 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800189 if (kVerboseInstrumentation) {
190 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
191 }
192 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100193 if (return_pc == instrumentation_exit_pc_) {
194 // We've reached a frame which has already been installed with instrumentation exit stub.
195 // We should have already installed instrumentation on previous frames.
196 reached_existing_instrumentation_frames_ = true;
197
198 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
199 const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
200 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
201 << ", Found " << PrettyMethod(frame.method_);
202 return_pc = frame.return_pc_;
203 if (kVerboseInstrumentation) {
204 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
205 }
206 } else {
207 CHECK_NE(return_pc, 0U);
208 CHECK(!reached_existing_instrumentation_frames_);
209 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
210 false);
211 if (kVerboseInstrumentation) {
212 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
213 }
214
215 // Insert frame before old ones so we do not corrupt the instrumentation stack.
216 auto it = instrumentation_stack_->end() - existing_instrumentation_frames_count_;
217 instrumentation_stack_->insert(it, instrumentation_frame);
218 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800219 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800220 dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
Ian Rogers62d6c772013-02-27 08:32:07 -0800221 last_return_pc_ = return_pc;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100222 ++instrumentation_stack_depth_;
Ian Rogers306057f2012-11-26 12:45:53 -0800223 return true; // Continue.
224 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800225 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100226 const size_t existing_instrumentation_frames_count_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800227 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800228 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100229 bool reached_existing_instrumentation_frames_;
230 size_t instrumentation_stack_depth_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800231 uintptr_t last_return_pc_;
Ian Rogers306057f2012-11-26 12:45:53 -0800232 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800233 if (kVerboseInstrumentation) {
234 std::string thread_name;
235 thread->GetThreadName(thread_name);
236 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800237 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100238
239 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers62d6c772013-02-27 08:32:07 -0800240 UniquePtr<Context> context(Context::Create());
Ian Rogers848871b2013-08-05 10:56:33 -0700241 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100242 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800243 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100244 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800245
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100246 if (!instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100247 // Create method enter events for all methods currently on the thread's stack. We only do this
248 // if no debugger is attached to prevent from posting events twice.
249 typedef std::deque<InstrumentationStackFrame>::const_reverse_iterator It;
250 for (It it = thread->GetInstrumentationStack()->rbegin(),
251 end = thread->GetInstrumentationStack()->rend(); it != end; ++it) {
252 mirror::Object* this_object = (*it).this_object_;
253 mirror::ArtMethod* method = (*it).method_;
254 uint32_t dex_pc = visitor.dex_pcs_.back();
255 visitor.dex_pcs_.pop_back();
256 instrumentation->MethodEnterEvent(thread, this_object, method, dex_pc);
257 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800258 }
259 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800260}
261
Ian Rogers62d6c772013-02-27 08:32:07 -0800262// Removes the instrumentation exit pc as the return PC for every quick frame.
263static void InstrumentationRestoreStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800264 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
265 struct RestoreStackVisitor : public StackVisitor {
Ian Rogers62d6c772013-02-27 08:32:07 -0800266 RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
267 Instrumentation* instrumentation)
268 : StackVisitor(thread, NULL), thread_(thread),
269 instrumentation_exit_pc_(instrumentation_exit_pc),
270 instrumentation_(instrumentation),
271 instrumentation_stack_(thread->GetInstrumentationStack()),
272 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800273
274 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800275 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800276 return false; // Stop.
277 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700278 mirror::ArtMethod* m = GetMethod();
Ian Rogers62d6c772013-02-27 08:32:07 -0800279 if (GetCurrentQuickFrame() == NULL) {
280 if (kVerboseInstrumentation) {
281 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
282 }
283 return true; // Ignore shadow frames.
284 }
Ian Rogers306057f2012-11-26 12:45:53 -0800285 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800286 if (kVerboseInstrumentation) {
287 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
288 }
Ian Rogers306057f2012-11-26 12:45:53 -0800289 return true; // Ignore upcalls.
290 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800291 bool removed_stub = false;
292 // TODO: make this search more efficient?
Mathieu Chartier02e25112013-08-14 16:14:24 -0700293 for (InstrumentationStackFrame instrumentation_frame : *instrumentation_stack_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800294 if (instrumentation_frame.frame_id_ == GetFrameId()) {
295 if (kVerboseInstrumentation) {
296 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
297 }
Jeff Hao9a916d32013-06-27 18:45:37 -0700298 if (instrumentation_frame.interpreter_entry_) {
299 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
300 } else {
301 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
302 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800303 SetReturnPc(instrumentation_frame.return_pc_);
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100304 if (!instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100305 // Create the method exit events. As the methods didn't really exit the result is 0.
306 // We only do this if no debugger is attached to prevent from posting events twice.
307 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
308 GetDexPc(), JValue());
309 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800310 frames_removed_++;
311 removed_stub = true;
312 break;
313 }
314 }
315 if (!removed_stub) {
316 if (kVerboseInstrumentation) {
317 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800318 }
jeffhao725a9572012-11-13 18:20:12 -0800319 }
320 return true; // Continue.
321 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800322 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800323 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800324 Instrumentation* const instrumentation_;
325 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
326 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800327 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800328 if (kVerboseInstrumentation) {
329 std::string thread_name;
330 thread->GetThreadName(thread_name);
331 LOG(INFO) << "Removing exit stubs in " << thread_name;
332 }
333 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
334 if (stack->size() > 0) {
335 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers848871b2013-08-05 10:56:33 -0700336 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Ian Rogers62d6c772013-02-27 08:32:07 -0800337 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
338 visitor.WalkStack(true);
339 CHECK_EQ(visitor.frames_removed_, stack->size());
340 while (stack->size() > 0) {
341 stack->pop_front();
342 }
jeffhao725a9572012-11-13 18:20:12 -0800343 }
344}
345
Ian Rogers62d6c772013-02-27 08:32:07 -0800346void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
347 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800348 if ((events & kMethodEntered) != 0) {
349 method_entry_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800350 have_method_entry_listeners_ = true;
351 }
352 if ((events & kMethodExited) != 0) {
353 method_exit_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800354 have_method_exit_listeners_ = true;
355 }
356 if ((events & kMethodUnwind) != 0) {
357 method_unwind_listeners_.push_back(listener);
358 have_method_unwind_listeners_ = true;
359 }
360 if ((events & kDexPcMoved) != 0) {
361 dex_pc_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800362 have_dex_pc_listeners_ = true;
363 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700364 if ((events & kExceptionCaught) != 0) {
365 exception_caught_listeners_.push_back(listener);
366 have_exception_caught_listeners_ = true;
367 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200368 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800369}
370
Ian Rogers62d6c772013-02-27 08:32:07 -0800371void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
372 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800373
374 if ((events & kMethodEntered) != 0) {
375 bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
376 listener) != method_entry_listeners_.end();
377 if (contains) {
378 method_entry_listeners_.remove(listener);
379 }
380 have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800381 }
382 if ((events & kMethodExited) != 0) {
383 bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
384 listener) != method_exit_listeners_.end();
385 if (contains) {
386 method_exit_listeners_.remove(listener);
387 }
388 have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800389 }
390 if ((events & kMethodUnwind) != 0) {
391 method_unwind_listeners_.remove(listener);
392 }
393 if ((events & kDexPcMoved) != 0) {
394 bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
395 listener) != dex_pc_listeners_.end();
396 if (contains) {
397 dex_pc_listeners_.remove(listener);
398 }
399 have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800400 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700401 if ((events & kExceptionCaught) != 0) {
402 exception_caught_listeners_.remove(listener);
403 have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
404 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200405 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800406}
407
Ian Rogers62d6c772013-02-27 08:32:07 -0800408void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
409 interpret_only_ = require_interpreter || forced_interpret_only_;
410 // Compute what level of instrumentation is required and compare to current.
411 int desired_level, current_level;
412 if (require_interpreter) {
413 desired_level = 2;
414 } else if (require_entry_exit_stubs) {
415 desired_level = 1;
416 } else {
417 desired_level = 0;
418 }
419 if (interpreter_stubs_installed_) {
420 current_level = 2;
421 } else if (entry_exit_stubs_installed_) {
422 current_level = 1;
423 } else {
424 current_level = 0;
425 }
426 if (desired_level == current_level) {
427 // We're already set.
428 return;
429 }
430 Thread* self = Thread::Current();
431 Runtime* runtime = Runtime::Current();
432 Locks::thread_list_lock_->AssertNotHeld(self);
433 if (desired_level > 0) {
434 if (require_interpreter) {
435 interpreter_stubs_installed_ = true;
436 } else {
437 CHECK(require_entry_exit_stubs);
438 entry_exit_stubs_installed_ = true;
439 }
440 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
441 instrumentation_stubs_installed_ = true;
442 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
443 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
444 } else {
445 interpreter_stubs_installed_ = false;
446 entry_exit_stubs_installed_ = false;
447 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100448 // Restore stack only if there is no method currently deoptimized.
449 if (deoptimized_methods_.empty()) {
450 instrumentation_stubs_installed_ = false;
451 MutexLock mu(self, *Locks::thread_list_lock_);
452 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
453 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800454 }
jeffhao725a9572012-11-13 18:20:12 -0800455}
456
Ian Rogersfa824272013-11-05 16:12:57 -0800457static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
458 thread->ResetQuickAllocEntryPointsForThread();
459}
460
461void Instrumentation::InstrumentQuickAllocEntryPoints() {
462 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
463 // should be guarded by a lock.
Ian Rogersb122a4b2013-11-19 18:00:50 -0800464 DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800465 const bool enable_instrumentation =
Ian Rogersb122a4b2013-11-19 18:00:50 -0800466 quick_alloc_entry_points_instrumentation_counter_.FetchAndAdd(1) == 0;
Ian Rogersfa824272013-11-05 16:12:57 -0800467 if (enable_instrumentation) {
468 // Instrumentation wasn't enabled so enable it.
469 SetQuickAllocEntryPointsInstrumented(true);
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800470 ThreadList* tl = Runtime::Current()->GetThreadList();
471 tl->SuspendAll();
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800472 ResetQuickAllocEntryPoints();
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800473 tl->ResumeAll();
Ian Rogersfa824272013-11-05 16:12:57 -0800474 }
475}
476
477void Instrumentation::UninstrumentQuickAllocEntryPoints() {
478 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
479 // should be guarded by a lock.
Ian Rogersb122a4b2013-11-19 18:00:50 -0800480 DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800481 const bool disable_instrumentation =
Ian Rogersb122a4b2013-11-19 18:00:50 -0800482 quick_alloc_entry_points_instrumentation_counter_.FetchAndSub(1) == 1;
Ian Rogersfa824272013-11-05 16:12:57 -0800483 if (disable_instrumentation) {
484 SetQuickAllocEntryPointsInstrumented(false);
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800485 ThreadList* tl = Runtime::Current()->GetThreadList();
486 tl->SuspendAll();
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800487 ResetQuickAllocEntryPoints();
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800488 tl->ResumeAll();
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800489 }
490}
491
492void Instrumentation::ResetQuickAllocEntryPoints() {
493 Runtime* runtime = Runtime::Current();
494 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800495 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
496 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
Ian Rogersfa824272013-11-05 16:12:57 -0800497 }
498}
499
Ian Rogersef7d42f2014-01-06 12:55:46 -0800500void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
501 const void* portable_code, bool have_portable_code) const {
502 const void* new_portable_code;
503 const void* new_quick_code;
504 bool new_have_portable_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800505 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800506 new_portable_code = portable_code;
507 new_quick_code = quick_code;
508 new_have_portable_code = have_portable_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700509 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100510 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800511 new_portable_code = GetPortableToInterpreterBridge();
512 new_quick_code = GetQuickToInterpreterBridge();
513 new_have_portable_code = false;
514 } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) ||
515 quick_code == GetQuickToInterpreterBridge()) {
516 DCHECK((portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker())) ||
517 (portable_code == GetPortableToInterpreterBridge()));
518 new_portable_code = portable_code;
519 new_quick_code = quick_code;
520 new_have_portable_code = have_portable_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100521 } else if (entry_exit_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800522 new_quick_code = GetQuickInstrumentationEntryPoint();
523 new_portable_code = GetPortableToInterpreterBridge();
524 new_have_portable_code = false;
Jeff Hao65d15d92013-07-16 16:39:33 -0700525 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800526 new_portable_code = portable_code;
527 new_quick_code = quick_code;
528 new_have_portable_code = have_portable_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700529 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800530 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800531 UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100532}
533
534void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
535 CHECK(!method->IsNative());
536 CHECK(!method->IsProxyMethod());
537 CHECK(!method->IsAbstract());
538
539 std::pair<std::set<mirror::ArtMethod*>::iterator, bool> pair = deoptimized_methods_.insert(method);
540 bool already_deoptimized = !pair.second;
541 CHECK(!already_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
542
543 if (!interpreter_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800544 UpdateEntrypoints(method, GetQuickToInterpreterBridge(), GetPortableToInterpreterBridge(),
545 false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100546
547 // Install instrumentation exit stub and instrumentation frames. We may already have installed
548 // these previously so it will only cover the newly created frames.
549 instrumentation_stubs_installed_ = true;
550 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
551 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
552 }
553}
554
555void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
556 CHECK(!method->IsNative());
557 CHECK(!method->IsProxyMethod());
558 CHECK(!method->IsAbstract());
559
560 auto it = deoptimized_methods_.find(method);
561 CHECK(it != deoptimized_methods_.end()) << "Method " << PrettyMethod(method) << " is not deoptimized";
562 deoptimized_methods_.erase(it);
563
564 // Restore code and possibly stack only if we did not deoptimize everything.
565 if (!interpreter_stubs_installed_) {
566 // Restore its code or resolution trampoline.
567 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800568 if (method->IsStatic() && !method->IsConstructor() &&
569 !method->GetDeclaringClass()->IsInitialized()) {
570 UpdateEntrypoints(method, GetQuickResolutionTrampoline(class_linker),
571 GetPortableResolutionTrampoline(class_linker), false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100572 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800573 bool have_portable_code = false;
574 const void* quick_code = class_linker->GetQuickOatCodeFor(method);
575 const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
576 UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100577 }
578
579 // If there is no deoptimized method left, we can restore the stack of each thread.
580 if (deoptimized_methods_.empty()) {
581 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
582 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
583 instrumentation_stubs_installed_ = false;
584 }
585 }
586}
587
588bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) const {
589 DCHECK(method != nullptr);
590 return deoptimized_methods_.count(method);
591}
592
593void Instrumentation::EnableDeoptimization() {
594 CHECK(deoptimized_methods_.empty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100595 CHECK_EQ(deoptimization_enabled_, false);
596 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100597}
598
599void Instrumentation::DisableDeoptimization() {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100600 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100601 // If we deoptimized everything, undo it.
602 if (interpreter_stubs_installed_) {
603 UndeoptimizeEverything();
604 }
605 // Undeoptimized selected methods.
606 while (!deoptimized_methods_.empty()) {
607 auto it_begin = deoptimized_methods_.begin();
608 Undeoptimize(*it_begin);
609 }
610 CHECK(deoptimized_methods_.empty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100611 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100612}
613
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100614// Indicates if instrumentation should notify method enter/exit events to the listeners.
615bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
616 return deoptimization_enabled_ || interpreter_stubs_installed_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100617}
618
619void Instrumentation::DeoptimizeEverything() {
620 CHECK(!interpreter_stubs_installed_);
621 ConfigureStubs(false, true);
622}
623
624void Instrumentation::UndeoptimizeEverything() {
625 CHECK(interpreter_stubs_installed_);
626 ConfigureStubs(false, false);
627}
628
629void Instrumentation::EnableMethodTracing() {
630 bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
631 ConfigureStubs(!require_interpreter, require_interpreter);
632}
633
634void Instrumentation::DisableMethodTracing() {
635 ConfigureStubs(false, false);
jeffhao725a9572012-11-13 18:20:12 -0800636}
637
Ian Rogersef7d42f2014-01-06 12:55:46 -0800638const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800639 Runtime* runtime = Runtime::Current();
640 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800641 const void* code = method->GetEntryPointFromQuickCompiledCode();
Ian Rogers62d6c772013-02-27 08:32:07 -0800642 DCHECK(code != NULL);
Ian Rogers848871b2013-08-05 10:56:33 -0700643 if (LIKELY(code != GetQuickResolutionTrampoline(runtime->GetClassLinker()) &&
644 code != GetQuickToInterpreterBridge())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800645 return code;
646 }
647 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800648 return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -0800649}
650
Ian Rogers62d6c772013-02-27 08:32:07 -0800651void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800652 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800653 uint32_t dex_pc) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700654 auto it = method_entry_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700655 bool is_end = (it == method_entry_listeners_.end());
656 // Implemented this way to prevent problems caused by modification of the list while iterating.
657 while (!is_end) {
658 InstrumentationListener* cur = *it;
659 ++it;
660 is_end = (it == method_entry_listeners_.end());
661 cur->MethodEntered(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800662 }
663}
664
665void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800666 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800667 uint32_t dex_pc, const JValue& return_value) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700668 auto it = method_exit_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700669 bool is_end = (it == method_exit_listeners_.end());
670 // Implemented this way to prevent problems caused by modification of the list while iterating.
671 while (!is_end) {
672 InstrumentationListener* cur = *it;
673 ++it;
674 is_end = (it == method_exit_listeners_.end());
675 cur->MethodExited(thread, this_object, method, dex_pc, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800676 }
677}
678
679void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800680 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800681 uint32_t dex_pc) const {
682 if (have_method_unwind_listeners_) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700683 for (InstrumentationListener* listener : method_unwind_listeners_) {
Sebastien Hertz51db44a2013-11-19 10:00:29 +0100684 listener->MethodUnwind(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800685 }
686 }
687}
688
689void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800690 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800691 uint32_t dex_pc) const {
692 // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
693 // action where it can remove itself as a listener and break the iterator. The copy only works
694 // around the problem and in general we may have to move to something like reference counting to
695 // ensure listeners are deleted correctly.
696 std::list<InstrumentationListener*> copy(dex_pc_listeners_);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700697 for (InstrumentationListener* listener : copy) {
698 listener->DexPcMoved(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800699 }
700}
701
702void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
Brian Carlstromea46f952013-07-30 01:26:50 -0700703 mirror::ArtMethod* catch_method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800704 uint32_t catch_dex_pc,
Sebastien Hertz947ff082013-09-17 14:10:13 +0200705 mirror::Throwable* exception_object) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800706 if (have_exception_caught_listeners_) {
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700707 DCHECK_EQ(thread->GetException(NULL), exception_object);
708 thread->ClearException();
Mathieu Chartier02e25112013-08-14 16:14:24 -0700709 for (InstrumentationListener* listener : exception_caught_listeners_) {
710 listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800711 }
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700712 thread->SetException(throw_location, exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800713 }
714}
715
716static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
717 int delta)
718 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
719 size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
720 if (frame_id != instrumentation_frame.frame_id_) {
721 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
722 << instrumentation_frame.frame_id_;
723 StackVisitor::DescribeStack(self);
724 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
725 }
726}
727
728void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700729 mirror::ArtMethod* method,
Jeff Hao9a916d32013-06-27 18:45:37 -0700730 uintptr_t lr, bool interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800731 // We have a callee-save frame meaning this value is guaranteed to never be 0.
732 size_t frame_id = StackVisitor::ComputeNumFrames(self);
733 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
734 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700735 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -0800736 }
737 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700738 frame_id, interpreter_entry);
Ian Rogers62d6c772013-02-27 08:32:07 -0800739 stack->push_front(instrumentation_frame);
740
741 MethodEnterEvent(self, this_object, method, 0);
742}
743
744uint64_t Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
745 uint64_t gpr_result, uint64_t fpr_result) {
746 // Do the pop.
747 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
748 CHECK_GT(stack->size(), 0U);
749 InstrumentationStackFrame instrumentation_frame = stack->front();
750 stack->pop_front();
751
752 // Set return PC and check the sanity of the stack.
753 *return_pc = instrumentation_frame.return_pc_;
754 CheckStackDepth(self, instrumentation_frame, 0);
755
Brian Carlstromea46f952013-07-30 01:26:50 -0700756 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800757 char return_shorty = MethodHelper(method).GetShorty()[0];
758 JValue return_value;
759 if (return_shorty == 'V') {
760 return_value.SetJ(0);
761 } else if (return_shorty == 'F' || return_shorty == 'D') {
762 return_value.SetJ(fpr_result);
763 } else {
764 return_value.SetJ(gpr_result);
765 }
766 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
767 // return_pc.
768 uint32_t dex_pc = DexFile::kDexNoIndex;
769 mirror::Object* this_object = instrumentation_frame.this_object_;
770 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
jeffhao725a9572012-11-13 18:20:12 -0800771
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100772 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
773 // back to an upcall.
774 NthCallerVisitor visitor(self, 1, true);
775 visitor.WalkStack(true);
776 bool deoptimize = (visitor.caller != NULL) &&
777 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
778 if (deoptimize && kVerboseInstrumentation) {
779 LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
Ian Rogers62d6c772013-02-27 08:32:07 -0800780 }
781 if (deoptimize) {
782 if (kVerboseInstrumentation) {
783 LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100784 << " result is " << std::hex << return_value.GetJ();
Ian Rogers62d6c772013-02-27 08:32:07 -0800785 }
786 self->SetDeoptimizationReturnValue(return_value);
Ian Rogers848871b2013-08-05 10:56:33 -0700787 return static_cast<uint64_t>(GetQuickDeoptimizationEntryPoint()) |
Ian Rogers62d6c772013-02-27 08:32:07 -0800788 (static_cast<uint64_t>(*return_pc) << 32);
789 } else {
790 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700791 LOG(INFO) << "Returning from " << PrettyMethod(method)
792 << " to PC " << reinterpret_cast<void*>(*return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800793 }
794 return *return_pc;
795 }
jeffhao725a9572012-11-13 18:20:12 -0800796}
797
Ian Rogers62d6c772013-02-27 08:32:07 -0800798void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
799 // Do the pop.
800 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
801 CHECK_GT(stack->size(), 0U);
802 InstrumentationStackFrame instrumentation_frame = stack->front();
803 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
804 stack->pop_front();
805
Brian Carlstromea46f952013-07-30 01:26:50 -0700806 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800807 if (is_deoptimization) {
808 if (kVerboseInstrumentation) {
809 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
810 }
811 } else {
812 if (kVerboseInstrumentation) {
813 LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
814 }
815
816 // Notify listeners of method unwind.
817 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
818 // return_pc.
819 uint32_t dex_pc = DexFile::kDexNoIndex;
820 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
821 }
822}
823
824std::string InstrumentationStackFrame::Dump() const {
825 std::ostringstream os;
826 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
827 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
828 return os.str();
829}
830
831} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -0800832} // namespace art