blob: 5988d71f4d38d7beee573aa55e0c81849a13c419 [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
Ian Rogerse63db272014-07-15 15:36:11 -070021#include "arch/context.h"
Ian Rogersef7d42f2014-01-06 12:55:46 -080022#include "atomic.h"
Elliott Hughes76160052012-12-12 16:31:20 -080023#include "base/unix_file/fd_file.h"
jeffhao725a9572012-11-13 18:20:12 -080024#include "class_linker.h"
25#include "debugger.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080026#include "dex_file-inl.h"
Mathieu Chartierd8891782014-03-02 13:28:37 -080027#include "entrypoints/quick/quick_alloc_entrypoints.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070028#include "gc_root-inl.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010029#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070030#include "mirror/art_method-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031#include "mirror/class-inl.h"
32#include "mirror/dex_cache.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080033#include "mirror/object_array-inl.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070034#include "mirror/object-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080035#include "nth_caller_visitor.h"
Ian Rogersc928de92013-02-27 14:30:44 -080036#if !defined(ART_USE_PORTABLE_COMPILER)
Ian Rogers166db042013-07-26 12:05:57 -070037#include "entrypoints/quick/quick_entrypoints.h"
jeffhao725a9572012-11-13 18:20:12 -080038#endif
jeffhao725a9572012-11-13 18:20:12 -080039#include "os.h"
40#include "scoped_thread_state_change.h"
41#include "thread.h"
42#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080043
44namespace art {
Ian Rogersfa824272013-11-05 16:12:57 -080045
Ian Rogers62d6c772013-02-27 08:32:07 -080046namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080047
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010048const bool kVerboseInstrumentation = false;
49
Ian Rogers816432e2013-09-06 15:47:45 -070050// Do we want to deoptimize for method entry and exit listeners or just try to intercept
51// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
52// application's performance.
Jeff Haob21f1332014-08-11 18:00:29 -070053static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
Ian Rogers816432e2013-09-06 15:47:45 -070054
Ian Rogers62d6c772013-02-27 08:32:07 -080055static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
jeffhao725a9572012-11-13 18:20:12 -080056 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -080057 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
58 return instrumentation->InstallStubsForClass(klass);
59}
60
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070061Instrumentation::Instrumentation()
62 : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
63 interpreter_stubs_installed_(false),
64 interpret_only_(false), forced_interpret_only_(false),
65 have_method_entry_listeners_(false), have_method_exit_listeners_(false),
66 have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +020067 have_field_read_listeners_(false), have_field_write_listeners_(false),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070068 have_exception_caught_listeners_(false),
69 deoptimized_methods_lock_("deoptimized methods lock"),
70 deoptimization_enabled_(false),
71 interpreter_handler_table_(kMainHandlerTable),
72 quick_alloc_entry_points_instrumentation_counter_(0) {
73}
74
Ian Rogers62d6c772013-02-27 08:32:07 -080075bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010076 for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
77 InstallStubsForMethod(klass->GetDirectMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080078 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010079 for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
80 InstallStubsForMethod(klass->GetVirtualMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080081 }
82 return true;
83}
84
Ian Rogersef7d42f2014-01-06 12:55:46 -080085static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
86 const void* portable_code, bool have_portable_code)
87 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
88 method->SetEntryPointFromPortableCompiledCode(portable_code);
89 method->SetEntryPointFromQuickCompiledCode(quick_code);
90 bool portable_enabled = method->IsPortableCompiled();
91 if (have_portable_code && !portable_enabled) {
92 method->SetIsPortableCompiled();
93 } else if (portable_enabled) {
94 method->ClearIsPortableCompiled();
95 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010096 if (!method->IsResolutionMethod()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -070097 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -080098 if (quick_code == GetQuickToInterpreterBridge() ||
Mingyao Yang98d1cc82014-05-15 17:02:16 -070099 quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
100 (quick_code == class_linker->GetQuickResolutionTrampoline() &&
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800101 Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
102 && !method->IsNative() && !method->IsProxyMethod())) {
103 if (kIsDebugBuild) {
104 if (quick_code == GetQuickToInterpreterBridge()) {
105 DCHECK(portable_code == GetPortableToInterpreterBridge());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700106 } else if (quick_code == class_linker->GetQuickResolutionTrampoline()) {
107 DCHECK(portable_code == class_linker->GetPortableResolutionTrampoline());
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800108 }
109 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800110 DCHECK(!method->IsNative()) << PrettyMethod(method);
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800111 DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100112 method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
113 } else {
114 method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
115 }
116 }
117}
118
119void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
120 if (method->IsAbstract() || method->IsProxyMethod()) {
121 // Do not change stubs for these methods.
122 return;
123 }
Jeff Hao5afe4662014-08-19 10:17:36 -0700124 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
125 if (method->IsConstructor() &&
126 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;")) {
Jeff Hao95b4c652014-08-14 17:18:52 -0700127 return;
128 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800129 const void* new_portable_code;
130 const void* new_quick_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100131 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
132 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
133 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800134 bool have_portable_code = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100135 if (uninstall) {
136 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800137 new_portable_code = GetPortableToInterpreterBridge();
138 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100139 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800140 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
141 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100142 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700143 new_portable_code = class_linker->GetPortableResolutionTrampoline();
144 new_quick_code = class_linker->GetQuickResolutionTrampoline();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100145 }
146 } else { // !uninstall
Sebastien Hertzbae182c2013-12-17 10:42:03 +0100147 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
148 !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800149 new_portable_code = GetPortableToInterpreterBridge();
150 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100151 } else {
152 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
153 // class, all its static methods code will be set to the instrumentation entry point.
154 // For more details, see ClassLinker::FixupStaticTrampolines.
155 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200156 if (entry_exit_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800157 new_portable_code = GetPortableToInterpreterBridge();
158 new_quick_code = GetQuickInstrumentationEntryPoint();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200159 } else {
160 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
161 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700162 DCHECK(new_quick_code != class_linker->GetQuickToInterpreterBridgeTrampoline());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100163 }
164 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700165 new_portable_code = class_linker->GetPortableResolutionTrampoline();
166 new_quick_code = class_linker->GetQuickResolutionTrampoline();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100167 }
168 }
169 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800170 UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100171}
172
Ian Rogers62d6c772013-02-27 08:32:07 -0800173// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
174// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100175// Since we may already have done this previously, we need to push new instrumentation frame before
176// existing instrumentation frames.
Ian Rogers62d6c772013-02-27 08:32:07 -0800177static void InstrumentationInstallStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800178 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
179 struct InstallStackVisitor : public StackVisitor {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100180 InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
Ian Rogers62d6c772013-02-27 08:32:07 -0800181 : StackVisitor(thread, context), instrumentation_stack_(thread->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100182 instrumentation_exit_pc_(instrumentation_exit_pc),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100183 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
184 last_return_pc_(0) {
185 }
jeffhao725a9572012-11-13 18:20:12 -0800186
Ian Rogers306057f2012-11-26 12:45:53 -0800187 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700188 mirror::ArtMethod* m = GetMethod();
Ian Rogers306057f2012-11-26 12:45:53 -0800189 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800190 if (kVerboseInstrumentation) {
191 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
192 }
193 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700194 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800195 }
Jeff Haoa15a81b2014-05-27 18:25:47 -0700196 if (GetCurrentQuickFrame() == NULL) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200197 bool interpreter_frame = !m->IsPortableCompiled();
198 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
199 interpreter_frame);
Jeff Haoa15a81b2014-05-27 18:25:47 -0700200 if (kVerboseInstrumentation) {
201 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
202 }
203 shadow_stack_.push_back(instrumentation_frame);
204 return true; // Continue.
205 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800206 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200207 if (m->IsRuntimeMethod()) {
208 if (return_pc == instrumentation_exit_pc_) {
209 if (kVerboseInstrumentation) {
210 LOG(INFO) << " Handling quick to interpreter transition. Frame " << GetFrameId();
211 }
212 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
213 const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
214 CHECK(frame.interpreter_entry_);
215 // This is an interpreter frame so method enter event must have been reported. However we
216 // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
217 // Since we won't report method entry here, we can safely push any DEX pc.
218 dex_pcs_.push_back(0);
219 last_return_pc_ = frame.return_pc_;
220 ++instrumentation_stack_depth_;
221 return true;
222 } else {
223 if (kVerboseInstrumentation) {
224 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
225 }
226 last_return_pc_ = GetReturnPc();
227 return true; // Ignore unresolved methods since they will be instrumented after resolution.
228 }
229 }
230 if (kVerboseInstrumentation) {
231 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
232 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100233 if (return_pc == instrumentation_exit_pc_) {
234 // We've reached a frame which has already been installed with instrumentation exit stub.
235 // We should have already installed instrumentation on previous frames.
236 reached_existing_instrumentation_frames_ = true;
237
238 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
239 const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
240 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
241 << ", Found " << PrettyMethod(frame.method_);
242 return_pc = frame.return_pc_;
243 if (kVerboseInstrumentation) {
244 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
245 }
246 } else {
247 CHECK_NE(return_pc, 0U);
248 CHECK(!reached_existing_instrumentation_frames_);
249 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
250 false);
251 if (kVerboseInstrumentation) {
252 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
253 }
254
Sebastien Hertz320deb22014-06-11 19:45:05 +0200255 // Insert frame at the right position so we do not corrupt the instrumentation stack.
256 // Instrumentation stack frames are in descending frame id order.
257 auto it = instrumentation_stack_->begin();
258 for (auto end = instrumentation_stack_->end(); it != end; ++it) {
259 const InstrumentationStackFrame& current = *it;
260 if (instrumentation_frame.frame_id_ >= current.frame_id_) {
261 break;
262 }
263 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100264 instrumentation_stack_->insert(it, instrumentation_frame);
265 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800266 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800267 dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
Ian Rogers62d6c772013-02-27 08:32:07 -0800268 last_return_pc_ = return_pc;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100269 ++instrumentation_stack_depth_;
Ian Rogers306057f2012-11-26 12:45:53 -0800270 return true; // Continue.
271 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800272 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
Jeff Haoa15a81b2014-05-27 18:25:47 -0700273 std::vector<InstrumentationStackFrame> shadow_stack_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800274 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800275 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100276 bool reached_existing_instrumentation_frames_;
277 size_t instrumentation_stack_depth_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800278 uintptr_t last_return_pc_;
Ian Rogers306057f2012-11-26 12:45:53 -0800279 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800280 if (kVerboseInstrumentation) {
281 std::string thread_name;
282 thread->GetThreadName(thread_name);
283 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800284 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100285
286 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers700a4022014-05-19 16:49:03 -0700287 std::unique_ptr<Context> context(Context::Create());
Ian Rogers848871b2013-08-05 10:56:33 -0700288 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100289 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800290 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100291 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800292
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100293 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100294 // Create method enter events for all methods currently on the thread's stack. We only do this
295 // if no debugger is attached to prevent from posting events twice.
Jeff Haoa15a81b2014-05-27 18:25:47 -0700296 auto ssi = visitor.shadow_stack_.rbegin();
297 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
298 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
299 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
300 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
301 ++ssi;
302 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100303 uint32_t dex_pc = visitor.dex_pcs_.back();
304 visitor.dex_pcs_.pop_back();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200305 if (!isi->interpreter_entry_) {
306 instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
307 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100308 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800309 }
310 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800311}
312
Ian Rogers62d6c772013-02-27 08:32:07 -0800313// Removes the instrumentation exit pc as the return PC for every quick frame.
314static void InstrumentationRestoreStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800315 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
316 struct RestoreStackVisitor : public StackVisitor {
Ian Rogers62d6c772013-02-27 08:32:07 -0800317 RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
318 Instrumentation* instrumentation)
319 : StackVisitor(thread, NULL), thread_(thread),
320 instrumentation_exit_pc_(instrumentation_exit_pc),
321 instrumentation_(instrumentation),
322 instrumentation_stack_(thread->GetInstrumentationStack()),
323 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800324
325 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800326 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800327 return false; // Stop.
328 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700329 mirror::ArtMethod* m = GetMethod();
Ian Rogers62d6c772013-02-27 08:32:07 -0800330 if (GetCurrentQuickFrame() == NULL) {
331 if (kVerboseInstrumentation) {
332 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
333 }
334 return true; // Ignore shadow frames.
335 }
Ian Rogers306057f2012-11-26 12:45:53 -0800336 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800337 if (kVerboseInstrumentation) {
338 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
339 }
Ian Rogers306057f2012-11-26 12:45:53 -0800340 return true; // Ignore upcalls.
341 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800342 bool removed_stub = false;
343 // TODO: make this search more efficient?
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100344 const size_t frameId = GetFrameId();
345 for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
346 if (instrumentation_frame.frame_id_ == frameId) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800347 if (kVerboseInstrumentation) {
348 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
349 }
Jeff Hao9a916d32013-06-27 18:45:37 -0700350 if (instrumentation_frame.interpreter_entry_) {
351 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
352 } else {
353 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
354 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800355 SetReturnPc(instrumentation_frame.return_pc_);
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100356 if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100357 // Create the method exit events. As the methods didn't really exit the result is 0.
358 // We only do this if no debugger is attached to prevent from posting events twice.
359 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
360 GetDexPc(), JValue());
361 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800362 frames_removed_++;
363 removed_stub = true;
364 break;
365 }
366 }
367 if (!removed_stub) {
368 if (kVerboseInstrumentation) {
369 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800370 }
jeffhao725a9572012-11-13 18:20:12 -0800371 }
372 return true; // Continue.
373 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800374 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800375 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800376 Instrumentation* const instrumentation_;
377 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
378 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800379 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800380 if (kVerboseInstrumentation) {
381 std::string thread_name;
382 thread->GetThreadName(thread_name);
383 LOG(INFO) << "Removing exit stubs in " << thread_name;
384 }
385 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
386 if (stack->size() > 0) {
387 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers848871b2013-08-05 10:56:33 -0700388 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Ian Rogers62d6c772013-02-27 08:32:07 -0800389 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
390 visitor.WalkStack(true);
391 CHECK_EQ(visitor.frames_removed_, stack->size());
392 while (stack->size() > 0) {
393 stack->pop_front();
394 }
jeffhao725a9572012-11-13 18:20:12 -0800395 }
396}
397
Ian Rogers62d6c772013-02-27 08:32:07 -0800398void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
399 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800400 if ((events & kMethodEntered) != 0) {
401 method_entry_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800402 have_method_entry_listeners_ = true;
403 }
404 if ((events & kMethodExited) != 0) {
405 method_exit_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800406 have_method_exit_listeners_ = true;
407 }
408 if ((events & kMethodUnwind) != 0) {
409 method_unwind_listeners_.push_back(listener);
410 have_method_unwind_listeners_ = true;
411 }
412 if ((events & kDexPcMoved) != 0) {
413 dex_pc_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800414 have_dex_pc_listeners_ = true;
415 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200416 if ((events & kFieldRead) != 0) {
417 field_read_listeners_.push_back(listener);
418 have_field_read_listeners_ = true;
419 }
420 if ((events & kFieldWritten) != 0) {
421 field_write_listeners_.push_back(listener);
422 have_field_write_listeners_ = true;
423 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700424 if ((events & kExceptionCaught) != 0) {
425 exception_caught_listeners_.push_back(listener);
426 have_exception_caught_listeners_ = true;
427 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200428 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800429}
430
Ian Rogers62d6c772013-02-27 08:32:07 -0800431void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
432 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800433
434 if ((events & kMethodEntered) != 0) {
435 bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
436 listener) != method_entry_listeners_.end();
437 if (contains) {
438 method_entry_listeners_.remove(listener);
439 }
440 have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800441 }
442 if ((events & kMethodExited) != 0) {
443 bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
444 listener) != method_exit_listeners_.end();
445 if (contains) {
446 method_exit_listeners_.remove(listener);
447 }
448 have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800449 }
450 if ((events & kMethodUnwind) != 0) {
451 method_unwind_listeners_.remove(listener);
452 }
453 if ((events & kDexPcMoved) != 0) {
454 bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
455 listener) != dex_pc_listeners_.end();
456 if (contains) {
457 dex_pc_listeners_.remove(listener);
458 }
459 have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800460 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200461 if ((events & kFieldRead) != 0) {
462 bool contains = std::find(field_read_listeners_.begin(), field_read_listeners_.end(),
463 listener) != field_read_listeners_.end();
464 if (contains) {
465 field_read_listeners_.remove(listener);
466 }
467 have_field_read_listeners_ = field_read_listeners_.size() > 0;
468 }
469 if ((events & kFieldWritten) != 0) {
470 bool contains = std::find(field_write_listeners_.begin(), field_write_listeners_.end(),
471 listener) != field_write_listeners_.end();
472 if (contains) {
473 field_write_listeners_.remove(listener);
474 }
475 have_field_write_listeners_ = field_write_listeners_.size() > 0;
476 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700477 if ((events & kExceptionCaught) != 0) {
478 exception_caught_listeners_.remove(listener);
479 have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
480 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200481 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800482}
483
Ian Rogers62d6c772013-02-27 08:32:07 -0800484void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
485 interpret_only_ = require_interpreter || forced_interpret_only_;
486 // Compute what level of instrumentation is required and compare to current.
487 int desired_level, current_level;
488 if (require_interpreter) {
489 desired_level = 2;
490 } else if (require_entry_exit_stubs) {
491 desired_level = 1;
492 } else {
493 desired_level = 0;
494 }
495 if (interpreter_stubs_installed_) {
496 current_level = 2;
497 } else if (entry_exit_stubs_installed_) {
498 current_level = 1;
499 } else {
500 current_level = 0;
501 }
502 if (desired_level == current_level) {
503 // We're already set.
504 return;
505 }
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100506 Thread* const self = Thread::Current();
Ian Rogers62d6c772013-02-27 08:32:07 -0800507 Runtime* runtime = Runtime::Current();
508 Locks::thread_list_lock_->AssertNotHeld(self);
509 if (desired_level > 0) {
510 if (require_interpreter) {
511 interpreter_stubs_installed_ = true;
512 } else {
513 CHECK(require_entry_exit_stubs);
514 entry_exit_stubs_installed_ = true;
515 }
516 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
517 instrumentation_stubs_installed_ = true;
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100518 MutexLock mu(self, *Locks::thread_list_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800519 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
520 } else {
521 interpreter_stubs_installed_ = false;
522 entry_exit_stubs_installed_ = false;
523 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100524 // Restore stack only if there is no method currently deoptimized.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700525 bool empty;
526 {
527 ReaderMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700528 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700529 }
530 if (empty) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100531 instrumentation_stubs_installed_ = false;
532 MutexLock mu(self, *Locks::thread_list_lock_);
533 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
534 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800535 }
jeffhao725a9572012-11-13 18:20:12 -0800536}
537
Ian Rogersfa824272013-11-05 16:12:57 -0800538static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
539 thread->ResetQuickAllocEntryPointsForThread();
540}
541
Mathieu Chartier661974a2014-01-09 11:23:53 -0800542void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
543 Runtime* runtime = Runtime::Current();
544 ThreadList* tl = runtime->GetThreadList();
545 if (runtime->IsStarted()) {
546 tl->SuspendAll();
547 }
548 {
549 MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
550 SetQuickAllocEntryPointsInstrumented(instrumented);
551 ResetQuickAllocEntryPoints();
552 }
553 if (runtime->IsStarted()) {
554 tl->ResumeAll();
555 }
556}
557
Ian Rogersfa824272013-11-05 16:12:57 -0800558void Instrumentation::InstrumentQuickAllocEntryPoints() {
559 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
560 // should be guarded by a lock.
Ian Rogers3e5cf302014-05-20 16:40:37 -0700561 DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800562 const bool enable_instrumentation =
Ian Rogers3e5cf302014-05-20 16:40:37 -0700563 quick_alloc_entry_points_instrumentation_counter_.FetchAndAddSequentiallyConsistent(1) == 0;
Ian Rogersfa824272013-11-05 16:12:57 -0800564 if (enable_instrumentation) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800565 SetEntrypointsInstrumented(true);
Ian Rogersfa824272013-11-05 16:12:57 -0800566 }
567}
568
569void Instrumentation::UninstrumentQuickAllocEntryPoints() {
570 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
571 // should be guarded by a lock.
Ian Rogers3e5cf302014-05-20 16:40:37 -0700572 DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800573 const bool disable_instrumentation =
Ian Rogers3e5cf302014-05-20 16:40:37 -0700574 quick_alloc_entry_points_instrumentation_counter_.FetchAndSubSequentiallyConsistent(1) == 1;
Ian Rogersfa824272013-11-05 16:12:57 -0800575 if (disable_instrumentation) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800576 SetEntrypointsInstrumented(false);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800577 }
578}
579
580void Instrumentation::ResetQuickAllocEntryPoints() {
581 Runtime* runtime = Runtime::Current();
582 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800583 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
584 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
Ian Rogersfa824272013-11-05 16:12:57 -0800585 }
586}
587
Ian Rogersef7d42f2014-01-06 12:55:46 -0800588void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700589 const void* portable_code, bool have_portable_code) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800590 const void* new_portable_code;
591 const void* new_quick_code;
592 bool new_have_portable_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800593 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800594 new_portable_code = portable_code;
595 new_quick_code = quick_code;
596 new_have_portable_code = have_portable_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700597 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100598 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800599 new_portable_code = GetPortableToInterpreterBridge();
600 new_quick_code = GetQuickToInterpreterBridge();
601 new_have_portable_code = false;
Jeff Hao65d15d92013-07-16 16:39:33 -0700602 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700603 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
604 if (quick_code == class_linker->GetQuickResolutionTrampoline() ||
605 quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
606 quick_code == GetQuickToInterpreterBridge()) {
607 DCHECK((portable_code == class_linker->GetPortableResolutionTrampoline()) ||
608 (portable_code == GetPortableToInterpreterBridge()));
609 new_portable_code = portable_code;
610 new_quick_code = quick_code;
611 new_have_portable_code = have_portable_code;
612 } else if (entry_exit_stubs_installed_) {
613 new_quick_code = GetQuickInstrumentationEntryPoint();
614 new_portable_code = GetPortableToInterpreterBridge();
615 new_have_portable_code = false;
616 } else {
617 new_portable_code = portable_code;
618 new_quick_code = quick_code;
619 new_have_portable_code = have_portable_code;
620 }
Jeff Hao65d15d92013-07-16 16:39:33 -0700621 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800622 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800623 UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100624}
625
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700626bool Instrumentation::AddDeoptimizedMethod(mirror::ArtMethod* method) {
627 // Note that the insert() below isn't read barrier-aware. So, this
628 // FindDeoptimizedMethod() call is necessary or else we would end up
629 // storing the same method twice in the map (the from-space and the
630 // to-space ones).
631 if (FindDeoptimizedMethod(method)) {
632 // Already in the map. Return.
633 return false;
634 }
635 // Not found. Add it.
636 int32_t hash_code = method->IdentityHashCode();
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700637 deoptimized_methods_.insert(std::make_pair(hash_code, GcRoot<mirror::ArtMethod>(method)));
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700638 return true;
639}
640
641bool Instrumentation::FindDeoptimizedMethod(mirror::ArtMethod* method) {
642 int32_t hash_code = method->IdentityHashCode();
643 auto range = deoptimized_methods_.equal_range(hash_code);
644 for (auto it = range.first; it != range.second; ++it) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700645 mirror::ArtMethod* m = it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700646 if (m == method) {
647 // Found.
648 return true;
649 }
650 }
651 // Not found.
652 return false;
653}
654
655mirror::ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
656 auto it = deoptimized_methods_.begin();
657 if (it == deoptimized_methods_.end()) {
658 // Empty.
659 return nullptr;
660 }
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700661 return it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700662}
663
664bool Instrumentation::RemoveDeoptimizedMethod(mirror::ArtMethod* method) {
665 int32_t hash_code = method->IdentityHashCode();
666 auto range = deoptimized_methods_.equal_range(hash_code);
667 for (auto it = range.first; it != range.second; ++it) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700668 mirror::ArtMethod* m = it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700669 if (m == method) {
670 // Found. Erase and return.
671 deoptimized_methods_.erase(it);
672 return true;
673 }
674 }
675 // Not found.
676 return false;
677}
678
679bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
680 return deoptimized_methods_.empty();
681}
682
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100683void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
684 CHECK(!method->IsNative());
685 CHECK(!method->IsProxyMethod());
686 CHECK(!method->IsAbstract());
687
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700688 Thread* self = Thread::Current();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700689 {
690 WriterMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700691 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
692 CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700693 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100694 if (!interpreter_stubs_installed_) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200695 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint(), GetPortableToInterpreterBridge(),
Ian Rogersef7d42f2014-01-06 12:55:46 -0800696 false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100697
698 // Install instrumentation exit stub and instrumentation frames. We may already have installed
699 // these previously so it will only cover the newly created frames.
700 instrumentation_stubs_installed_ = true;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700701 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100702 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
703 }
704}
705
706void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
707 CHECK(!method->IsNative());
708 CHECK(!method->IsProxyMethod());
709 CHECK(!method->IsAbstract());
710
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700711 Thread* self = Thread::Current();
712 bool empty;
713 {
714 WriterMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700715 bool found_and_erased = RemoveDeoptimizedMethod(method);
716 CHECK(found_and_erased) << "Method " << PrettyMethod(method)
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700717 << " is not deoptimized";
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700718 empty = IsDeoptimizedMethodsEmpty();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700719 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100720
721 // Restore code and possibly stack only if we did not deoptimize everything.
722 if (!interpreter_stubs_installed_) {
723 // Restore its code or resolution trampoline.
724 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800725 if (method->IsStatic() && !method->IsConstructor() &&
726 !method->GetDeclaringClass()->IsInitialized()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700727 // TODO: we're updating to entrypoints in the image here, we can avoid the trampoline.
728 UpdateEntrypoints(method, class_linker->GetQuickResolutionTrampoline(),
729 class_linker->GetPortableResolutionTrampoline(), false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100730 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800731 bool have_portable_code = false;
732 const void* quick_code = class_linker->GetQuickOatCodeFor(method);
733 const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
734 UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100735 }
736
737 // If there is no deoptimized method left, we can restore the stack of each thread.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700738 if (empty) {
739 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100740 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
741 instrumentation_stubs_installed_ = false;
742 }
743 }
744}
745
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700746bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100747 DCHECK(method != nullptr);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700748 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
749 return FindDeoptimizedMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100750}
751
752void Instrumentation::EnableDeoptimization() {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700753 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700754 CHECK(IsDeoptimizedMethodsEmpty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100755 CHECK_EQ(deoptimization_enabled_, false);
756 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100757}
758
759void Instrumentation::DisableDeoptimization() {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100760 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100761 // If we deoptimized everything, undo it.
762 if (interpreter_stubs_installed_) {
763 UndeoptimizeEverything();
764 }
765 // Undeoptimized selected methods.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700766 while (true) {
767 mirror::ArtMethod* method;
768 {
769 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700770 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700771 break;
772 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700773 method = BeginDeoptimizedMethod();
774 CHECK(method != nullptr);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700775 }
776 Undeoptimize(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100777 }
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100778 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100779}
780
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100781// Indicates if instrumentation should notify method enter/exit events to the listeners.
782bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100783 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100784}
785
786void Instrumentation::DeoptimizeEverything() {
787 CHECK(!interpreter_stubs_installed_);
788 ConfigureStubs(false, true);
789}
790
791void Instrumentation::UndeoptimizeEverything() {
792 CHECK(interpreter_stubs_installed_);
793 ConfigureStubs(false, false);
794}
795
796void Instrumentation::EnableMethodTracing() {
797 bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
798 ConfigureStubs(!require_interpreter, require_interpreter);
799}
800
801void Instrumentation::DisableMethodTracing() {
802 ConfigureStubs(false, false);
jeffhao725a9572012-11-13 18:20:12 -0800803}
804
Ian Rogersef7d42f2014-01-06 12:55:46 -0800805const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800806 Runtime* runtime = Runtime::Current();
807 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800808 const void* code = method->GetEntryPointFromQuickCompiledCode();
Vladimir Marko8a630572014-04-09 18:45:35 +0100809 DCHECK(code != nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700810 ClassLinker* class_linker = runtime->GetClassLinker();
811 if (LIKELY(code != class_linker->GetQuickResolutionTrampoline()) &&
812 LIKELY(code != class_linker->GetQuickToInterpreterBridgeTrampoline()) &&
Vladimir Marko8a630572014-04-09 18:45:35 +0100813 LIKELY(code != GetQuickToInterpreterBridge())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800814 return code;
815 }
816 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800817 return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -0800818}
819
Ian Rogers62d6c772013-02-27 08:32:07 -0800820void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800821 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800822 uint32_t dex_pc) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700823 auto it = method_entry_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700824 bool is_end = (it == method_entry_listeners_.end());
825 // Implemented this way to prevent problems caused by modification of the list while iterating.
826 while (!is_end) {
827 InstrumentationListener* cur = *it;
828 ++it;
829 is_end = (it == method_entry_listeners_.end());
830 cur->MethodEntered(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800831 }
832}
833
834void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800835 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800836 uint32_t dex_pc, const JValue& return_value) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700837 auto it = method_exit_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700838 bool is_end = (it == method_exit_listeners_.end());
839 // Implemented this way to prevent problems caused by modification of the list while iterating.
840 while (!is_end) {
841 InstrumentationListener* cur = *it;
842 ++it;
843 is_end = (it == method_exit_listeners_.end());
844 cur->MethodExited(thread, this_object, method, dex_pc, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800845 }
846}
847
848void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800849 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800850 uint32_t dex_pc) const {
851 if (have_method_unwind_listeners_) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700852 for (InstrumentationListener* listener : method_unwind_listeners_) {
Sebastien Hertz51db44a2013-11-19 10:00:29 +0100853 listener->MethodUnwind(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800854 }
855 }
856}
857
858void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800859 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800860 uint32_t dex_pc) const {
861 // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
862 // action where it can remove itself as a listener and break the iterator. The copy only works
863 // around the problem and in general we may have to move to something like reference counting to
864 // ensure listeners are deleted correctly.
865 std::list<InstrumentationListener*> copy(dex_pc_listeners_);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700866 for (InstrumentationListener* listener : copy) {
867 listener->DexPcMoved(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800868 }
869}
870
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200871void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
872 mirror::ArtMethod* method, uint32_t dex_pc,
873 mirror::ArtField* field) const {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200874 // TODO: same comment than DexPcMovedEventImpl.
875 std::list<InstrumentationListener*> copy(field_read_listeners_);
876 for (InstrumentationListener* listener : copy) {
877 listener->FieldRead(thread, this_object, method, dex_pc, field);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200878 }
879}
880
881void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
882 mirror::ArtMethod* method, uint32_t dex_pc,
883 mirror::ArtField* field, const JValue& field_value) const {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200884 // TODO: same comment than DexPcMovedEventImpl.
885 std::list<InstrumentationListener*> copy(field_write_listeners_);
886 for (InstrumentationListener* listener : copy) {
887 listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200888 }
889}
890
Ian Rogers62d6c772013-02-27 08:32:07 -0800891void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
Brian Carlstromea46f952013-07-30 01:26:50 -0700892 mirror::ArtMethod* catch_method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800893 uint32_t catch_dex_pc,
Sebastien Hertz947ff082013-09-17 14:10:13 +0200894 mirror::Throwable* exception_object) const {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200895 if (HasExceptionCaughtListeners()) {
896 DCHECK_EQ(thread->GetException(nullptr), exception_object);
897 bool is_exception_reported = thread->IsExceptionReportedToInstrumentation();
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700898 thread->ClearException();
Sebastien Hertzbf079fe2014-04-01 15:31:05 +0200899 // TODO: The copy below is due to the debug listener having an action where it can remove
900 // itself as a listener and break the iterator. The copy only works around the problem.
901 std::list<InstrumentationListener*> copy(exception_caught_listeners_);
902 for (InstrumentationListener* listener : copy) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700903 listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800904 }
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700905 thread->SetException(throw_location, exception_object);
Sebastien Hertz9f102032014-05-23 08:59:42 +0200906 thread->SetExceptionReportedToInstrumentation(is_exception_reported);
Ian Rogers62d6c772013-02-27 08:32:07 -0800907 }
908}
909
910static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
911 int delta)
912 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
913 size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
914 if (frame_id != instrumentation_frame.frame_id_) {
915 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
916 << instrumentation_frame.frame_id_;
917 StackVisitor::DescribeStack(self);
918 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
919 }
920}
921
922void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700923 mirror::ArtMethod* method,
Jeff Hao9a916d32013-06-27 18:45:37 -0700924 uintptr_t lr, bool interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800925 // We have a callee-save frame meaning this value is guaranteed to never be 0.
926 size_t frame_id = StackVisitor::ComputeNumFrames(self);
927 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
928 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700929 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -0800930 }
931 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700932 frame_id, interpreter_entry);
Ian Rogers62d6c772013-02-27 08:32:07 -0800933 stack->push_front(instrumentation_frame);
934
Sebastien Hertz320deb22014-06-11 19:45:05 +0200935 if (!interpreter_entry) {
936 MethodEnterEvent(self, this_object, method, 0);
937 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800938}
939
Andreas Gamped58342c2014-06-05 14:18:08 -0700940TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
941 uint64_t gpr_result,
942 uint64_t fpr_result) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800943 // Do the pop.
944 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
945 CHECK_GT(stack->size(), 0U);
946 InstrumentationStackFrame instrumentation_frame = stack->front();
947 stack->pop_front();
948
949 // Set return PC and check the sanity of the stack.
950 *return_pc = instrumentation_frame.return_pc_;
951 CheckStackDepth(self, instrumentation_frame, 0);
952
Brian Carlstromea46f952013-07-30 01:26:50 -0700953 mirror::ArtMethod* method = instrumentation_frame.method_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700954 uint32_t length;
955 char return_shorty = method->GetShorty(&length)[0];
Ian Rogers62d6c772013-02-27 08:32:07 -0800956 JValue return_value;
957 if (return_shorty == 'V') {
958 return_value.SetJ(0);
959 } else if (return_shorty == 'F' || return_shorty == 'D') {
960 return_value.SetJ(fpr_result);
961 } else {
962 return_value.SetJ(gpr_result);
963 }
964 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
965 // return_pc.
966 uint32_t dex_pc = DexFile::kDexNoIndex;
967 mirror::Object* this_object = instrumentation_frame.this_object_;
Sebastien Hertz320deb22014-06-11 19:45:05 +0200968 if (!instrumentation_frame.interpreter_entry_) {
969 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
970 }
jeffhao725a9572012-11-13 18:20:12 -0800971
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100972 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
973 // back to an upcall.
974 NthCallerVisitor visitor(self, 1, true);
975 visitor.WalkStack(true);
976 bool deoptimize = (visitor.caller != NULL) &&
977 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
978 if (deoptimize && kVerboseInstrumentation) {
979 LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
Ian Rogers62d6c772013-02-27 08:32:07 -0800980 }
981 if (deoptimize) {
982 if (kVerboseInstrumentation) {
983 LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100984 << " result is " << std::hex << return_value.GetJ();
Ian Rogers62d6c772013-02-27 08:32:07 -0800985 }
986 self->SetDeoptimizationReturnValue(return_value);
Andreas Gamped58342c2014-06-05 14:18:08 -0700987 return GetTwoWordSuccessValue(*return_pc,
988 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
Ian Rogers62d6c772013-02-27 08:32:07 -0800989 } else {
990 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700991 LOG(INFO) << "Returning from " << PrettyMethod(method)
992 << " to PC " << reinterpret_cast<void*>(*return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800993 }
Andreas Gamped58342c2014-06-05 14:18:08 -0700994 return GetTwoWordSuccessValue(0, *return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800995 }
jeffhao725a9572012-11-13 18:20:12 -0800996}
997
Ian Rogers62d6c772013-02-27 08:32:07 -0800998void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
999 // Do the pop.
1000 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1001 CHECK_GT(stack->size(), 0U);
1002 InstrumentationStackFrame instrumentation_frame = stack->front();
1003 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1004 stack->pop_front();
1005
Brian Carlstromea46f952013-07-30 01:26:50 -07001006 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -08001007 if (is_deoptimization) {
1008 if (kVerboseInstrumentation) {
1009 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
1010 }
1011 } else {
1012 if (kVerboseInstrumentation) {
1013 LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
1014 }
1015
1016 // Notify listeners of method unwind.
1017 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1018 // return_pc.
1019 uint32_t dex_pc = DexFile::kDexNoIndex;
1020 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1021 }
1022}
1023
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001024void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
1025 WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001026 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001027 return;
1028 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001029 for (auto pair : deoptimized_methods_) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -07001030 pair.second.VisitRoot(callback, arg, 0, kRootVMInternal);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001031 }
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001032}
1033
Ian Rogers62d6c772013-02-27 08:32:07 -08001034std::string InstrumentationStackFrame::Dump() const {
1035 std::ostringstream os;
1036 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
1037 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1038 return os.str();
1039}
1040
1041} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -08001042} // namespace art