blob: 577bbf99f150b7d23165d43f8f469c4004830876 [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_INSTRUMENTATION_H_
18#define ART_RUNTIME_INSTRUMENTATION_H_
jeffhao725a9572012-11-13 18:20:12 -080019
Ian Rogers576ca0c2014-06-06 15:58:22 -070020#include <stdint.h>
Ian Rogers576ca0c2014-06-06 15:58:22 -070021#include <list>
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include <unordered_set>
Ian Rogers576ca0c2014-06-06 15:58:22 -070023
Ian Rogersd582fa42014-11-05 23:46:43 -080024#include "arch/instruction_set.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070025#include "base/enums.h"
Elliott Hughes76160052012-12-12 16:31:20 -080026#include "base/macros.h"
Ian Rogers719d1a32014-03-06 12:13:39 -080027#include "base/mutex.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070028#include "gc_root.h"
Sebastien Hertz0462c4c2015-04-01 16:34:17 +020029#include "safe_map.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080030
jeffhao725a9572012-11-13 18:20:12 -080031namespace art {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080032namespace mirror {
Brian Carlstromea46f952013-07-30 01:26:50 -070033 class Class;
34 class Object;
35 class Throwable;
Ian Rogers62d6c772013-02-27 08:32:07 -080036} // namespace mirror
Mathieu Chartierc7853442015-03-27 14:35:38 -070037class ArtField;
Mathieu Chartiere401d142015-04-22 13:56:20 -070038class ArtMethod;
Alex Lightd7661582017-05-01 13:48:16 -070039template <typename T> class Handle;
Ian Rogers62d6c772013-02-27 08:32:07 -080040union JValue;
jeffhao725a9572012-11-13 18:20:12 -080041class Thread;
Mingyao Yang047abb22017-08-23 15:26:57 -070042enum class DeoptimizationMethodType;
jeffhao725a9572012-11-13 18:20:12 -080043
Ian Rogers62d6c772013-02-27 08:32:07 -080044namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080045
Sebastien Hertzee1997a2013-09-19 14:47:09 +020046// Interpreter handler tables.
47enum InterpreterHandlerTable {
48 kMainHandlerTable = 0, // Main handler table: no suspend check, no instrumentation.
49 kAlternativeHandlerTable = 1, // Alternative handler table: suspend check and/or instrumentation
50 // enabled.
51 kNumHandlerTables
52};
53
Andreas Gampe40da2862015-02-27 12:49:04 -080054// Do we want to deoptimize for method entry and exit listeners or just try to intercept
55// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
56// application's performance.
57static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
58
Ian Rogers62d6c772013-02-27 08:32:07 -080059// Instrumentation event listener API. Registered listeners will get the appropriate call back for
60// the events they are listening for. The call backs supply the thread, method and dex_pc the event
61// occurred upon. The thread may or may not be Thread::Current().
62struct InstrumentationListener {
63 InstrumentationListener() {}
64 virtual ~InstrumentationListener() {}
65
66 // Call-back for when a method is entered.
Alex Lightd7661582017-05-01 13:48:16 -070067 virtual void MethodEntered(Thread* thread,
68 Handle<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -070069 ArtMethod* method,
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070070 uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -080071
Alex Lightd7661582017-05-01 13:48:16 -070072 virtual void MethodExited(Thread* thread,
73 Handle<mirror::Object> this_object,
74 ArtMethod* method,
75 uint32_t dex_pc,
76 Handle<mirror::Object> return_value)
77 REQUIRES_SHARED(Locks::mutator_lock_);
78
79 // Call-back for when a method is exited. The implementor should either handler-ize the return
80 // value (if appropriate) or use the alternate MethodExited callback instead if they need to
81 // go through a suspend point.
82 virtual void MethodExited(Thread* thread,
83 Handle<mirror::Object> this_object,
84 ArtMethod* method,
85 uint32_t dex_pc,
Ian Rogers62d6c772013-02-27 08:32:07 -080086 const JValue& return_value)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070087 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -080088
89 // Call-back for when a method is popped due to an exception throw. A method will either cause a
90 // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
Alex Lightd7661582017-05-01 13:48:16 -070091 virtual void MethodUnwind(Thread* thread,
92 Handle<mirror::Object> this_object,
93 ArtMethod* method,
94 uint32_t dex_pc)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070095 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -080096
97 // Call-back for when the dex pc moves in a method.
Alex Lightd7661582017-05-01 13:48:16 -070098 virtual void DexPcMoved(Thread* thread,
99 Handle<mirror::Object> this_object,
100 ArtMethod* method,
101 uint32_t new_dex_pc)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700102 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800103
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200104 // Call-back for when we read from a field.
Alex Lightd7661582017-05-01 13:48:16 -0700105 virtual void FieldRead(Thread* thread,
106 Handle<mirror::Object> this_object,
107 ArtMethod* method,
108 uint32_t dex_pc,
109 ArtField* field) = 0;
110
111 virtual void FieldWritten(Thread* thread,
112 Handle<mirror::Object> this_object,
113 ArtMethod* method,
114 uint32_t dex_pc,
115 ArtField* field,
116 Handle<mirror::Object> field_value)
117 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200118
119 // Call-back for when we write into a field.
Alex Lightd7661582017-05-01 13:48:16 -0700120 virtual void FieldWritten(Thread* thread,
121 Handle<mirror::Object> this_object,
122 ArtMethod* method,
123 uint32_t dex_pc,
124 ArtField* field,
125 const JValue& field_value)
126 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200127
Ian Rogers62d6c772013-02-27 08:32:07 -0800128 // Call-back when an exception is caught.
Alex Lightd7661582017-05-01 13:48:16 -0700129 virtual void ExceptionCaught(Thread* thread,
130 Handle<mirror::Throwable> exception_object)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700131 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800132
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000133 // Call-back for when we execute a branch.
134 virtual void Branch(Thread* thread,
135 ArtMethod* method,
136 uint32_t dex_pc,
137 int32_t dex_pc_offset)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700138 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100139
140 // Call-back for when we get an invokevirtual or an invokeinterface.
141 virtual void InvokeVirtualOrInterface(Thread* thread,
Alex Lightd7661582017-05-01 13:48:16 -0700142 Handle<mirror::Object> this_object,
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100143 ArtMethod* caller,
144 uint32_t dex_pc,
145 ArtMethod* callee)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700146 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
jeffhao725a9572012-11-13 18:20:12 -0800147};
148
Ian Rogers62d6c772013-02-27 08:32:07 -0800149// Instrumentation is a catch-all for when extra information is required from the runtime. The
150// typical use for instrumentation is for profiling and debugging. Instrumentation may add stubs
151// to method entry and exit, it may also force execution to be switched to the interpreter and
152// trigger deoptimization.
jeffhao725a9572012-11-13 18:20:12 -0800153class Instrumentation {
154 public:
Ian Rogers62d6c772013-02-27 08:32:07 -0800155 enum InstrumentationEvent {
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800156 kMethodEntered = 0x1,
157 kMethodExited = 0x2,
158 kMethodUnwind = 0x4,
159 kDexPcMoved = 0x8,
160 kFieldRead = 0x10,
161 kFieldWritten = 0x20,
162 kExceptionCaught = 0x40,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000163 kBranch = 0x80,
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100164 kInvokeVirtualOrInterface = 0x100,
Ian Rogers62d6c772013-02-27 08:32:07 -0800165 };
jeffhao725a9572012-11-13 18:20:12 -0800166
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200167 enum class InstrumentationLevel {
168 kInstrumentNothing, // execute without instrumentation
169 kInstrumentWithInstrumentationStubs, // execute with instrumentation entry/exit stubs
170 kInstrumentWithInterpreter // execute with interpreter
171 };
172
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700173 Instrumentation();
jeffhao725a9572012-11-13 18:20:12 -0800174
Ian Rogers62d6c772013-02-27 08:32:07 -0800175 // Add a listener to be notified of the masked together sent of instrumentation events. This
176 // suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy
177 // for saying you should have suspended all threads (installing stubs while threads are running
178 // will break).
179 void AddListener(InstrumentationListener* listener, uint32_t events)
Mathieu Chartier90443472015-07-16 20:32:27 -0700180 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800181
Ian Rogers62d6c772013-02-27 08:32:07 -0800182 // Removes a listener possibly removing instrumentation stubs.
183 void RemoveListener(InstrumentationListener* listener, uint32_t events)
Mathieu Chartier90443472015-07-16 20:32:27 -0700184 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800185
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100186 // Deoptimization.
Sebastien Hertza76a6d42014-03-20 16:40:17 +0100187 void EnableDeoptimization()
Mathieu Chartieraa516822015-10-02 15:53:37 -0700188 REQUIRES(Locks::mutator_lock_)
189 REQUIRES(!deoptimized_methods_lock_);
190 // Calls UndeoptimizeEverything which may visit class linker classes through ConfigureStubs.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200191 void DisableDeoptimization(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700192 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
193 REQUIRES(!deoptimized_methods_lock_);
194
Sebastien Hertza76a6d42014-03-20 16:40:17 +0100195 bool AreAllMethodsDeoptimized() const {
196 return interpreter_stubs_installed_;
197 }
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700198 bool ShouldNotifyMethodEnterExitEvents() const REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100199
Alex Lightbebd7bd2017-07-25 14:05:52 -0700200 bool CanDeoptimize() {
201 return deoptimization_enabled_;
202 }
203
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100204 // Executes everything with interpreter.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200205 void DeoptimizeEverything(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700206 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
207 REQUIRES(!Locks::thread_list_lock_,
208 !Locks::classlinker_classes_lock_,
Mathieu Chartier90443472015-07-16 20:32:27 -0700209 !deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100210
Mathieu Chartieraa516822015-10-02 15:53:37 -0700211 // Executes everything with compiled code (or interpreter if there is no code). May visit class
212 // linker classes through ConfigureStubs.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200213 void UndeoptimizeEverything(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700214 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
215 REQUIRES(!Locks::thread_list_lock_,
216 !Locks::classlinker_classes_lock_,
Mathieu Chartier90443472015-07-16 20:32:27 -0700217 !deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100218
219 // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
220 // method (except a class initializer) set to the resolution trampoline will be deoptimized only
221 // once its declaring class is initialized.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700222 void Deoptimize(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -0700223 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100224
225 // Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method
226 // (except a class initializer) set to the resolution trampoline will be updated only once its
227 // declaring class is initialized.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700228 void Undeoptimize(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -0700229 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100230
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200231 // Indicates whether the method has been deoptimized so it is executed with the interpreter.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700232 bool IsDeoptimized(ArtMethod* method)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700233 REQUIRES(!deoptimized_methods_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100234
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200235 // Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
236 void EnableMethodTracing(const char* key,
237 bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700238 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
239 REQUIRES(!Locks::thread_list_lock_,
240 !Locks::classlinker_classes_lock_,
Mathieu Chartier90443472015-07-16 20:32:27 -0700241 !deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100242
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200243 // Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter.
244 void DisableMethodTracing(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700245 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
246 REQUIRES(!Locks::thread_list_lock_,
247 !Locks::classlinker_classes_lock_,
Mathieu Chartier90443472015-07-16 20:32:27 -0700248 !deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100249
Sebastien Hertzed2be172014-08-19 15:33:43 +0200250 InterpreterHandlerTable GetInterpreterHandlerTable() const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700251 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200252 return interpreter_handler_table_;
253 }
254
Mathieu Chartier90443472015-07-16 20:32:27 -0700255 void InstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
256 void UninstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700257 void InstrumentQuickAllocEntryPointsLocked()
Mathieu Chartier90443472015-07-16 20:32:27 -0700258 REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
259 !Locks::runtime_shutdown_lock_);
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700260 void UninstrumentQuickAllocEntryPointsLocked()
Mathieu Chartier90443472015-07-16 20:32:27 -0700261 REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
262 !Locks::runtime_shutdown_lock_);
263 void ResetQuickAllocEntryPoints() REQUIRES(Locks::runtime_shutdown_lock_);
Ian Rogersfa824272013-11-05 16:12:57 -0800264
Ian Rogers62d6c772013-02-27 08:32:07 -0800265 // Update the code of a method respecting any installed stubs.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700266 void UpdateMethodsCode(ArtMethod* method, const void* quick_code)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700267 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800268
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700269 // Update the code of a method respecting any installed stubs from debugger.
Nicolas Geoffraya0619e22016-12-20 13:57:43 +0000270 void UpdateMethodsCodeForJavaDebuggable(ArtMethod* method, const void* quick_code)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700271 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700272
Ian Rogers62d6c772013-02-27 08:32:07 -0800273 // Get the quick code for the given method. More efficient than asking the class linker as it
274 // will short-cut to GetCode if instrumentation and static method resolution stubs aren't
275 // installed.
Andreas Gampe542451c2016-07-26 09:02:02 -0700276 const void* GetQuickCodeFor(ArtMethod* method, PointerSize pointer_size) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700277 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800278
279 void ForceInterpretOnly() {
280 interpret_only_ = true;
281 forced_interpret_only_ = true;
282 }
283
Brian Carlstromea46f952013-07-30 01:26:50 -0700284 // Called by ArtMethod::Invoke to determine dispatch mechanism.
Ian Rogers62d6c772013-02-27 08:32:07 -0800285 bool InterpretOnly() const {
286 return interpret_only_;
287 }
288
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800289 bool IsForcedInterpretOnly() const {
290 return forced_interpret_only_;
291 }
292
Mingyao Yang6ea1a0e2016-01-29 12:12:49 -0800293 // Code is in boot image oat file which isn't compiled as debuggable.
294 // Need debug version (interpreter or jitted) if that's the case.
Nicolas Geoffraya0619e22016-12-20 13:57:43 +0000295 bool NeedDebugVersionFor(ArtMethod* method) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700296 REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang6ea1a0e2016-01-29 12:12:49 -0800297
Ian Rogers62d6c772013-02-27 08:32:07 -0800298 bool AreExitStubsInstalled() const {
299 return instrumentation_stubs_installed_;
300 }
301
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700302 bool HasMethodEntryListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200303 return have_method_entry_listeners_;
304 }
305
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700306 bool HasMethodExitListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200307 return have_method_exit_listeners_;
308 }
309
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700310 bool HasMethodUnwindListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200311 return have_method_unwind_listeners_;
312 }
313
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700314 bool HasDexPcListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200315 return have_dex_pc_listeners_;
316 }
317
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700318 bool HasFieldReadListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200319 return have_field_read_listeners_;
320 }
321
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700322 bool HasFieldWriteListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200323 return have_field_write_listeners_;
324 }
325
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700326 bool HasExceptionCaughtListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200327 return have_exception_caught_listeners_;
328 }
329
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700330 bool HasBranchListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000331 return have_branch_listeners_;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800332 }
333
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700334 bool HasInvokeVirtualOrInterfaceListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100335 return have_invoke_virtual_or_interface_listeners_;
336 }
337
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700338 bool IsActive() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200339 return have_dex_pc_listeners_ || have_method_entry_listeners_ || have_method_exit_listeners_ ||
Sebastien Hertz42cd43f2014-05-13 14:15:41 +0200340 have_field_read_listeners_ || have_field_write_listeners_ ||
Bill Buzbeefd522f92016-02-11 22:37:42 +0000341 have_exception_caught_listeners_ || have_method_unwind_listeners_ ||
342 have_branch_listeners_ || have_invoke_virtual_or_interface_listeners_;
343 }
344
345 // Any instrumentation *other* than what is needed for Jit profiling active?
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700346 bool NonJitProfilingActive() const REQUIRES_SHARED(Locks::mutator_lock_) {
Bill Buzbeefd522f92016-02-11 22:37:42 +0000347 return have_dex_pc_listeners_ || have_method_exit_listeners_ ||
348 have_field_read_listeners_ || have_field_write_listeners_ ||
Bill Buzbee1d011d92016-04-04 16:59:29 +0000349 have_exception_caught_listeners_ || have_method_unwind_listeners_ ||
350 have_branch_listeners_;
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200351 }
352
Ian Rogers62d6c772013-02-27 08:32:07 -0800353 // Inform listeners that a method has been entered. A dex PC is provided as we may install
354 // listeners into executing code and get method enter events for methods already on the stack.
355 void MethodEnterEvent(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700356 ArtMethod* method, uint32_t dex_pc) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700357 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200358 if (UNLIKELY(HasMethodEntryListeners())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800359 MethodEnterEventImpl(thread, this_object, method, dex_pc);
360 }
361 }
362
363 // Inform listeners that a method has been exited.
Alex Lightd7661582017-05-01 13:48:16 -0700364 void MethodExitEvent(Thread* thread,
365 mirror::Object* this_object,
366 ArtMethod* method,
367 uint32_t dex_pc,
Ian Rogers62d6c772013-02-27 08:32:07 -0800368 const JValue& return_value) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700369 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200370 if (UNLIKELY(HasMethodExitListeners())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800371 MethodExitEventImpl(thread, this_object, method, dex_pc, return_value);
372 }
373 }
374
375 // Inform listeners that a method has been exited due to an exception.
376 void MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700377 ArtMethod* method, uint32_t dex_pc) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700378 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800379
380 // Inform listeners that the dex pc has moved (only supported by the interpreter).
381 void DexPcMovedEvent(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700382 ArtMethod* method, uint32_t dex_pc) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700383 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200384 if (UNLIKELY(HasDexPcListeners())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800385 DexPcMovedEventImpl(thread, this_object, method, dex_pc);
386 }
387 }
388
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000389 // Inform listeners that a branch has been taken (only supported by the interpreter).
390 void Branch(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700391 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000392 if (UNLIKELY(HasBranchListeners())) {
393 BranchImpl(thread, method, dex_pc, offset);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800394 }
395 }
396
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200397 // Inform listeners that we read a field (only supported by the interpreter).
398 void FieldReadEvent(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700399 ArtMethod* method, uint32_t dex_pc,
Mathieu Chartierc7853442015-03-27 14:35:38 -0700400 ArtField* field) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700401 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200402 if (UNLIKELY(HasFieldReadListeners())) {
403 FieldReadEventImpl(thread, this_object, method, dex_pc, field);
404 }
405 }
406
407 // Inform listeners that we write a field (only supported by the interpreter).
408 void FieldWriteEvent(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700409 ArtMethod* method, uint32_t dex_pc,
Mathieu Chartierc7853442015-03-27 14:35:38 -0700410 ArtField* field, const JValue& field_value) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700411 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200412 if (UNLIKELY(HasFieldWriteListeners())) {
413 FieldWriteEventImpl(thread, this_object, method, dex_pc, field, field_value);
414 }
415 }
416
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100417 void InvokeVirtualOrInterface(Thread* thread,
418 mirror::Object* this_object,
419 ArtMethod* caller,
420 uint32_t dex_pc,
421 ArtMethod* callee) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700422 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100423 if (UNLIKELY(HasInvokeVirtualOrInterfaceListeners())) {
424 InvokeVirtualOrInterfaceImpl(thread, this_object, caller, dex_pc, callee);
425 }
426 }
427
Ian Rogers62d6c772013-02-27 08:32:07 -0800428 // Inform listeners that an exception was caught.
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000429 void ExceptionCaughtEvent(Thread* thread, mirror::Throwable* exception_object) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700430 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800431
432 // Called when an instrumented method is entered. The intended link register (lr) is saved so
433 // that returning causes a branch to the method exit stub. Generates method enter events.
434 void PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700435 ArtMethod* method, uintptr_t lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700436 bool interpreter_entry)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700437 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800438
Mingyao Yang047abb22017-08-23 15:26:57 -0700439 DeoptimizationMethodType GetDeoptimizationMethodType(ArtMethod* method)
440 REQUIRES_SHARED(Locks::mutator_lock_);
441
Ian Rogers62d6c772013-02-27 08:32:07 -0800442 // Called when an instrumented method is exited. Removes the pushed instrumentation frame
Alex Lightb7edcda2017-04-27 13:20:31 -0700443 // returning the intended link register. Generates method exit events. The gpr_result and
444 // fpr_result pointers are pointers to the locations where the integer/pointer and floating point
445 // result values of the function are stored. Both pointers must always be valid but the values
446 // held there will only be meaningful if interpreted as the appropriate type given the function
447 // being returned from.
Andreas Gamped58342c2014-06-05 14:18:08 -0700448 TwoWordReturn PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
Alex Lightb7edcda2017-04-27 13:20:31 -0700449 uint64_t* gpr_result, uint64_t* fpr_result)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700450 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800451
452 // Pops an instrumentation frame from the current thread and generate an unwind event.
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700453 // Returns the return pc for the instrumentation frame that's popped.
454 uintptr_t PopMethodForUnwind(Thread* self, bool is_deoptimization) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700455 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800456
457 // Call back for configure stubs.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700458 void InstallStubsForClass(mirror::Class* klass) REQUIRES_SHARED(Locks::mutator_lock_)
Mathieu Chartier90443472015-07-16 20:32:27 -0700459 REQUIRES(!deoptimized_methods_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800460
Mathieu Chartiere401d142015-04-22 13:56:20 -0700461 void InstallStubsForMethod(ArtMethod* method)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700462 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100463
Mingyao Yang99170c62015-07-06 11:10:37 -0700464 // Install instrumentation exit stub on every method of the stack of the given thread.
465 // This is used by the debugger to cause a deoptimization of the thread's stack after updating
466 // local variable(s).
467 void InstrumentThreadStack(Thread* thread)
Alex Light3ae82532017-07-26 13:59:07 -0700468 REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang99170c62015-07-06 11:10:37 -0700469
Sebastien Hertzb2feaaf2015-10-12 13:40:10 +0000470 static size_t ComputeFrameId(Thread* self,
471 size_t frame_depth,
472 size_t inlined_frames_before_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700473 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertzb2feaaf2015-10-12 13:40:10 +0000474
Mathieu Chartiereebc3af2016-02-29 18:13:38 -0800475 // Does not hold lock, used to check if someone changed from not instrumented to instrumented
476 // during a GC suspend point.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700477 bool AllocEntrypointsInstrumented() const REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier50e93312016-03-16 11:25:29 -0700478 return alloc_entrypoints_instrumented_;
Mathieu Chartiereebc3af2016-02-29 18:13:38 -0800479 }
480
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200481 InstrumentationLevel GetCurrentInstrumentationLevel() const;
482
Alex Lightdba61482016-12-21 08:20:29 -0800483 private:
484 // Returns true if moving to the given instrumentation level requires the installation of stubs.
485 // False otherwise.
486 bool RequiresInstrumentationInstallation(InstrumentationLevel new_level) const;
487
Ian Rogers62d6c772013-02-27 08:32:07 -0800488 // Does the job of installing or removing instrumentation code within methods.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200489 // In order to support multiple clients using instrumentation at the same time,
490 // the caller must pass a unique key (a string) identifying it so we remind which
491 // instrumentation level it needs. Therefore the current instrumentation level
492 // becomes the highest instrumentation level required by a client.
493 void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700494 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
495 REQUIRES(!deoptimized_methods_lock_,
496 !Locks::thread_list_lock_,
Mathieu Chartier90443472015-07-16 20:32:27 -0700497 !Locks::classlinker_classes_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800498
Mathieu Chartier90443472015-07-16 20:32:27 -0700499 void UpdateInterpreterHandlerTable() REQUIRES(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800500 /*
501 * TUNING: Dalvik's mterp stashes the actual current handler table base in a
502 * tls field. For Arm, this enables all suspend, debug & tracing checks to be
503 * collapsed into a single conditionally-executed ldw instruction.
504 * Move to Dalvik-style handler-table management for both the goto interpreter and
505 * mterp.
506 */
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200507 interpreter_handler_table_ = IsActive() ? kAlternativeHandlerTable : kMainHandlerTable;
508 }
509
Mathieu Chartier661974a2014-01-09 11:23:53 -0800510 // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring
511 // exclusive access to mutator lock which you can't get if the runtime isn't started.
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700512 void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartier661974a2014-01-09 11:23:53 -0800513
Alex Lightd7661582017-05-01 13:48:16 -0700514 void MethodEnterEventImpl(Thread* thread,
515 ObjPtr<mirror::Object> this_object,
516 ArtMethod* method,
517 uint32_t dex_pc) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700518 REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lightd7661582017-05-01 13:48:16 -0700519 void MethodExitEventImpl(Thread* thread,
520 ObjPtr<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700521 ArtMethod* method,
Alex Lightd7661582017-05-01 13:48:16 -0700522 uint32_t dex_pc,
523 const JValue& return_value) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700524 REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lightd7661582017-05-01 13:48:16 -0700525 void DexPcMovedEventImpl(Thread* thread,
526 ObjPtr<mirror::Object> this_object,
527 ArtMethod* method,
528 uint32_t dex_pc) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700529 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000530 void BranchImpl(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700531 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100532 void InvokeVirtualOrInterfaceImpl(Thread* thread,
Alex Lightd7661582017-05-01 13:48:16 -0700533 ObjPtr<mirror::Object> this_object,
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100534 ArtMethod* caller,
535 uint32_t dex_pc,
536 ArtMethod* callee) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700537 REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lightd7661582017-05-01 13:48:16 -0700538 void FieldReadEventImpl(Thread* thread,
539 ObjPtr<mirror::Object> this_object,
540 ArtMethod* method,
541 uint32_t dex_pc,
542 ArtField* field) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700543 REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lightd7661582017-05-01 13:48:16 -0700544 void FieldWriteEventImpl(Thread* thread,
545 ObjPtr<mirror::Object> this_object,
546 ArtMethod* method,
547 uint32_t dex_pc,
548 ArtField* field,
549 const JValue& field_value) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700550 REQUIRES_SHARED(Locks::mutator_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800551
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700552 // Read barrier-aware utility functions for accessing deoptimized_methods_
Mathieu Chartiere401d142015-04-22 13:56:20 -0700553 bool AddDeoptimizedMethod(ArtMethod* method)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700554 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(deoptimized_methods_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700555 bool IsDeoptimizedMethod(ArtMethod* method)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700556 REQUIRES_SHARED(Locks::mutator_lock_, deoptimized_methods_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700557 bool RemoveDeoptimizedMethod(ArtMethod* method)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700558 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(deoptimized_methods_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700559 ArtMethod* BeginDeoptimizedMethod()
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700560 REQUIRES_SHARED(Locks::mutator_lock_, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700561 bool IsDeoptimizedMethodsEmpty() const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700562 REQUIRES_SHARED(Locks::mutator_lock_, deoptimized_methods_lock_);
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700563 void UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700564 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700565
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700566
Brian Carlstromea46f952013-07-30 01:26:50 -0700567 // Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
Ian Rogers62d6c772013-02-27 08:32:07 -0800568 bool instrumentation_stubs_installed_;
569
Brian Carlstromea46f952013-07-30 01:26:50 -0700570 // Have we hijacked ArtMethod::code_ to reference the enter/exit stubs?
Ian Rogers62d6c772013-02-27 08:32:07 -0800571 bool entry_exit_stubs_installed_;
572
Brian Carlstromea46f952013-07-30 01:26:50 -0700573 // Have we hijacked ArtMethod::code_ to reference the enter interpreter stub?
Ian Rogers62d6c772013-02-27 08:32:07 -0800574 bool interpreter_stubs_installed_;
575
576 // Do we need the fidelity of events that we only get from running within the interpreter?
577 bool interpret_only_;
578
579 // Did the runtime request we only run in the interpreter? ie -Xint mode.
580 bool forced_interpret_only_;
581
582 // Do we have any listeners for method entry events? Short-cut to avoid taking the
583 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200584 bool have_method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800585
586 // Do we have any listeners for method exit events? Short-cut to avoid taking the
587 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200588 bool have_method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800589
590 // Do we have any listeners for method unwind events? Short-cut to avoid taking the
591 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200592 bool have_method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800593
594 // Do we have any listeners for dex move events? Short-cut to avoid taking the
595 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200596 bool have_dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800597
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200598 // Do we have any listeners for field read events? Short-cut to avoid taking the
599 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200600 bool have_field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200601
602 // Do we have any listeners for field write events? Short-cut to avoid taking the
603 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200604 bool have_field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200605
Ian Rogers62d6c772013-02-27 08:32:07 -0800606 // Do we have any exception caught listeners? Short-cut to avoid taking the instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200607 bool have_exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800608
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000609 // Do we have any branch listeners? Short-cut to avoid taking the instrumentation_lock_.
610 bool have_branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800611
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100612 // Do we have any invoke listeners? Short-cut to avoid taking the instrumentation_lock_.
613 bool have_invoke_virtual_or_interface_listeners_ GUARDED_BY(Locks::mutator_lock_);
614
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200615 // Contains the instrumentation level required by each client of the instrumentation identified
616 // by a string key.
617 typedef SafeMap<const char*, InstrumentationLevel> InstrumentationLevelTable;
618 InstrumentationLevelTable requested_instrumentation_levels_ GUARDED_BY(Locks::mutator_lock_);
619
Ian Rogers62d6c772013-02-27 08:32:07 -0800620 // The event listeners, written to with the mutator_lock_ exclusively held.
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000621 // Mutators must be able to iterate over these lists concurrently, that is, with listeners being
622 // added or removed while iterating. The modifying thread holds exclusive lock,
623 // so other threads cannot iterate (i.e. read the data of the list) at the same time but they
624 // do keep iterators that need to remain valid. This is the reason these listeners are std::list
625 // and not for example std::vector: the existing storage for a std::list does not move.
626 // Note that mutators cannot make a copy of these lists before iterating, as the instrumentation
627 // listeners can also be deleted concurrently.
628 // As a result, these lists are never trimmed. That's acceptable given the low number of
629 // listeners we have.
Ian Rogers62d6c772013-02-27 08:32:07 -0800630 std::list<InstrumentationListener*> method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
631 std::list<InstrumentationListener*> method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
632 std::list<InstrumentationListener*> method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000633 std::list<InstrumentationListener*> branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100634 std::list<InstrumentationListener*> invoke_virtual_or_interface_listeners_
635 GUARDED_BY(Locks::mutator_lock_);
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000636 std::list<InstrumentationListener*> dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
637 std::list<InstrumentationListener*> field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
638 std::list<InstrumentationListener*> field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
639 std::list<InstrumentationListener*> exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800640
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100641 // The set of methods being deoptimized (by the debugger) which must be executed with interpreter
642 // only.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700643 mutable ReaderWriterMutex deoptimized_methods_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700644 std::unordered_set<ArtMethod*> deoptimized_methods_ GUARDED_BY(deoptimized_methods_lock_);
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100645 bool deoptimization_enabled_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100646
Ian Rogersfa824272013-11-05 16:12:57 -0800647 // Current interpreter handler table. This is updated each time the thread state flags are
648 // modified.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200649 InterpreterHandlerTable interpreter_handler_table_ GUARDED_BY(Locks::mutator_lock_);
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200650
Ian Rogersfa824272013-11-05 16:12:57 -0800651 // Greater than 0 if quick alloc entry points instrumented.
Mathieu Chartiereebc3af2016-02-29 18:13:38 -0800652 size_t quick_alloc_entry_points_instrumentation_counter_;
Mathieu Chartier50e93312016-03-16 11:25:29 -0700653
654 // alloc_entrypoints_instrumented_ is only updated with all the threads suspended, this is done
655 // to prevent races with the GC where the GC relies on thread suspension only see
656 // alloc_entrypoints_instrumented_ change during suspend points.
657 bool alloc_entrypoints_instrumented_;
658
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200659 friend class InstrumentationTest; // For GetCurrentInstrumentationLevel and ConfigureStubs.
660
jeffhao725a9572012-11-13 18:20:12 -0800661 DISALLOW_COPY_AND_ASSIGN(Instrumentation);
662};
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700663std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationEvent& rhs);
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200664std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationLevel& rhs);
jeffhao725a9572012-11-13 18:20:12 -0800665
Ian Rogers62d6c772013-02-27 08:32:07 -0800666// An element in the instrumentation side stack maintained in art::Thread.
667struct InstrumentationStackFrame {
Mingyao Yang047abb22017-08-23 15:26:57 -0700668 InstrumentationStackFrame(mirror::Object* this_object,
669 ArtMethod* method,
670 uintptr_t return_pc,
671 size_t frame_id,
672 bool interpreter_entry)
673 : this_object_(this_object),
674 method_(method),
675 return_pc_(return_pc),
676 frame_id_(frame_id),
Jeff Hao9a916d32013-06-27 18:45:37 -0700677 interpreter_entry_(interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800678 }
679
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700680 std::string Dump() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800681
682 mirror::Object* this_object_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700683 ArtMethod* method_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100684 uintptr_t return_pc_;
685 size_t frame_id_;
686 bool interpreter_entry_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800687};
688
689} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -0800690} // namespace art
691
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700692#endif // ART_RUNTIME_INSTRUMENTATION_H_