blob: 3a71bbaec1dda5e6e62a2a6cb9161d7f83648500 [file] [log] [blame]
Nicolas Geoffray5550ca82015-08-21 18:38:30 +01001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_JIT_PROFILING_INFO_H_
18#define ART_RUNTIME_JIT_PROFILING_INFO_H_
19
20#include <vector>
21
22#include "base/macros.h"
23#include "gc_root.h"
24
25namespace art {
26
27class ArtMethod;
Nicolas Geoffray73be1e82015-09-17 15:22:56 +010028class ProfilingInfo;
Nicolas Geoffray5550ca82015-08-21 18:38:30 +010029
Nicolas Geoffray26705e22015-10-28 12:50:11 +000030namespace jit {
31class JitCodeCache;
32}
33
Nicolas Geoffray5550ca82015-08-21 18:38:30 +010034namespace mirror {
35class Class;
36}
37
Nicolas Geoffray73be1e82015-09-17 15:22:56 +010038// Structure to store the classes seen at runtime for a specific instruction.
39// Once the classes_ array is full, we consider the INVOKE to be megamorphic.
40class InlineCache {
41 public:
42 bool IsMonomorphic() const {
43 DCHECK_GE(kIndividualCacheSize, 2);
44 return !classes_[0].IsNull() && classes_[1].IsNull();
45 }
46
47 bool IsMegamorphic() const {
48 for (size_t i = 0; i < kIndividualCacheSize; ++i) {
49 if (classes_[i].IsNull()) {
50 return false;
51 }
52 }
53 return true;
54 }
55
56 mirror::Class* GetMonomorphicType() const SHARED_REQUIRES(Locks::mutator_lock_) {
57 // Note that we cannot ensure the inline cache is actually monomorphic
58 // at this point, as other threads may have updated it.
Nicolas Geoffray07e3ca92016-03-11 09:57:57 +000059 DCHECK(!classes_[0].IsNull());
Nicolas Geoffray73be1e82015-09-17 15:22:56 +010060 return classes_[0].Read();
61 }
62
Nicolas Geoffray07e3ca92016-03-11 09:57:57 +000063 bool IsUninitialized() const {
Nicolas Geoffray73be1e82015-09-17 15:22:56 +010064 return classes_[0].IsNull();
65 }
66
67 bool IsPolymorphic() const {
68 DCHECK_GE(kIndividualCacheSize, 3);
69 return !classes_[1].IsNull() && classes_[kIndividualCacheSize - 1].IsNull();
70 }
71
Nicolas Geoffraya42363f2015-12-17 14:57:09 +000072 mirror::Class* GetTypeAt(size_t i) const SHARED_REQUIRES(Locks::mutator_lock_) {
73 return classes_[i].Read();
74 }
75
Nicolas Geoffray73be1e82015-09-17 15:22:56 +010076 static constexpr uint16_t kIndividualCacheSize = 5;
Nicolas Geoffraya42363f2015-12-17 14:57:09 +000077
78 private:
Nicolas Geoffray73be1e82015-09-17 15:22:56 +010079 uint32_t dex_pc_;
80 GcRoot<mirror::Class> classes_[kIndividualCacheSize];
81
82 friend class ProfilingInfo;
83
84 DISALLOW_COPY_AND_ASSIGN(InlineCache);
85};
86
Nicolas Geoffray5550ca82015-08-21 18:38:30 +010087/**
88 * Profiling info for a method, created and filled by the interpreter once the
89 * method is warm, and used by the compiler to drive optimizations.
90 */
91class ProfilingInfo {
92 public:
Nicolas Geoffray26705e22015-10-28 12:50:11 +000093 // Create a ProfilingInfo for 'method'. Return whether it succeeded, or if it is
94 // not needed in case the method does not have virtual/interface invocations.
95 static bool Create(Thread* self, ArtMethod* method, bool retry_allocation)
96 SHARED_REQUIRES(Locks::mutator_lock_);
Nicolas Geoffray5550ca82015-08-21 18:38:30 +010097
98 // Add information from an executed INVOKE instruction to the profile.
Nicolas Geoffray26705e22015-10-28 12:50:11 +000099 void AddInvokeInfo(uint32_t dex_pc, mirror::Class* cls)
100 // Method should not be interruptible, as it manipulates the ProfilingInfo
101 // which can be concurrently collected.
102 REQUIRES(Roles::uninterruptible_)
103 SHARED_REQUIRES(Locks::mutator_lock_);
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100104
105 // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
106 template<typename RootVisitorType>
107 void VisitRoots(RootVisitorType& visitor) NO_THREAD_SAFETY_ANALYSIS {
108 for (size_t i = 0; i < number_of_inline_caches_; ++i) {
109 InlineCache* cache = &cache_[i];
110 for (size_t j = 0; j < InlineCache::kIndividualCacheSize; ++j) {
111 visitor.VisitRootIfNonNull(cache->classes_[j].AddressWithoutBarrier());
112 }
113 }
114 }
115
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000116 ArtMethod* GetMethod() const {
117 return method_;
118 }
119
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100120 InlineCache* GetInlineCache(uint32_t dex_pc);
121
buzbee454b3b62016-04-07 14:42:47 -0700122 bool IsMethodBeingCompiled(bool osr) const {
123 return osr
124 ? is_osr_method_being_compiled_
125 : is_method_being_compiled_;
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100126 }
127
buzbee454b3b62016-04-07 14:42:47 -0700128 void SetIsMethodBeingCompiled(bool value, bool osr) {
129 if (osr) {
130 is_osr_method_being_compiled_ = value;
131 } else {
132 is_method_being_compiled_ = value;
133 }
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100134 }
135
Nicolas Geoffray35122442016-03-02 12:05:30 +0000136 void SetSavedEntryPoint(const void* entry_point) {
137 saved_entry_point_ = entry_point;
138 }
139
140 const void* GetSavedEntryPoint() const {
141 return saved_entry_point_;
142 }
143
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000144 void ClearGcRootsInInlineCaches() {
145 for (size_t i = 0; i < number_of_inline_caches_; ++i) {
146 InlineCache* cache = &cache_[i];
147 memset(&cache->classes_[0],
148 0,
149 InlineCache::kIndividualCacheSize * sizeof(GcRoot<mirror::Class>));
150 }
151 }
152
153 void IncrementInlineUse() {
154 DCHECK_NE(current_inline_uses_, std::numeric_limits<uint16_t>::max());
155 current_inline_uses_++;
156 }
157
158 void DecrementInlineUse() {
159 DCHECK_GT(current_inline_uses_, 0);
160 current_inline_uses_--;
161 }
162
163 bool IsInUseByCompiler() const {
buzbee454b3b62016-04-07 14:42:47 -0700164 return IsMethodBeingCompiled(/*osr*/ true) || IsMethodBeingCompiled(/*osr*/ false) ||
165 (current_inline_uses_ > 0);
Nicolas Geoffray511e41b2016-03-02 17:09:35 +0000166 }
167
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100168 private:
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000169 ProfilingInfo(ArtMethod* method, const std::vector<uint32_t>& entries)
170 : number_of_inline_caches_(entries.size()),
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100171 method_(method),
Nicolas Geoffray35122442016-03-02 12:05:30 +0000172 is_method_being_compiled_(false),
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000173 current_inline_uses_(0),
Nicolas Geoffray35122442016-03-02 12:05:30 +0000174 saved_entry_point_(nullptr) {
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000175 memset(&cache_, 0, number_of_inline_caches_ * sizeof(InlineCache));
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100176 for (size_t i = 0; i < number_of_inline_caches_; ++i) {
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100177 cache_[i].dex_pc_ = entries[i];
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100178 }
179 }
180
181 // Number of instructions we are profiling in the ArtMethod.
182 const uint32_t number_of_inline_caches_;
183
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000184 // Method this profiling info is for.
185 ArtMethod* const method_;
186
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100187 // Whether the ArtMethod is currently being compiled. This flag
188 // is implicitly guarded by the JIT code cache lock.
189 // TODO: Make the JIT code cache lock global.
190 bool is_method_being_compiled_;
buzbee454b3b62016-04-07 14:42:47 -0700191 bool is_osr_method_being_compiled_;
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100192
Nicolas Geoffrayb6e20ae2016-03-07 14:29:04 +0000193 // When the compiler inlines the method associated to this ProfilingInfo,
194 // it updates this counter so that the GC does not try to clear the inline caches.
195 uint16_t current_inline_uses_;
196
Nicolas Geoffray35122442016-03-02 12:05:30 +0000197 // Entry point of the corresponding ArtMethod, while the JIT code cache
198 // is poking for the liveness of compiled code.
199 const void* saved_entry_point_;
200
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100201 // Dynamically allocated array of size `number_of_inline_caches_`.
202 InlineCache cache_[0];
203
Nicolas Geoffray26705e22015-10-28 12:50:11 +0000204 friend class jit::JitCodeCache;
205
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100206 DISALLOW_COPY_AND_ASSIGN(ProfilingInfo);
207};
208
209} // namespace art
210
211#endif // ART_RUNTIME_JIT_PROFILING_INFO_H_