blob: 91371d1e4ec03b2c3e3dc681caf6fe792d198e7f [file] [log] [blame]
Andreas Gampe36a296f2017-06-13 14:11:11 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_INTERPRETER_SHADOW_FRAME_H_
18#define ART_RUNTIME_INTERPRETER_SHADOW_FRAME_H_
19
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070020#include <cstdint>
Andreas Gampe36a296f2017-06-13 14:11:11 -070021#include <cstring>
Andreas Gampe36a296f2017-06-13 14:11:11 -070022#include <string>
23
24#include "base/macros.h"
25#include "base/mutex.h"
David Sehr9e734c72018-01-04 17:56:19 -080026#include "dex/dex_file.h"
Andreas Gampe36a296f2017-06-13 14:11:11 -070027#include "lock_count_data.h"
28#include "read_barrier.h"
29#include "stack_reference.h"
30#include "verify_object.h"
31
32namespace art {
33
34namespace mirror {
Igor Murashkin2ffb7032017-11-08 13:35:21 -080035class Object;
Andreas Gampe36a296f2017-06-13 14:11:11 -070036} // namespace mirror
37
38class ArtMethod;
39class ShadowFrame;
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +010040template<class MirrorType> class ObjPtr;
Andreas Gampe36a296f2017-06-13 14:11:11 -070041class Thread;
42union JValue;
43
44// Forward declaration. Just calls the destructor.
45struct ShadowFrameDeleter;
46using ShadowFrameAllocaUniquePtr = std::unique_ptr<ShadowFrame, ShadowFrameDeleter>;
47
48// ShadowFrame has 2 possible layouts:
49// - interpreter - separate VRegs and reference arrays. References are in the reference array.
50// - JNI - just VRegs, but where every VReg holds a reference.
51class ShadowFrame {
52 public:
53 // Compute size of ShadowFrame in bytes assuming it has a reference array.
54 static size_t ComputeSize(uint32_t num_vregs) {
55 return sizeof(ShadowFrame) + (sizeof(uint32_t) * num_vregs) +
56 (sizeof(StackReference<mirror::Object>) * num_vregs);
57 }
58
59 // Create ShadowFrame in heap for deoptimization.
60 static ShadowFrame* CreateDeoptimizedFrame(uint32_t num_vregs, ShadowFrame* link,
61 ArtMethod* method, uint32_t dex_pc) {
62 uint8_t* memory = new uint8_t[ComputeSize(num_vregs)];
63 return CreateShadowFrameImpl(num_vregs, link, method, dex_pc, memory);
64 }
65
66 // Delete a ShadowFrame allocated on the heap for deoptimization.
67 static void DeleteDeoptimizedFrame(ShadowFrame* sf) {
68 sf->~ShadowFrame(); // Explicitly destruct.
69 uint8_t* memory = reinterpret_cast<uint8_t*>(sf);
70 delete[] memory;
71 }
72
73 // Create a shadow frame in a fresh alloca. This needs to be in the context of the caller.
74 // Inlining doesn't work, the compiler will still undo the alloca. So this needs to be a macro.
75#define CREATE_SHADOW_FRAME(num_vregs, link, method, dex_pc) ({ \
76 size_t frame_size = ShadowFrame::ComputeSize(num_vregs); \
77 void* alloca_mem = alloca(frame_size); \
78 ShadowFrameAllocaUniquePtr( \
79 ShadowFrame::CreateShadowFrameImpl((num_vregs), (link), (method), (dex_pc), \
80 (alloca_mem))); \
81 })
82
83 ~ShadowFrame() {}
84
85 // TODO(iam): Clean references array up since they're always there,
86 // we don't need to do conditionals.
87 bool HasReferenceArray() const {
88 return true;
89 }
90
91 uint32_t NumberOfVRegs() const {
92 return number_of_vregs_;
93 }
94
95 uint32_t GetDexPC() const {
Mathieu Chartierfc9555d2017-11-05 16:32:19 -080096 return (dex_pc_ptr_ == nullptr) ? dex_pc_ : dex_pc_ptr_ - dex_instructions_;
Andreas Gampe36a296f2017-06-13 14:11:11 -070097 }
98
99 int16_t GetCachedHotnessCountdown() const {
100 return cached_hotness_countdown_;
101 }
102
103 void SetCachedHotnessCountdown(int16_t cached_hotness_countdown) {
104 cached_hotness_countdown_ = cached_hotness_countdown;
105 }
106
107 int16_t GetHotnessCountdown() const {
108 return hotness_countdown_;
109 }
110
111 void SetHotnessCountdown(int16_t hotness_countdown) {
112 hotness_countdown_ = hotness_countdown;
113 }
114
115 void SetDexPC(uint32_t dex_pc) {
116 dex_pc_ = dex_pc;
117 dex_pc_ptr_ = nullptr;
118 }
119
120 ShadowFrame* GetLink() const {
121 return link_;
122 }
123
124 void SetLink(ShadowFrame* frame) {
125 DCHECK_NE(this, frame);
126 link_ = frame;
127 }
128
129 int32_t GetVReg(size_t i) const {
130 DCHECK_LT(i, NumberOfVRegs());
131 const uint32_t* vreg = &vregs_[i];
132 return *reinterpret_cast<const int32_t*>(vreg);
133 }
134
135 // Shorts are extended to Ints in VRegs. Interpreter intrinsics needs them as shorts.
136 int16_t GetVRegShort(size_t i) const {
137 return static_cast<int16_t>(GetVReg(i));
138 }
139
140 uint32_t* GetVRegAddr(size_t i) {
141 return &vregs_[i];
142 }
143
144 uint32_t* GetShadowRefAddr(size_t i) {
145 DCHECK(HasReferenceArray());
146 DCHECK_LT(i, NumberOfVRegs());
147 return &vregs_[i + NumberOfVRegs()];
148 }
149
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800150 const uint16_t* GetDexInstructions() const {
151 return dex_instructions_;
Andreas Gampe36a296f2017-06-13 14:11:11 -0700152 }
153
154 float GetVRegFloat(size_t i) const {
155 DCHECK_LT(i, NumberOfVRegs());
156 // NOTE: Strict-aliasing?
157 const uint32_t* vreg = &vregs_[i];
158 return *reinterpret_cast<const float*>(vreg);
159 }
160
161 int64_t GetVRegLong(size_t i) const {
David Srbeckyb8e5ad12018-08-31 07:02:02 +0100162 DCHECK_LT(i + 1, NumberOfVRegs());
Andreas Gampe36a296f2017-06-13 14:11:11 -0700163 const uint32_t* vreg = &vregs_[i];
164 typedef const int64_t unaligned_int64 __attribute__ ((aligned (4)));
165 return *reinterpret_cast<unaligned_int64*>(vreg);
166 }
167
168 double GetVRegDouble(size_t i) const {
David Srbeckyb8e5ad12018-08-31 07:02:02 +0100169 DCHECK_LT(i + 1, NumberOfVRegs());
Andreas Gampe36a296f2017-06-13 14:11:11 -0700170 const uint32_t* vreg = &vregs_[i];
171 typedef const double unaligned_double __attribute__ ((aligned (4)));
172 return *reinterpret_cast<unaligned_double*>(vreg);
173 }
174
175 // Look up the reference given its virtual register number.
176 // If this returns non-null then this does not mean the vreg is currently a reference
177 // on non-moving collectors. Check that the raw reg with GetVReg is equal to this if not certain.
178 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
179 mirror::Object* GetVRegReference(size_t i) const REQUIRES_SHARED(Locks::mutator_lock_) {
180 DCHECK_LT(i, NumberOfVRegs());
181 mirror::Object* ref;
182 if (HasReferenceArray()) {
183 ref = References()[i].AsMirrorPtr();
184 } else {
185 const uint32_t* vreg_ptr = &vregs_[i];
186 ref = reinterpret_cast<const StackReference<mirror::Object>*>(vreg_ptr)->AsMirrorPtr();
187 }
Roland Levillaina78f5b62017-09-29 13:50:44 +0100188 ReadBarrier::MaybeAssertToSpaceInvariant(ref);
Andreas Gampe36a296f2017-06-13 14:11:11 -0700189 if (kVerifyFlags & kVerifyReads) {
190 VerifyObject(ref);
191 }
192 return ref;
193 }
194
195 // Get view of vregs as range of consecutive arguments starting at i.
196 uint32_t* GetVRegArgs(size_t i) {
197 return &vregs_[i];
198 }
199
200 void SetVReg(size_t i, int32_t val) {
201 DCHECK_LT(i, NumberOfVRegs());
202 uint32_t* vreg = &vregs_[i];
203 *reinterpret_cast<int32_t*>(vreg) = val;
204 // This is needed for moving collectors since these can update the vreg references if they
205 // happen to agree with references in the reference array.
206 if (kMovingCollector && HasReferenceArray()) {
207 References()[i].Clear();
208 }
209 }
210
211 void SetVRegFloat(size_t i, float val) {
212 DCHECK_LT(i, NumberOfVRegs());
213 uint32_t* vreg = &vregs_[i];
214 *reinterpret_cast<float*>(vreg) = val;
215 // This is needed for moving collectors since these can update the vreg references if they
216 // happen to agree with references in the reference array.
217 if (kMovingCollector && HasReferenceArray()) {
218 References()[i].Clear();
219 }
220 }
221
222 void SetVRegLong(size_t i, int64_t val) {
David Srbeckyb8e5ad12018-08-31 07:02:02 +0100223 DCHECK_LT(i + 1, NumberOfVRegs());
Andreas Gampe36a296f2017-06-13 14:11:11 -0700224 uint32_t* vreg = &vregs_[i];
225 typedef int64_t unaligned_int64 __attribute__ ((aligned (4)));
226 *reinterpret_cast<unaligned_int64*>(vreg) = val;
227 // This is needed for moving collectors since these can update the vreg references if they
228 // happen to agree with references in the reference array.
229 if (kMovingCollector && HasReferenceArray()) {
230 References()[i].Clear();
231 References()[i + 1].Clear();
232 }
233 }
234
235 void SetVRegDouble(size_t i, double val) {
David Srbeckyb8e5ad12018-08-31 07:02:02 +0100236 DCHECK_LT(i + 1, NumberOfVRegs());
Andreas Gampe36a296f2017-06-13 14:11:11 -0700237 uint32_t* vreg = &vregs_[i];
238 typedef double unaligned_double __attribute__ ((aligned (4)));
239 *reinterpret_cast<unaligned_double*>(vreg) = val;
240 // This is needed for moving collectors since these can update the vreg references if they
241 // happen to agree with references in the reference array.
242 if (kMovingCollector && HasReferenceArray()) {
243 References()[i].Clear();
244 References()[i + 1].Clear();
245 }
246 }
247
248 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +0100249 void SetVRegReference(size_t i, ObjPtr<mirror::Object> val)
250 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampe36a296f2017-06-13 14:11:11 -0700251
252 void SetMethod(ArtMethod* method) REQUIRES(Locks::mutator_lock_) {
253 DCHECK(method != nullptr);
254 DCHECK(method_ != nullptr);
255 method_ = method;
256 }
257
258 ArtMethod* GetMethod() const REQUIRES_SHARED(Locks::mutator_lock_) {
259 DCHECK(method_ != nullptr);
260 return method_;
261 }
262
263 mirror::Object* GetThisObject() const REQUIRES_SHARED(Locks::mutator_lock_);
264
265 mirror::Object* GetThisObject(uint16_t num_ins) const REQUIRES_SHARED(Locks::mutator_lock_);
266
267 bool Contains(StackReference<mirror::Object>* shadow_frame_entry_obj) const {
268 if (HasReferenceArray()) {
269 return ((&References()[0] <= shadow_frame_entry_obj) &&
270 (shadow_frame_entry_obj <= (&References()[NumberOfVRegs() - 1])));
271 } else {
272 uint32_t* shadow_frame_entry = reinterpret_cast<uint32_t*>(shadow_frame_entry_obj);
273 return ((&vregs_[0] <= shadow_frame_entry) &&
274 (shadow_frame_entry <= (&vregs_[NumberOfVRegs() - 1])));
275 }
276 }
277
278 LockCountData& GetLockCountData() {
279 return lock_count_data_;
280 }
281
David Srbecky56de89a2018-10-01 15:32:20 +0100282 static constexpr size_t LockCountDataOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700283 return OFFSETOF_MEMBER(ShadowFrame, lock_count_data_);
284 }
285
David Srbecky56de89a2018-10-01 15:32:20 +0100286 static constexpr size_t LinkOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700287 return OFFSETOF_MEMBER(ShadowFrame, link_);
288 }
289
David Srbecky56de89a2018-10-01 15:32:20 +0100290 static constexpr size_t MethodOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700291 return OFFSETOF_MEMBER(ShadowFrame, method_);
292 }
293
David Srbecky56de89a2018-10-01 15:32:20 +0100294 static constexpr size_t DexPCOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700295 return OFFSETOF_MEMBER(ShadowFrame, dex_pc_);
296 }
297
David Srbecky56de89a2018-10-01 15:32:20 +0100298 static constexpr size_t NumberOfVRegsOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700299 return OFFSETOF_MEMBER(ShadowFrame, number_of_vregs_);
300 }
301
David Srbecky56de89a2018-10-01 15:32:20 +0100302 static constexpr size_t VRegsOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700303 return OFFSETOF_MEMBER(ShadowFrame, vregs_);
304 }
305
David Srbecky56de89a2018-10-01 15:32:20 +0100306 static constexpr size_t ResultRegisterOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700307 return OFFSETOF_MEMBER(ShadowFrame, result_register_);
308 }
309
David Srbecky56de89a2018-10-01 15:32:20 +0100310 static constexpr size_t DexPCPtrOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700311 return OFFSETOF_MEMBER(ShadowFrame, dex_pc_ptr_);
312 }
313
David Srbecky56de89a2018-10-01 15:32:20 +0100314 static constexpr size_t DexInstructionsOffset() {
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800315 return OFFSETOF_MEMBER(ShadowFrame, dex_instructions_);
Andreas Gampe36a296f2017-06-13 14:11:11 -0700316 }
317
David Srbecky56de89a2018-10-01 15:32:20 +0100318 static constexpr size_t CachedHotnessCountdownOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700319 return OFFSETOF_MEMBER(ShadowFrame, cached_hotness_countdown_);
320 }
321
David Srbecky56de89a2018-10-01 15:32:20 +0100322 static constexpr size_t HotnessCountdownOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700323 return OFFSETOF_MEMBER(ShadowFrame, hotness_countdown_);
324 }
325
326 // Create ShadowFrame for interpreter using provided memory.
327 static ShadowFrame* CreateShadowFrameImpl(uint32_t num_vregs,
328 ShadowFrame* link,
329 ArtMethod* method,
330 uint32_t dex_pc,
331 void* memory) {
332 return new (memory) ShadowFrame(num_vregs, link, method, dex_pc, true);
333 }
334
335 const uint16_t* GetDexPCPtr() {
336 return dex_pc_ptr_;
337 }
338
339 void SetDexPCPtr(uint16_t* dex_pc_ptr) {
340 dex_pc_ptr_ = dex_pc_ptr;
341 }
342
343 JValue* GetResultRegister() {
344 return result_register_;
345 }
346
Alex Lighte814f9d2017-07-31 16:14:39 -0700347 bool NeedsNotifyPop() const {
Nicolas Geoffray202b6172018-10-10 07:47:58 +0000348 return needs_notify_pop_;
Alex Lighte814f9d2017-07-31 16:14:39 -0700349 }
350
351 void SetNotifyPop(bool notify) {
Nicolas Geoffray202b6172018-10-10 07:47:58 +0000352 needs_notify_pop_ = notify;
Alex Lighte814f9d2017-07-31 16:14:39 -0700353 }
354
Andreas Gampe36a296f2017-06-13 14:11:11 -0700355 private:
356 ShadowFrame(uint32_t num_vregs, ShadowFrame* link, ArtMethod* method,
357 uint32_t dex_pc, bool has_reference_array)
358 : link_(link),
359 method_(method),
360 result_register_(nullptr),
361 dex_pc_ptr_(nullptr),
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800362 dex_instructions_(nullptr),
Andreas Gampe36a296f2017-06-13 14:11:11 -0700363 number_of_vregs_(num_vregs),
364 dex_pc_(dex_pc),
365 cached_hotness_countdown_(0),
Alex Lighte814f9d2017-07-31 16:14:39 -0700366 hotness_countdown_(0),
Nicolas Geoffray202b6172018-10-10 07:47:58 +0000367 needs_notify_pop_(0) {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700368 // TODO(iam): Remove this parameter, it's an an artifact of portable removal
369 DCHECK(has_reference_array);
370 if (has_reference_array) {
371 memset(vregs_, 0, num_vregs * (sizeof(uint32_t) + sizeof(StackReference<mirror::Object>)));
372 } else {
373 memset(vregs_, 0, num_vregs * sizeof(uint32_t));
374 }
375 }
376
377 const StackReference<mirror::Object>* References() const {
378 DCHECK(HasReferenceArray());
379 const uint32_t* vreg_end = &vregs_[NumberOfVRegs()];
380 return reinterpret_cast<const StackReference<mirror::Object>*>(vreg_end);
381 }
382
383 StackReference<mirror::Object>* References() {
384 return const_cast<StackReference<mirror::Object>*>(
385 const_cast<const ShadowFrame*>(this)->References());
386 }
387
388 // Link to previous shadow frame or null.
389 ShadowFrame* link_;
390 ArtMethod* method_;
391 JValue* result_register_;
392 const uint16_t* dex_pc_ptr_;
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800393 // Dex instruction base of the code item.
394 const uint16_t* dex_instructions_;
Andreas Gampe36a296f2017-06-13 14:11:11 -0700395 LockCountData lock_count_data_; // This may contain GC roots when lock counting is active.
396 const uint32_t number_of_vregs_;
397 uint32_t dex_pc_;
398 int16_t cached_hotness_countdown_;
399 int16_t hotness_countdown_;
Nicolas Geoffray202b6172018-10-10 07:47:58 +0000400 // TODO Might be worth it to try to bit-pack this into some other field to reduce stack usage.
401 // NB alignment requires that this field takes 4 bytes. Only 1 bit is actually ever used.
402 bool needs_notify_pop_;
Andreas Gampe36a296f2017-06-13 14:11:11 -0700403
404 // This is a two-part array:
405 // - [0..number_of_vregs) holds the raw virtual registers, and each element here is always 4
406 // bytes.
407 // - [number_of_vregs..number_of_vregs*2) holds only reference registers. Each element here is
408 // ptr-sized.
409 // In other words when a primitive is stored in vX, the second (reference) part of the array will
410 // be null. When a reference is stored in vX, the second (reference) part of the array will be a
411 // copy of vX.
412 uint32_t vregs_[0];
413
414 DISALLOW_IMPLICIT_CONSTRUCTORS(ShadowFrame);
415};
416
417struct ShadowFrameDeleter {
418 inline void operator()(ShadowFrame* frame) {
419 if (frame != nullptr) {
420 frame->~ShadowFrame();
421 }
422 }
423};
424
425} // namespace art
426
427#endif // ART_RUNTIME_INTERPRETER_SHADOW_FRAME_H_