blob: 88eb413ec7563ca9be8b94769a52aa466004abf6 [file] [log] [blame]
Andreas Gampe36a296f2017-06-13 14:11:11 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_INTERPRETER_SHADOW_FRAME_H_
18#define ART_RUNTIME_INTERPRETER_SHADOW_FRAME_H_
19
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070020#include <cstdint>
Andreas Gampe36a296f2017-06-13 14:11:11 -070021#include <cstring>
Andreas Gampe36a296f2017-06-13 14:11:11 -070022#include <string>
23
24#include "base/macros.h"
25#include "base/mutex.h"
David Sehr9e734c72018-01-04 17:56:19 -080026#include "dex/dex_file.h"
Andreas Gampe36a296f2017-06-13 14:11:11 -070027#include "lock_count_data.h"
28#include "read_barrier.h"
29#include "stack_reference.h"
30#include "verify_object.h"
31
32namespace art {
33
34namespace mirror {
Igor Murashkin2ffb7032017-11-08 13:35:21 -080035class Object;
Andreas Gampe36a296f2017-06-13 14:11:11 -070036} // namespace mirror
37
38class ArtMethod;
39class ShadowFrame;
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +010040template<class MirrorType> class ObjPtr;
Andreas Gampe36a296f2017-06-13 14:11:11 -070041class Thread;
42union JValue;
43
44// Forward declaration. Just calls the destructor.
45struct ShadowFrameDeleter;
46using ShadowFrameAllocaUniquePtr = std::unique_ptr<ShadowFrame, ShadowFrameDeleter>;
47
48// ShadowFrame has 2 possible layouts:
49// - interpreter - separate VRegs and reference arrays. References are in the reference array.
50// - JNI - just VRegs, but where every VReg holds a reference.
51class ShadowFrame {
52 public:
53 // Compute size of ShadowFrame in bytes assuming it has a reference array.
54 static size_t ComputeSize(uint32_t num_vregs) {
55 return sizeof(ShadowFrame) + (sizeof(uint32_t) * num_vregs) +
56 (sizeof(StackReference<mirror::Object>) * num_vregs);
57 }
58
59 // Create ShadowFrame in heap for deoptimization.
60 static ShadowFrame* CreateDeoptimizedFrame(uint32_t num_vregs, ShadowFrame* link,
61 ArtMethod* method, uint32_t dex_pc) {
62 uint8_t* memory = new uint8_t[ComputeSize(num_vregs)];
63 return CreateShadowFrameImpl(num_vregs, link, method, dex_pc, memory);
64 }
65
66 // Delete a ShadowFrame allocated on the heap for deoptimization.
67 static void DeleteDeoptimizedFrame(ShadowFrame* sf) {
68 sf->~ShadowFrame(); // Explicitly destruct.
69 uint8_t* memory = reinterpret_cast<uint8_t*>(sf);
70 delete[] memory;
71 }
72
73 // Create a shadow frame in a fresh alloca. This needs to be in the context of the caller.
74 // Inlining doesn't work, the compiler will still undo the alloca. So this needs to be a macro.
75#define CREATE_SHADOW_FRAME(num_vregs, link, method, dex_pc) ({ \
76 size_t frame_size = ShadowFrame::ComputeSize(num_vregs); \
77 void* alloca_mem = alloca(frame_size); \
78 ShadowFrameAllocaUniquePtr( \
79 ShadowFrame::CreateShadowFrameImpl((num_vregs), (link), (method), (dex_pc), \
80 (alloca_mem))); \
81 })
82
83 ~ShadowFrame() {}
84
85 // TODO(iam): Clean references array up since they're always there,
86 // we don't need to do conditionals.
87 bool HasReferenceArray() const {
88 return true;
89 }
90
91 uint32_t NumberOfVRegs() const {
92 return number_of_vregs_;
93 }
94
95 uint32_t GetDexPC() const {
Mathieu Chartierfc9555d2017-11-05 16:32:19 -080096 return (dex_pc_ptr_ == nullptr) ? dex_pc_ : dex_pc_ptr_ - dex_instructions_;
Andreas Gampe36a296f2017-06-13 14:11:11 -070097 }
98
99 int16_t GetCachedHotnessCountdown() const {
100 return cached_hotness_countdown_;
101 }
102
103 void SetCachedHotnessCountdown(int16_t cached_hotness_countdown) {
104 cached_hotness_countdown_ = cached_hotness_countdown;
105 }
106
107 int16_t GetHotnessCountdown() const {
108 return hotness_countdown_;
109 }
110
111 void SetHotnessCountdown(int16_t hotness_countdown) {
112 hotness_countdown_ = hotness_countdown;
113 }
114
115 void SetDexPC(uint32_t dex_pc) {
116 dex_pc_ = dex_pc;
117 dex_pc_ptr_ = nullptr;
118 }
119
120 ShadowFrame* GetLink() const {
121 return link_;
122 }
123
124 void SetLink(ShadowFrame* frame) {
125 DCHECK_NE(this, frame);
126 link_ = frame;
127 }
128
129 int32_t GetVReg(size_t i) const {
130 DCHECK_LT(i, NumberOfVRegs());
131 const uint32_t* vreg = &vregs_[i];
132 return *reinterpret_cast<const int32_t*>(vreg);
133 }
134
135 // Shorts are extended to Ints in VRegs. Interpreter intrinsics needs them as shorts.
136 int16_t GetVRegShort(size_t i) const {
137 return static_cast<int16_t>(GetVReg(i));
138 }
139
140 uint32_t* GetVRegAddr(size_t i) {
141 return &vregs_[i];
142 }
143
144 uint32_t* GetShadowRefAddr(size_t i) {
145 DCHECK(HasReferenceArray());
146 DCHECK_LT(i, NumberOfVRegs());
147 return &vregs_[i + NumberOfVRegs()];
148 }
149
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800150 const uint16_t* GetDexInstructions() const {
151 return dex_instructions_;
Andreas Gampe36a296f2017-06-13 14:11:11 -0700152 }
153
154 float GetVRegFloat(size_t i) const {
155 DCHECK_LT(i, NumberOfVRegs());
156 // NOTE: Strict-aliasing?
157 const uint32_t* vreg = &vregs_[i];
158 return *reinterpret_cast<const float*>(vreg);
159 }
160
161 int64_t GetVRegLong(size_t i) const {
David Srbeckyb8e5ad12018-08-31 07:02:02 +0100162 DCHECK_LT(i + 1, NumberOfVRegs());
Andreas Gampe36a296f2017-06-13 14:11:11 -0700163 const uint32_t* vreg = &vregs_[i];
164 typedef const int64_t unaligned_int64 __attribute__ ((aligned (4)));
165 return *reinterpret_cast<unaligned_int64*>(vreg);
166 }
167
168 double GetVRegDouble(size_t i) const {
David Srbeckyb8e5ad12018-08-31 07:02:02 +0100169 DCHECK_LT(i + 1, NumberOfVRegs());
Andreas Gampe36a296f2017-06-13 14:11:11 -0700170 const uint32_t* vreg = &vregs_[i];
171 typedef const double unaligned_double __attribute__ ((aligned (4)));
172 return *reinterpret_cast<unaligned_double*>(vreg);
173 }
174
175 // Look up the reference given its virtual register number.
176 // If this returns non-null then this does not mean the vreg is currently a reference
177 // on non-moving collectors. Check that the raw reg with GetVReg is equal to this if not certain.
178 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
179 mirror::Object* GetVRegReference(size_t i) const REQUIRES_SHARED(Locks::mutator_lock_) {
180 DCHECK_LT(i, NumberOfVRegs());
181 mirror::Object* ref;
Nicolas Geoffray4cbfadc2018-10-10 16:09:43 +0100182 DCHECK(HasReferenceArray());
183 ref = References()[i].AsMirrorPtr();
Roland Levillaina78f5b62017-09-29 13:50:44 +0100184 ReadBarrier::MaybeAssertToSpaceInvariant(ref);
Andreas Gampe36a296f2017-06-13 14:11:11 -0700185 if (kVerifyFlags & kVerifyReads) {
186 VerifyObject(ref);
187 }
188 return ref;
189 }
190
191 // Get view of vregs as range of consecutive arguments starting at i.
192 uint32_t* GetVRegArgs(size_t i) {
193 return &vregs_[i];
194 }
195
196 void SetVReg(size_t i, int32_t val) {
197 DCHECK_LT(i, NumberOfVRegs());
198 uint32_t* vreg = &vregs_[i];
199 *reinterpret_cast<int32_t*>(vreg) = val;
200 // This is needed for moving collectors since these can update the vreg references if they
201 // happen to agree with references in the reference array.
202 if (kMovingCollector && HasReferenceArray()) {
203 References()[i].Clear();
204 }
205 }
206
207 void SetVRegFloat(size_t i, float val) {
208 DCHECK_LT(i, NumberOfVRegs());
209 uint32_t* vreg = &vregs_[i];
210 *reinterpret_cast<float*>(vreg) = val;
211 // This is needed for moving collectors since these can update the vreg references if they
212 // happen to agree with references in the reference array.
213 if (kMovingCollector && HasReferenceArray()) {
214 References()[i].Clear();
215 }
216 }
217
218 void SetVRegLong(size_t i, int64_t val) {
David Srbeckyb8e5ad12018-08-31 07:02:02 +0100219 DCHECK_LT(i + 1, NumberOfVRegs());
Andreas Gampe36a296f2017-06-13 14:11:11 -0700220 uint32_t* vreg = &vregs_[i];
221 typedef int64_t unaligned_int64 __attribute__ ((aligned (4)));
222 *reinterpret_cast<unaligned_int64*>(vreg) = val;
223 // This is needed for moving collectors since these can update the vreg references if they
224 // happen to agree with references in the reference array.
225 if (kMovingCollector && HasReferenceArray()) {
226 References()[i].Clear();
227 References()[i + 1].Clear();
228 }
229 }
230
231 void SetVRegDouble(size_t i, double val) {
David Srbeckyb8e5ad12018-08-31 07:02:02 +0100232 DCHECK_LT(i + 1, NumberOfVRegs());
Andreas Gampe36a296f2017-06-13 14:11:11 -0700233 uint32_t* vreg = &vregs_[i];
234 typedef double unaligned_double __attribute__ ((aligned (4)));
235 *reinterpret_cast<unaligned_double*>(vreg) = val;
236 // This is needed for moving collectors since these can update the vreg references if they
237 // happen to agree with references in the reference array.
238 if (kMovingCollector && HasReferenceArray()) {
239 References()[i].Clear();
240 References()[i + 1].Clear();
241 }
242 }
243
244 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +0100245 void SetVRegReference(size_t i, ObjPtr<mirror::Object> val)
246 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampe36a296f2017-06-13 14:11:11 -0700247
248 void SetMethod(ArtMethod* method) REQUIRES(Locks::mutator_lock_) {
249 DCHECK(method != nullptr);
250 DCHECK(method_ != nullptr);
251 method_ = method;
252 }
253
254 ArtMethod* GetMethod() const REQUIRES_SHARED(Locks::mutator_lock_) {
255 DCHECK(method_ != nullptr);
256 return method_;
257 }
258
259 mirror::Object* GetThisObject() const REQUIRES_SHARED(Locks::mutator_lock_);
260
261 mirror::Object* GetThisObject(uint16_t num_ins) const REQUIRES_SHARED(Locks::mutator_lock_);
262
263 bool Contains(StackReference<mirror::Object>* shadow_frame_entry_obj) const {
264 if (HasReferenceArray()) {
265 return ((&References()[0] <= shadow_frame_entry_obj) &&
266 (shadow_frame_entry_obj <= (&References()[NumberOfVRegs() - 1])));
267 } else {
268 uint32_t* shadow_frame_entry = reinterpret_cast<uint32_t*>(shadow_frame_entry_obj);
269 return ((&vregs_[0] <= shadow_frame_entry) &&
270 (shadow_frame_entry <= (&vregs_[NumberOfVRegs() - 1])));
271 }
272 }
273
274 LockCountData& GetLockCountData() {
275 return lock_count_data_;
276 }
277
David Srbecky56de89a2018-10-01 15:32:20 +0100278 static constexpr size_t LockCountDataOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700279 return OFFSETOF_MEMBER(ShadowFrame, lock_count_data_);
280 }
281
David Srbecky56de89a2018-10-01 15:32:20 +0100282 static constexpr size_t LinkOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700283 return OFFSETOF_MEMBER(ShadowFrame, link_);
284 }
285
David Srbecky56de89a2018-10-01 15:32:20 +0100286 static constexpr size_t MethodOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700287 return OFFSETOF_MEMBER(ShadowFrame, method_);
288 }
289
David Srbecky56de89a2018-10-01 15:32:20 +0100290 static constexpr size_t DexPCOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700291 return OFFSETOF_MEMBER(ShadowFrame, dex_pc_);
292 }
293
David Srbecky56de89a2018-10-01 15:32:20 +0100294 static constexpr size_t NumberOfVRegsOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700295 return OFFSETOF_MEMBER(ShadowFrame, number_of_vregs_);
296 }
297
David Srbecky56de89a2018-10-01 15:32:20 +0100298 static constexpr size_t VRegsOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700299 return OFFSETOF_MEMBER(ShadowFrame, vregs_);
300 }
301
David Srbecky56de89a2018-10-01 15:32:20 +0100302 static constexpr size_t ResultRegisterOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700303 return OFFSETOF_MEMBER(ShadowFrame, result_register_);
304 }
305
David Srbecky56de89a2018-10-01 15:32:20 +0100306 static constexpr size_t DexPCPtrOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700307 return OFFSETOF_MEMBER(ShadowFrame, dex_pc_ptr_);
308 }
309
David Srbecky56de89a2018-10-01 15:32:20 +0100310 static constexpr size_t DexInstructionsOffset() {
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800311 return OFFSETOF_MEMBER(ShadowFrame, dex_instructions_);
Andreas Gampe36a296f2017-06-13 14:11:11 -0700312 }
313
David Srbecky56de89a2018-10-01 15:32:20 +0100314 static constexpr size_t CachedHotnessCountdownOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700315 return OFFSETOF_MEMBER(ShadowFrame, cached_hotness_countdown_);
316 }
317
David Srbecky56de89a2018-10-01 15:32:20 +0100318 static constexpr size_t HotnessCountdownOffset() {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700319 return OFFSETOF_MEMBER(ShadowFrame, hotness_countdown_);
320 }
321
322 // Create ShadowFrame for interpreter using provided memory.
323 static ShadowFrame* CreateShadowFrameImpl(uint32_t num_vregs,
324 ShadowFrame* link,
325 ArtMethod* method,
326 uint32_t dex_pc,
327 void* memory) {
328 return new (memory) ShadowFrame(num_vregs, link, method, dex_pc, true);
329 }
330
331 const uint16_t* GetDexPCPtr() {
332 return dex_pc_ptr_;
333 }
334
335 void SetDexPCPtr(uint16_t* dex_pc_ptr) {
336 dex_pc_ptr_ = dex_pc_ptr;
337 }
338
339 JValue* GetResultRegister() {
340 return result_register_;
341 }
342
Alex Lighte814f9d2017-07-31 16:14:39 -0700343 bool NeedsNotifyPop() const {
Nicolas Geoffray202b6172018-10-10 07:47:58 +0000344 return needs_notify_pop_;
Alex Lighte814f9d2017-07-31 16:14:39 -0700345 }
346
347 void SetNotifyPop(bool notify) {
Nicolas Geoffray202b6172018-10-10 07:47:58 +0000348 needs_notify_pop_ = notify;
Alex Lighte814f9d2017-07-31 16:14:39 -0700349 }
350
Andreas Gampe36a296f2017-06-13 14:11:11 -0700351 private:
352 ShadowFrame(uint32_t num_vregs, ShadowFrame* link, ArtMethod* method,
353 uint32_t dex_pc, bool has_reference_array)
354 : link_(link),
355 method_(method),
356 result_register_(nullptr),
357 dex_pc_ptr_(nullptr),
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800358 dex_instructions_(nullptr),
Andreas Gampe36a296f2017-06-13 14:11:11 -0700359 number_of_vregs_(num_vregs),
360 dex_pc_(dex_pc),
361 cached_hotness_countdown_(0),
Alex Lighte814f9d2017-07-31 16:14:39 -0700362 hotness_countdown_(0),
Nicolas Geoffray202b6172018-10-10 07:47:58 +0000363 needs_notify_pop_(0) {
Andreas Gampe36a296f2017-06-13 14:11:11 -0700364 // TODO(iam): Remove this parameter, it's an an artifact of portable removal
365 DCHECK(has_reference_array);
366 if (has_reference_array) {
367 memset(vregs_, 0, num_vregs * (sizeof(uint32_t) + sizeof(StackReference<mirror::Object>)));
368 } else {
369 memset(vregs_, 0, num_vregs * sizeof(uint32_t));
370 }
371 }
372
373 const StackReference<mirror::Object>* References() const {
374 DCHECK(HasReferenceArray());
375 const uint32_t* vreg_end = &vregs_[NumberOfVRegs()];
376 return reinterpret_cast<const StackReference<mirror::Object>*>(vreg_end);
377 }
378
379 StackReference<mirror::Object>* References() {
380 return const_cast<StackReference<mirror::Object>*>(
381 const_cast<const ShadowFrame*>(this)->References());
382 }
383
384 // Link to previous shadow frame or null.
385 ShadowFrame* link_;
386 ArtMethod* method_;
387 JValue* result_register_;
388 const uint16_t* dex_pc_ptr_;
Mathieu Chartierfc9555d2017-11-05 16:32:19 -0800389 // Dex instruction base of the code item.
390 const uint16_t* dex_instructions_;
Andreas Gampe36a296f2017-06-13 14:11:11 -0700391 LockCountData lock_count_data_; // This may contain GC roots when lock counting is active.
392 const uint32_t number_of_vregs_;
393 uint32_t dex_pc_;
394 int16_t cached_hotness_countdown_;
395 int16_t hotness_countdown_;
Nicolas Geoffray202b6172018-10-10 07:47:58 +0000396 // TODO Might be worth it to try to bit-pack this into some other field to reduce stack usage.
397 // NB alignment requires that this field takes 4 bytes. Only 1 bit is actually ever used.
398 bool needs_notify_pop_;
Andreas Gampe36a296f2017-06-13 14:11:11 -0700399
400 // This is a two-part array:
401 // - [0..number_of_vregs) holds the raw virtual registers, and each element here is always 4
402 // bytes.
403 // - [number_of_vregs..number_of_vregs*2) holds only reference registers. Each element here is
404 // ptr-sized.
405 // In other words when a primitive is stored in vX, the second (reference) part of the array will
406 // be null. When a reference is stored in vX, the second (reference) part of the array will be a
407 // copy of vX.
408 uint32_t vregs_[0];
409
410 DISALLOW_IMPLICIT_CONSTRUCTORS(ShadowFrame);
411};
412
413struct ShadowFrameDeleter {
414 inline void operator()(ShadowFrame* frame) {
415 if (frame != nullptr) {
416 frame->~ShadowFrame();
417 }
418 }
419};
420
421} // namespace art
422
423#endif // ART_RUNTIME_INTERPRETER_SHADOW_FRAME_H_