blob: 338bd06f7c831f440c8cfc6d42e743bf2c53f5e3 [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070018#include "common_throws.h"
Ian Rogers848871b2013-08-05 10:56:33 -070019#include "dex_file-inl.h"
20#include "dex_instruction-inl.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070021#include "entrypoints/entrypoint_utils-inl.h"
Ian Rogers83883d72013-10-21 21:07:24 -070022#include "gc/accounting/card_table-inl.h"
Andreas Gamped58342c2014-06-05 14:18:08 -070023#include "instruction_set.h"
Ian Rogers848871b2013-08-05 10:56:33 -070024#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070025#include "mirror/art_method-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070026#include "mirror/class-inl.h"
Mathieu Chartier5f3ded42014-04-03 15:25:30 -070027#include "mirror/dex_cache-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070028#include "mirror/object-inl.h"
29#include "mirror/object_array-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070030#include "runtime.h"
Ian Rogers53b8b092014-03-13 23:45:53 -070031#include "scoped_thread_state_change.h"
Ian Rogers848871b2013-08-05 10:56:33 -070032
33namespace art {
34
35// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
36class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080037 // Number of bytes for each out register in the caller method's frame.
38 static constexpr size_t kBytesStackArgLocation = 4;
Alexei Zavjalov41c507a2014-05-15 16:02:46 +070039 // Frame size in bytes of a callee-save frame for RefsAndArgs.
40 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize =
41 GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -070042#if defined(__arm__)
43 // The callee save frame is pointed to by SP.
44 // | argN | |
45 // | ... | |
46 // | arg4 | |
47 // | arg3 spill | | Caller's frame
48 // | arg2 spill | |
49 // | arg1 spill | |
50 // | Method* | ---
51 // | LR |
52 // | ... | callee saves
53 // | R3 | arg3
54 // | R2 | arg2
55 // | R1 | arg1
Ian Rogers936b37f2014-02-14 00:52:24 -080056 // | R0 | padding
Ian Rogers848871b2013-08-05 10:56:33 -070057 // | Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -080058 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
59 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
60 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -080061 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
62 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 8; // Offset of first GPR arg.
63 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 44; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -080064 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000065 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -080066 }
Stuart Monteithb95a5342014-03-12 13:32:32 +000067#elif defined(__aarch64__)
68 // The callee save frame is pointed to by SP.
69 // | argN | |
70 // | ... | |
71 // | arg4 | |
72 // | arg3 spill | | Caller's frame
73 // | arg2 spill | |
74 // | arg1 spill | |
75 // | Method* | ---
76 // | LR |
77 // | X28 |
78 // | : |
79 // | X19 |
80 // | X7 |
81 // | : |
82 // | X1 |
83 // | D15 |
84 // | : |
85 // | D0 |
86 // | | padding
87 // | Method* | <- sp
88 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
89 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
90 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Alexei Zavjalov41c507a2014-05-15 16:02:46 +070091 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
Stuart Monteithb95a5342014-03-12 13:32:32 +000092 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 144; // Offset of first GPR arg.
93 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 296; // Offset of return address.
Stuart Monteithb95a5342014-03-12 13:32:32 +000094 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000095 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Stuart Monteithb95a5342014-03-12 13:32:32 +000096 }
Ian Rogers848871b2013-08-05 10:56:33 -070097#elif defined(__mips__)
98 // The callee save frame is pointed to by SP.
99 // | argN | |
100 // | ... | |
101 // | arg4 | |
102 // | arg3 spill | | Caller's frame
103 // | arg2 spill | |
104 // | arg1 spill | |
105 // | Method* | ---
106 // | RA |
107 // | ... | callee saves
108 // | A3 | arg3
109 // | A2 | arg2
110 // | A1 | arg1
111 // | A0/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800112 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
113 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
114 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800115 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
116 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4; // Offset of first GPR arg.
117 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 60; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800118 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000119 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800120 }
Ian Rogers848871b2013-08-05 10:56:33 -0700121#elif defined(__i386__)
122 // The callee save frame is pointed to by SP.
123 // | argN | |
124 // | ... | |
125 // | arg4 | |
126 // | arg3 spill | | Caller's frame
127 // | arg2 spill | |
128 // | arg1 spill | |
129 // | Method* | ---
130 // | Return |
131 // | EBP,ESI,EDI | callee saves
132 // | EBX | arg3
133 // | EDX | arg2
134 // | ECX | arg1
135 // | EAX/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800136 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
137 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
138 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800139 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
140 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4; // Offset of first GPR arg.
141 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 28; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800142 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000143 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800144 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800145#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800146 // The callee save frame is pointed to by SP.
147 // | argN | |
148 // | ... | |
149 // | reg. arg spills | | Caller's frame
150 // | Method* | ---
151 // | Return |
152 // | R15 | callee save
153 // | R14 | callee save
154 // | R13 | callee save
155 // | R12 | callee save
156 // | R9 | arg5
157 // | R8 | arg4
158 // | RSI/R6 | arg1
159 // | RBP/R5 | callee save
160 // | RBX/R3 | callee save
161 // | RDX/R2 | arg2
162 // | RCX/R1 | arg3
163 // | XMM7 | float arg 8
164 // | XMM6 | float arg 7
165 // | XMM5 | float arg 6
166 // | XMM4 | float arg 5
167 // | XMM3 | float arg 4
168 // | XMM2 | float arg 3
169 // | XMM1 | float arg 2
170 // | XMM0 | float arg 1
171 // | Padding |
172 // | RDI/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800173 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700174 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700175 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800176 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
Serguei Katkovc3801912014-07-08 17:21:53 +0700177 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80 + 4*8; // Offset of first GPR arg.
178 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 168 + 4*8; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800179 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
180 switch (gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000181 case 0: return (4 * GetBytesPerGprSpillLocation(kRuntimeISA));
182 case 1: return (1 * GetBytesPerGprSpillLocation(kRuntimeISA));
183 case 2: return (0 * GetBytesPerGprSpillLocation(kRuntimeISA));
184 case 3: return (5 * GetBytesPerGprSpillLocation(kRuntimeISA));
185 case 4: return (6 * GetBytesPerGprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800186 default:
Andreas Gampec200a4a2014-06-16 18:39:09 -0700187 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
188 return 0;
Ian Rogers936b37f2014-02-14 00:52:24 -0800189 }
190 }
Ian Rogers848871b2013-08-05 10:56:33 -0700191#else
192#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700193#endif
194
Ian Rogers936b37f2014-02-14 00:52:24 -0800195 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700196 static mirror::ArtMethod* GetCallingMethod(StackReference<mirror::ArtMethod>* sp)
Ian Rogers936b37f2014-02-14 00:52:24 -0800197 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -0700198 DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod());
Ian Rogers936b37f2014-02-14 00:52:24 -0800199 byte* previous_sp = reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Andreas Gampecf4035a2014-05-28 22:43:01 -0700200 return reinterpret_cast<StackReference<mirror::ArtMethod>*>(previous_sp)->AsMirrorPtr();
Ian Rogers848871b2013-08-05 10:56:33 -0700201 }
202
Ian Rogers936b37f2014-02-14 00:52:24 -0800203 // For the given quick ref and args quick frame, return the caller's PC.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700204 static uintptr_t GetCallingPc(StackReference<mirror::ArtMethod>* sp)
Ian Rogers936b37f2014-02-14 00:52:24 -0800205 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -0700206 DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod());
Ian Rogers936b37f2014-02-14 00:52:24 -0800207 byte* lr = reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset;
Ian Rogers848871b2013-08-05 10:56:33 -0700208 return *reinterpret_cast<uintptr_t*>(lr);
209 }
210
Andreas Gampec200a4a2014-06-16 18:39:09 -0700211 QuickArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static, const char* shorty,
212 uint32_t shorty_len) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) :
213 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
214 gpr_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
215 fpr_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
216 stack_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
217 + StackArgumentStartFromShorty(is_static, shorty, shorty_len)),
218 gpr_index_(0), fpr_index_(0), stack_index_(0), cur_type_(Primitive::kPrimVoid),
219 is_split_long_or_double_(false) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700220
221 virtual ~QuickArgumentVisitor() {}
222
223 virtual void Visit() = 0;
224
Ian Rogers936b37f2014-02-14 00:52:24 -0800225 Primitive::Type GetParamPrimitiveType() const {
226 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700227 }
228
229 byte* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800230 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800231 Primitive::Type type = GetParamPrimitiveType();
232 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800233 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000234 return fpr_args_ + (fpr_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800235 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700236 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers936b37f2014-02-14 00:52:24 -0800237 }
238 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800239 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800240 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
241 }
242 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700243 }
244
245 bool IsSplitLongOrDouble() const {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000246 if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) || (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800247 return is_split_long_or_double_;
248 } else {
249 return false; // An optimization for when GPR and FPRs are 64bit.
250 }
Ian Rogers848871b2013-08-05 10:56:33 -0700251 }
252
Ian Rogers936b37f2014-02-14 00:52:24 -0800253 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700254 return GetParamPrimitiveType() == Primitive::kPrimNot;
255 }
256
Ian Rogers936b37f2014-02-14 00:52:24 -0800257 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700258 Primitive::Type type = GetParamPrimitiveType();
259 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
260 }
261
262 uint64_t ReadSplitLongParam() const {
263 DCHECK(IsSplitLongOrDouble());
264 uint64_t low_half = *reinterpret_cast<uint32_t*>(GetParamAddress());
265 uint64_t high_half = *reinterpret_cast<uint32_t*>(stack_args_);
266 return (low_half & 0xffffffffULL) | (high_half << 32);
267 }
268
269 void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700270 // This implementation doesn't support reg-spill area for hard float
271 // ABI targets such as x86_64 and aarch64. So, for those targets whose
272 // 'kQuickSoftFloatAbi' is 'false':
273 // (a) 'stack_args_' should point to the first method's argument
274 // (b) whatever the argument type it is, the 'stack_index_' should
275 // be moved forward along with every visiting.
Ian Rogers936b37f2014-02-14 00:52:24 -0800276 gpr_index_ = 0;
277 fpr_index_ = 0;
278 stack_index_ = 0;
279 if (!is_static_) { // Handle this.
280 cur_type_ = Primitive::kPrimNot;
281 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700282 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700283 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == 0) {
284 stack_index_++;
285 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800286 if (kNumQuickGprArgs > 0) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800287 gpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800288 }
Ian Rogers848871b2013-08-05 10:56:33 -0700289 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800290 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
291 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
292 switch (cur_type_) {
293 case Primitive::kPrimNot:
294 case Primitive::kPrimBoolean:
295 case Primitive::kPrimByte:
296 case Primitive::kPrimChar:
297 case Primitive::kPrimShort:
298 case Primitive::kPrimInt:
299 is_split_long_or_double_ = false;
300 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700301 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == gpr_index_) {
302 stack_index_++;
303 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800304 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800305 gpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800306 }
307 break;
308 case Primitive::kPrimFloat:
309 is_split_long_or_double_ = false;
310 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800311 if (kQuickSoftFloatAbi) {
312 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800313 gpr_index_++;
314 } else {
315 stack_index_++;
316 }
317 } else {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800318 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800319 fpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800320 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700321 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800322 }
323 break;
324 case Primitive::kPrimDouble:
325 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800326 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000327 is_split_long_or_double_ = (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800328 ((gpr_index_ + 1) == kNumQuickGprArgs);
Ian Rogers936b37f2014-02-14 00:52:24 -0800329 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700330 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == gpr_index_) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800331 if (kBytesStackArgLocation == 4) {
332 stack_index_+= 2;
333 } else {
334 CHECK_EQ(kBytesStackArgLocation, 8U);
335 stack_index_++;
336 }
337 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700338 if (gpr_index_ < kNumQuickGprArgs) {
339 gpr_index_++;
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000340 if (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700341 if (gpr_index_ < kNumQuickGprArgs) {
342 gpr_index_++;
343 } else if (kQuickSoftFloatAbi) {
344 stack_index_++;
345 }
346 }
347 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800348 } else {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000349 is_split_long_or_double_ = (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800350 ((fpr_index_ + 1) == kNumQuickFprArgs);
Ian Rogers936b37f2014-02-14 00:52:24 -0800351 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800352 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800353 fpr_index_++;
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000354 if (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800355 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800356 fpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800357 }
358 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700359 }
360 if (kBytesStackArgLocation == 4) {
361 stack_index_+= 2;
Ian Rogers936b37f2014-02-14 00:52:24 -0800362 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700363 CHECK_EQ(kBytesStackArgLocation, 8U);
364 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800365 }
366 }
367 break;
368 default:
369 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
370 }
Ian Rogers848871b2013-08-05 10:56:33 -0700371 }
372 }
373
374 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800375 static size_t StackArgumentStartFromShorty(bool is_static, const char* shorty,
376 uint32_t shorty_len) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800377 if (kQuickSoftFloatAbi) {
378 CHECK_EQ(kNumQuickFprArgs, 0U);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000379 return (kNumQuickGprArgs * GetBytesPerGprSpillLocation(kRuntimeISA))
Andreas Gampecf4035a2014-05-28 22:43:01 -0700380 + sizeof(StackReference<mirror::ArtMethod>) /* StackReference<ArtMethod> */;
Ian Rogers936b37f2014-02-14 00:52:24 -0800381 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700382 // For now, there is no reg-spill area for the targets with
383 // hard float ABI. So, the offset pointing to the first method's
384 // parameter ('this' for non-static methods) should be returned.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700385 return sizeof(StackReference<mirror::ArtMethod>); // Skip StackReference<ArtMethod>.
Ian Rogers848871b2013-08-05 10:56:33 -0700386 }
Ian Rogers848871b2013-08-05 10:56:33 -0700387 }
388
Andreas Gampec200a4a2014-06-16 18:39:09 -0700389 protected:
Ian Rogers848871b2013-08-05 10:56:33 -0700390 const bool is_static_;
391 const char* const shorty_;
392 const uint32_t shorty_len_;
Andreas Gampec200a4a2014-06-16 18:39:09 -0700393
394 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800395 byte* const gpr_args_; // Address of GPR arguments in callee save frame.
396 byte* const fpr_args_; // Address of FPR arguments in callee save frame.
397 byte* const stack_args_; // Address of stack arguments in caller's frame.
398 uint32_t gpr_index_; // Index into spilled GPRs.
399 uint32_t fpr_index_; // Index into spilled FPRs.
400 uint32_t stack_index_; // Index into arguments on the stack.
401 // The current type of argument during VisitArguments.
402 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700403 // Does a 64bit parameter straddle the register and stack arguments?
404 bool is_split_long_or_double_;
405};
406
407// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800408class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700409 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700410 BuildQuickShadowFrameVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
411 const char* shorty, uint32_t shorty_len, ShadowFrame* sf,
412 size_t first_arg_reg) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700413 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700414
Ian Rogers9758f792014-03-13 09:02:55 -0700415 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700416
417 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800418 ShadowFrame* const sf_;
419 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700420
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700421 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700422};
423
Andreas Gampec200a4a2014-06-16 18:39:09 -0700424void BuildQuickShadowFrameVisitor::Visit() {
Ian Rogers9758f792014-03-13 09:02:55 -0700425 Primitive::Type type = GetParamPrimitiveType();
426 switch (type) {
427 case Primitive::kPrimLong: // Fall-through.
428 case Primitive::kPrimDouble:
429 if (IsSplitLongOrDouble()) {
430 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
431 } else {
432 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
433 }
434 ++cur_reg_;
435 break;
436 case Primitive::kPrimNot: {
437 StackReference<mirror::Object>* stack_ref =
438 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
439 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
440 }
441 break;
442 case Primitive::kPrimBoolean: // Fall-through.
443 case Primitive::kPrimByte: // Fall-through.
444 case Primitive::kPrimChar: // Fall-through.
445 case Primitive::kPrimShort: // Fall-through.
446 case Primitive::kPrimInt: // Fall-through.
447 case Primitive::kPrimFloat:
448 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
449 break;
450 case Primitive::kPrimVoid:
451 LOG(FATAL) << "UNREACHABLE";
452 break;
453 }
454 ++cur_reg_;
455}
456
Brian Carlstromea46f952013-07-30 01:26:50 -0700457extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700458 StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700459 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
460 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
461 // frame.
462 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
463
464 if (method->IsAbstract()) {
465 ThrowAbstractMethodError(method);
466 return 0;
467 } else {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800468 DCHECK(!method->IsNative()) << PrettyMethod(method);
Andreas Gampec200a4a2014-06-16 18:39:09 -0700469 const char* old_cause = self->StartAssertNoThreadSuspension(
470 "Building interpreter shadow frame");
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700471 const DexFile::CodeItem* code_item = method->GetCodeItem();
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800472 DCHECK(code_item != nullptr) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700473 uint16_t num_regs = code_item->registers_size_;
474 void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
Andreas Gampec200a4a2014-06-16 18:39:09 -0700475 // No last shadow coming from quick.
476 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, nullptr, method, 0, memory));
Ian Rogers848871b2013-08-05 10:56:33 -0700477 size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700478 uint32_t shorty_len = 0;
479 const char* shorty = method->GetShorty(&shorty_len);
480 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len,
Ian Rogers936b37f2014-02-14 00:52:24 -0800481 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700482 shadow_frame_builder.VisitArguments();
483 // Push a transition back into managed code onto the linked list in thread.
484 ManagedStack fragment;
485 self->PushManagedStackFragment(&fragment);
486 self->PushShadowFrame(shadow_frame);
487 self->EndAssertNoThreadSuspension(old_cause);
488
Ian Rogers6c5cb212014-06-18 16:07:20 -0700489 if (method->IsStatic() && !method->GetDeclaringClass()->IsInitialized()) {
Ian Rogers848871b2013-08-05 10:56:33 -0700490 // Ensure static method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700491 StackHandleScope<1> hs(self);
492 Handle<mirror::Class> h_class(hs.NewHandle(method->GetDeclaringClass()));
493 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_class, true, true)) {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800494 DCHECK(Thread::Current()->IsExceptionPending()) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700495 self->PopManagedStackFragment(fragment);
496 return 0;
497 }
498 }
499
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700500 StackHandleScope<1> hs(self);
501 MethodHelper mh(hs.NewHandle(method));
Ian Rogers848871b2013-08-05 10:56:33 -0700502 JValue result = interpreter::EnterInterpreterFromStub(self, mh, code_item, *shadow_frame);
503 // Pop transition.
504 self->PopManagedStackFragment(fragment);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800505 // No need to restore the args since the method has already been run by the interpreter.
Ian Rogers848871b2013-08-05 10:56:33 -0700506 return result.GetJ();
507 }
508}
509
510// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
511// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800512class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700513 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700514 BuildQuickArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
515 const char* shorty, uint32_t shorty_len,
516 ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700517 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700518
Ian Rogers9758f792014-03-13 09:02:55 -0700519 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700520
Ian Rogers9758f792014-03-13 09:02:55 -0700521 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800522
Ian Rogers848871b2013-08-05 10:56:33 -0700523 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700524 ScopedObjectAccessUnchecked* const soa_;
525 std::vector<jvalue>* const args_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800526 // References which we must update when exiting in case the GC moved the objects.
Ian Rogers700a4022014-05-19 16:49:03 -0700527 std::vector<std::pair<jobject, StackReference<mirror::Object>*>> references_;
Ian Rogers9758f792014-03-13 09:02:55 -0700528
Ian Rogers848871b2013-08-05 10:56:33 -0700529 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
530};
531
Ian Rogers9758f792014-03-13 09:02:55 -0700532void BuildQuickArgumentVisitor::Visit() {
533 jvalue val;
534 Primitive::Type type = GetParamPrimitiveType();
535 switch (type) {
536 case Primitive::kPrimNot: {
537 StackReference<mirror::Object>* stack_ref =
538 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
539 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
540 references_.push_back(std::make_pair(val.l, stack_ref));
541 break;
542 }
543 case Primitive::kPrimLong: // Fall-through.
544 case Primitive::kPrimDouble:
545 if (IsSplitLongOrDouble()) {
546 val.j = ReadSplitLongParam();
547 } else {
548 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
549 }
550 break;
551 case Primitive::kPrimBoolean: // Fall-through.
552 case Primitive::kPrimByte: // Fall-through.
553 case Primitive::kPrimChar: // Fall-through.
554 case Primitive::kPrimShort: // Fall-through.
555 case Primitive::kPrimInt: // Fall-through.
556 case Primitive::kPrimFloat:
557 val.i = *reinterpret_cast<jint*>(GetParamAddress());
558 break;
559 case Primitive::kPrimVoid:
560 LOG(FATAL) << "UNREACHABLE";
561 val.j = 0;
562 break;
563 }
564 args_->push_back(val);
565}
566
567void BuildQuickArgumentVisitor::FixupReferences() {
568 // Fixup any references which may have changed.
569 for (const auto& pair : references_) {
570 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700571 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700572 }
573}
574
Ian Rogers848871b2013-08-05 10:56:33 -0700575// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
576// which is responsible for recording callee save registers. We explicitly place into jobjects the
577// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
578// field within the proxy object, which will box the primitive arguments and deal with error cases.
Brian Carlstromea46f952013-07-30 01:26:50 -0700579extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method,
Ian Rogers848871b2013-08-05 10:56:33 -0700580 mirror::Object* receiver,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700581 Thread* self, StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700582 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromd3633d52013-08-20 21:06:26 -0700583 DCHECK(proxy_method->IsProxyMethod()) << PrettyMethod(proxy_method);
584 DCHECK(receiver->GetClass()->IsProxyClass()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700585 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
586 const char* old_cause =
587 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
588 // Register the top of the managed stack, making stack crawlable.
Andreas Gampec200a4a2014-06-16 18:39:09 -0700589 DCHECK_EQ(sp->AsMirrorPtr(), proxy_method)
590 << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700591 self->SetTopOfStack(sp, 0);
592 DCHECK_EQ(proxy_method->GetFrameSizeInBytes(),
Brian Carlstromd3633d52013-08-20 21:06:26 -0700593 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes())
594 << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700595 self->VerifyStack();
596 // Start new JNI local reference state.
597 JNIEnvExt* env = self->GetJniEnv();
598 ScopedObjectAccessUnchecked soa(env);
599 ScopedJniEnvLocalRefState env_state(env);
600 // Create local ref. copies of proxy method and the receiver.
601 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
602
603 // Placing arguments into args vector and remove the receiver.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700604 mirror::ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy();
605 CHECK(!non_proxy_method->IsStatic()) << PrettyMethod(proxy_method) << " "
Andreas Gampec200a4a2014-06-16 18:39:09 -0700606 << PrettyMethod(non_proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700607 std::vector<jvalue> args;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700608 uint32_t shorty_len = 0;
609 const char* shorty = proxy_method->GetShorty(&shorty_len);
610 BuildQuickArgumentVisitor local_ref_visitor(sp, false, shorty, shorty_len, &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700611
Ian Rogers848871b2013-08-05 10:56:33 -0700612 local_ref_visitor.VisitArguments();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700613 DCHECK_GT(args.size(), 0U) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700614 args.erase(args.begin());
615
616 // Convert proxy method into expected interface method.
Brian Carlstromea46f952013-07-30 01:26:50 -0700617 mirror::ArtMethod* interface_method = proxy_method->FindOverriddenMethod();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700618 DCHECK(interface_method != NULL) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700619 DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
620 jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_method);
621
622 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
623 // that performs allocations.
624 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700625 JValue result = InvokeProxyInvocationHandler(soa, shorty, rcvr_jobj, interface_method_jobj, args);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800626 // Restore references which might have moved.
627 local_ref_visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700628 return result.GetJ();
629}
630
631// Read object references held in arguments from quick frames and place in a JNI local references,
632// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800633class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700634 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700635 RememberForGcArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
636 const char* shorty, uint32_t shorty_len,
637 ScopedObjectAccessUnchecked* soa) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700638 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700639
Ian Rogers9758f792014-03-13 09:02:55 -0700640 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700641
Ian Rogers9758f792014-03-13 09:02:55 -0700642 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -0700643
644 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700645 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800646 // References which we must update when exiting in case the GC moved the objects.
Andreas Gampec200a4a2014-06-16 18:39:09 -0700647 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
648
Mathieu Chartier590fee92013-09-13 13:46:47 -0700649 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700650};
651
Ian Rogers9758f792014-03-13 09:02:55 -0700652void RememberForGcArgumentVisitor::Visit() {
653 if (IsParamAReference()) {
654 StackReference<mirror::Object>* stack_ref =
655 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
656 jobject reference =
657 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
658 references_.push_back(std::make_pair(reference, stack_ref));
659 }
660}
661
662void RememberForGcArgumentVisitor::FixupReferences() {
663 // Fixup any references which may have changed.
664 for (const auto& pair : references_) {
665 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700666 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700667 }
668}
669
Ian Rogers848871b2013-08-05 10:56:33 -0700670// Lazily resolve a method for quick. Called by stub code.
Brian Carlstromea46f952013-07-30 01:26:50 -0700671extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called,
Ian Rogers848871b2013-08-05 10:56:33 -0700672 mirror::Object* receiver,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700673 Thread* self,
674 StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700675 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800676 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -0700677 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800678 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -0700679 ScopedObjectAccessUnchecked soa(env);
680 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800681 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -0700682
683 // Compute details about the called method (avoid GCs)
684 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Brian Carlstromea46f952013-07-30 01:26:50 -0700685 mirror::ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700686 InvokeType invoke_type;
687 const DexFile* dex_file;
688 uint32_t dex_method_idx;
689 if (called->IsRuntimeMethod()) {
690 uint32_t dex_pc = caller->ToDexPc(QuickArgumentVisitor::GetCallingPc(sp));
691 const DexFile::CodeItem* code;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700692 dex_file = caller->GetDexFile();
693 code = caller->GetCodeItem();
Ian Rogers848871b2013-08-05 10:56:33 -0700694 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
695 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
696 Instruction::Code instr_code = instr->Opcode();
697 bool is_range;
698 switch (instr_code) {
699 case Instruction::INVOKE_DIRECT:
700 invoke_type = kDirect;
701 is_range = false;
702 break;
703 case Instruction::INVOKE_DIRECT_RANGE:
704 invoke_type = kDirect;
705 is_range = true;
706 break;
707 case Instruction::INVOKE_STATIC:
708 invoke_type = kStatic;
709 is_range = false;
710 break;
711 case Instruction::INVOKE_STATIC_RANGE:
712 invoke_type = kStatic;
713 is_range = true;
714 break;
715 case Instruction::INVOKE_SUPER:
716 invoke_type = kSuper;
717 is_range = false;
718 break;
719 case Instruction::INVOKE_SUPER_RANGE:
720 invoke_type = kSuper;
721 is_range = true;
722 break;
723 case Instruction::INVOKE_VIRTUAL:
724 invoke_type = kVirtual;
725 is_range = false;
726 break;
727 case Instruction::INVOKE_VIRTUAL_RANGE:
728 invoke_type = kVirtual;
729 is_range = true;
730 break;
731 case Instruction::INVOKE_INTERFACE:
732 invoke_type = kInterface;
733 is_range = false;
734 break;
735 case Instruction::INVOKE_INTERFACE_RANGE:
736 invoke_type = kInterface;
737 is_range = true;
738 break;
739 default:
740 LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(NULL);
741 // Avoid used uninitialized warnings.
742 invoke_type = kDirect;
743 is_range = false;
744 }
745 dex_method_idx = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
Ian Rogers848871b2013-08-05 10:56:33 -0700746 } else {
747 invoke_type = kStatic;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700748 dex_file = called->GetDexFile();
Ian Rogers848871b2013-08-05 10:56:33 -0700749 dex_method_idx = called->GetDexMethodIndex();
750 }
751 uint32_t shorty_len;
752 const char* shorty =
753 dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700754 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -0700755 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800756 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800757 bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -0700758 // Resolve method filling in dex cache.
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700759 if (UNLIKELY(called->IsRuntimeMethod())) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700760 StackHandleScope<1> hs(self);
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700761 mirror::Object* dummy = nullptr;
762 HandleWrapper<mirror::Object> h_receiver(
763 hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy));
764 called = linker->ResolveMethod(self, dex_method_idx, &caller, invoke_type);
Ian Rogers848871b2013-08-05 10:56:33 -0700765 }
766 const void* code = NULL;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800767 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -0700768 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800769 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
770 << PrettyMethod(called) << " " << invoke_type;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800771 if (virtual_or_interface) {
772 // Refine called method based on receiver.
773 CHECK(receiver != nullptr) << invoke_type;
Mingyao Yangf4867782014-05-05 11:55:02 -0700774
775 mirror::ArtMethod* orig_called = called;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800776 if (invoke_type == kVirtual) {
777 called = receiver->GetClass()->FindVirtualMethodForVirtual(called);
778 } else {
779 called = receiver->GetClass()->FindVirtualMethodForInterface(called);
780 }
Mingyao Yangf4867782014-05-05 11:55:02 -0700781
782 CHECK(called != nullptr) << PrettyMethod(orig_called) << " "
783 << PrettyTypeOf(receiver) << " "
784 << invoke_type << " " << orig_called->GetVtableIndex();
785
Ian Rogers83883d72013-10-21 21:07:24 -0700786 // We came here because of sharpening. Ensure the dex cache is up-to-date on the method index
787 // of the sharpened method.
788 if (called->GetDexCacheResolvedMethods() == caller->GetDexCacheResolvedMethods()) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100789 caller->GetDexCacheResolvedMethods()->Set<false>(called->GetDexMethodIndex(), called);
Ian Rogers83883d72013-10-21 21:07:24 -0700790 } else {
791 // Calling from one dex file to another, need to compute the method index appropriate to
Vladimir Markobbcc0c02014-02-03 14:08:42 +0000792 // the caller's dex file. Since we get here only if the original called was a runtime
793 // method, we've got the correct dex_file and a dex_method_idx from above.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700794 DCHECK_EQ(caller->GetDexFile(), dex_file);
795 StackHandleScope<1> hs(self);
796 MethodHelper mh(hs.NewHandle(called));
797 uint32_t method_index = mh.FindDexMethodIndexInOtherDexFile(*dex_file, dex_method_idx);
Ian Rogers83883d72013-10-21 21:07:24 -0700798 if (method_index != DexFile::kDexNoIndex) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100799 caller->GetDexCacheResolvedMethods()->Set<false>(method_index, called);
Ian Rogers83883d72013-10-21 21:07:24 -0700800 }
801 }
802 }
Ian Rogers848871b2013-08-05 10:56:33 -0700803 // Ensure that the called method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700804 StackHandleScope<1> hs(soa.Self());
805 Handle<mirror::Class> called_class(hs.NewHandle(called->GetDeclaringClass()));
Ian Rogers848871b2013-08-05 10:56:33 -0700806 linker->EnsureInitialized(called_class, true, true);
807 if (LIKELY(called_class->IsInitialized())) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800808 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700809 } else if (called_class->IsInitializing()) {
810 if (invoke_type == kStatic) {
811 // Class is still initializing, go to oat and grab code (trampoline must be left in place
812 // until class is initialized to stop races between threads).
Ian Rogersef7d42f2014-01-06 12:55:46 -0800813 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -0700814 } else {
815 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800816 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700817 }
818 } else {
819 DCHECK(called_class->IsErroneous());
820 }
821 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800822 CHECK_EQ(code == NULL, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700823 // Fixup any locally saved objects may have moved during a GC.
824 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700825 // Place called method in callee-save frame to be placed as first argument to quick method.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700826 sp->Assign(called);
Ian Rogers848871b2013-08-05 10:56:33 -0700827 return code;
828}
829
Andreas Gampec147b002014-03-06 18:11:06 -0800830/*
831 * This class uses a couple of observations to unite the different calling conventions through
832 * a few constants.
833 *
834 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
835 * possible alignment.
836 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
837 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
838 * when we have to split things
839 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
840 * and we can use Int handling directly.
841 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
842 * necessary when widening. Also, widening of Ints will take place implicitly, and the
843 * extension should be compatible with Aarch64, which mandates copying the available bits
844 * into LSB and leaving the rest unspecified.
845 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
846 * the stack.
847 * 6) There is only little endian.
848 *
849 *
850 * Actual work is supposed to be done in a delegate of the template type. The interface is as
851 * follows:
852 *
853 * void PushGpr(uintptr_t): Add a value for the next GPR
854 *
855 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
856 * padding, that is, think the architecture is 32b and aligns 64b.
857 *
858 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
859 * split this if necessary. The current state will have aligned, if
860 * necessary.
861 *
862 * void PushStack(uintptr_t): Push a value to the stack.
863 *
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700864 * uintptr_t PushHandleScope(mirror::Object* ref): Add a reference to the HandleScope. This _will_ have nullptr,
Andreas Gampe36fea8d2014-03-10 13:37:40 -0700865 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -0800866 * Must return the jobject, that is, the reference to the
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700867 * entry in the HandleScope (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -0800868 *
869 */
Andreas Gampec200a4a2014-06-16 18:39:09 -0700870template<class T> class BuildNativeCallFrameStateMachine {
Andreas Gampec147b002014-03-06 18:11:06 -0800871 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800872#if defined(__arm__)
873 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -0800874 static constexpr bool kNativeSoftFloatAbi = true;
875 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800876 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
877
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800878 static constexpr size_t kRegistersNeededForLong = 2;
879 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800880 static constexpr bool kMultiRegistersAligned = true;
881 static constexpr bool kMultiRegistersWidened = false;
882 static constexpr bool kAlignLongOnStack = true;
883 static constexpr bool kAlignDoubleOnStack = true;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000884#elif defined(__aarch64__)
885 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
886 static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs.
887 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
888
889 static constexpr size_t kRegistersNeededForLong = 1;
890 static constexpr size_t kRegistersNeededForDouble = 1;
891 static constexpr bool kMultiRegistersAligned = false;
892 static constexpr bool kMultiRegistersWidened = false;
893 static constexpr bool kAlignLongOnStack = false;
894 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800895#elif defined(__mips__)
896 // TODO: These are all dummy values!
897 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
898 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
899 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
900
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800901 static constexpr size_t kRegistersNeededForLong = 2;
902 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800903 static constexpr bool kMultiRegistersAligned = true;
904 static constexpr bool kMultiRegistersWidened = true;
905 static constexpr bool kAlignLongOnStack = false;
906 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800907#elif defined(__i386__)
908 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -0800909 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800910 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
911 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
912
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800913 static constexpr size_t kRegistersNeededForLong = 2;
914 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec200a4a2014-06-16 18:39:09 -0700915 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
Andreas Gampec147b002014-03-06 18:11:06 -0800916 static constexpr bool kMultiRegistersWidened = false;
917 static constexpr bool kAlignLongOnStack = false;
918 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800919#elif defined(__x86_64__)
920 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
921 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
922 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
923
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800924 static constexpr size_t kRegistersNeededForLong = 1;
925 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -0800926 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe7a0e5042014-03-07 13:03:19 -0800927 static constexpr bool kMultiRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -0800928 static constexpr bool kAlignLongOnStack = false;
929 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800930#else
931#error "Unsupported architecture"
932#endif
933
Andreas Gampec147b002014-03-06 18:11:06 -0800934 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -0700935 explicit BuildNativeCallFrameStateMachine(T* delegate)
936 : gpr_index_(kNumNativeGprArgs),
937 fpr_index_(kNumNativeFprArgs),
938 stack_entries_(0),
939 delegate_(delegate) {
Andreas Gampec147b002014-03-06 18:11:06 -0800940 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
941 // the next register is even; counting down is just to make the compiler happy...
942 CHECK_EQ(kNumNativeGprArgs % 2, 0U);
943 CHECK_EQ(kNumNativeFprArgs % 2, 0U);
944 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800945
Andreas Gampec200a4a2014-06-16 18:39:09 -0700946 virtual ~BuildNativeCallFrameStateMachine() {}
Andreas Gampec147b002014-03-06 18:11:06 -0800947
948 bool HavePointerGpr() {
949 return gpr_index_ > 0;
950 }
951
Andreas Gampec200a4a2014-06-16 18:39:09 -0700952 void AdvancePointer(const void* val) {
Andreas Gampec147b002014-03-06 18:11:06 -0800953 if (HavePointerGpr()) {
954 gpr_index_--;
955 PushGpr(reinterpret_cast<uintptr_t>(val));
956 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -0700957 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
Andreas Gampec147b002014-03-06 18:11:06 -0800958 PushStack(reinterpret_cast<uintptr_t>(val));
959 gpr_index_ = 0;
960 }
961 }
962
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700963 bool HaveHandleScopeGpr() {
Andreas Gampec147b002014-03-06 18:11:06 -0800964 return gpr_index_ > 0;
965 }
966
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700967 void AdvanceHandleScope(mirror::Object* ptr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
968 uintptr_t handle = PushHandle(ptr);
969 if (HaveHandleScopeGpr()) {
Andreas Gampec147b002014-03-06 18:11:06 -0800970 gpr_index_--;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700971 PushGpr(handle);
Andreas Gampec147b002014-03-06 18:11:06 -0800972 } else {
973 stack_entries_++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700974 PushStack(handle);
Andreas Gampec147b002014-03-06 18:11:06 -0800975 gpr_index_ = 0;
976 }
977 }
978
Andreas Gampec147b002014-03-06 18:11:06 -0800979 bool HaveIntGpr() {
980 return gpr_index_ > 0;
981 }
982
983 void AdvanceInt(uint32_t val) {
984 if (HaveIntGpr()) {
985 gpr_index_--;
986 PushGpr(val);
987 } else {
988 stack_entries_++;
989 PushStack(val);
990 gpr_index_ = 0;
991 }
992 }
993
Andreas Gampec147b002014-03-06 18:11:06 -0800994 bool HaveLongGpr() {
995 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
996 }
997
998 bool LongGprNeedsPadding() {
999 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1000 kAlignLongOnStack && // and when it needs alignment
1001 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1002 }
1003
1004 bool LongStackNeedsPadding() {
1005 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1006 kAlignLongOnStack && // and when it needs 8B alignment
1007 (stack_entries_ & 1) == 1; // counter is odd
1008 }
1009
1010 void AdvanceLong(uint64_t val) {
1011 if (HaveLongGpr()) {
1012 if (LongGprNeedsPadding()) {
1013 PushGpr(0);
1014 gpr_index_--;
1015 }
1016 if (kRegistersNeededForLong == 1) {
1017 PushGpr(static_cast<uintptr_t>(val));
1018 } else {
1019 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1020 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1021 }
1022 gpr_index_ -= kRegistersNeededForLong;
1023 } else {
1024 if (LongStackNeedsPadding()) {
1025 PushStack(0);
1026 stack_entries_++;
1027 }
1028 if (kRegistersNeededForLong == 1) {
1029 PushStack(static_cast<uintptr_t>(val));
1030 stack_entries_++;
1031 } else {
1032 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1033 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1034 stack_entries_ += 2;
1035 }
1036 gpr_index_ = 0;
1037 }
1038 }
1039
Andreas Gampec147b002014-03-06 18:11:06 -08001040 bool HaveFloatFpr() {
1041 return fpr_index_ > 0;
1042 }
1043
Andreas Gampec147b002014-03-06 18:11:06 -08001044 void AdvanceFloat(float val) {
1045 if (kNativeSoftFloatAbi) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001046 AdvanceInt(bit_cast<float, uint32_t>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001047 } else {
1048 if (HaveFloatFpr()) {
1049 fpr_index_--;
1050 if (kRegistersNeededForDouble == 1) {
1051 if (kMultiRegistersWidened) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001052 PushFpr8(bit_cast<double, uint64_t>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001053 } else {
1054 // No widening, just use the bits.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001055 PushFpr8(bit_cast<float, uint64_t>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001056 }
1057 } else {
1058 PushFpr4(val);
1059 }
1060 } else {
1061 stack_entries_++;
1062 if (kRegistersNeededForDouble == 1 && kMultiRegistersWidened) {
1063 // Need to widen before storing: Note the "double" in the template instantiation.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001064 // Note: We need to jump through those hoops to make the compiler happy.
1065 DCHECK_EQ(sizeof(uintptr_t), sizeof(uint64_t));
1066 PushStack(static_cast<uintptr_t>(bit_cast<double, uint64_t>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001067 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001068 PushStack(bit_cast<float, uintptr_t>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001069 }
1070 fpr_index_ = 0;
1071 }
1072 }
1073 }
1074
Andreas Gampec147b002014-03-06 18:11:06 -08001075 bool HaveDoubleFpr() {
1076 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1077 }
1078
1079 bool DoubleFprNeedsPadding() {
1080 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1081 kAlignDoubleOnStack && // and when it needs alignment
1082 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1083 }
1084
1085 bool DoubleStackNeedsPadding() {
1086 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1087 kAlignDoubleOnStack && // and when it needs 8B alignment
1088 (stack_entries_ & 1) == 1; // counter is odd
1089 }
1090
1091 void AdvanceDouble(uint64_t val) {
1092 if (kNativeSoftFloatAbi) {
1093 AdvanceLong(val);
1094 } else {
1095 if (HaveDoubleFpr()) {
1096 if (DoubleFprNeedsPadding()) {
1097 PushFpr4(0);
1098 fpr_index_--;
1099 }
1100 PushFpr8(val);
1101 fpr_index_ -= kRegistersNeededForDouble;
1102 } else {
1103 if (DoubleStackNeedsPadding()) {
1104 PushStack(0);
1105 stack_entries_++;
1106 }
1107 if (kRegistersNeededForDouble == 1) {
1108 PushStack(static_cast<uintptr_t>(val));
1109 stack_entries_++;
1110 } else {
1111 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1112 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1113 stack_entries_ += 2;
1114 }
1115 fpr_index_ = 0;
1116 }
1117 }
1118 }
1119
1120 uint32_t getStackEntries() {
1121 return stack_entries_;
1122 }
1123
1124 uint32_t getNumberOfUsedGprs() {
1125 return kNumNativeGprArgs - gpr_index_;
1126 }
1127
1128 uint32_t getNumberOfUsedFprs() {
1129 return kNumNativeFprArgs - fpr_index_;
1130 }
1131
1132 private:
1133 void PushGpr(uintptr_t val) {
1134 delegate_->PushGpr(val);
1135 }
1136 void PushFpr4(float val) {
1137 delegate_->PushFpr4(val);
1138 }
1139 void PushFpr8(uint64_t val) {
1140 delegate_->PushFpr8(val);
1141 }
1142 void PushStack(uintptr_t val) {
1143 delegate_->PushStack(val);
1144 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001145 uintptr_t PushHandle(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1146 return delegate_->PushHandle(ref);
Andreas Gampec147b002014-03-06 18:11:06 -08001147 }
1148
1149 uint32_t gpr_index_; // Number of free GPRs
1150 uint32_t fpr_index_; // Number of free FPRs
1151 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1152 // extended
1153 T* delegate_; // What Push implementation gets called
1154};
1155
Andreas Gampec200a4a2014-06-16 18:39:09 -07001156// Computes the sizes of register stacks and call stack area. Handling of references can be extended
1157// in subclasses.
1158//
1159// To handle native pointers, use "L" in the shorty for an object reference, which simulates
1160// them with handles.
1161class ComputeNativeCallFrameSize {
Andreas Gampec147b002014-03-06 18:11:06 -08001162 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001163 ComputeNativeCallFrameSize() : num_stack_entries_(0) {}
1164
1165 virtual ~ComputeNativeCallFrameSize() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001166
Andreas Gampec147b002014-03-06 18:11:06 -08001167 uint32_t GetStackSize() {
1168 return num_stack_entries_ * sizeof(uintptr_t);
1169 }
1170
Andreas Gampec200a4a2014-06-16 18:39:09 -07001171 uint8_t* LayoutCallStack(uint8_t* sp8) {
Andreas Gampec147b002014-03-06 18:11:06 -08001172 sp8 -= GetStackSize();
Andreas Gampe779f8c92014-06-09 18:29:38 -07001173 // Align by kStackAlignment.
1174 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001175 return sp8;
Andreas Gampec147b002014-03-06 18:11:06 -08001176 }
1177
Andreas Gampec200a4a2014-06-16 18:39:09 -07001178 uint8_t* LayoutCallRegisterStacks(uint8_t* sp8, uintptr_t** start_gpr, uint32_t** start_fpr) {
1179 // Assumption is OK right now, as we have soft-float arm
1180 size_t fregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeFprArgs;
1181 sp8 -= fregs * sizeof(uintptr_t);
1182 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1183 size_t iregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeGprArgs;
1184 sp8 -= iregs * sizeof(uintptr_t);
1185 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1186 return sp8;
1187 }
Andreas Gampec147b002014-03-06 18:11:06 -08001188
Andreas Gampec200a4a2014-06-16 18:39:09 -07001189 uint8_t* LayoutNativeCall(uint8_t* sp8, uintptr_t** start_stack, uintptr_t** start_gpr,
1190 uint32_t** start_fpr) {
1191 // Native call stack.
1192 sp8 = LayoutCallStack(sp8);
1193 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
Andreas Gampec147b002014-03-06 18:11:06 -08001194
Andreas Gampec200a4a2014-06-16 18:39:09 -07001195 // Put fprs and gprs below.
1196 sp8 = LayoutCallRegisterStacks(sp8, start_gpr, start_fpr);
Andreas Gampec147b002014-03-06 18:11:06 -08001197
Andreas Gampec200a4a2014-06-16 18:39:09 -07001198 // Return the new bottom.
1199 return sp8;
1200 }
1201
1202 virtual void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm)
1203 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {}
1204
1205 void Walk(const char* shorty, uint32_t shorty_len) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1206 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize> sm(this);
1207
1208 WalkHeader(&sm);
Andreas Gampec147b002014-03-06 18:11:06 -08001209
1210 for (uint32_t i = 1; i < shorty_len; ++i) {
1211 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1212 switch (cur_type_) {
1213 case Primitive::kPrimNot:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001214 sm.AdvanceHandleScope(
1215 reinterpret_cast<mirror::Object*>(0x12345678));
Andreas Gampec147b002014-03-06 18:11:06 -08001216 break;
1217
1218 case Primitive::kPrimBoolean:
1219 case Primitive::kPrimByte:
1220 case Primitive::kPrimChar:
1221 case Primitive::kPrimShort:
1222 case Primitive::kPrimInt:
1223 sm.AdvanceInt(0);
1224 break;
1225 case Primitive::kPrimFloat:
1226 sm.AdvanceFloat(0);
1227 break;
1228 case Primitive::kPrimDouble:
1229 sm.AdvanceDouble(0);
1230 break;
1231 case Primitive::kPrimLong:
1232 sm.AdvanceLong(0);
1233 break;
1234 default:
1235 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
1236 }
1237 }
1238
1239 num_stack_entries_ = sm.getStackEntries();
1240 }
1241
1242 void PushGpr(uintptr_t /* val */) {
1243 // not optimizing registers, yet
1244 }
1245
1246 void PushFpr4(float /* val */) {
1247 // not optimizing registers, yet
1248 }
1249
1250 void PushFpr8(uint64_t /* val */) {
1251 // not optimizing registers, yet
1252 }
1253
1254 void PushStack(uintptr_t /* val */) {
1255 // counting is already done in the superclass
1256 }
1257
Andreas Gampec200a4a2014-06-16 18:39:09 -07001258 virtual uintptr_t PushHandle(mirror::Object* /* ptr */) {
Andreas Gampec147b002014-03-06 18:11:06 -08001259 return reinterpret_cast<uintptr_t>(nullptr);
1260 }
1261
Andreas Gampec200a4a2014-06-16 18:39:09 -07001262 protected:
Andreas Gampec147b002014-03-06 18:11:06 -08001263 uint32_t num_stack_entries_;
1264};
1265
Andreas Gampec200a4a2014-06-16 18:39:09 -07001266class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001267 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001268 ComputeGenericJniFrameSize() : num_handle_scope_references_(0) {}
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001269
Andreas Gampec200a4a2014-06-16 18:39:09 -07001270 // Lays out the callee-save frame. Assumes that the incorrect frame corresponding to RefsAndArgs
1271 // is at *m = sp. Will update to point to the bottom of the save frame.
1272 //
1273 // Note: assumes ComputeAll() has been run before.
1274 void LayoutCalleeSaveFrame(StackReference<mirror::ArtMethod>** m, void* sp, HandleScope** table,
1275 uint32_t* handle_scope_entries)
1276 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1277 mirror::ArtMethod* method = (*m)->AsMirrorPtr();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001278
Andreas Gampec200a4a2014-06-16 18:39:09 -07001279 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
1280
1281 // First, fix up the layout of the callee-save frame.
1282 // We have to squeeze in the HandleScope, and relocate the method pointer.
1283
1284 // "Free" the slot for the method.
1285 sp8 += kPointerSize; // In the callee-save frame we use a full pointer.
1286
1287 // Under the callee saves put handle scope and new method stack reference.
1288 *handle_scope_entries = num_handle_scope_references_;
1289
1290 size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_);
1291 size_t scope_and_method = handle_scope_size + sizeof(StackReference<mirror::ArtMethod>);
1292
1293 sp8 -= scope_and_method;
1294 // Align by kStackAlignment.
1295 sp8 = reinterpret_cast<uint8_t*>(RoundDown(
1296 reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
1297
1298 uint8_t* sp8_table = sp8 + sizeof(StackReference<mirror::ArtMethod>);
1299 *table = reinterpret_cast<HandleScope*>(sp8_table);
1300 (*table)->SetNumberOfReferences(num_handle_scope_references_);
1301
1302 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
1303 uint8_t* method_pointer = sp8;
1304 StackReference<mirror::ArtMethod>* new_method_ref =
1305 reinterpret_cast<StackReference<mirror::ArtMethod>*>(method_pointer);
1306 new_method_ref->Assign(method);
1307 *m = new_method_ref;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001308 }
1309
Andreas Gampec200a4a2014-06-16 18:39:09 -07001310 // Adds space for the cookie. Note: may leave stack unaligned.
1311 void LayoutCookie(uint8_t** sp) {
1312 // Reference cookie and padding
1313 *sp -= 8;
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001314 }
1315
Andreas Gampec200a4a2014-06-16 18:39:09 -07001316 // Re-layout the callee-save frame (insert a handle-scope). Then add space for the cookie.
1317 // Returns the new bottom. Note: this may be unaligned.
1318 uint8_t* LayoutJNISaveFrame(StackReference<mirror::ArtMethod>** m, void* sp, HandleScope** table,
1319 uint32_t* handle_scope_entries)
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001320 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001321 // First, fix up the layout of the callee-save frame.
1322 // We have to squeeze in the HandleScope, and relocate the method pointer.
1323 LayoutCalleeSaveFrame(m, sp, table, handle_scope_entries);
1324
1325 // The bottom of the callee-save frame is now where the method is, *m.
1326 uint8_t* sp8 = reinterpret_cast<uint8_t*>(*m);
1327
1328 // Add space for cookie.
1329 LayoutCookie(&sp8);
1330
1331 return sp8;
1332 }
1333
1334 // WARNING: After this, *sp won't be pointing to the method anymore!
1335 uint8_t* ComputeLayout(StackReference<mirror::ArtMethod>** m, bool is_static, const char* shorty,
1336 uint32_t shorty_len, HandleScope** table, uint32_t* handle_scope_entries,
1337 uintptr_t** start_stack, uintptr_t** start_gpr, uint32_t** start_fpr)
1338 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1339 Walk(shorty, shorty_len);
1340
1341 // JNI part.
1342 uint8_t* sp8 = LayoutJNISaveFrame(m, reinterpret_cast<void*>(*m), table, handle_scope_entries);
1343
1344 sp8 = LayoutNativeCall(sp8, start_stack, start_gpr, start_fpr);
1345
1346 // Return the new bottom.
1347 return sp8;
1348 }
1349
1350 uintptr_t PushHandle(mirror::Object* /* ptr */) OVERRIDE;
1351
1352 // Add JNIEnv* and jobj/jclass before the shorty-derived elements.
1353 void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) OVERRIDE
1354 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
1355
1356 private:
1357 uint32_t num_handle_scope_references_;
1358};
1359
1360uintptr_t ComputeGenericJniFrameSize::PushHandle(mirror::Object* /* ptr */) {
1361 num_handle_scope_references_++;
1362 return reinterpret_cast<uintptr_t>(nullptr);
1363}
1364
1365void ComputeGenericJniFrameSize::WalkHeader(
1366 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) {
1367 // JNIEnv
1368 sm->AdvancePointer(nullptr);
1369
1370 // Class object or this as first argument
1371 sm->AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678));
1372}
1373
1374// Class to push values to three separate regions. Used to fill the native call part. Adheres to
1375// the template requirements of BuildGenericJniFrameStateMachine.
1376class FillNativeCall {
1377 public:
1378 FillNativeCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) :
1379 cur_gpr_reg_(gpr_regs), cur_fpr_reg_(fpr_regs), cur_stack_arg_(stack_args) {}
1380
1381 virtual ~FillNativeCall() {}
1382
1383 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) {
1384 cur_gpr_reg_ = gpr_regs;
1385 cur_fpr_reg_ = fpr_regs;
1386 cur_stack_arg_ = stack_args;
Andreas Gampec147b002014-03-06 18:11:06 -08001387 }
1388
1389 void PushGpr(uintptr_t val) {
1390 *cur_gpr_reg_ = val;
1391 cur_gpr_reg_++;
1392 }
1393
1394 void PushFpr4(float val) {
1395 *cur_fpr_reg_ = val;
1396 cur_fpr_reg_++;
1397 }
1398
1399 void PushFpr8(uint64_t val) {
1400 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
1401 *tmp = val;
1402 cur_fpr_reg_ += 2;
1403 }
1404
1405 void PushStack(uintptr_t val) {
1406 *cur_stack_arg_ = val;
1407 cur_stack_arg_++;
1408 }
1409
Andreas Gampec200a4a2014-06-16 18:39:09 -07001410 virtual uintptr_t PushHandle(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1411 LOG(FATAL) << "(Non-JNI) Native call does not use handles.";
1412 return 0U;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001413 }
1414
1415 private:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001416 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001417 uint32_t* cur_fpr_reg_;
1418 uintptr_t* cur_stack_arg_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001419};
Andreas Gampec147b002014-03-06 18:11:06 -08001420
Andreas Gampec200a4a2014-06-16 18:39:09 -07001421// Visits arguments on the stack placing them into a region lower down the stack for the benefit
1422// of transitioning into native code.
1423class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
1424 public:
1425 BuildGenericJniFrameVisitor(StackReference<mirror::ArtMethod>** sp, bool is_static,
1426 const char* shorty, uint32_t shorty_len, Thread* self)
1427 : QuickArgumentVisitor(*sp, is_static, shorty, shorty_len),
1428 jni_call_(nullptr, nullptr, nullptr, nullptr), sm_(&jni_call_) {
1429 ComputeGenericJniFrameSize fsc;
1430 uintptr_t* start_gpr_reg;
1431 uint32_t* start_fpr_reg;
1432 uintptr_t* start_stack_arg;
1433 uint32_t handle_scope_entries;
1434 bottom_of_used_area_ = fsc.ComputeLayout(sp, is_static, shorty, shorty_len, &handle_scope_,
1435 &handle_scope_entries, &start_stack_arg,
1436 &start_gpr_reg, &start_fpr_reg);
1437
1438 handle_scope_->SetNumberOfReferences(handle_scope_entries);
1439 jni_call_.Reset(start_gpr_reg, start_fpr_reg, start_stack_arg, handle_scope_);
1440
1441 // jni environment is always first argument
1442 sm_.AdvancePointer(self->GetJniEnv());
1443
1444 if (is_static) {
1445 sm_.AdvanceHandleScope((*sp)->AsMirrorPtr()->GetDeclaringClass());
1446 }
1447 }
1448
1449 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
1450
1451 void FinalizeHandleScope(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
1452
1453 StackReference<mirror::Object>* GetFirstHandleScopeEntry()
1454 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1455 return handle_scope_->GetHandle(0).GetReference();
1456 }
1457
1458 jobject GetFirstHandleScopeJObject() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1459 return handle_scope_->GetHandle(0).ToJObject();
1460 }
1461
1462 void* GetBottomOfUsedArea() {
1463 return bottom_of_used_area_;
1464 }
1465
1466 private:
1467 // A class to fill a JNI call. Adds reference/handle-scope management to FillNativeCall.
1468 class FillJniCall FINAL : public FillNativeCall {
1469 public:
1470 FillJniCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args,
1471 HandleScope* handle_scope) : FillNativeCall(gpr_regs, fpr_regs, stack_args),
1472 handle_scope_(handle_scope), cur_entry_(0) {}
1473
1474 uintptr_t PushHandle(mirror::Object* ref) OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
1475
1476 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args, HandleScope* scope) {
1477 FillNativeCall::Reset(gpr_regs, fpr_regs, stack_args);
1478 handle_scope_ = scope;
1479 cur_entry_ = 0U;
1480 }
1481
1482 void ResetRemainingScopeSlots() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1483 // Initialize padding entries.
1484 size_t expected_slots = handle_scope_->NumberOfReferences();
1485 while (cur_entry_ < expected_slots) {
1486 handle_scope_->GetHandle(cur_entry_++).Assign(nullptr);
1487 }
1488 DCHECK_NE(cur_entry_, 0U);
1489 }
1490
1491 private:
1492 HandleScope* handle_scope_;
1493 size_t cur_entry_;
1494 };
1495
1496 HandleScope* handle_scope_;
1497 FillJniCall jni_call_;
1498 void* bottom_of_used_area_;
1499
1500 BuildNativeCallFrameStateMachine<FillJniCall> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001501
1502 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
1503};
1504
Andreas Gampec200a4a2014-06-16 18:39:09 -07001505uintptr_t BuildGenericJniFrameVisitor::FillJniCall::PushHandle(mirror::Object* ref) {
1506 uintptr_t tmp;
1507 Handle<mirror::Object> h = handle_scope_->GetHandle(cur_entry_);
1508 h.Assign(ref);
1509 tmp = reinterpret_cast<uintptr_t>(h.ToJObject());
1510 cur_entry_++;
1511 return tmp;
1512}
1513
Ian Rogers9758f792014-03-13 09:02:55 -07001514void BuildGenericJniFrameVisitor::Visit() {
1515 Primitive::Type type = GetParamPrimitiveType();
1516 switch (type) {
1517 case Primitive::kPrimLong: {
1518 jlong long_arg;
1519 if (IsSplitLongOrDouble()) {
1520 long_arg = ReadSplitLongParam();
1521 } else {
1522 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
1523 }
1524 sm_.AdvanceLong(long_arg);
1525 break;
1526 }
1527 case Primitive::kPrimDouble: {
1528 uint64_t double_arg;
1529 if (IsSplitLongOrDouble()) {
1530 // Read into union so that we don't case to a double.
1531 double_arg = ReadSplitLongParam();
1532 } else {
1533 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
1534 }
1535 sm_.AdvanceDouble(double_arg);
1536 break;
1537 }
1538 case Primitive::kPrimNot: {
1539 StackReference<mirror::Object>* stack_ref =
1540 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001541 sm_.AdvanceHandleScope(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -07001542 break;
1543 }
1544 case Primitive::kPrimFloat:
1545 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
1546 break;
1547 case Primitive::kPrimBoolean: // Fall-through.
1548 case Primitive::kPrimByte: // Fall-through.
1549 case Primitive::kPrimChar: // Fall-through.
1550 case Primitive::kPrimShort: // Fall-through.
1551 case Primitive::kPrimInt: // Fall-through.
1552 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
1553 break;
1554 case Primitive::kPrimVoid:
1555 LOG(FATAL) << "UNREACHABLE";
1556 break;
1557 }
1558}
1559
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001560void BuildGenericJniFrameVisitor::FinalizeHandleScope(Thread* self) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001561 // Clear out rest of the scope.
1562 jni_call_.ResetRemainingScopeSlots();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001563 // Install HandleScope.
1564 self->PushHandleScope(handle_scope_);
Ian Rogers9758f792014-03-13 09:02:55 -07001565}
1566
Ian Rogers04c31d22014-07-07 21:44:06 -07001567#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07001568extern "C" void* artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07001569#else
1570extern "C" void* artFindNativeMethod(Thread* self);
1571#endif
Andreas Gampe90546832014-03-12 18:07:19 -07001572
Andreas Gampead615172014-04-04 16:20:13 -07001573uint64_t artQuickGenericJniEndJNIRef(Thread* self, uint32_t cookie, jobject l, jobject lock) {
1574 if (lock != nullptr) {
1575 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self));
1576 } else {
1577 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(l, cookie, self));
1578 }
1579}
1580
1581void artQuickGenericJniEndJNINonRef(Thread* self, uint32_t cookie, jobject lock) {
1582 if (lock != nullptr) {
1583 JniMethodEndSynchronized(cookie, lock, self);
1584 } else {
1585 JniMethodEnd(cookie, self);
1586 }
1587}
1588
Andreas Gampec147b002014-03-06 18:11:06 -08001589/*
1590 * Initializes an alloca region assumed to be directly below sp for a native call:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001591 * Create a HandleScope and call stack and fill a mini stack with values to be pushed to registers.
Andreas Gampec147b002014-03-06 18:11:06 -08001592 * The final element on the stack is a pointer to the native code.
1593 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001594 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001595 * We need to fix this, as the handle scope needs to go into the callee-save frame.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001596 *
Andreas Gampec147b002014-03-06 18:11:06 -08001597 * The return of this function denotes:
1598 * 1) How many bytes of the alloca can be released, if the value is non-negative.
1599 * 2) An error, if the value is negative.
1600 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001601extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self,
1602 StackReference<mirror::ArtMethod>* sp)
Andreas Gampe2da88232014-02-27 12:26:20 -08001603 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -07001604 mirror::ArtMethod* called = sp->AsMirrorPtr();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001605 DCHECK(called->IsNative()) << PrettyMethod(called, true);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001606 uint32_t shorty_len = 0;
1607 const char* shorty = called->GetShorty(&shorty_len);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001608
1609 // Run the visitor.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001610 BuildGenericJniFrameVisitor visitor(&sp, called->IsStatic(), shorty, shorty_len, self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001611 visitor.VisitArguments();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001612 visitor.FinalizeHandleScope(self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001613
Andreas Gampec200a4a2014-06-16 18:39:09 -07001614 // Fix up managed-stack things in Thread.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001615 self->SetTopOfStack(sp, 0);
1616
Ian Rogerse0dcd462014-03-08 15:21:04 -08001617 self->VerifyStack();
1618
Andreas Gampe90546832014-03-12 18:07:19 -07001619 // Start JNI, save the cookie.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001620 uint32_t cookie;
1621 if (called->IsSynchronized()) {
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001622 cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001623 if (self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001624 self->PopHandleScope();
Andreas Gampec147b002014-03-06 18:11:06 -08001625 // A negative value denotes an error.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001626 return GetTwoWordFailureValue();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001627 }
1628 } else {
1629 cookie = JniMethodStart(self);
1630 }
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001631 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Ian Rogerse0dcd462014-03-08 15:21:04 -08001632 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001633
Andreas Gampe90546832014-03-12 18:07:19 -07001634 // Retrieve the stored native code.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001635 const void* nativeCode = called->GetNativeMethod();
Andreas Gampe90546832014-03-12 18:07:19 -07001636
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07001637 // There are two cases for the content of nativeCode:
1638 // 1) Pointer to the native function.
1639 // 2) Pointer to the trampoline for native code binding.
1640 // In the second case, we need to execute the binding and continue with the actual native function
1641 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07001642 DCHECK(nativeCode != nullptr);
1643 if (nativeCode == GetJniDlsymLookupStub()) {
Ian Rogers04c31d22014-07-07 21:44:06 -07001644#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07001645 nativeCode = artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07001646#else
1647 nativeCode = artFindNativeMethod(self);
1648#endif
Andreas Gampe90546832014-03-12 18:07:19 -07001649
1650 if (nativeCode == nullptr) {
1651 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
Andreas Gampead615172014-04-04 16:20:13 -07001652
1653 // End JNI, as the assembly will move to deliver the exception.
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001654 jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001655 if (shorty[0] == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001656 artQuickGenericJniEndJNIRef(self, cookie, nullptr, lock);
1657 } else {
1658 artQuickGenericJniEndJNINonRef(self, cookie, lock);
1659 }
1660
Andreas Gampec200a4a2014-06-16 18:39:09 -07001661 return GetTwoWordFailureValue();
Andreas Gampe90546832014-03-12 18:07:19 -07001662 }
1663 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001664 }
1665
Andreas Gampec200a4a2014-06-16 18:39:09 -07001666 // Return native code addr(lo) and bottom of alloca address(hi).
1667 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(visitor.GetBottomOfUsedArea()),
1668 reinterpret_cast<uintptr_t>(nativeCode));
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001669}
1670
1671/*
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001672 * Is called after the native JNI code. Responsible for cleanup (handle scope, saved state) and
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001673 * unlocking.
1674 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001675extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self, jvalue result, uint64_t result_f)
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001676 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001677 StackReference<mirror::ArtMethod>* sp = self->GetManagedStack()->GetTopQuickFrame();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001678 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Andreas Gampecf4035a2014-05-28 22:43:01 -07001679 mirror::ArtMethod* called = sp->AsMirrorPtr();
Ian Rogerse0dcd462014-03-08 15:21:04 -08001680 uint32_t cookie = *(sp32 - 1);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001681
Andreas Gampead615172014-04-04 16:20:13 -07001682 jobject lock = nullptr;
1683 if (called->IsSynchronized()) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001684 HandleScope* table = reinterpret_cast<HandleScope*>(reinterpret_cast<uint8_t*>(sp)
1685 + sizeof(StackReference<mirror::ArtMethod>));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001686 lock = table->GetHandle(0).ToJObject();
Andreas Gampead615172014-04-04 16:20:13 -07001687 }
1688
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001689 char return_shorty_char = called->GetShorty()[0];
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001690
1691 if (return_shorty_char == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001692 return artQuickGenericJniEndJNIRef(self, cookie, result.l, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001693 } else {
Andreas Gampead615172014-04-04 16:20:13 -07001694 artQuickGenericJniEndJNINonRef(self, cookie, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001695
1696 switch (return_shorty_char) {
1697 case 'F': // Fall-through.
1698 case 'D':
1699 return result_f;
1700 case 'Z':
1701 return result.z;
1702 case 'B':
1703 return result.b;
1704 case 'C':
1705 return result.c;
1706 case 'S':
1707 return result.s;
1708 case 'I':
1709 return result.i;
1710 case 'J':
1711 return result.j;
1712 case 'V':
1713 return 0;
1714 default:
1715 LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char;
1716 return 0;
1717 }
1718 }
Andreas Gampe2da88232014-02-27 12:26:20 -08001719}
1720
Andreas Gamped58342c2014-06-05 14:18:08 -07001721// We use TwoWordReturn to optimize scalar returns. We use the hi value for code, and the lo value
1722// for the method pointer.
Andreas Gampe51f76352014-05-21 08:28:48 -07001723//
Andreas Gamped58342c2014-06-05 14:18:08 -07001724// It is valid to use this, as at the usage points here (returns from C functions) we are assuming
1725// to hold the mutator lock (see SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) annotations).
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001726
1727template<InvokeType type, bool access_check>
Andreas Gamped58342c2014-06-05 14:18:08 -07001728static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object,
Andreas Gampe51f76352014-05-21 08:28:48 -07001729 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001730 Thread* self, StackReference<mirror::ArtMethod>* sp);
Andreas Gampe51f76352014-05-21 08:28:48 -07001731
1732template<InvokeType type, bool access_check>
Andreas Gamped58342c2014-06-05 14:18:08 -07001733static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object,
Andreas Gampe51f76352014-05-21 08:28:48 -07001734 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001735 Thread* self, StackReference<mirror::ArtMethod>* sp) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001736 mirror::ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check,
1737 type);
1738 if (UNLIKELY(method == nullptr)) {
1739 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1740 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
1741 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001742 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001743 {
1744 // Remember the args in case a GC happens in FindMethodFromCode.
1745 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
1746 RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa);
1747 visitor.VisitArguments();
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001748 method = FindMethodFromCode<type, access_check>(method_idx, &this_object, &caller_method,
1749 self);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001750 visitor.FixupReferences();
1751 }
1752
1753 if (UNLIKELY(method == NULL)) {
1754 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07001755 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001756 }
1757 }
1758 DCHECK(!self->IsExceptionPending());
1759 const void* code = method->GetEntryPointFromQuickCompiledCode();
1760
1761 // When we return, the caller will branch to this address, so it had better not be 0!
Andreas Gampec200a4a2014-06-16 18:39:09 -07001762 DCHECK(code != nullptr) << "Code was NULL in method: " << PrettyMethod(method)
1763 << " location: "
1764 << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07001765
Andreas Gamped58342c2014-06-05 14:18:08 -07001766 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
1767 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001768}
1769
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001770// Explicit artInvokeCommon template function declarations to please analysis tool.
1771#define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \
1772 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) \
Andreas Gamped58342c2014-06-05 14:18:08 -07001773 TwoWordReturn artInvokeCommon<type, access_check>(uint32_t method_idx, \
Andreas Gampe51f76352014-05-21 08:28:48 -07001774 mirror::Object* this_object, \
1775 mirror::ArtMethod* caller_method, \
Andreas Gampecf4035a2014-05-28 22:43:01 -07001776 Thread* self, \
1777 StackReference<mirror::ArtMethod>* sp) \
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001778
1779EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false);
1780EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true);
1781EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false);
1782EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true);
1783EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false);
1784EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true);
1785EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false);
1786EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true);
1787EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false);
1788EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true);
1789#undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL
1790
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001791// See comments in runtime_support_asm.S
Andreas Gampec200a4a2014-06-16 18:39:09 -07001792extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck(
1793 uint32_t method_idx, mirror::Object* this_object,
1794 mirror::ArtMethod* caller_method, Thread* self,
1795 StackReference<mirror::ArtMethod>* sp)
1796 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1797 return artInvokeCommon<kInterface, true>(method_idx, this_object,
1798 caller_method, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001799}
1800
Andreas Gampec200a4a2014-06-16 18:39:09 -07001801extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck(
1802 uint32_t method_idx, mirror::Object* this_object,
1803 mirror::ArtMethod* caller_method, Thread* self,
1804 StackReference<mirror::ArtMethod>* sp)
1805 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1806 return artInvokeCommon<kDirect, true>(method_idx, this_object, caller_method,
1807 self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001808}
1809
Andreas Gampec200a4a2014-06-16 18:39:09 -07001810extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck(
1811 uint32_t method_idx, mirror::Object* this_object,
1812 mirror::ArtMethod* caller_method, Thread* self,
1813 StackReference<mirror::ArtMethod>* sp)
1814 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1815 return artInvokeCommon<kStatic, true>(method_idx, this_object, caller_method,
1816 self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001817}
1818
Andreas Gampec200a4a2014-06-16 18:39:09 -07001819extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck(
1820 uint32_t method_idx, mirror::Object* this_object,
1821 mirror::ArtMethod* caller_method, Thread* self,
1822 StackReference<mirror::ArtMethod>* sp)
1823 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1824 return artInvokeCommon<kSuper, true>(method_idx, this_object, caller_method,
1825 self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001826}
1827
Andreas Gampec200a4a2014-06-16 18:39:09 -07001828extern "C" TwoWordReturn artInvokeVirtualTrampolineWithAccessCheck(
1829 uint32_t method_idx, mirror::Object* this_object,
1830 mirror::ArtMethod* caller_method, Thread* self,
1831 StackReference<mirror::ArtMethod>* sp)
1832 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1833 return artInvokeCommon<kVirtual, true>(method_idx, this_object, caller_method,
1834 self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001835}
1836
1837// Determine target of interface dispatch. This object is known non-null.
Andreas Gamped58342c2014-06-05 14:18:08 -07001838extern "C" TwoWordReturn artInvokeInterfaceTrampoline(mirror::ArtMethod* interface_method,
Andreas Gampe51f76352014-05-21 08:28:48 -07001839 mirror::Object* this_object,
1840 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001841 Thread* self,
1842 StackReference<mirror::ArtMethod>* sp)
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001843 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1844 mirror::ArtMethod* method;
1845 if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) {
1846 method = this_object->GetClass()->FindVirtualMethodForInterface(interface_method);
1847 if (UNLIKELY(method == NULL)) {
1848 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1849 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(interface_method, this_object,
1850 caller_method);
Andreas Gamped58342c2014-06-05 14:18:08 -07001851 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001852 }
1853 } else {
1854 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1855 DCHECK(interface_method == Runtime::Current()->GetResolutionMethod());
Alexei Zavjalov41c507a2014-05-15 16:02:46 +07001856
1857 // Find the caller PC.
1858 constexpr size_t pc_offset = GetCalleeSavePCOffset(kRuntimeISA, Runtime::kRefsAndArgs);
1859 uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp) + pc_offset);
1860
1861 // Map the caller PC to a dex PC.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001862 uint32_t dex_pc = caller_method->ToDexPc(caller_pc);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001863 const DexFile::CodeItem* code = caller_method->GetCodeItem();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001864 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
1865 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
1866 Instruction::Code instr_code = instr->Opcode();
1867 CHECK(instr_code == Instruction::INVOKE_INTERFACE ||
1868 instr_code == Instruction::INVOKE_INTERFACE_RANGE)
1869 << "Unexpected call into interface trampoline: " << instr->DumpString(NULL);
1870 uint32_t dex_method_idx;
1871 if (instr_code == Instruction::INVOKE_INTERFACE) {
1872 dex_method_idx = instr->VRegB_35c();
1873 } else {
1874 DCHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE);
1875 dex_method_idx = instr->VRegB_3rc();
1876 }
1877
Andreas Gampec200a4a2014-06-16 18:39:09 -07001878 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()
1879 ->GetDexFile();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001880 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001881 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx),
1882 &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001883 {
1884 // Remember the args in case a GC happens in FindMethodFromCode.
1885 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
1886 RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa);
1887 visitor.VisitArguments();
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001888 method = FindMethodFromCode<kInterface, false>(dex_method_idx, &this_object, &caller_method,
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001889 self);
1890 visitor.FixupReferences();
1891 }
1892
1893 if (UNLIKELY(method == nullptr)) {
1894 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07001895 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001896 }
1897 }
1898 const void* code = method->GetEntryPointFromQuickCompiledCode();
1899
1900 // When we return, the caller will branch to this address, so it had better not be 0!
Andreas Gampec200a4a2014-06-16 18:39:09 -07001901 DCHECK(code != nullptr) << "Code was NULL in method: " << PrettyMethod(method)
1902 << " location: " << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07001903
Andreas Gamped58342c2014-06-05 14:18:08 -07001904 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
1905 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001906}
1907
Ian Rogers848871b2013-08-05 10:56:33 -07001908} // namespace art