blob: 2b29591fbbb96934d6172ebeae2a58645867b72e [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070018#include "common_throws.h"
Ian Rogers848871b2013-08-05 10:56:33 -070019#include "dex_file-inl.h"
20#include "dex_instruction-inl.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070021#include "entrypoints/entrypoint_utils.h"
Ian Rogers83883d72013-10-21 21:07:24 -070022#include "gc/accounting/card_table-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070023#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070024#include "mirror/art_method-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070025#include "mirror/class-inl.h"
Mathieu Chartier5f3ded42014-04-03 15:25:30 -070026#include "mirror/dex_cache-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070027#include "mirror/object-inl.h"
28#include "mirror/object_array-inl.h"
29#include "object_utils.h"
30#include "runtime.h"
Ian Rogers53b8b092014-03-13 23:45:53 -070031#include "scoped_thread_state_change.h"
Ian Rogers848871b2013-08-05 10:56:33 -070032
33namespace art {
34
35// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
36class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080037 // Size of each spilled GPR.
38#ifdef __LP64__
39 static constexpr size_t kBytesPerGprSpillLocation = 8;
40#else
41 static constexpr size_t kBytesPerGprSpillLocation = 4;
42#endif
43 // Number of bytes for each out register in the caller method's frame.
44 static constexpr size_t kBytesStackArgLocation = 4;
Ian Rogers848871b2013-08-05 10:56:33 -070045#if defined(__arm__)
46 // The callee save frame is pointed to by SP.
47 // | argN | |
48 // | ... | |
49 // | arg4 | |
50 // | arg3 spill | | Caller's frame
51 // | arg2 spill | |
52 // | arg1 spill | |
53 // | Method* | ---
54 // | LR |
55 // | ... | callee saves
56 // | R3 | arg3
57 // | R2 | arg2
58 // | R1 | arg1
Ian Rogers936b37f2014-02-14 00:52:24 -080059 // | R0 | padding
Ian Rogers848871b2013-08-05 10:56:33 -070060 // | Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -080061 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
62 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
63 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -080064 static constexpr size_t kBytesPerFprSpillLocation = 4; // FPR spill size is 4 bytes.
65 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
66 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 8; // Offset of first GPR arg.
67 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 44; // Offset of return address.
68 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 48; // Frame size.
69 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
70 return gpr_index * kBytesPerGprSpillLocation;
71 }
Stuart Monteithb95a5342014-03-12 13:32:32 +000072#elif defined(__aarch64__)
73 // The callee save frame is pointed to by SP.
74 // | argN | |
75 // | ... | |
76 // | arg4 | |
77 // | arg3 spill | | Caller's frame
78 // | arg2 spill | |
79 // | arg1 spill | |
80 // | Method* | ---
81 // | LR |
82 // | X28 |
83 // | : |
84 // | X19 |
85 // | X7 |
86 // | : |
87 // | X1 |
88 // | D15 |
89 // | : |
90 // | D0 |
91 // | | padding
92 // | Method* | <- sp
93 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
94 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
95 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
96 static constexpr size_t kBytesPerFprSpillLocation = 8; // FPR spill size is 8 bytes.
97 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =16; // Offset of first FPR arg.
98 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 144; // Offset of first GPR arg.
99 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 296; // Offset of return address.
100 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 304; // Frame size.
101 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
102 return gpr_index * kBytesPerGprSpillLocation;
103 }
Ian Rogers848871b2013-08-05 10:56:33 -0700104#elif defined(__mips__)
105 // The callee save frame is pointed to by SP.
106 // | argN | |
107 // | ... | |
108 // | arg4 | |
109 // | arg3 spill | | Caller's frame
110 // | arg2 spill | |
111 // | arg1 spill | |
112 // | Method* | ---
113 // | RA |
114 // | ... | callee saves
115 // | A3 | arg3
116 // | A2 | arg2
117 // | A1 | arg1
118 // | A0/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800119 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
120 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
121 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800122 static constexpr size_t kBytesPerFprSpillLocation = 4; // FPR spill size is 4 bytes.
123 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
124 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4; // Offset of first GPR arg.
125 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 60; // Offset of return address.
126 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 64; // Frame size.
127 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
128 return gpr_index * kBytesPerGprSpillLocation;
129 }
Ian Rogers848871b2013-08-05 10:56:33 -0700130#elif defined(__i386__)
131 // The callee save frame is pointed to by SP.
132 // | argN | |
133 // | ... | |
134 // | arg4 | |
135 // | arg3 spill | | Caller's frame
136 // | arg2 spill | |
137 // | arg1 spill | |
138 // | Method* | ---
139 // | Return |
140 // | EBP,ESI,EDI | callee saves
141 // | EBX | arg3
142 // | EDX | arg2
143 // | ECX | arg1
144 // | EAX/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800145 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
146 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
147 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800148 static constexpr size_t kBytesPerFprSpillLocation = 8; // FPR spill size is 8 bytes.
149 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
150 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4; // Offset of first GPR arg.
151 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 28; // Offset of return address.
152 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 32; // Frame size.
153 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
154 return gpr_index * kBytesPerGprSpillLocation;
155 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800156#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800157 // The callee save frame is pointed to by SP.
158 // | argN | |
159 // | ... | |
160 // | reg. arg spills | | Caller's frame
161 // | Method* | ---
162 // | Return |
163 // | R15 | callee save
164 // | R14 | callee save
165 // | R13 | callee save
166 // | R12 | callee save
167 // | R9 | arg5
168 // | R8 | arg4
169 // | RSI/R6 | arg1
170 // | RBP/R5 | callee save
171 // | RBX/R3 | callee save
172 // | RDX/R2 | arg2
173 // | RCX/R1 | arg3
174 // | XMM7 | float arg 8
175 // | XMM6 | float arg 7
176 // | XMM5 | float arg 6
177 // | XMM4 | float arg 5
178 // | XMM3 | float arg 4
179 // | XMM2 | float arg 3
180 // | XMM1 | float arg 2
181 // | XMM0 | float arg 1
182 // | Padding |
183 // | RDI/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800184 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
185 static constexpr size_t kNumQuickGprArgs = 5; // 3 arguments passed in GPRs.
186 static constexpr size_t kNumQuickFprArgs = 8; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800187 static constexpr size_t kBytesPerFprSpillLocation = 8; // FPR spill size is 8 bytes.
188 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
189 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80; // Offset of first GPR arg.
190 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 168; // Offset of return address.
191 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 176; // Frame size.
192 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
193 switch (gpr_index) {
194 case 0: return (4 * kBytesPerGprSpillLocation);
195 case 1: return (1 * kBytesPerGprSpillLocation);
196 case 2: return (0 * kBytesPerGprSpillLocation);
197 case 3: return (5 * kBytesPerGprSpillLocation);
198 case 4: return (6 * kBytesPerGprSpillLocation);
199 default:
200 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
201 return 0;
202 }
203 }
Ian Rogers848871b2013-08-05 10:56:33 -0700204#else
205#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700206#endif
207
Ian Rogers936b37f2014-02-14 00:52:24 -0800208 public:
209 static mirror::ArtMethod* GetCallingMethod(mirror::ArtMethod** sp)
210 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
211 DCHECK((*sp)->IsCalleeSaveMethod());
212 byte* previous_sp = reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Brian Carlstromea46f952013-07-30 01:26:50 -0700213 return *reinterpret_cast<mirror::ArtMethod**>(previous_sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700214 }
215
Ian Rogers936b37f2014-02-14 00:52:24 -0800216 // For the given quick ref and args quick frame, return the caller's PC.
217 static uintptr_t GetCallingPc(mirror::ArtMethod** sp)
218 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
219 DCHECK((*sp)->IsCalleeSaveMethod());
220 byte* lr = reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset;
Ian Rogers848871b2013-08-05 10:56:33 -0700221 return *reinterpret_cast<uintptr_t*>(lr);
222 }
223
Brian Carlstromea46f952013-07-30 01:26:50 -0700224 QuickArgumentVisitor(mirror::ArtMethod** sp, bool is_static,
Ian Rogers848871b2013-08-05 10:56:33 -0700225 const char* shorty, uint32_t shorty_len)
Ian Rogers936b37f2014-02-14 00:52:24 -0800226 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) :
227 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
228 gpr_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
229 fpr_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
230 stack_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
231 + StackArgumentStartFromShorty(is_static, shorty, shorty_len)),
232 gpr_index_(0), fpr_index_(0), stack_index_(0), cur_type_(Primitive::kPrimVoid),
233 is_split_long_or_double_(false) {
234 DCHECK_EQ(kQuickCalleeSaveFrame_RefAndArgs_FrameSize,
Ian Rogers848871b2013-08-05 10:56:33 -0700235 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
236 }
237
238 virtual ~QuickArgumentVisitor() {}
239
240 virtual void Visit() = 0;
241
Ian Rogers936b37f2014-02-14 00:52:24 -0800242 Primitive::Type GetParamPrimitiveType() const {
243 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700244 }
245
246 byte* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800247 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800248 Primitive::Type type = GetParamPrimitiveType();
249 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800250 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800251 return fpr_args_ + (fpr_index_ * kBytesPerFprSpillLocation);
252 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700253 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers936b37f2014-02-14 00:52:24 -0800254 }
255 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800256 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800257 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
258 }
259 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700260 }
261
262 bool IsSplitLongOrDouble() const {
Ian Rogers936b37f2014-02-14 00:52:24 -0800263 if ((kBytesPerGprSpillLocation == 4) || (kBytesPerFprSpillLocation == 4)) {
264 return is_split_long_or_double_;
265 } else {
266 return false; // An optimization for when GPR and FPRs are 64bit.
267 }
Ian Rogers848871b2013-08-05 10:56:33 -0700268 }
269
Ian Rogers936b37f2014-02-14 00:52:24 -0800270 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700271 return GetParamPrimitiveType() == Primitive::kPrimNot;
272 }
273
Ian Rogers936b37f2014-02-14 00:52:24 -0800274 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700275 Primitive::Type type = GetParamPrimitiveType();
276 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
277 }
278
279 uint64_t ReadSplitLongParam() const {
280 DCHECK(IsSplitLongOrDouble());
281 uint64_t low_half = *reinterpret_cast<uint32_t*>(GetParamAddress());
282 uint64_t high_half = *reinterpret_cast<uint32_t*>(stack_args_);
283 return (low_half & 0xffffffffULL) | (high_half << 32);
284 }
285
286 void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700287 // This implementation doesn't support reg-spill area for hard float
288 // ABI targets such as x86_64 and aarch64. So, for those targets whose
289 // 'kQuickSoftFloatAbi' is 'false':
290 // (a) 'stack_args_' should point to the first method's argument
291 // (b) whatever the argument type it is, the 'stack_index_' should
292 // be moved forward along with every visiting.
Ian Rogers936b37f2014-02-14 00:52:24 -0800293 gpr_index_ = 0;
294 fpr_index_ = 0;
295 stack_index_ = 0;
296 if (!is_static_) { // Handle this.
297 cur_type_ = Primitive::kPrimNot;
298 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700299 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700300 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == 0) {
301 stack_index_++;
302 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800303 if (kNumQuickGprArgs > 0) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800304 gpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800305 }
Ian Rogers848871b2013-08-05 10:56:33 -0700306 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800307 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
308 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
309 switch (cur_type_) {
310 case Primitive::kPrimNot:
311 case Primitive::kPrimBoolean:
312 case Primitive::kPrimByte:
313 case Primitive::kPrimChar:
314 case Primitive::kPrimShort:
315 case Primitive::kPrimInt:
316 is_split_long_or_double_ = false;
317 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700318 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == gpr_index_) {
319 stack_index_++;
320 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800321 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800322 gpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800323 }
324 break;
325 case Primitive::kPrimFloat:
326 is_split_long_or_double_ = false;
327 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800328 if (kQuickSoftFloatAbi) {
329 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800330 gpr_index_++;
331 } else {
332 stack_index_++;
333 }
334 } else {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800335 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800336 fpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800337 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700338 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800339 }
340 break;
341 case Primitive::kPrimDouble:
342 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800343 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800344 is_split_long_or_double_ = (kBytesPerGprSpillLocation == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800345 ((gpr_index_ + 1) == kNumQuickGprArgs);
Ian Rogers936b37f2014-02-14 00:52:24 -0800346 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700347 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == gpr_index_) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800348 if (kBytesStackArgLocation == 4) {
349 stack_index_+= 2;
350 } else {
351 CHECK_EQ(kBytesStackArgLocation, 8U);
352 stack_index_++;
353 }
354 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700355 if (gpr_index_ < kNumQuickGprArgs) {
356 gpr_index_++;
357 if (kBytesPerGprSpillLocation == 4) {
358 if (gpr_index_ < kNumQuickGprArgs) {
359 gpr_index_++;
360 } else if (kQuickSoftFloatAbi) {
361 stack_index_++;
362 }
363 }
364 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800365 } else {
366 is_split_long_or_double_ = (kBytesPerFprSpillLocation == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800367 ((fpr_index_ + 1) == kNumQuickFprArgs);
Ian Rogers936b37f2014-02-14 00:52:24 -0800368 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800369 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800370 fpr_index_++;
371 if (kBytesPerFprSpillLocation == 4) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800372 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800373 fpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800374 }
375 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700376 }
377 if (kBytesStackArgLocation == 4) {
378 stack_index_+= 2;
Ian Rogers936b37f2014-02-14 00:52:24 -0800379 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700380 CHECK_EQ(kBytesStackArgLocation, 8U);
381 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800382 }
383 }
384 break;
385 default:
386 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
387 }
Ian Rogers848871b2013-08-05 10:56:33 -0700388 }
389 }
390
391 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800392 static size_t StackArgumentStartFromShorty(bool is_static, const char* shorty,
393 uint32_t shorty_len) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800394 if (kQuickSoftFloatAbi) {
395 CHECK_EQ(kNumQuickFprArgs, 0U);
396 return (kNumQuickGprArgs * kBytesPerGprSpillLocation) + kBytesPerGprSpillLocation /* ArtMethod* */;
Ian Rogers936b37f2014-02-14 00:52:24 -0800397 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700398 // For now, there is no reg-spill area for the targets with
399 // hard float ABI. So, the offset pointing to the first method's
400 // parameter ('this' for non-static methods) should be returned.
401 return kBytesPerGprSpillLocation; // Skip Method*.
Ian Rogers848871b2013-08-05 10:56:33 -0700402 }
Ian Rogers848871b2013-08-05 10:56:33 -0700403 }
404
405 const bool is_static_;
406 const char* const shorty_;
407 const uint32_t shorty_len_;
Ian Rogers936b37f2014-02-14 00:52:24 -0800408 byte* const gpr_args_; // Address of GPR arguments in callee save frame.
409 byte* const fpr_args_; // Address of FPR arguments in callee save frame.
410 byte* const stack_args_; // Address of stack arguments in caller's frame.
411 uint32_t gpr_index_; // Index into spilled GPRs.
412 uint32_t fpr_index_; // Index into spilled FPRs.
413 uint32_t stack_index_; // Index into arguments on the stack.
414 // The current type of argument during VisitArguments.
415 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700416 // Does a 64bit parameter straddle the register and stack arguments?
417 bool is_split_long_or_double_;
418};
419
420// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800421class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700422 public:
Ian Rogers936b37f2014-02-14 00:52:24 -0800423 BuildQuickShadowFrameVisitor(mirror::ArtMethod** sp, bool is_static, const char* shorty,
424 uint32_t shorty_len, ShadowFrame* sf, size_t first_arg_reg) :
Ian Rogers848871b2013-08-05 10:56:33 -0700425 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
426
Ian Rogers9758f792014-03-13 09:02:55 -0700427 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700428
429 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800430 ShadowFrame* const sf_;
431 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700432
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700433 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700434};
435
Ian Rogers9758f792014-03-13 09:02:55 -0700436void BuildQuickShadowFrameVisitor::Visit() {
437 Primitive::Type type = GetParamPrimitiveType();
438 switch (type) {
439 case Primitive::kPrimLong: // Fall-through.
440 case Primitive::kPrimDouble:
441 if (IsSplitLongOrDouble()) {
442 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
443 } else {
444 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
445 }
446 ++cur_reg_;
447 break;
448 case Primitive::kPrimNot: {
449 StackReference<mirror::Object>* stack_ref =
450 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
451 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
452 }
453 break;
454 case Primitive::kPrimBoolean: // Fall-through.
455 case Primitive::kPrimByte: // Fall-through.
456 case Primitive::kPrimChar: // Fall-through.
457 case Primitive::kPrimShort: // Fall-through.
458 case Primitive::kPrimInt: // Fall-through.
459 case Primitive::kPrimFloat:
460 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
461 break;
462 case Primitive::kPrimVoid:
463 LOG(FATAL) << "UNREACHABLE";
464 break;
465 }
466 ++cur_reg_;
467}
468
Brian Carlstromea46f952013-07-30 01:26:50 -0700469extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
470 mirror::ArtMethod** sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700471 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
472 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
473 // frame.
474 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
475
476 if (method->IsAbstract()) {
477 ThrowAbstractMethodError(method);
478 return 0;
479 } else {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800480 DCHECK(!method->IsNative()) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700481 const char* old_cause = self->StartAssertNoThreadSuspension("Building interpreter shadow frame");
482 MethodHelper mh(method);
483 const DexFile::CodeItem* code_item = mh.GetCodeItem();
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800484 DCHECK(code_item != nullptr) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700485 uint16_t num_regs = code_item->registers_size_;
486 void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
487 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, NULL, // No last shadow coming from quick.
488 method, 0, memory));
489 size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700490 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, mh.IsStatic(), mh.GetShorty(),
Ian Rogers936b37f2014-02-14 00:52:24 -0800491 mh.GetShortyLength(),
492 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700493 shadow_frame_builder.VisitArguments();
494 // Push a transition back into managed code onto the linked list in thread.
495 ManagedStack fragment;
496 self->PushManagedStackFragment(&fragment);
497 self->PushShadowFrame(shadow_frame);
498 self->EndAssertNoThreadSuspension(old_cause);
499
500 if (method->IsStatic() && !method->GetDeclaringClass()->IsInitializing()) {
501 // Ensure static method's class is initialized.
Mathieu Chartierc528dba2013-11-26 12:00:11 -0800502 SirtRef<mirror::Class> sirt_c(self, method->GetDeclaringClass());
503 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(sirt_c, true, true)) {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800504 DCHECK(Thread::Current()->IsExceptionPending()) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700505 self->PopManagedStackFragment(fragment);
506 return 0;
507 }
508 }
509
510 JValue result = interpreter::EnterInterpreterFromStub(self, mh, code_item, *shadow_frame);
511 // Pop transition.
512 self->PopManagedStackFragment(fragment);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800513 // No need to restore the args since the method has already been run by the interpreter.
Ian Rogers848871b2013-08-05 10:56:33 -0700514 return result.GetJ();
515 }
516}
517
518// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
519// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800520class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700521 public:
Brian Carlstromea46f952013-07-30 01:26:50 -0700522 BuildQuickArgumentVisitor(mirror::ArtMethod** sp, bool is_static, const char* shorty,
Ian Rogers848871b2013-08-05 10:56:33 -0700523 uint32_t shorty_len, ScopedObjectAccessUnchecked* soa,
524 std::vector<jvalue>* args) :
525 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
526
Ian Rogers9758f792014-03-13 09:02:55 -0700527 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700528
Ian Rogers9758f792014-03-13 09:02:55 -0700529 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800530
Ian Rogers848871b2013-08-05 10:56:33 -0700531 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700532 ScopedObjectAccessUnchecked* const soa_;
533 std::vector<jvalue>* const args_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800534 // References which we must update when exiting in case the GC moved the objects.
535 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
Ian Rogers9758f792014-03-13 09:02:55 -0700536
Ian Rogers848871b2013-08-05 10:56:33 -0700537 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
538};
539
Ian Rogers9758f792014-03-13 09:02:55 -0700540void BuildQuickArgumentVisitor::Visit() {
541 jvalue val;
542 Primitive::Type type = GetParamPrimitiveType();
543 switch (type) {
544 case Primitive::kPrimNot: {
545 StackReference<mirror::Object>* stack_ref =
546 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
547 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
548 references_.push_back(std::make_pair(val.l, stack_ref));
549 break;
550 }
551 case Primitive::kPrimLong: // Fall-through.
552 case Primitive::kPrimDouble:
553 if (IsSplitLongOrDouble()) {
554 val.j = ReadSplitLongParam();
555 } else {
556 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
557 }
558 break;
559 case Primitive::kPrimBoolean: // Fall-through.
560 case Primitive::kPrimByte: // Fall-through.
561 case Primitive::kPrimChar: // Fall-through.
562 case Primitive::kPrimShort: // Fall-through.
563 case Primitive::kPrimInt: // Fall-through.
564 case Primitive::kPrimFloat:
565 val.i = *reinterpret_cast<jint*>(GetParamAddress());
566 break;
567 case Primitive::kPrimVoid:
568 LOG(FATAL) << "UNREACHABLE";
569 val.j = 0;
570 break;
571 }
572 args_->push_back(val);
573}
574
575void BuildQuickArgumentVisitor::FixupReferences() {
576 // Fixup any references which may have changed.
577 for (const auto& pair : references_) {
578 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700579 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700580 }
581}
582
Ian Rogers848871b2013-08-05 10:56:33 -0700583// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
584// which is responsible for recording callee save registers. We explicitly place into jobjects the
585// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
586// field within the proxy object, which will box the primitive arguments and deal with error cases.
Brian Carlstromea46f952013-07-30 01:26:50 -0700587extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method,
Ian Rogers848871b2013-08-05 10:56:33 -0700588 mirror::Object* receiver,
Brian Carlstromea46f952013-07-30 01:26:50 -0700589 Thread* self, mirror::ArtMethod** sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700590 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromd3633d52013-08-20 21:06:26 -0700591 DCHECK(proxy_method->IsProxyMethod()) << PrettyMethod(proxy_method);
592 DCHECK(receiver->GetClass()->IsProxyClass()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700593 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
594 const char* old_cause =
595 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
596 // Register the top of the managed stack, making stack crawlable.
Brian Carlstromd3633d52013-08-20 21:06:26 -0700597 DCHECK_EQ(*sp, proxy_method) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700598 self->SetTopOfStack(sp, 0);
599 DCHECK_EQ(proxy_method->GetFrameSizeInBytes(),
Brian Carlstromd3633d52013-08-20 21:06:26 -0700600 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes())
601 << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700602 self->VerifyStack();
603 // Start new JNI local reference state.
604 JNIEnvExt* env = self->GetJniEnv();
605 ScopedObjectAccessUnchecked soa(env);
606 ScopedJniEnvLocalRefState env_state(env);
607 // Create local ref. copies of proxy method and the receiver.
608 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
609
610 // Placing arguments into args vector and remove the receiver.
611 MethodHelper proxy_mh(proxy_method);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700612 DCHECK(!proxy_mh.IsStatic()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700613 std::vector<jvalue> args;
614 BuildQuickArgumentVisitor local_ref_visitor(sp, proxy_mh.IsStatic(), proxy_mh.GetShorty(),
615 proxy_mh.GetShortyLength(), &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700616
Ian Rogers848871b2013-08-05 10:56:33 -0700617 local_ref_visitor.VisitArguments();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700618 DCHECK_GT(args.size(), 0U) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700619 args.erase(args.begin());
620
621 // Convert proxy method into expected interface method.
Brian Carlstromea46f952013-07-30 01:26:50 -0700622 mirror::ArtMethod* interface_method = proxy_method->FindOverriddenMethod();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700623 DCHECK(interface_method != NULL) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700624 DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
625 jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_method);
626
627 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
628 // that performs allocations.
629 self->EndAssertNoThreadSuspension(old_cause);
630 JValue result = InvokeProxyInvocationHandler(soa, proxy_mh.GetShorty(),
631 rcvr_jobj, interface_method_jobj, args);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800632 // Restore references which might have moved.
633 local_ref_visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700634 return result.GetJ();
635}
636
637// Read object references held in arguments from quick frames and place in a JNI local references,
638// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800639class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700640 public:
Mathieu Chartier590fee92013-09-13 13:46:47 -0700641 RememberForGcArgumentVisitor(mirror::ArtMethod** sp, bool is_static, const char* shorty,
642 uint32_t shorty_len, ScopedObjectAccessUnchecked* soa) :
Ian Rogers848871b2013-08-05 10:56:33 -0700643 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
644
Ian Rogers9758f792014-03-13 09:02:55 -0700645 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700646
Ian Rogers9758f792014-03-13 09:02:55 -0700647 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -0700648
649 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700650 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800651 // References which we must update when exiting in case the GC moved the objects.
652 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700653 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700654};
655
Ian Rogers9758f792014-03-13 09:02:55 -0700656void RememberForGcArgumentVisitor::Visit() {
657 if (IsParamAReference()) {
658 StackReference<mirror::Object>* stack_ref =
659 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
660 jobject reference =
661 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
662 references_.push_back(std::make_pair(reference, stack_ref));
663 }
664}
665
666void RememberForGcArgumentVisitor::FixupReferences() {
667 // Fixup any references which may have changed.
668 for (const auto& pair : references_) {
669 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700670 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700671 }
672}
673
674
Ian Rogers848871b2013-08-05 10:56:33 -0700675// Lazily resolve a method for quick. Called by stub code.
Brian Carlstromea46f952013-07-30 01:26:50 -0700676extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called,
Ian Rogers848871b2013-08-05 10:56:33 -0700677 mirror::Object* receiver,
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800678 Thread* self, mirror::ArtMethod** sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700679 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800680 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -0700681 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800682 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -0700683 ScopedObjectAccessUnchecked soa(env);
684 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800685 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -0700686
687 // Compute details about the called method (avoid GCs)
688 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Brian Carlstromea46f952013-07-30 01:26:50 -0700689 mirror::ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700690 InvokeType invoke_type;
691 const DexFile* dex_file;
692 uint32_t dex_method_idx;
693 if (called->IsRuntimeMethod()) {
694 uint32_t dex_pc = caller->ToDexPc(QuickArgumentVisitor::GetCallingPc(sp));
695 const DexFile::CodeItem* code;
696 {
697 MethodHelper mh(caller);
698 dex_file = &mh.GetDexFile();
699 code = mh.GetCodeItem();
700 }
701 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
702 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
703 Instruction::Code instr_code = instr->Opcode();
704 bool is_range;
705 switch (instr_code) {
706 case Instruction::INVOKE_DIRECT:
707 invoke_type = kDirect;
708 is_range = false;
709 break;
710 case Instruction::INVOKE_DIRECT_RANGE:
711 invoke_type = kDirect;
712 is_range = true;
713 break;
714 case Instruction::INVOKE_STATIC:
715 invoke_type = kStatic;
716 is_range = false;
717 break;
718 case Instruction::INVOKE_STATIC_RANGE:
719 invoke_type = kStatic;
720 is_range = true;
721 break;
722 case Instruction::INVOKE_SUPER:
723 invoke_type = kSuper;
724 is_range = false;
725 break;
726 case Instruction::INVOKE_SUPER_RANGE:
727 invoke_type = kSuper;
728 is_range = true;
729 break;
730 case Instruction::INVOKE_VIRTUAL:
731 invoke_type = kVirtual;
732 is_range = false;
733 break;
734 case Instruction::INVOKE_VIRTUAL_RANGE:
735 invoke_type = kVirtual;
736 is_range = true;
737 break;
738 case Instruction::INVOKE_INTERFACE:
739 invoke_type = kInterface;
740 is_range = false;
741 break;
742 case Instruction::INVOKE_INTERFACE_RANGE:
743 invoke_type = kInterface;
744 is_range = true;
745 break;
746 default:
747 LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(NULL);
748 // Avoid used uninitialized warnings.
749 invoke_type = kDirect;
750 is_range = false;
751 }
752 dex_method_idx = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
753
754 } else {
755 invoke_type = kStatic;
756 dex_file = &MethodHelper(called).GetDexFile();
757 dex_method_idx = called->GetDexMethodIndex();
758 }
759 uint32_t shorty_len;
760 const char* shorty =
761 dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700762 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -0700763 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800764 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800765 bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -0700766 // Resolve method filling in dex cache.
767 if (called->IsRuntimeMethod()) {
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800768 SirtRef<mirror::Object> sirt_receiver(soa.Self(), virtual_or_interface ? receiver : nullptr);
Ian Rogers848871b2013-08-05 10:56:33 -0700769 called = linker->ResolveMethod(dex_method_idx, caller, invoke_type);
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800770 receiver = sirt_receiver.get();
Ian Rogers848871b2013-08-05 10:56:33 -0700771 }
772 const void* code = NULL;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800773 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -0700774 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800775 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
776 << PrettyMethod(called) << " " << invoke_type;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800777 if (virtual_or_interface) {
778 // Refine called method based on receiver.
779 CHECK(receiver != nullptr) << invoke_type;
780 if (invoke_type == kVirtual) {
781 called = receiver->GetClass()->FindVirtualMethodForVirtual(called);
782 } else {
783 called = receiver->GetClass()->FindVirtualMethodForInterface(called);
784 }
Ian Rogers83883d72013-10-21 21:07:24 -0700785 // We came here because of sharpening. Ensure the dex cache is up-to-date on the method index
786 // of the sharpened method.
787 if (called->GetDexCacheResolvedMethods() == caller->GetDexCacheResolvedMethods()) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100788 caller->GetDexCacheResolvedMethods()->Set<false>(called->GetDexMethodIndex(), called);
Ian Rogers83883d72013-10-21 21:07:24 -0700789 } else {
790 // Calling from one dex file to another, need to compute the method index appropriate to
Vladimir Markobbcc0c02014-02-03 14:08:42 +0000791 // the caller's dex file. Since we get here only if the original called was a runtime
792 // method, we've got the correct dex_file and a dex_method_idx from above.
793 DCHECK(&MethodHelper(caller).GetDexFile() == dex_file);
Ian Rogers83883d72013-10-21 21:07:24 -0700794 uint32_t method_index =
Vladimir Markobbcc0c02014-02-03 14:08:42 +0000795 MethodHelper(called).FindDexMethodIndexInOtherDexFile(*dex_file, dex_method_idx);
Ian Rogers83883d72013-10-21 21:07:24 -0700796 if (method_index != DexFile::kDexNoIndex) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100797 caller->GetDexCacheResolvedMethods()->Set<false>(method_index, called);
Ian Rogers83883d72013-10-21 21:07:24 -0700798 }
799 }
800 }
Ian Rogers848871b2013-08-05 10:56:33 -0700801 // Ensure that the called method's class is initialized.
Mathieu Chartierc528dba2013-11-26 12:00:11 -0800802 SirtRef<mirror::Class> called_class(soa.Self(), called->GetDeclaringClass());
Ian Rogers848871b2013-08-05 10:56:33 -0700803 linker->EnsureInitialized(called_class, true, true);
804 if (LIKELY(called_class->IsInitialized())) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800805 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700806 } else if (called_class->IsInitializing()) {
807 if (invoke_type == kStatic) {
808 // Class is still initializing, go to oat and grab code (trampoline must be left in place
809 // until class is initialized to stop races between threads).
Ian Rogersef7d42f2014-01-06 12:55:46 -0800810 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -0700811 } else {
812 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800813 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700814 }
815 } else {
816 DCHECK(called_class->IsErroneous());
817 }
818 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800819 CHECK_EQ(code == NULL, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700820 // Fixup any locally saved objects may have moved during a GC.
821 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700822 // Place called method in callee-save frame to be placed as first argument to quick method.
823 *sp = called;
824 return code;
825}
826
Andreas Gampec147b002014-03-06 18:11:06 -0800827
828
829/*
830 * This class uses a couple of observations to unite the different calling conventions through
831 * a few constants.
832 *
833 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
834 * possible alignment.
835 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
836 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
837 * when we have to split things
838 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
839 * and we can use Int handling directly.
840 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
841 * necessary when widening. Also, widening of Ints will take place implicitly, and the
842 * extension should be compatible with Aarch64, which mandates copying the available bits
843 * into LSB and leaving the rest unspecified.
844 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
845 * the stack.
846 * 6) There is only little endian.
847 *
848 *
849 * Actual work is supposed to be done in a delegate of the template type. The interface is as
850 * follows:
851 *
852 * void PushGpr(uintptr_t): Add a value for the next GPR
853 *
854 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
855 * padding, that is, think the architecture is 32b and aligns 64b.
856 *
857 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
858 * split this if necessary. The current state will have aligned, if
859 * necessary.
860 *
861 * void PushStack(uintptr_t): Push a value to the stack.
862 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -0700863 * uintptr_t PushSirt(mirror::Object* ref): Add a reference to the Sirt. This _will_ have nullptr,
864 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -0800865 * Must return the jobject, that is, the reference to the
Andreas Gampe36fea8d2014-03-10 13:37:40 -0700866 * entry in the Sirt (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -0800867 *
868 */
869template <class T> class BuildGenericJniFrameStateMachine {
870 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800871#if defined(__arm__)
872 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -0800873 static constexpr bool kNativeSoftFloatAbi = true;
874 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800875 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
876
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800877 static constexpr size_t kRegistersNeededForLong = 2;
878 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800879 static constexpr bool kMultiRegistersAligned = true;
880 static constexpr bool kMultiRegistersWidened = false;
881 static constexpr bool kAlignLongOnStack = true;
882 static constexpr bool kAlignDoubleOnStack = true;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000883#elif defined(__aarch64__)
884 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
885 static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs.
886 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
887
888 static constexpr size_t kRegistersNeededForLong = 1;
889 static constexpr size_t kRegistersNeededForDouble = 1;
890 static constexpr bool kMultiRegistersAligned = false;
891 static constexpr bool kMultiRegistersWidened = false;
892 static constexpr bool kAlignLongOnStack = false;
893 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800894#elif defined(__mips__)
895 // TODO: These are all dummy values!
896 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
897 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
898 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
899
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800900 static constexpr size_t kRegistersNeededForLong = 2;
901 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800902 static constexpr bool kMultiRegistersAligned = true;
903 static constexpr bool kMultiRegistersWidened = true;
904 static constexpr bool kAlignLongOnStack = false;
905 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800906#elif defined(__i386__)
907 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -0800908 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800909 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
910 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
911
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800912 static constexpr size_t kRegistersNeededForLong = 2;
913 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800914 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
915 static constexpr bool kMultiRegistersWidened = false;
916 static constexpr bool kAlignLongOnStack = false;
917 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800918#elif defined(__x86_64__)
919 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
920 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
921 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
922
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800923 static constexpr size_t kRegistersNeededForLong = 1;
924 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -0800925 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe7a0e5042014-03-07 13:03:19 -0800926 static constexpr bool kMultiRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -0800927 static constexpr bool kAlignLongOnStack = false;
928 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800929#else
930#error "Unsupported architecture"
931#endif
932
Andreas Gampec147b002014-03-06 18:11:06 -0800933 public:
934 explicit BuildGenericJniFrameStateMachine(T* delegate) : gpr_index_(kNumNativeGprArgs),
935 fpr_index_(kNumNativeFprArgs),
936 stack_entries_(0),
937 delegate_(delegate) {
938 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
939 // the next register is even; counting down is just to make the compiler happy...
940 CHECK_EQ(kNumNativeGprArgs % 2, 0U);
941 CHECK_EQ(kNumNativeFprArgs % 2, 0U);
942 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800943
Andreas Gampec147b002014-03-06 18:11:06 -0800944 virtual ~BuildGenericJniFrameStateMachine() {}
945
946 bool HavePointerGpr() {
947 return gpr_index_ > 0;
948 }
949
950 void AdvancePointer(void* val) {
951 if (HavePointerGpr()) {
952 gpr_index_--;
953 PushGpr(reinterpret_cast<uintptr_t>(val));
954 } else {
955 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
956 PushStack(reinterpret_cast<uintptr_t>(val));
957 gpr_index_ = 0;
958 }
959 }
960
961
962 bool HaveSirtGpr() {
963 return gpr_index_ > 0;
964 }
965
966 void AdvanceSirt(mirror::Object* ptr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -0700967 uintptr_t sirtRef = PushSirt(ptr);
Andreas Gampec147b002014-03-06 18:11:06 -0800968 if (HaveSirtGpr()) {
969 gpr_index_--;
970 PushGpr(sirtRef);
971 } else {
972 stack_entries_++;
973 PushStack(sirtRef);
974 gpr_index_ = 0;
975 }
976 }
977
978
979 bool HaveIntGpr() {
980 return gpr_index_ > 0;
981 }
982
983 void AdvanceInt(uint32_t val) {
984 if (HaveIntGpr()) {
985 gpr_index_--;
986 PushGpr(val);
987 } else {
988 stack_entries_++;
989 PushStack(val);
990 gpr_index_ = 0;
991 }
992 }
993
994
995 bool HaveLongGpr() {
996 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
997 }
998
999 bool LongGprNeedsPadding() {
1000 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1001 kAlignLongOnStack && // and when it needs alignment
1002 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1003 }
1004
1005 bool LongStackNeedsPadding() {
1006 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1007 kAlignLongOnStack && // and when it needs 8B alignment
1008 (stack_entries_ & 1) == 1; // counter is odd
1009 }
1010
1011 void AdvanceLong(uint64_t val) {
1012 if (HaveLongGpr()) {
1013 if (LongGprNeedsPadding()) {
1014 PushGpr(0);
1015 gpr_index_--;
1016 }
1017 if (kRegistersNeededForLong == 1) {
1018 PushGpr(static_cast<uintptr_t>(val));
1019 } else {
1020 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1021 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1022 }
1023 gpr_index_ -= kRegistersNeededForLong;
1024 } else {
1025 if (LongStackNeedsPadding()) {
1026 PushStack(0);
1027 stack_entries_++;
1028 }
1029 if (kRegistersNeededForLong == 1) {
1030 PushStack(static_cast<uintptr_t>(val));
1031 stack_entries_++;
1032 } else {
1033 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1034 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1035 stack_entries_ += 2;
1036 }
1037 gpr_index_ = 0;
1038 }
1039 }
1040
1041
1042 bool HaveFloatFpr() {
1043 return fpr_index_ > 0;
1044 }
1045
Andreas Gampec147b002014-03-06 18:11:06 -08001046 template <typename U, typename V> V convert(U in) {
1047 CHECK_LE(sizeof(U), sizeof(V));
1048 union { U u; V v; } tmp;
1049 tmp.u = in;
1050 return tmp.v;
1051 }
1052
1053 void AdvanceFloat(float val) {
1054 if (kNativeSoftFloatAbi) {
1055 AdvanceInt(convert<float, uint32_t>(val));
1056 } else {
1057 if (HaveFloatFpr()) {
1058 fpr_index_--;
1059 if (kRegistersNeededForDouble == 1) {
1060 if (kMultiRegistersWidened) {
1061 PushFpr8(convert<double, uint64_t>(val));
1062 } else {
1063 // No widening, just use the bits.
1064 PushFpr8(convert<float, uint64_t>(val));
1065 }
1066 } else {
1067 PushFpr4(val);
1068 }
1069 } else {
1070 stack_entries_++;
1071 if (kRegistersNeededForDouble == 1 && kMultiRegistersWidened) {
1072 // Need to widen before storing: Note the "double" in the template instantiation.
1073 PushStack(convert<double, uintptr_t>(val));
1074 } else {
1075 PushStack(convert<float, uintptr_t>(val));
1076 }
1077 fpr_index_ = 0;
1078 }
1079 }
1080 }
1081
1082
1083 bool HaveDoubleFpr() {
1084 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1085 }
1086
1087 bool DoubleFprNeedsPadding() {
1088 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1089 kAlignDoubleOnStack && // and when it needs alignment
1090 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1091 }
1092
1093 bool DoubleStackNeedsPadding() {
1094 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1095 kAlignDoubleOnStack && // and when it needs 8B alignment
1096 (stack_entries_ & 1) == 1; // counter is odd
1097 }
1098
1099 void AdvanceDouble(uint64_t val) {
1100 if (kNativeSoftFloatAbi) {
1101 AdvanceLong(val);
1102 } else {
1103 if (HaveDoubleFpr()) {
1104 if (DoubleFprNeedsPadding()) {
1105 PushFpr4(0);
1106 fpr_index_--;
1107 }
1108 PushFpr8(val);
1109 fpr_index_ -= kRegistersNeededForDouble;
1110 } else {
1111 if (DoubleStackNeedsPadding()) {
1112 PushStack(0);
1113 stack_entries_++;
1114 }
1115 if (kRegistersNeededForDouble == 1) {
1116 PushStack(static_cast<uintptr_t>(val));
1117 stack_entries_++;
1118 } else {
1119 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1120 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1121 stack_entries_ += 2;
1122 }
1123 fpr_index_ = 0;
1124 }
1125 }
1126 }
1127
1128 uint32_t getStackEntries() {
1129 return stack_entries_;
1130 }
1131
1132 uint32_t getNumberOfUsedGprs() {
1133 return kNumNativeGprArgs - gpr_index_;
1134 }
1135
1136 uint32_t getNumberOfUsedFprs() {
1137 return kNumNativeFprArgs - fpr_index_;
1138 }
1139
1140 private:
1141 void PushGpr(uintptr_t val) {
1142 delegate_->PushGpr(val);
1143 }
1144 void PushFpr4(float val) {
1145 delegate_->PushFpr4(val);
1146 }
1147 void PushFpr8(uint64_t val) {
1148 delegate_->PushFpr8(val);
1149 }
1150 void PushStack(uintptr_t val) {
1151 delegate_->PushStack(val);
1152 }
1153 uintptr_t PushSirt(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1154 return delegate_->PushSirt(ref);
1155 }
1156
1157 uint32_t gpr_index_; // Number of free GPRs
1158 uint32_t fpr_index_; // Number of free FPRs
1159 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1160 // extended
1161 T* delegate_; // What Push implementation gets called
1162};
1163
1164class ComputeGenericJniFrameSize FINAL {
1165 public:
1166 ComputeGenericJniFrameSize() : num_sirt_references_(0), num_stack_entries_(0) {}
1167
Andreas Gampec147b002014-03-06 18:11:06 -08001168 uint32_t GetStackSize() {
1169 return num_stack_entries_ * sizeof(uintptr_t);
1170 }
1171
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001172 // WARNING: After this, *sp won't be pointing to the method anymore!
1173 void ComputeLayout(mirror::ArtMethod*** m, bool is_static, const char* shorty, uint32_t shorty_len,
1174 void* sp, StackIndirectReferenceTable** table, uint32_t* sirt_entries,
1175 uintptr_t** start_stack, uintptr_t** start_gpr, uint32_t** start_fpr,
1176 void** code_return, size_t* overall_size)
Andreas Gampec147b002014-03-06 18:11:06 -08001177 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1178 ComputeAll(is_static, shorty, shorty_len);
1179
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001180 mirror::ArtMethod* method = **m;
1181
Andreas Gampec147b002014-03-06 18:11:06 -08001182 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
Andreas Gampec147b002014-03-06 18:11:06 -08001183
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001184 // First, fix up the layout of the callee-save frame.
1185 // We have to squeeze in the Sirt, and relocate the method pointer.
1186
1187 // "Free" the slot for the method.
1188 sp8 += kPointerSize;
1189
1190 // Add the Sirt.
Andreas Gampec147b002014-03-06 18:11:06 -08001191 *sirt_entries = num_sirt_references_;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001192 size_t sirt_size = StackIndirectReferenceTable::GetAlignedSirtSize(num_sirt_references_);
1193 sp8 -= sirt_size;
Andreas Gampec147b002014-03-06 18:11:06 -08001194 *table = reinterpret_cast<StackIndirectReferenceTable*>(sp8);
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001195 (*table)->SetNumberOfReferences(num_sirt_references_);
Andreas Gampec147b002014-03-06 18:11:06 -08001196
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001197 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
1198 sp8 -= kPointerSize;
1199 uint8_t* method_pointer = sp8;
1200 *(reinterpret_cast<mirror::ArtMethod**>(method_pointer)) = method;
1201 *m = reinterpret_cast<mirror::ArtMethod**>(method_pointer);
1202
1203 // Reference cookie and padding
1204 sp8 -= 8;
1205 // Store Sirt size
1206 *reinterpret_cast<uint32_t*>(sp8) = static_cast<uint32_t>(sirt_size & 0xFFFFFFFF);
1207
1208 // Next comes the native call stack.
Andreas Gampec147b002014-03-06 18:11:06 -08001209 sp8 -= GetStackSize();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001210 // Now align the call stack below. This aligns by 16, as AArch64 seems to require.
Andreas Gampec147b002014-03-06 18:11:06 -08001211 uintptr_t mask = ~0x0F;
1212 sp8 = reinterpret_cast<uint8_t*>(reinterpret_cast<uintptr_t>(sp8) & mask);
1213 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
1214
1215 // put fprs and gprs below
1216 // Assumption is OK right now, as we have soft-float arm
1217 size_t fregs = BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize>::kNumNativeFprArgs;
1218 sp8 -= fregs * sizeof(uintptr_t);
1219 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1220 size_t iregs = BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize>::kNumNativeGprArgs;
1221 sp8 -= iregs * sizeof(uintptr_t);
1222 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1223
1224 // reserve space for the code pointer
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001225 sp8 -= kPointerSize;
Andreas Gampec147b002014-03-06 18:11:06 -08001226 *code_return = reinterpret_cast<void*>(sp8);
1227
1228 *overall_size = reinterpret_cast<uint8_t*>(sp) - sp8;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001229
1230 // The new SP is stored at the end of the alloca, so it can be immediately popped
1231 sp8 = reinterpret_cast<uint8_t*>(sp) - 5 * KB;
1232 *(reinterpret_cast<uint8_t**>(sp8)) = method_pointer;
Andreas Gampec147b002014-03-06 18:11:06 -08001233 }
1234
1235 void ComputeSirtOffset() { } // nothing to do, static right now
1236
1237 void ComputeAll(bool is_static, const char* shorty, uint32_t shorty_len)
1238 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1239 BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize> sm(this);
1240
1241 // JNIEnv
1242 sm.AdvancePointer(nullptr);
1243
1244 // Class object or this as first argument
1245 sm.AdvanceSirt(reinterpret_cast<mirror::Object*>(0x12345678));
1246
1247 for (uint32_t i = 1; i < shorty_len; ++i) {
1248 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1249 switch (cur_type_) {
1250 case Primitive::kPrimNot:
1251 sm.AdvanceSirt(reinterpret_cast<mirror::Object*>(0x12345678));
1252 break;
1253
1254 case Primitive::kPrimBoolean:
1255 case Primitive::kPrimByte:
1256 case Primitive::kPrimChar:
1257 case Primitive::kPrimShort:
1258 case Primitive::kPrimInt:
1259 sm.AdvanceInt(0);
1260 break;
1261 case Primitive::kPrimFloat:
1262 sm.AdvanceFloat(0);
1263 break;
1264 case Primitive::kPrimDouble:
1265 sm.AdvanceDouble(0);
1266 break;
1267 case Primitive::kPrimLong:
1268 sm.AdvanceLong(0);
1269 break;
1270 default:
1271 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
1272 }
1273 }
1274
1275 num_stack_entries_ = sm.getStackEntries();
1276 }
1277
1278 void PushGpr(uintptr_t /* val */) {
1279 // not optimizing registers, yet
1280 }
1281
1282 void PushFpr4(float /* val */) {
1283 // not optimizing registers, yet
1284 }
1285
1286 void PushFpr8(uint64_t /* val */) {
1287 // not optimizing registers, yet
1288 }
1289
1290 void PushStack(uintptr_t /* val */) {
1291 // counting is already done in the superclass
1292 }
1293
1294 uintptr_t PushSirt(mirror::Object* /* ptr */) {
1295 num_sirt_references_++;
1296 return reinterpret_cast<uintptr_t>(nullptr);
1297 }
1298
1299 private:
1300 uint32_t num_sirt_references_;
1301 uint32_t num_stack_entries_;
1302};
1303
1304// Visits arguments on the stack placing them into a region lower down the stack for the benefit
1305// of transitioning into native code.
1306class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001307 public:
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001308 BuildGenericJniFrameVisitor(mirror::ArtMethod*** sp, bool is_static, const char* shorty,
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001309 uint32_t shorty_len, Thread* self) :
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001310 QuickArgumentVisitor(*sp, is_static, shorty, shorty_len), sm_(this) {
Andreas Gampec147b002014-03-06 18:11:06 -08001311 ComputeGenericJniFrameSize fsc;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001312 fsc.ComputeLayout(sp, is_static, shorty, shorty_len, *sp, &sirt_, &sirt_expected_refs_,
1313 &cur_stack_arg_, &cur_gpr_reg_, &cur_fpr_reg_, &code_return_,
1314 &alloca_used_size_);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001315 sirt_number_of_references_ = 0;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001316 cur_sirt_entry_ = reinterpret_cast<StackReference<mirror::Object>*>(GetFirstSirtEntry());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001317
1318 // jni environment is always first argument
Andreas Gampec147b002014-03-06 18:11:06 -08001319 sm_.AdvancePointer(self->GetJniEnv());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001320
1321 if (is_static) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001322 sm_.AdvanceSirt((**sp)->GetDeclaringClass());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001323 }
1324 }
1325
Ian Rogers9758f792014-03-13 09:02:55 -07001326 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001327
Ian Rogers9758f792014-03-13 09:02:55 -07001328 void FinalizeSirt(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001329
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001330 jobject GetFirstSirtEntry() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1331 return reinterpret_cast<jobject>(sirt_->GetStackReference(0));
Andreas Gampec147b002014-03-06 18:11:06 -08001332 }
1333
1334 void PushGpr(uintptr_t val) {
1335 *cur_gpr_reg_ = val;
1336 cur_gpr_reg_++;
1337 }
1338
1339 void PushFpr4(float val) {
1340 *cur_fpr_reg_ = val;
1341 cur_fpr_reg_++;
1342 }
1343
1344 void PushFpr8(uint64_t val) {
1345 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
1346 *tmp = val;
1347 cur_fpr_reg_ += 2;
1348 }
1349
1350 void PushStack(uintptr_t val) {
1351 *cur_stack_arg_ = val;
1352 cur_stack_arg_++;
1353 }
1354
1355 uintptr_t PushSirt(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001356 uintptr_t tmp;
1357 if (ref == nullptr) {
1358 *cur_sirt_entry_ = StackReference<mirror::Object>();
1359 tmp = reinterpret_cast<uintptr_t>(nullptr);
1360 } else {
1361 *cur_sirt_entry_ = StackReference<mirror::Object>::FromMirrorPtr(ref);
1362 tmp = reinterpret_cast<uintptr_t>(cur_sirt_entry_);
1363 }
1364 cur_sirt_entry_++;
Andreas Gampec147b002014-03-06 18:11:06 -08001365 sirt_number_of_references_++;
1366 return tmp;
1367 }
1368
1369 // Size of the part of the alloca that we actually need.
1370 size_t GetAllocaUsedSize() {
1371 return alloca_used_size_;
1372 }
1373
1374 void* GetCodeReturn() {
1375 return code_return_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001376 }
1377
1378 private:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001379 uint32_t sirt_number_of_references_;
1380 StackReference<mirror::Object>* cur_sirt_entry_;
Andreas Gampec147b002014-03-06 18:11:06 -08001381 StackIndirectReferenceTable* sirt_;
1382 uint32_t sirt_expected_refs_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001383 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001384 uint32_t* cur_fpr_reg_;
1385 uintptr_t* cur_stack_arg_;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001386 // StackReference<mirror::Object>* top_of_sirt_;
Andreas Gampec147b002014-03-06 18:11:06 -08001387 void* code_return_;
1388 size_t alloca_used_size_;
1389
1390 BuildGenericJniFrameStateMachine<BuildGenericJniFrameVisitor> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001391
1392 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
1393};
1394
Ian Rogers9758f792014-03-13 09:02:55 -07001395void BuildGenericJniFrameVisitor::Visit() {
1396 Primitive::Type type = GetParamPrimitiveType();
1397 switch (type) {
1398 case Primitive::kPrimLong: {
1399 jlong long_arg;
1400 if (IsSplitLongOrDouble()) {
1401 long_arg = ReadSplitLongParam();
1402 } else {
1403 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
1404 }
1405 sm_.AdvanceLong(long_arg);
1406 break;
1407 }
1408 case Primitive::kPrimDouble: {
1409 uint64_t double_arg;
1410 if (IsSplitLongOrDouble()) {
1411 // Read into union so that we don't case to a double.
1412 double_arg = ReadSplitLongParam();
1413 } else {
1414 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
1415 }
1416 sm_.AdvanceDouble(double_arg);
1417 break;
1418 }
1419 case Primitive::kPrimNot: {
1420 StackReference<mirror::Object>* stack_ref =
1421 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
1422 sm_.AdvanceSirt(stack_ref->AsMirrorPtr());
1423 break;
1424 }
1425 case Primitive::kPrimFloat:
1426 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
1427 break;
1428 case Primitive::kPrimBoolean: // Fall-through.
1429 case Primitive::kPrimByte: // Fall-through.
1430 case Primitive::kPrimChar: // Fall-through.
1431 case Primitive::kPrimShort: // Fall-through.
1432 case Primitive::kPrimInt: // Fall-through.
1433 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
1434 break;
1435 case Primitive::kPrimVoid:
1436 LOG(FATAL) << "UNREACHABLE";
1437 break;
1438 }
1439}
1440
1441void BuildGenericJniFrameVisitor::FinalizeSirt(Thread* self) {
1442 // Initialize padding entries.
1443 while (sirt_number_of_references_ < sirt_expected_refs_) {
1444 *cur_sirt_entry_ = StackReference<mirror::Object>();
1445 cur_sirt_entry_++;
1446 sirt_number_of_references_++;
1447 }
1448 sirt_->SetNumberOfReferences(sirt_expected_refs_);
1449 DCHECK_NE(sirt_expected_refs_, 0U);
1450 // Install Sirt.
1451 self->PushSirt(sirt_);
1452}
1453
Andreas Gampe90546832014-03-12 18:07:19 -07001454extern "C" void* artFindNativeMethod();
1455
Andreas Gampead615172014-04-04 16:20:13 -07001456uint64_t artQuickGenericJniEndJNIRef(Thread* self, uint32_t cookie, jobject l, jobject lock) {
1457 if (lock != nullptr) {
1458 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self));
1459 } else {
1460 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(l, cookie, self));
1461 }
1462}
1463
1464void artQuickGenericJniEndJNINonRef(Thread* self, uint32_t cookie, jobject lock) {
1465 if (lock != nullptr) {
1466 JniMethodEndSynchronized(cookie, lock, self);
1467 } else {
1468 JniMethodEnd(cookie, self);
1469 }
1470}
1471
Andreas Gampec147b002014-03-06 18:11:06 -08001472/*
1473 * Initializes an alloca region assumed to be directly below sp for a native call:
1474 * Create a Sirt and call stack and fill a mini stack with values to be pushed to registers.
1475 * The final element on the stack is a pointer to the native code.
1476 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001477 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
1478 * We need to fix this, as the Sirt needs to go into the callee-save frame.
1479 *
Andreas Gampec147b002014-03-06 18:11:06 -08001480 * The return of this function denotes:
1481 * 1) How many bytes of the alloca can be released, if the value is non-negative.
1482 * 2) An error, if the value is negative.
1483 */
1484extern "C" ssize_t artQuickGenericJniTrampoline(Thread* self, mirror::ArtMethod** sp)
Andreas Gampe2da88232014-02-27 12:26:20 -08001485 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001486 mirror::ArtMethod* called = *sp;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001487 DCHECK(called->IsNative()) << PrettyMethod(called, true);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001488
1489 // run the visitor
1490 MethodHelper mh(called);
Andreas Gampec147b002014-03-06 18:11:06 -08001491
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001492 BuildGenericJniFrameVisitor visitor(&sp, called->IsStatic(), mh.GetShorty(), mh.GetShortyLength(),
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001493 self);
1494 visitor.VisitArguments();
1495 visitor.FinalizeSirt(self);
1496
1497 // fix up managed-stack things in Thread
1498 self->SetTopOfStack(sp, 0);
1499
Ian Rogerse0dcd462014-03-08 15:21:04 -08001500 self->VerifyStack();
1501
Andreas Gampe90546832014-03-12 18:07:19 -07001502 // Start JNI, save the cookie.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001503 uint32_t cookie;
1504 if (called->IsSynchronized()) {
1505 cookie = JniMethodStartSynchronized(visitor.GetFirstSirtEntry(), self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001506 if (self->IsExceptionPending()) {
1507 self->PopSirt();
Andreas Gampec147b002014-03-06 18:11:06 -08001508 // A negative value denotes an error.
1509 return -1;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001510 }
1511 } else {
1512 cookie = JniMethodStart(self);
1513 }
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001514 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Ian Rogerse0dcd462014-03-08 15:21:04 -08001515 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001516
Andreas Gampe90546832014-03-12 18:07:19 -07001517 // Retrieve the stored native code.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001518 const void* nativeCode = called->GetNativeMethod();
Andreas Gampe90546832014-03-12 18:07:19 -07001519
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07001520 // There are two cases for the content of nativeCode:
1521 // 1) Pointer to the native function.
1522 // 2) Pointer to the trampoline for native code binding.
1523 // In the second case, we need to execute the binding and continue with the actual native function
1524 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07001525 DCHECK(nativeCode != nullptr);
1526 if (nativeCode == GetJniDlsymLookupStub()) {
1527 nativeCode = artFindNativeMethod();
1528
1529 if (nativeCode == nullptr) {
1530 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
Andreas Gampead615172014-04-04 16:20:13 -07001531
1532 // End JNI, as the assembly will move to deliver the exception.
1533 jobject lock = called->IsSynchronized() ? visitor.GetFirstSirtEntry() : nullptr;
1534 if (mh.GetShorty()[0] == 'L') {
1535 artQuickGenericJniEndJNIRef(self, cookie, nullptr, lock);
1536 } else {
1537 artQuickGenericJniEndJNINonRef(self, cookie, lock);
1538 }
1539
Andreas Gampe90546832014-03-12 18:07:19 -07001540 return -1;
1541 }
1542 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001543 }
1544
Andreas Gampe90546832014-03-12 18:07:19 -07001545 // Store the native code pointer in the stack at the right location.
Andreas Gampec147b002014-03-06 18:11:06 -08001546 uintptr_t* code_pointer = reinterpret_cast<uintptr_t*>(visitor.GetCodeReturn());
Andreas Gampec147b002014-03-06 18:11:06 -08001547 *code_pointer = reinterpret_cast<uintptr_t>(nativeCode);
1548
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001549 // 5K reserved, window_size + frame pointer used.
Andreas Gampe90546832014-03-12 18:07:19 -07001550 size_t window_size = visitor.GetAllocaUsedSize();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001551 return (5 * KB) - window_size - kPointerSize;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001552}
1553
1554/*
1555 * Is called after the native JNI code. Responsible for cleanup (SIRT, saved state) and
1556 * unlocking.
1557 */
1558extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self, mirror::ArtMethod** sp,
1559 jvalue result, uint64_t result_f)
1560 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1561 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
1562 mirror::ArtMethod* called = *sp;
Ian Rogerse0dcd462014-03-08 15:21:04 -08001563 uint32_t cookie = *(sp32 - 1);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001564
Andreas Gampead615172014-04-04 16:20:13 -07001565 jobject lock = nullptr;
1566 if (called->IsSynchronized()) {
1567 StackIndirectReferenceTable* table =
1568 reinterpret_cast<StackIndirectReferenceTable*>(
1569 reinterpret_cast<uint8_t*>(sp) + kPointerSize);
1570 lock = reinterpret_cast<jobject>(table->GetStackReference(0));
1571 }
1572
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001573 MethodHelper mh(called);
1574 char return_shorty_char = mh.GetShorty()[0];
1575
1576 if (return_shorty_char == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001577 return artQuickGenericJniEndJNIRef(self, cookie, result.l, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001578 } else {
Andreas Gampead615172014-04-04 16:20:13 -07001579 artQuickGenericJniEndJNINonRef(self, cookie, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001580
1581 switch (return_shorty_char) {
1582 case 'F': // Fall-through.
1583 case 'D':
1584 return result_f;
1585 case 'Z':
1586 return result.z;
1587 case 'B':
1588 return result.b;
1589 case 'C':
1590 return result.c;
1591 case 'S':
1592 return result.s;
1593 case 'I':
1594 return result.i;
1595 case 'J':
1596 return result.j;
1597 case 'V':
1598 return 0;
1599 default:
1600 LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char;
1601 return 0;
1602 }
1603 }
Andreas Gampe2da88232014-02-27 12:26:20 -08001604}
1605
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001606template<InvokeType type, bool access_check>
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001607static uint64_t artInvokeCommon(uint32_t method_idx, mirror::Object* this_object,
1608 mirror::ArtMethod* caller_method,
1609 Thread* self, mirror::ArtMethod** sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001610
1611template<InvokeType type, bool access_check>
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001612static uint64_t artInvokeCommon(uint32_t method_idx, mirror::Object* this_object,
1613 mirror::ArtMethod* caller_method,
1614 Thread* self, mirror::ArtMethod** sp) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001615 mirror::ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check,
1616 type);
1617 if (UNLIKELY(method == nullptr)) {
1618 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1619 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
1620 uint32_t shorty_len;
1621 const char* shorty =
1622 dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
1623 {
1624 // Remember the args in case a GC happens in FindMethodFromCode.
1625 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
1626 RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa);
1627 visitor.VisitArguments();
1628 method = FindMethodFromCode<type, access_check>(method_idx, this_object, caller_method, self);
1629 visitor.FixupReferences();
1630 }
1631
1632 if (UNLIKELY(method == NULL)) {
1633 CHECK(self->IsExceptionPending());
1634 return 0; // failure
1635 }
1636 }
1637 DCHECK(!self->IsExceptionPending());
1638 const void* code = method->GetEntryPointFromQuickCompiledCode();
1639
1640 // When we return, the caller will branch to this address, so it had better not be 0!
1641 DCHECK(code != nullptr) << "Code was NULL in method: " << PrettyMethod(method) << " location: "
1642 << MethodHelper(method).GetDexFile().GetLocation();
1643#ifdef __LP64__
1644 UNIMPLEMENTED(FATAL);
1645 return 0;
1646#else
1647 uint32_t method_uint = reinterpret_cast<uint32_t>(method);
1648 uint64_t code_uint = reinterpret_cast<uint32_t>(code);
1649 uint64_t result = ((code_uint << 32) | method_uint);
1650 return result;
1651#endif
1652}
1653
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001654// Explicit artInvokeCommon template function declarations to please analysis tool.
1655#define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \
1656 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) \
Nicolas Geoffray0bf8b9c2014-04-04 09:55:27 +01001657 uint64_t artInvokeCommon<type, access_check>(uint32_t method_idx, \
1658 mirror::Object* this_object, \
1659 mirror::ArtMethod* caller_method, \
1660 Thread* self, mirror::ArtMethod** sp) \
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001661
1662EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false);
1663EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true);
1664EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false);
1665EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true);
1666EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false);
1667EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true);
1668EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false);
1669EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true);
1670EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false);
1671EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true);
1672#undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL
1673
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001674
1675// See comments in runtime_support_asm.S
1676extern "C" uint64_t artInvokeInterfaceTrampolineWithAccessCheck(uint32_t method_idx,
1677 mirror::Object* this_object,
1678 mirror::ArtMethod* caller_method,
1679 Thread* self,
1680 mirror::ArtMethod** sp)
1681 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1682 return artInvokeCommon<kInterface, true>(method_idx, this_object, caller_method, self, sp);
1683}
1684
1685
1686extern "C" uint64_t artInvokeDirectTrampolineWithAccessCheck(uint32_t method_idx,
1687 mirror::Object* this_object,
1688 mirror::ArtMethod* caller_method,
1689 Thread* self,
1690 mirror::ArtMethod** sp)
1691 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1692 return artInvokeCommon<kDirect, true>(method_idx, this_object, caller_method, self, sp);
1693}
1694
1695extern "C" uint64_t artInvokeStaticTrampolineWithAccessCheck(uint32_t method_idx,
1696 mirror::Object* this_object,
1697 mirror::ArtMethod* caller_method,
1698 Thread* self,
1699 mirror::ArtMethod** sp)
1700 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1701 return artInvokeCommon<kStatic, true>(method_idx, this_object, caller_method, self, sp);
1702}
1703
1704extern "C" uint64_t artInvokeSuperTrampolineWithAccessCheck(uint32_t method_idx,
1705 mirror::Object* this_object,
1706 mirror::ArtMethod* caller_method,
1707 Thread* self,
1708 mirror::ArtMethod** sp)
1709 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1710 return artInvokeCommon<kSuper, true>(method_idx, this_object, caller_method, self, sp);
1711}
1712
1713extern "C" uint64_t artInvokeVirtualTrampolineWithAccessCheck(uint32_t method_idx,
1714 mirror::Object* this_object,
1715 mirror::ArtMethod* caller_method,
1716 Thread* self,
1717 mirror::ArtMethod** sp)
1718 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1719 return artInvokeCommon<kVirtual, true>(method_idx, this_object, caller_method, self, sp);
1720}
1721
1722// Determine target of interface dispatch. This object is known non-null.
1723extern "C" uint64_t artInvokeInterfaceTrampoline(mirror::ArtMethod* interface_method,
1724 mirror::Object* this_object,
1725 mirror::ArtMethod* caller_method,
1726 Thread* self, mirror::ArtMethod** sp)
1727 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1728 mirror::ArtMethod* method;
1729 if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) {
1730 method = this_object->GetClass()->FindVirtualMethodForInterface(interface_method);
1731 if (UNLIKELY(method == NULL)) {
1732 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1733 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(interface_method, this_object,
1734 caller_method);
1735 return 0; // Failure.
1736 }
1737 } else {
1738 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1739 DCHECK(interface_method == Runtime::Current()->GetResolutionMethod());
1740 // Determine method index from calling dex instruction.
1741#if defined(__arm__)
1742 // On entry the stack pointed by sp is:
1743 // | argN | |
1744 // | ... | |
1745 // | arg4 | |
1746 // | arg3 spill | | Caller's frame
1747 // | arg2 spill | |
1748 // | arg1 spill | |
1749 // | Method* | ---
1750 // | LR |
1751 // | ... | callee saves
1752 // | R3 | arg3
1753 // | R2 | arg2
1754 // | R1 | arg1
1755 // | R0 |
1756 // | Method* | <- sp
1757 DCHECK_EQ(48U, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
1758 uintptr_t* regs = reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp) + kPointerSize);
1759 uintptr_t caller_pc = regs[10];
1760#elif defined(__i386__)
1761 // On entry the stack pointed by sp is:
1762 // | argN | |
1763 // | ... | |
1764 // | arg4 | |
1765 // | arg3 spill | | Caller's frame
1766 // | arg2 spill | |
1767 // | arg1 spill | |
1768 // | Method* | ---
1769 // | Return |
1770 // | EBP,ESI,EDI | callee saves
1771 // | EBX | arg3
1772 // | EDX | arg2
1773 // | ECX | arg1
1774 // | EAX/Method* | <- sp
1775 DCHECK_EQ(32U, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
1776 uintptr_t* regs = reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp));
1777 uintptr_t caller_pc = regs[7];
1778#elif defined(__mips__)
1779 // On entry the stack pointed by sp is:
1780 // | argN | |
1781 // | ... | |
1782 // | arg4 | |
1783 // | arg3 spill | | Caller's frame
1784 // | arg2 spill | |
1785 // | arg1 spill | |
1786 // | Method* | ---
1787 // | RA |
1788 // | ... | callee saves
1789 // | A3 | arg3
1790 // | A2 | arg2
1791 // | A1 | arg1
1792 // | A0/Method* | <- sp
1793 DCHECK_EQ(64U, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
1794 uintptr_t* regs = reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp));
1795 uintptr_t caller_pc = regs[15];
1796#else
1797 UNIMPLEMENTED(FATAL);
1798 uintptr_t caller_pc = 0;
1799#endif
1800 uint32_t dex_pc = caller_method->ToDexPc(caller_pc);
1801 const DexFile::CodeItem* code = MethodHelper(caller_method).GetCodeItem();
1802 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
1803 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
1804 Instruction::Code instr_code = instr->Opcode();
1805 CHECK(instr_code == Instruction::INVOKE_INTERFACE ||
1806 instr_code == Instruction::INVOKE_INTERFACE_RANGE)
1807 << "Unexpected call into interface trampoline: " << instr->DumpString(NULL);
1808 uint32_t dex_method_idx;
1809 if (instr_code == Instruction::INVOKE_INTERFACE) {
1810 dex_method_idx = instr->VRegB_35c();
1811 } else {
1812 DCHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE);
1813 dex_method_idx = instr->VRegB_3rc();
1814 }
1815
1816 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
1817 uint32_t shorty_len;
1818 const char* shorty =
1819 dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx), &shorty_len);
1820 {
1821 // Remember the args in case a GC happens in FindMethodFromCode.
1822 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
1823 RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa);
1824 visitor.VisitArguments();
1825 method = FindMethodFromCode<kInterface, false>(dex_method_idx, this_object, caller_method,
1826 self);
1827 visitor.FixupReferences();
1828 }
1829
1830 if (UNLIKELY(method == nullptr)) {
1831 CHECK(self->IsExceptionPending());
1832 return 0; // Failure.
1833 }
1834 }
1835 const void* code = method->GetEntryPointFromQuickCompiledCode();
1836
1837 // When we return, the caller will branch to this address, so it had better not be 0!
1838 DCHECK(code != nullptr) << "Code was NULL in method: " << PrettyMethod(method) << " location: "
1839 << MethodHelper(method).GetDexFile().GetLocation();
1840#ifdef __LP64__
1841 UNIMPLEMENTED(FATAL);
1842 return 0;
1843#else
1844 uint32_t method_uint = reinterpret_cast<uint32_t>(method);
1845 uint64_t code_uint = reinterpret_cast<uint32_t>(code);
1846 uint64_t result = ((code_uint << 32) | method_uint);
1847 return result;
1848#endif
1849}
1850
Ian Rogers848871b2013-08-05 10:56:33 -07001851} // namespace art