blob: 36dc1cb06c160c80b157919a78b72a9a1a865aae [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070018#include "common_throws.h"
Ian Rogers848871b2013-08-05 10:56:33 -070019#include "dex_file-inl.h"
20#include "dex_instruction-inl.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070021#include "entrypoints/entrypoint_utils.h"
Ian Rogers83883d72013-10-21 21:07:24 -070022#include "gc/accounting/card_table-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070023#include "interpreter/interpreter.h"
24#include "invoke_arg_array_builder.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070025#include "mirror/art_method-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070026#include "mirror/class-inl.h"
27#include "mirror/object-inl.h"
28#include "mirror/object_array-inl.h"
29#include "object_utils.h"
30#include "runtime.h"
31
32namespace art {
33
34// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
35class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080036 // Size of each spilled GPR.
37#ifdef __LP64__
38 static constexpr size_t kBytesPerGprSpillLocation = 8;
39#else
40 static constexpr size_t kBytesPerGprSpillLocation = 4;
41#endif
42 // Number of bytes for each out register in the caller method's frame.
43 static constexpr size_t kBytesStackArgLocation = 4;
Ian Rogers848871b2013-08-05 10:56:33 -070044#if defined(__arm__)
45 // The callee save frame is pointed to by SP.
46 // | argN | |
47 // | ... | |
48 // | arg4 | |
49 // | arg3 spill | | Caller's frame
50 // | arg2 spill | |
51 // | arg1 spill | |
52 // | Method* | ---
53 // | LR |
54 // | ... | callee saves
55 // | R3 | arg3
56 // | R2 | arg2
57 // | R1 | arg1
Ian Rogers936b37f2014-02-14 00:52:24 -080058 // | R0 | padding
Ian Rogers848871b2013-08-05 10:56:33 -070059 // | Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -080060 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
61 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
62 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -080063 static constexpr size_t kBytesPerFprSpillLocation = 4; // FPR spill size is 4 bytes.
64 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
65 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 8; // Offset of first GPR arg.
66 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 44; // Offset of return address.
67 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 48; // Frame size.
68 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
69 return gpr_index * kBytesPerGprSpillLocation;
70 }
Ian Rogers848871b2013-08-05 10:56:33 -070071#elif defined(__mips__)
72 // The callee save frame is pointed to by SP.
73 // | argN | |
74 // | ... | |
75 // | arg4 | |
76 // | arg3 spill | | Caller's frame
77 // | arg2 spill | |
78 // | arg1 spill | |
79 // | Method* | ---
80 // | RA |
81 // | ... | callee saves
82 // | A3 | arg3
83 // | A2 | arg2
84 // | A1 | arg1
85 // | A0/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -080086 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
87 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
88 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -080089 static constexpr size_t kBytesPerFprSpillLocation = 4; // FPR spill size is 4 bytes.
90 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
91 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4; // Offset of first GPR arg.
92 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 60; // Offset of return address.
93 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 64; // Frame size.
94 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
95 return gpr_index * kBytesPerGprSpillLocation;
96 }
Ian Rogers848871b2013-08-05 10:56:33 -070097#elif defined(__i386__)
98 // The callee save frame is pointed to by SP.
99 // | argN | |
100 // | ... | |
101 // | arg4 | |
102 // | arg3 spill | | Caller's frame
103 // | arg2 spill | |
104 // | arg1 spill | |
105 // | Method* | ---
106 // | Return |
107 // | EBP,ESI,EDI | callee saves
108 // | EBX | arg3
109 // | EDX | arg2
110 // | ECX | arg1
111 // | EAX/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800112 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
113 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
114 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800115 static constexpr size_t kBytesPerFprSpillLocation = 8; // FPR spill size is 8 bytes.
116 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
117 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4; // Offset of first GPR arg.
118 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 28; // Offset of return address.
119 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 32; // Frame size.
120 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
121 return gpr_index * kBytesPerGprSpillLocation;
122 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800123#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800124 // The callee save frame is pointed to by SP.
125 // | argN | |
126 // | ... | |
127 // | reg. arg spills | | Caller's frame
128 // | Method* | ---
129 // | Return |
130 // | R15 | callee save
131 // | R14 | callee save
132 // | R13 | callee save
133 // | R12 | callee save
134 // | R9 | arg5
135 // | R8 | arg4
136 // | RSI/R6 | arg1
137 // | RBP/R5 | callee save
138 // | RBX/R3 | callee save
139 // | RDX/R2 | arg2
140 // | RCX/R1 | arg3
141 // | XMM7 | float arg 8
142 // | XMM6 | float arg 7
143 // | XMM5 | float arg 6
144 // | XMM4 | float arg 5
145 // | XMM3 | float arg 4
146 // | XMM2 | float arg 3
147 // | XMM1 | float arg 2
148 // | XMM0 | float arg 1
149 // | Padding |
150 // | RDI/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800151 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
152 static constexpr size_t kNumQuickGprArgs = 5; // 3 arguments passed in GPRs.
153 static constexpr size_t kNumQuickFprArgs = 8; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800154 static constexpr size_t kBytesPerFprSpillLocation = 8; // FPR spill size is 8 bytes.
155 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
156 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80; // Offset of first GPR arg.
157 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 168; // Offset of return address.
158 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 176; // Frame size.
159 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
160 switch (gpr_index) {
161 case 0: return (4 * kBytesPerGprSpillLocation);
162 case 1: return (1 * kBytesPerGprSpillLocation);
163 case 2: return (0 * kBytesPerGprSpillLocation);
164 case 3: return (5 * kBytesPerGprSpillLocation);
165 case 4: return (6 * kBytesPerGprSpillLocation);
166 default:
167 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
168 return 0;
169 }
170 }
Ian Rogers848871b2013-08-05 10:56:33 -0700171#else
172#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700173#endif
174
Ian Rogers936b37f2014-02-14 00:52:24 -0800175 public:
176 static mirror::ArtMethod* GetCallingMethod(mirror::ArtMethod** sp)
177 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
178 DCHECK((*sp)->IsCalleeSaveMethod());
179 byte* previous_sp = reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Brian Carlstromea46f952013-07-30 01:26:50 -0700180 return *reinterpret_cast<mirror::ArtMethod**>(previous_sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700181 }
182
Ian Rogers936b37f2014-02-14 00:52:24 -0800183 // For the given quick ref and args quick frame, return the caller's PC.
184 static uintptr_t GetCallingPc(mirror::ArtMethod** sp)
185 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
186 DCHECK((*sp)->IsCalleeSaveMethod());
187 byte* lr = reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset;
Ian Rogers848871b2013-08-05 10:56:33 -0700188 return *reinterpret_cast<uintptr_t*>(lr);
189 }
190
Brian Carlstromea46f952013-07-30 01:26:50 -0700191 QuickArgumentVisitor(mirror::ArtMethod** sp, bool is_static,
Ian Rogers848871b2013-08-05 10:56:33 -0700192 const char* shorty, uint32_t shorty_len)
Ian Rogers936b37f2014-02-14 00:52:24 -0800193 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) :
194 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
195 gpr_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
196 fpr_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
197 stack_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
198 + StackArgumentStartFromShorty(is_static, shorty, shorty_len)),
199 gpr_index_(0), fpr_index_(0), stack_index_(0), cur_type_(Primitive::kPrimVoid),
200 is_split_long_or_double_(false) {
201 DCHECK_EQ(kQuickCalleeSaveFrame_RefAndArgs_FrameSize,
Ian Rogers848871b2013-08-05 10:56:33 -0700202 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
203 }
204
205 virtual ~QuickArgumentVisitor() {}
206
207 virtual void Visit() = 0;
208
Ian Rogers936b37f2014-02-14 00:52:24 -0800209 Primitive::Type GetParamPrimitiveType() const {
210 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700211 }
212
213 byte* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800214 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800215 Primitive::Type type = GetParamPrimitiveType();
216 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800217 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800218 return fpr_args_ + (fpr_index_ * kBytesPerFprSpillLocation);
219 }
220 }
221 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800222 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800223 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
224 }
225 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700226 }
227
228 bool IsSplitLongOrDouble() const {
Ian Rogers936b37f2014-02-14 00:52:24 -0800229 if ((kBytesPerGprSpillLocation == 4) || (kBytesPerFprSpillLocation == 4)) {
230 return is_split_long_or_double_;
231 } else {
232 return false; // An optimization for when GPR and FPRs are 64bit.
233 }
Ian Rogers848871b2013-08-05 10:56:33 -0700234 }
235
Ian Rogers936b37f2014-02-14 00:52:24 -0800236 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700237 return GetParamPrimitiveType() == Primitive::kPrimNot;
238 }
239
Ian Rogers936b37f2014-02-14 00:52:24 -0800240 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700241 Primitive::Type type = GetParamPrimitiveType();
242 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
243 }
244
245 uint64_t ReadSplitLongParam() const {
246 DCHECK(IsSplitLongOrDouble());
247 uint64_t low_half = *reinterpret_cast<uint32_t*>(GetParamAddress());
248 uint64_t high_half = *reinterpret_cast<uint32_t*>(stack_args_);
249 return (low_half & 0xffffffffULL) | (high_half << 32);
250 }
251
252 void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800253 gpr_index_ = 0;
254 fpr_index_ = 0;
255 stack_index_ = 0;
256 if (!is_static_) { // Handle this.
257 cur_type_ = Primitive::kPrimNot;
258 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700259 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800260 if (kNumQuickGprArgs > 0) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800261 gpr_index_++;
262 } else {
263 stack_index_++;
264 }
Ian Rogers848871b2013-08-05 10:56:33 -0700265 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800266 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
267 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
268 switch (cur_type_) {
269 case Primitive::kPrimNot:
270 case Primitive::kPrimBoolean:
271 case Primitive::kPrimByte:
272 case Primitive::kPrimChar:
273 case Primitive::kPrimShort:
274 case Primitive::kPrimInt:
275 is_split_long_or_double_ = false;
276 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800277 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800278 gpr_index_++;
279 } else {
280 stack_index_++;
281 }
282 break;
283 case Primitive::kPrimFloat:
284 is_split_long_or_double_ = false;
285 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800286 if (kQuickSoftFloatAbi) {
287 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800288 gpr_index_++;
289 } else {
290 stack_index_++;
291 }
292 } else {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800293 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800294 fpr_index_++;
295 } else {
296 stack_index_++;
297 }
298 }
299 break;
300 case Primitive::kPrimDouble:
301 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800302 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800303 is_split_long_or_double_ = (kBytesPerGprSpillLocation == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800304 ((gpr_index_ + 1) == kNumQuickGprArgs);
Ian Rogers936b37f2014-02-14 00:52:24 -0800305 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800306 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800307 gpr_index_++;
308 if (kBytesPerGprSpillLocation == 4) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800309 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800310 gpr_index_++;
311 } else {
312 stack_index_++;
313 }
314 }
315 } else {
316 if (kBytesStackArgLocation == 4) {
317 stack_index_+= 2;
318 } else {
319 CHECK_EQ(kBytesStackArgLocation, 8U);
320 stack_index_++;
321 }
322 }
323 } else {
324 is_split_long_or_double_ = (kBytesPerFprSpillLocation == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800325 ((fpr_index_ + 1) == kNumQuickFprArgs);
Ian Rogers936b37f2014-02-14 00:52:24 -0800326 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800327 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800328 fpr_index_++;
329 if (kBytesPerFprSpillLocation == 4) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800330 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800331 fpr_index_++;
332 } else {
333 stack_index_++;
334 }
335 }
336 } else {
337 if (kBytesStackArgLocation == 4) {
338 stack_index_+= 2;
339 } else {
340 CHECK_EQ(kBytesStackArgLocation, 8U);
341 stack_index_++;
342 }
343 }
344 }
345 break;
346 default:
347 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
348 }
Ian Rogers848871b2013-08-05 10:56:33 -0700349 }
350 }
351
352 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800353 static size_t StackArgumentStartFromShorty(bool is_static, const char* shorty,
354 uint32_t shorty_len) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800355 if (kQuickSoftFloatAbi) {
356 CHECK_EQ(kNumQuickFprArgs, 0U);
357 return (kNumQuickGprArgs * kBytesPerGprSpillLocation) + kBytesPerGprSpillLocation /* ArtMethod* */;
Ian Rogers936b37f2014-02-14 00:52:24 -0800358 } else {
359 size_t offset = kBytesPerGprSpillLocation; // Skip Method*.
360 size_t gprs_seen = 0;
361 size_t fprs_seen = 0;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800362 if (!is_static && (gprs_seen < kNumQuickGprArgs)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800363 gprs_seen++;
364 offset += kBytesStackArgLocation;
Ian Rogers848871b2013-08-05 10:56:33 -0700365 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800366 for (uint32_t i = 1; i < shorty_len; ++i) {
367 switch (shorty[i]) {
368 case 'Z':
369 case 'B':
370 case 'C':
371 case 'S':
372 case 'I':
373 case 'L':
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800374 if (gprs_seen < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800375 gprs_seen++;
376 offset += kBytesStackArgLocation;
377 }
378 break;
379 case 'J':
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800380 if (gprs_seen < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800381 gprs_seen++;
382 offset += 2 * kBytesStackArgLocation;
383 if (kBytesPerGprSpillLocation == 4) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800384 if (gprs_seen < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800385 gprs_seen++;
386 }
387 }
388 }
389 break;
390 case 'F':
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800391 if ((kNumQuickFprArgs != 0) && (fprs_seen + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800392 fprs_seen++;
393 offset += kBytesStackArgLocation;
394 }
395 break;
396 case 'D':
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800397 if ((kNumQuickFprArgs != 0) && (fprs_seen + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800398 fprs_seen++;
399 offset += 2 * kBytesStackArgLocation;
400 if (kBytesPerFprSpillLocation == 4) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800401 if ((kNumQuickFprArgs != 0) && (fprs_seen + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800402 fprs_seen++;
403 }
404 }
405 }
406 break;
407 default:
408 LOG(FATAL) << "Unexpected shorty character: " << shorty[i] << " in " << shorty;
409 }
Ian Rogers848871b2013-08-05 10:56:33 -0700410 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800411 return offset;
Ian Rogers848871b2013-08-05 10:56:33 -0700412 }
Ian Rogers848871b2013-08-05 10:56:33 -0700413 }
414
415 const bool is_static_;
416 const char* const shorty_;
417 const uint32_t shorty_len_;
Ian Rogers936b37f2014-02-14 00:52:24 -0800418 byte* const gpr_args_; // Address of GPR arguments in callee save frame.
419 byte* const fpr_args_; // Address of FPR arguments in callee save frame.
420 byte* const stack_args_; // Address of stack arguments in caller's frame.
421 uint32_t gpr_index_; // Index into spilled GPRs.
422 uint32_t fpr_index_; // Index into spilled FPRs.
423 uint32_t stack_index_; // Index into arguments on the stack.
424 // The current type of argument during VisitArguments.
425 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700426 // Does a 64bit parameter straddle the register and stack arguments?
427 bool is_split_long_or_double_;
428};
429
430// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800431class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700432 public:
Ian Rogers936b37f2014-02-14 00:52:24 -0800433 BuildQuickShadowFrameVisitor(mirror::ArtMethod** sp, bool is_static, const char* shorty,
434 uint32_t shorty_len, ShadowFrame* sf, size_t first_arg_reg) :
Ian Rogers848871b2013-08-05 10:56:33 -0700435 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
436
Ian Rogers9758f792014-03-13 09:02:55 -0700437 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700438
439 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800440 ShadowFrame* const sf_;
441 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700442
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700443 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700444};
445
Ian Rogers9758f792014-03-13 09:02:55 -0700446void BuildQuickShadowFrameVisitor::Visit() {
447 Primitive::Type type = GetParamPrimitiveType();
448 switch (type) {
449 case Primitive::kPrimLong: // Fall-through.
450 case Primitive::kPrimDouble:
451 if (IsSplitLongOrDouble()) {
452 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
453 } else {
454 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
455 }
456 ++cur_reg_;
457 break;
458 case Primitive::kPrimNot: {
459 StackReference<mirror::Object>* stack_ref =
460 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
461 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
462 }
463 break;
464 case Primitive::kPrimBoolean: // Fall-through.
465 case Primitive::kPrimByte: // Fall-through.
466 case Primitive::kPrimChar: // Fall-through.
467 case Primitive::kPrimShort: // Fall-through.
468 case Primitive::kPrimInt: // Fall-through.
469 case Primitive::kPrimFloat:
470 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
471 break;
472 case Primitive::kPrimVoid:
473 LOG(FATAL) << "UNREACHABLE";
474 break;
475 }
476 ++cur_reg_;
477}
478
Brian Carlstromea46f952013-07-30 01:26:50 -0700479extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
480 mirror::ArtMethod** sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700481 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
482 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
483 // frame.
484 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
485
486 if (method->IsAbstract()) {
487 ThrowAbstractMethodError(method);
488 return 0;
489 } else {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800490 DCHECK(!method->IsNative()) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700491 const char* old_cause = self->StartAssertNoThreadSuspension("Building interpreter shadow frame");
492 MethodHelper mh(method);
493 const DexFile::CodeItem* code_item = mh.GetCodeItem();
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800494 DCHECK(code_item != nullptr) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700495 uint16_t num_regs = code_item->registers_size_;
496 void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
497 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, NULL, // No last shadow coming from quick.
498 method, 0, memory));
499 size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700500 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, mh.IsStatic(), mh.GetShorty(),
Ian Rogers936b37f2014-02-14 00:52:24 -0800501 mh.GetShortyLength(),
502 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700503 shadow_frame_builder.VisitArguments();
504 // Push a transition back into managed code onto the linked list in thread.
505 ManagedStack fragment;
506 self->PushManagedStackFragment(&fragment);
507 self->PushShadowFrame(shadow_frame);
508 self->EndAssertNoThreadSuspension(old_cause);
509
510 if (method->IsStatic() && !method->GetDeclaringClass()->IsInitializing()) {
511 // Ensure static method's class is initialized.
Mathieu Chartierc528dba2013-11-26 12:00:11 -0800512 SirtRef<mirror::Class> sirt_c(self, method->GetDeclaringClass());
513 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(sirt_c, true, true)) {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800514 DCHECK(Thread::Current()->IsExceptionPending()) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700515 self->PopManagedStackFragment(fragment);
516 return 0;
517 }
518 }
519
520 JValue result = interpreter::EnterInterpreterFromStub(self, mh, code_item, *shadow_frame);
521 // Pop transition.
522 self->PopManagedStackFragment(fragment);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800523 // No need to restore the args since the method has already been run by the interpreter.
Ian Rogers848871b2013-08-05 10:56:33 -0700524 return result.GetJ();
525 }
526}
527
528// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
529// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800530class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700531 public:
Brian Carlstromea46f952013-07-30 01:26:50 -0700532 BuildQuickArgumentVisitor(mirror::ArtMethod** sp, bool is_static, const char* shorty,
Ian Rogers848871b2013-08-05 10:56:33 -0700533 uint32_t shorty_len, ScopedObjectAccessUnchecked* soa,
534 std::vector<jvalue>* args) :
535 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
536
Ian Rogers9758f792014-03-13 09:02:55 -0700537 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700538
Ian Rogers9758f792014-03-13 09:02:55 -0700539 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800540
Ian Rogers848871b2013-08-05 10:56:33 -0700541 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700542 ScopedObjectAccessUnchecked* const soa_;
543 std::vector<jvalue>* const args_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800544 // References which we must update when exiting in case the GC moved the objects.
545 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
Ian Rogers9758f792014-03-13 09:02:55 -0700546
Ian Rogers848871b2013-08-05 10:56:33 -0700547 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
548};
549
Ian Rogers9758f792014-03-13 09:02:55 -0700550void BuildQuickArgumentVisitor::Visit() {
551 jvalue val;
552 Primitive::Type type = GetParamPrimitiveType();
553 switch (type) {
554 case Primitive::kPrimNot: {
555 StackReference<mirror::Object>* stack_ref =
556 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
557 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
558 references_.push_back(std::make_pair(val.l, stack_ref));
559 break;
560 }
561 case Primitive::kPrimLong: // Fall-through.
562 case Primitive::kPrimDouble:
563 if (IsSplitLongOrDouble()) {
564 val.j = ReadSplitLongParam();
565 } else {
566 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
567 }
568 break;
569 case Primitive::kPrimBoolean: // Fall-through.
570 case Primitive::kPrimByte: // Fall-through.
571 case Primitive::kPrimChar: // Fall-through.
572 case Primitive::kPrimShort: // Fall-through.
573 case Primitive::kPrimInt: // Fall-through.
574 case Primitive::kPrimFloat:
575 val.i = *reinterpret_cast<jint*>(GetParamAddress());
576 break;
577 case Primitive::kPrimVoid:
578 LOG(FATAL) << "UNREACHABLE";
579 val.j = 0;
580 break;
581 }
582 args_->push_back(val);
583}
584
585void BuildQuickArgumentVisitor::FixupReferences() {
586 // Fixup any references which may have changed.
587 for (const auto& pair : references_) {
588 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
589 }
590}
591
Ian Rogers848871b2013-08-05 10:56:33 -0700592// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
593// which is responsible for recording callee save registers. We explicitly place into jobjects the
594// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
595// field within the proxy object, which will box the primitive arguments and deal with error cases.
Brian Carlstromea46f952013-07-30 01:26:50 -0700596extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method,
Ian Rogers848871b2013-08-05 10:56:33 -0700597 mirror::Object* receiver,
Brian Carlstromea46f952013-07-30 01:26:50 -0700598 Thread* self, mirror::ArtMethod** sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700599 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromd3633d52013-08-20 21:06:26 -0700600 DCHECK(proxy_method->IsProxyMethod()) << PrettyMethod(proxy_method);
601 DCHECK(receiver->GetClass()->IsProxyClass()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700602 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
603 const char* old_cause =
604 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
605 // Register the top of the managed stack, making stack crawlable.
Brian Carlstromd3633d52013-08-20 21:06:26 -0700606 DCHECK_EQ(*sp, proxy_method) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700607 self->SetTopOfStack(sp, 0);
608 DCHECK_EQ(proxy_method->GetFrameSizeInBytes(),
Brian Carlstromd3633d52013-08-20 21:06:26 -0700609 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes())
610 << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700611 self->VerifyStack();
612 // Start new JNI local reference state.
613 JNIEnvExt* env = self->GetJniEnv();
614 ScopedObjectAccessUnchecked soa(env);
615 ScopedJniEnvLocalRefState env_state(env);
616 // Create local ref. copies of proxy method and the receiver.
617 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
618
619 // Placing arguments into args vector and remove the receiver.
620 MethodHelper proxy_mh(proxy_method);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700621 DCHECK(!proxy_mh.IsStatic()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700622 std::vector<jvalue> args;
623 BuildQuickArgumentVisitor local_ref_visitor(sp, proxy_mh.IsStatic(), proxy_mh.GetShorty(),
624 proxy_mh.GetShortyLength(), &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700625
Ian Rogers848871b2013-08-05 10:56:33 -0700626 local_ref_visitor.VisitArguments();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700627 DCHECK_GT(args.size(), 0U) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700628 args.erase(args.begin());
629
630 // Convert proxy method into expected interface method.
Brian Carlstromea46f952013-07-30 01:26:50 -0700631 mirror::ArtMethod* interface_method = proxy_method->FindOverriddenMethod();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700632 DCHECK(interface_method != NULL) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700633 DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
634 jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_method);
635
636 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
637 // that performs allocations.
638 self->EndAssertNoThreadSuspension(old_cause);
639 JValue result = InvokeProxyInvocationHandler(soa, proxy_mh.GetShorty(),
640 rcvr_jobj, interface_method_jobj, args);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800641 // Restore references which might have moved.
642 local_ref_visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700643 return result.GetJ();
644}
645
646// Read object references held in arguments from quick frames and place in a JNI local references,
647// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800648class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700649 public:
Mathieu Chartier590fee92013-09-13 13:46:47 -0700650 RememberForGcArgumentVisitor(mirror::ArtMethod** sp, bool is_static, const char* shorty,
651 uint32_t shorty_len, ScopedObjectAccessUnchecked* soa) :
Ian Rogers848871b2013-08-05 10:56:33 -0700652 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
653
Ian Rogers9758f792014-03-13 09:02:55 -0700654 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700655
Ian Rogers9758f792014-03-13 09:02:55 -0700656 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -0700657
658 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700659 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800660 // References which we must update when exiting in case the GC moved the objects.
661 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700662 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700663};
664
Ian Rogers9758f792014-03-13 09:02:55 -0700665void RememberForGcArgumentVisitor::Visit() {
666 if (IsParamAReference()) {
667 StackReference<mirror::Object>* stack_ref =
668 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
669 jobject reference =
670 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
671 references_.push_back(std::make_pair(reference, stack_ref));
672 }
673}
674
675void RememberForGcArgumentVisitor::FixupReferences() {
676 // Fixup any references which may have changed.
677 for (const auto& pair : references_) {
678 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
679 }
680}
681
682
Ian Rogers848871b2013-08-05 10:56:33 -0700683// Lazily resolve a method for quick. Called by stub code.
Brian Carlstromea46f952013-07-30 01:26:50 -0700684extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called,
Ian Rogers848871b2013-08-05 10:56:33 -0700685 mirror::Object* receiver,
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800686 Thread* self, mirror::ArtMethod** sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700687 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800688 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -0700689 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800690 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -0700691 ScopedObjectAccessUnchecked soa(env);
692 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800693 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -0700694
695 // Compute details about the called method (avoid GCs)
696 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Brian Carlstromea46f952013-07-30 01:26:50 -0700697 mirror::ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700698 InvokeType invoke_type;
699 const DexFile* dex_file;
700 uint32_t dex_method_idx;
701 if (called->IsRuntimeMethod()) {
702 uint32_t dex_pc = caller->ToDexPc(QuickArgumentVisitor::GetCallingPc(sp));
703 const DexFile::CodeItem* code;
704 {
705 MethodHelper mh(caller);
706 dex_file = &mh.GetDexFile();
707 code = mh.GetCodeItem();
708 }
709 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
710 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
711 Instruction::Code instr_code = instr->Opcode();
712 bool is_range;
713 switch (instr_code) {
714 case Instruction::INVOKE_DIRECT:
715 invoke_type = kDirect;
716 is_range = false;
717 break;
718 case Instruction::INVOKE_DIRECT_RANGE:
719 invoke_type = kDirect;
720 is_range = true;
721 break;
722 case Instruction::INVOKE_STATIC:
723 invoke_type = kStatic;
724 is_range = false;
725 break;
726 case Instruction::INVOKE_STATIC_RANGE:
727 invoke_type = kStatic;
728 is_range = true;
729 break;
730 case Instruction::INVOKE_SUPER:
731 invoke_type = kSuper;
732 is_range = false;
733 break;
734 case Instruction::INVOKE_SUPER_RANGE:
735 invoke_type = kSuper;
736 is_range = true;
737 break;
738 case Instruction::INVOKE_VIRTUAL:
739 invoke_type = kVirtual;
740 is_range = false;
741 break;
742 case Instruction::INVOKE_VIRTUAL_RANGE:
743 invoke_type = kVirtual;
744 is_range = true;
745 break;
746 case Instruction::INVOKE_INTERFACE:
747 invoke_type = kInterface;
748 is_range = false;
749 break;
750 case Instruction::INVOKE_INTERFACE_RANGE:
751 invoke_type = kInterface;
752 is_range = true;
753 break;
754 default:
755 LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(NULL);
756 // Avoid used uninitialized warnings.
757 invoke_type = kDirect;
758 is_range = false;
759 }
760 dex_method_idx = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
761
762 } else {
763 invoke_type = kStatic;
764 dex_file = &MethodHelper(called).GetDexFile();
765 dex_method_idx = called->GetDexMethodIndex();
766 }
767 uint32_t shorty_len;
768 const char* shorty =
769 dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700770 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -0700771 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800772 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800773 bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -0700774 // Resolve method filling in dex cache.
775 if (called->IsRuntimeMethod()) {
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800776 SirtRef<mirror::Object> sirt_receiver(soa.Self(), virtual_or_interface ? receiver : nullptr);
Ian Rogers848871b2013-08-05 10:56:33 -0700777 called = linker->ResolveMethod(dex_method_idx, caller, invoke_type);
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800778 receiver = sirt_receiver.get();
Ian Rogers848871b2013-08-05 10:56:33 -0700779 }
780 const void* code = NULL;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800781 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -0700782 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800783 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
784 << PrettyMethod(called) << " " << invoke_type;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800785 if (virtual_or_interface) {
786 // Refine called method based on receiver.
787 CHECK(receiver != nullptr) << invoke_type;
788 if (invoke_type == kVirtual) {
789 called = receiver->GetClass()->FindVirtualMethodForVirtual(called);
790 } else {
791 called = receiver->GetClass()->FindVirtualMethodForInterface(called);
792 }
Ian Rogers83883d72013-10-21 21:07:24 -0700793 // We came here because of sharpening. Ensure the dex cache is up-to-date on the method index
794 // of the sharpened method.
795 if (called->GetDexCacheResolvedMethods() == caller->GetDexCacheResolvedMethods()) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100796 caller->GetDexCacheResolvedMethods()->Set<false>(called->GetDexMethodIndex(), called);
Ian Rogers83883d72013-10-21 21:07:24 -0700797 } else {
798 // Calling from one dex file to another, need to compute the method index appropriate to
Vladimir Markobbcc0c02014-02-03 14:08:42 +0000799 // the caller's dex file. Since we get here only if the original called was a runtime
800 // method, we've got the correct dex_file and a dex_method_idx from above.
801 DCHECK(&MethodHelper(caller).GetDexFile() == dex_file);
Ian Rogers83883d72013-10-21 21:07:24 -0700802 uint32_t method_index =
Vladimir Markobbcc0c02014-02-03 14:08:42 +0000803 MethodHelper(called).FindDexMethodIndexInOtherDexFile(*dex_file, dex_method_idx);
Ian Rogers83883d72013-10-21 21:07:24 -0700804 if (method_index != DexFile::kDexNoIndex) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100805 caller->GetDexCacheResolvedMethods()->Set<false>(method_index, called);
Ian Rogers83883d72013-10-21 21:07:24 -0700806 }
807 }
808 }
Ian Rogers848871b2013-08-05 10:56:33 -0700809 // Ensure that the called method's class is initialized.
Mathieu Chartierc528dba2013-11-26 12:00:11 -0800810 SirtRef<mirror::Class> called_class(soa.Self(), called->GetDeclaringClass());
Ian Rogers848871b2013-08-05 10:56:33 -0700811 linker->EnsureInitialized(called_class, true, true);
812 if (LIKELY(called_class->IsInitialized())) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800813 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700814 } else if (called_class->IsInitializing()) {
815 if (invoke_type == kStatic) {
816 // Class is still initializing, go to oat and grab code (trampoline must be left in place
817 // until class is initialized to stop races between threads).
Ian Rogersef7d42f2014-01-06 12:55:46 -0800818 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -0700819 } else {
820 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800821 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700822 }
823 } else {
824 DCHECK(called_class->IsErroneous());
825 }
826 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800827 CHECK_EQ(code == NULL, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700828 // Fixup any locally saved objects may have moved during a GC.
829 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700830 // Place called method in callee-save frame to be placed as first argument to quick method.
831 *sp = called;
832 return code;
833}
834
Andreas Gampec147b002014-03-06 18:11:06 -0800835
836
837/*
838 * This class uses a couple of observations to unite the different calling conventions through
839 * a few constants.
840 *
841 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
842 * possible alignment.
843 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
844 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
845 * when we have to split things
846 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
847 * and we can use Int handling directly.
848 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
849 * necessary when widening. Also, widening of Ints will take place implicitly, and the
850 * extension should be compatible with Aarch64, which mandates copying the available bits
851 * into LSB and leaving the rest unspecified.
852 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
853 * the stack.
854 * 6) There is only little endian.
855 *
856 *
857 * Actual work is supposed to be done in a delegate of the template type. The interface is as
858 * follows:
859 *
860 * void PushGpr(uintptr_t): Add a value for the next GPR
861 *
862 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
863 * padding, that is, think the architecture is 32b and aligns 64b.
864 *
865 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
866 * split this if necessary. The current state will have aligned, if
867 * necessary.
868 *
869 * void PushStack(uintptr_t): Push a value to the stack.
870 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -0700871 * uintptr_t PushSirt(mirror::Object* ref): Add a reference to the Sirt. This _will_ have nullptr,
872 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -0800873 * Must return the jobject, that is, the reference to the
Andreas Gampe36fea8d2014-03-10 13:37:40 -0700874 * entry in the Sirt (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -0800875 *
876 */
877template <class T> class BuildGenericJniFrameStateMachine {
878 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800879#if defined(__arm__)
880 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -0800881 static constexpr bool kNativeSoftFloatAbi = true;
882 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800883 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
884
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800885 static constexpr size_t kRegistersNeededForLong = 2;
886 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800887 static constexpr bool kMultiRegistersAligned = true;
888 static constexpr bool kMultiRegistersWidened = false;
889 static constexpr bool kAlignLongOnStack = true;
890 static constexpr bool kAlignDoubleOnStack = true;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800891#elif defined(__mips__)
892 // TODO: These are all dummy values!
893 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
894 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
895 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
896
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800897 static constexpr size_t kRegistersNeededForLong = 2;
898 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800899 static constexpr bool kMultiRegistersAligned = true;
900 static constexpr bool kMultiRegistersWidened = true;
901 static constexpr bool kAlignLongOnStack = false;
902 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800903#elif defined(__i386__)
904 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -0800905 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800906 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
907 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
908
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800909 static constexpr size_t kRegistersNeededForLong = 2;
910 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800911 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
912 static constexpr bool kMultiRegistersWidened = false;
913 static constexpr bool kAlignLongOnStack = false;
914 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800915#elif defined(__x86_64__)
916 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
917 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
918 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
919
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800920 static constexpr size_t kRegistersNeededForLong = 1;
921 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -0800922 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe7a0e5042014-03-07 13:03:19 -0800923 static constexpr bool kMultiRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -0800924 static constexpr bool kAlignLongOnStack = false;
925 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800926#else
927#error "Unsupported architecture"
928#endif
929
Andreas Gampec147b002014-03-06 18:11:06 -0800930 public:
931 explicit BuildGenericJniFrameStateMachine(T* delegate) : gpr_index_(kNumNativeGprArgs),
932 fpr_index_(kNumNativeFprArgs),
933 stack_entries_(0),
934 delegate_(delegate) {
935 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
936 // the next register is even; counting down is just to make the compiler happy...
937 CHECK_EQ(kNumNativeGprArgs % 2, 0U);
938 CHECK_EQ(kNumNativeFprArgs % 2, 0U);
939 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800940
Andreas Gampec147b002014-03-06 18:11:06 -0800941 virtual ~BuildGenericJniFrameStateMachine() {}
942
943 bool HavePointerGpr() {
944 return gpr_index_ > 0;
945 }
946
947 void AdvancePointer(void* val) {
948 if (HavePointerGpr()) {
949 gpr_index_--;
950 PushGpr(reinterpret_cast<uintptr_t>(val));
951 } else {
952 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
953 PushStack(reinterpret_cast<uintptr_t>(val));
954 gpr_index_ = 0;
955 }
956 }
957
958
959 bool HaveSirtGpr() {
960 return gpr_index_ > 0;
961 }
962
963 void AdvanceSirt(mirror::Object* ptr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -0700964 uintptr_t sirtRef = PushSirt(ptr);
Andreas Gampec147b002014-03-06 18:11:06 -0800965 if (HaveSirtGpr()) {
966 gpr_index_--;
967 PushGpr(sirtRef);
968 } else {
969 stack_entries_++;
970 PushStack(sirtRef);
971 gpr_index_ = 0;
972 }
973 }
974
975
976 bool HaveIntGpr() {
977 return gpr_index_ > 0;
978 }
979
980 void AdvanceInt(uint32_t val) {
981 if (HaveIntGpr()) {
982 gpr_index_--;
983 PushGpr(val);
984 } else {
985 stack_entries_++;
986 PushStack(val);
987 gpr_index_ = 0;
988 }
989 }
990
991
992 bool HaveLongGpr() {
993 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
994 }
995
996 bool LongGprNeedsPadding() {
997 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
998 kAlignLongOnStack && // and when it needs alignment
999 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1000 }
1001
1002 bool LongStackNeedsPadding() {
1003 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1004 kAlignLongOnStack && // and when it needs 8B alignment
1005 (stack_entries_ & 1) == 1; // counter is odd
1006 }
1007
1008 void AdvanceLong(uint64_t val) {
1009 if (HaveLongGpr()) {
1010 if (LongGprNeedsPadding()) {
1011 PushGpr(0);
1012 gpr_index_--;
1013 }
1014 if (kRegistersNeededForLong == 1) {
1015 PushGpr(static_cast<uintptr_t>(val));
1016 } else {
1017 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1018 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1019 }
1020 gpr_index_ -= kRegistersNeededForLong;
1021 } else {
1022 if (LongStackNeedsPadding()) {
1023 PushStack(0);
1024 stack_entries_++;
1025 }
1026 if (kRegistersNeededForLong == 1) {
1027 PushStack(static_cast<uintptr_t>(val));
1028 stack_entries_++;
1029 } else {
1030 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1031 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1032 stack_entries_ += 2;
1033 }
1034 gpr_index_ = 0;
1035 }
1036 }
1037
1038
1039 bool HaveFloatFpr() {
1040 return fpr_index_ > 0;
1041 }
1042
Andreas Gampec147b002014-03-06 18:11:06 -08001043 template <typename U, typename V> V convert(U in) {
1044 CHECK_LE(sizeof(U), sizeof(V));
1045 union { U u; V v; } tmp;
1046 tmp.u = in;
1047 return tmp.v;
1048 }
1049
1050 void AdvanceFloat(float val) {
1051 if (kNativeSoftFloatAbi) {
1052 AdvanceInt(convert<float, uint32_t>(val));
1053 } else {
1054 if (HaveFloatFpr()) {
1055 fpr_index_--;
1056 if (kRegistersNeededForDouble == 1) {
1057 if (kMultiRegistersWidened) {
1058 PushFpr8(convert<double, uint64_t>(val));
1059 } else {
1060 // No widening, just use the bits.
1061 PushFpr8(convert<float, uint64_t>(val));
1062 }
1063 } else {
1064 PushFpr4(val);
1065 }
1066 } else {
1067 stack_entries_++;
1068 if (kRegistersNeededForDouble == 1 && kMultiRegistersWidened) {
1069 // Need to widen before storing: Note the "double" in the template instantiation.
1070 PushStack(convert<double, uintptr_t>(val));
1071 } else {
1072 PushStack(convert<float, uintptr_t>(val));
1073 }
1074 fpr_index_ = 0;
1075 }
1076 }
1077 }
1078
1079
1080 bool HaveDoubleFpr() {
1081 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1082 }
1083
1084 bool DoubleFprNeedsPadding() {
1085 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1086 kAlignDoubleOnStack && // and when it needs alignment
1087 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1088 }
1089
1090 bool DoubleStackNeedsPadding() {
1091 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1092 kAlignDoubleOnStack && // and when it needs 8B alignment
1093 (stack_entries_ & 1) == 1; // counter is odd
1094 }
1095
1096 void AdvanceDouble(uint64_t val) {
1097 if (kNativeSoftFloatAbi) {
1098 AdvanceLong(val);
1099 } else {
1100 if (HaveDoubleFpr()) {
1101 if (DoubleFprNeedsPadding()) {
1102 PushFpr4(0);
1103 fpr_index_--;
1104 }
1105 PushFpr8(val);
1106 fpr_index_ -= kRegistersNeededForDouble;
1107 } else {
1108 if (DoubleStackNeedsPadding()) {
1109 PushStack(0);
1110 stack_entries_++;
1111 }
1112 if (kRegistersNeededForDouble == 1) {
1113 PushStack(static_cast<uintptr_t>(val));
1114 stack_entries_++;
1115 } else {
1116 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1117 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1118 stack_entries_ += 2;
1119 }
1120 fpr_index_ = 0;
1121 }
1122 }
1123 }
1124
1125 uint32_t getStackEntries() {
1126 return stack_entries_;
1127 }
1128
1129 uint32_t getNumberOfUsedGprs() {
1130 return kNumNativeGprArgs - gpr_index_;
1131 }
1132
1133 uint32_t getNumberOfUsedFprs() {
1134 return kNumNativeFprArgs - fpr_index_;
1135 }
1136
1137 private:
1138 void PushGpr(uintptr_t val) {
1139 delegate_->PushGpr(val);
1140 }
1141 void PushFpr4(float val) {
1142 delegate_->PushFpr4(val);
1143 }
1144 void PushFpr8(uint64_t val) {
1145 delegate_->PushFpr8(val);
1146 }
1147 void PushStack(uintptr_t val) {
1148 delegate_->PushStack(val);
1149 }
1150 uintptr_t PushSirt(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1151 return delegate_->PushSirt(ref);
1152 }
1153
1154 uint32_t gpr_index_; // Number of free GPRs
1155 uint32_t fpr_index_; // Number of free FPRs
1156 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1157 // extended
1158 T* delegate_; // What Push implementation gets called
1159};
1160
1161class ComputeGenericJniFrameSize FINAL {
1162 public:
1163 ComputeGenericJniFrameSize() : num_sirt_references_(0), num_stack_entries_(0) {}
1164
Andreas Gampec147b002014-03-06 18:11:06 -08001165 uint32_t GetStackSize() {
1166 return num_stack_entries_ * sizeof(uintptr_t);
1167 }
1168
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001169 // WARNING: After this, *sp won't be pointing to the method anymore!
1170 void ComputeLayout(mirror::ArtMethod*** m, bool is_static, const char* shorty, uint32_t shorty_len,
1171 void* sp, StackIndirectReferenceTable** table, uint32_t* sirt_entries,
1172 uintptr_t** start_stack, uintptr_t** start_gpr, uint32_t** start_fpr,
1173 void** code_return, size_t* overall_size)
Andreas Gampec147b002014-03-06 18:11:06 -08001174 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1175 ComputeAll(is_static, shorty, shorty_len);
1176
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001177 mirror::ArtMethod* method = **m;
1178
Andreas Gampec147b002014-03-06 18:11:06 -08001179 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
Andreas Gampec147b002014-03-06 18:11:06 -08001180
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001181 // First, fix up the layout of the callee-save frame.
1182 // We have to squeeze in the Sirt, and relocate the method pointer.
1183
1184 // "Free" the slot for the method.
1185 sp8 += kPointerSize;
1186
1187 // Add the Sirt.
Andreas Gampec147b002014-03-06 18:11:06 -08001188 *sirt_entries = num_sirt_references_;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001189 size_t sirt_size = StackIndirectReferenceTable::GetAlignedSirtSize(num_sirt_references_);
1190 sp8 -= sirt_size;
Andreas Gampec147b002014-03-06 18:11:06 -08001191 *table = reinterpret_cast<StackIndirectReferenceTable*>(sp8);
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001192 (*table)->SetNumberOfReferences(num_sirt_references_);
Andreas Gampec147b002014-03-06 18:11:06 -08001193
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001194 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
1195 sp8 -= kPointerSize;
1196 uint8_t* method_pointer = sp8;
1197 *(reinterpret_cast<mirror::ArtMethod**>(method_pointer)) = method;
1198 *m = reinterpret_cast<mirror::ArtMethod**>(method_pointer);
1199
1200 // Reference cookie and padding
1201 sp8 -= 8;
1202 // Store Sirt size
1203 *reinterpret_cast<uint32_t*>(sp8) = static_cast<uint32_t>(sirt_size & 0xFFFFFFFF);
1204
1205 // Next comes the native call stack.
Andreas Gampec147b002014-03-06 18:11:06 -08001206 sp8 -= GetStackSize();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001207 // Now align the call stack below. This aligns by 16, as AArch64 seems to require.
Andreas Gampec147b002014-03-06 18:11:06 -08001208 uintptr_t mask = ~0x0F;
1209 sp8 = reinterpret_cast<uint8_t*>(reinterpret_cast<uintptr_t>(sp8) & mask);
1210 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
1211
1212 // put fprs and gprs below
1213 // Assumption is OK right now, as we have soft-float arm
1214 size_t fregs = BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize>::kNumNativeFprArgs;
1215 sp8 -= fregs * sizeof(uintptr_t);
1216 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1217 size_t iregs = BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize>::kNumNativeGprArgs;
1218 sp8 -= iregs * sizeof(uintptr_t);
1219 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1220
1221 // reserve space for the code pointer
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001222 sp8 -= kPointerSize;
Andreas Gampec147b002014-03-06 18:11:06 -08001223 *code_return = reinterpret_cast<void*>(sp8);
1224
1225 *overall_size = reinterpret_cast<uint8_t*>(sp) - sp8;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001226
1227 // The new SP is stored at the end of the alloca, so it can be immediately popped
1228 sp8 = reinterpret_cast<uint8_t*>(sp) - 5 * KB;
1229 *(reinterpret_cast<uint8_t**>(sp8)) = method_pointer;
Andreas Gampec147b002014-03-06 18:11:06 -08001230 }
1231
1232 void ComputeSirtOffset() { } // nothing to do, static right now
1233
1234 void ComputeAll(bool is_static, const char* shorty, uint32_t shorty_len)
1235 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1236 BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize> sm(this);
1237
1238 // JNIEnv
1239 sm.AdvancePointer(nullptr);
1240
1241 // Class object or this as first argument
1242 sm.AdvanceSirt(reinterpret_cast<mirror::Object*>(0x12345678));
1243
1244 for (uint32_t i = 1; i < shorty_len; ++i) {
1245 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1246 switch (cur_type_) {
1247 case Primitive::kPrimNot:
1248 sm.AdvanceSirt(reinterpret_cast<mirror::Object*>(0x12345678));
1249 break;
1250
1251 case Primitive::kPrimBoolean:
1252 case Primitive::kPrimByte:
1253 case Primitive::kPrimChar:
1254 case Primitive::kPrimShort:
1255 case Primitive::kPrimInt:
1256 sm.AdvanceInt(0);
1257 break;
1258 case Primitive::kPrimFloat:
1259 sm.AdvanceFloat(0);
1260 break;
1261 case Primitive::kPrimDouble:
1262 sm.AdvanceDouble(0);
1263 break;
1264 case Primitive::kPrimLong:
1265 sm.AdvanceLong(0);
1266 break;
1267 default:
1268 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
1269 }
1270 }
1271
1272 num_stack_entries_ = sm.getStackEntries();
1273 }
1274
1275 void PushGpr(uintptr_t /* val */) {
1276 // not optimizing registers, yet
1277 }
1278
1279 void PushFpr4(float /* val */) {
1280 // not optimizing registers, yet
1281 }
1282
1283 void PushFpr8(uint64_t /* val */) {
1284 // not optimizing registers, yet
1285 }
1286
1287 void PushStack(uintptr_t /* val */) {
1288 // counting is already done in the superclass
1289 }
1290
1291 uintptr_t PushSirt(mirror::Object* /* ptr */) {
1292 num_sirt_references_++;
1293 return reinterpret_cast<uintptr_t>(nullptr);
1294 }
1295
1296 private:
1297 uint32_t num_sirt_references_;
1298 uint32_t num_stack_entries_;
1299};
1300
1301// Visits arguments on the stack placing them into a region lower down the stack for the benefit
1302// of transitioning into native code.
1303class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001304 public:
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001305 BuildGenericJniFrameVisitor(mirror::ArtMethod*** sp, bool is_static, const char* shorty,
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001306 uint32_t shorty_len, Thread* self) :
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001307 QuickArgumentVisitor(*sp, is_static, shorty, shorty_len), sm_(this) {
Andreas Gampec147b002014-03-06 18:11:06 -08001308 ComputeGenericJniFrameSize fsc;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001309 fsc.ComputeLayout(sp, is_static, shorty, shorty_len, *sp, &sirt_, &sirt_expected_refs_,
1310 &cur_stack_arg_, &cur_gpr_reg_, &cur_fpr_reg_, &code_return_,
1311 &alloca_used_size_);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001312 sirt_number_of_references_ = 0;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001313 cur_sirt_entry_ = reinterpret_cast<StackReference<mirror::Object>*>(GetFirstSirtEntry());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001314
1315 // jni environment is always first argument
Andreas Gampec147b002014-03-06 18:11:06 -08001316 sm_.AdvancePointer(self->GetJniEnv());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001317
1318 if (is_static) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001319 sm_.AdvanceSirt((**sp)->GetDeclaringClass());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001320 }
1321 }
1322
Ian Rogers9758f792014-03-13 09:02:55 -07001323 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001324
Ian Rogers9758f792014-03-13 09:02:55 -07001325 void FinalizeSirt(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001326
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001327 jobject GetFirstSirtEntry() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1328 return reinterpret_cast<jobject>(sirt_->GetStackReference(0));
Andreas Gampec147b002014-03-06 18:11:06 -08001329 }
1330
1331 void PushGpr(uintptr_t val) {
1332 *cur_gpr_reg_ = val;
1333 cur_gpr_reg_++;
1334 }
1335
1336 void PushFpr4(float val) {
1337 *cur_fpr_reg_ = val;
1338 cur_fpr_reg_++;
1339 }
1340
1341 void PushFpr8(uint64_t val) {
1342 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
1343 *tmp = val;
1344 cur_fpr_reg_ += 2;
1345 }
1346
1347 void PushStack(uintptr_t val) {
1348 *cur_stack_arg_ = val;
1349 cur_stack_arg_++;
1350 }
1351
1352 uintptr_t PushSirt(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001353 uintptr_t tmp;
1354 if (ref == nullptr) {
1355 *cur_sirt_entry_ = StackReference<mirror::Object>();
1356 tmp = reinterpret_cast<uintptr_t>(nullptr);
1357 } else {
1358 *cur_sirt_entry_ = StackReference<mirror::Object>::FromMirrorPtr(ref);
1359 tmp = reinterpret_cast<uintptr_t>(cur_sirt_entry_);
1360 }
1361 cur_sirt_entry_++;
Andreas Gampec147b002014-03-06 18:11:06 -08001362 sirt_number_of_references_++;
1363 return tmp;
1364 }
1365
1366 // Size of the part of the alloca that we actually need.
1367 size_t GetAllocaUsedSize() {
1368 return alloca_used_size_;
1369 }
1370
1371 void* GetCodeReturn() {
1372 return code_return_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001373 }
1374
1375 private:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001376 uint32_t sirt_number_of_references_;
1377 StackReference<mirror::Object>* cur_sirt_entry_;
Andreas Gampec147b002014-03-06 18:11:06 -08001378 StackIndirectReferenceTable* sirt_;
1379 uint32_t sirt_expected_refs_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001380 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001381 uint32_t* cur_fpr_reg_;
1382 uintptr_t* cur_stack_arg_;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001383 // StackReference<mirror::Object>* top_of_sirt_;
Andreas Gampec147b002014-03-06 18:11:06 -08001384 void* code_return_;
1385 size_t alloca_used_size_;
1386
1387 BuildGenericJniFrameStateMachine<BuildGenericJniFrameVisitor> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001388
1389 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
1390};
1391
Ian Rogers9758f792014-03-13 09:02:55 -07001392void BuildGenericJniFrameVisitor::Visit() {
1393 Primitive::Type type = GetParamPrimitiveType();
1394 switch (type) {
1395 case Primitive::kPrimLong: {
1396 jlong long_arg;
1397 if (IsSplitLongOrDouble()) {
1398 long_arg = ReadSplitLongParam();
1399 } else {
1400 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
1401 }
1402 sm_.AdvanceLong(long_arg);
1403 break;
1404 }
1405 case Primitive::kPrimDouble: {
1406 uint64_t double_arg;
1407 if (IsSplitLongOrDouble()) {
1408 // Read into union so that we don't case to a double.
1409 double_arg = ReadSplitLongParam();
1410 } else {
1411 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
1412 }
1413 sm_.AdvanceDouble(double_arg);
1414 break;
1415 }
1416 case Primitive::kPrimNot: {
1417 StackReference<mirror::Object>* stack_ref =
1418 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
1419 sm_.AdvanceSirt(stack_ref->AsMirrorPtr());
1420 break;
1421 }
1422 case Primitive::kPrimFloat:
1423 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
1424 break;
1425 case Primitive::kPrimBoolean: // Fall-through.
1426 case Primitive::kPrimByte: // Fall-through.
1427 case Primitive::kPrimChar: // Fall-through.
1428 case Primitive::kPrimShort: // Fall-through.
1429 case Primitive::kPrimInt: // Fall-through.
1430 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
1431 break;
1432 case Primitive::kPrimVoid:
1433 LOG(FATAL) << "UNREACHABLE";
1434 break;
1435 }
1436}
1437
1438void BuildGenericJniFrameVisitor::FinalizeSirt(Thread* self) {
1439 // Initialize padding entries.
1440 while (sirt_number_of_references_ < sirt_expected_refs_) {
1441 *cur_sirt_entry_ = StackReference<mirror::Object>();
1442 cur_sirt_entry_++;
1443 sirt_number_of_references_++;
1444 }
1445 sirt_->SetNumberOfReferences(sirt_expected_refs_);
1446 DCHECK_NE(sirt_expected_refs_, 0U);
1447 // Install Sirt.
1448 self->PushSirt(sirt_);
1449}
1450
Andreas Gampe90546832014-03-12 18:07:19 -07001451extern "C" void* artFindNativeMethod();
1452
Andreas Gampec147b002014-03-06 18:11:06 -08001453/*
1454 * Initializes an alloca region assumed to be directly below sp for a native call:
1455 * Create a Sirt and call stack and fill a mini stack with values to be pushed to registers.
1456 * The final element on the stack is a pointer to the native code.
1457 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001458 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
1459 * We need to fix this, as the Sirt needs to go into the callee-save frame.
1460 *
Andreas Gampec147b002014-03-06 18:11:06 -08001461 * The return of this function denotes:
1462 * 1) How many bytes of the alloca can be released, if the value is non-negative.
1463 * 2) An error, if the value is negative.
1464 */
1465extern "C" ssize_t artQuickGenericJniTrampoline(Thread* self, mirror::ArtMethod** sp)
Andreas Gampe2da88232014-02-27 12:26:20 -08001466 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001467 mirror::ArtMethod* called = *sp;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001468 DCHECK(called->IsNative()) << PrettyMethod(called, true);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001469
1470 // run the visitor
1471 MethodHelper mh(called);
Andreas Gampec147b002014-03-06 18:11:06 -08001472
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001473 BuildGenericJniFrameVisitor visitor(&sp, called->IsStatic(), mh.GetShorty(), mh.GetShortyLength(),
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001474 self);
1475 visitor.VisitArguments();
1476 visitor.FinalizeSirt(self);
1477
1478 // fix up managed-stack things in Thread
1479 self->SetTopOfStack(sp, 0);
1480
Ian Rogerse0dcd462014-03-08 15:21:04 -08001481 self->VerifyStack();
1482
Andreas Gampe90546832014-03-12 18:07:19 -07001483 // Start JNI, save the cookie.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001484 uint32_t cookie;
1485 if (called->IsSynchronized()) {
1486 cookie = JniMethodStartSynchronized(visitor.GetFirstSirtEntry(), self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001487 if (self->IsExceptionPending()) {
1488 self->PopSirt();
Andreas Gampec147b002014-03-06 18:11:06 -08001489 // A negative value denotes an error.
1490 return -1;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001491 }
1492 } else {
1493 cookie = JniMethodStart(self);
1494 }
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001495 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Ian Rogerse0dcd462014-03-08 15:21:04 -08001496 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001497
Andreas Gampe90546832014-03-12 18:07:19 -07001498 // Retrieve the stored native code.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001499 const void* nativeCode = called->GetNativeMethod();
Andreas Gampe90546832014-03-12 18:07:19 -07001500
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07001501 // There are two cases for the content of nativeCode:
1502 // 1) Pointer to the native function.
1503 // 2) Pointer to the trampoline for native code binding.
1504 // In the second case, we need to execute the binding and continue with the actual native function
1505 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07001506 DCHECK(nativeCode != nullptr);
1507 if (nativeCode == GetJniDlsymLookupStub()) {
1508 nativeCode = artFindNativeMethod();
1509
1510 if (nativeCode == nullptr) {
1511 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
1512 return -1;
1513 }
1514 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001515 }
1516
Andreas Gampe90546832014-03-12 18:07:19 -07001517 // Store the native code pointer in the stack at the right location.
Andreas Gampec147b002014-03-06 18:11:06 -08001518 uintptr_t* code_pointer = reinterpret_cast<uintptr_t*>(visitor.GetCodeReturn());
Andreas Gampec147b002014-03-06 18:11:06 -08001519 *code_pointer = reinterpret_cast<uintptr_t>(nativeCode);
1520
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001521 // 5K reserved, window_size + frame pointer used.
Andreas Gampe90546832014-03-12 18:07:19 -07001522 size_t window_size = visitor.GetAllocaUsedSize();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001523 return (5 * KB) - window_size - kPointerSize;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001524}
1525
1526/*
1527 * Is called after the native JNI code. Responsible for cleanup (SIRT, saved state) and
1528 * unlocking.
1529 */
1530extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self, mirror::ArtMethod** sp,
1531 jvalue result, uint64_t result_f)
1532 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1533 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
1534 mirror::ArtMethod* called = *sp;
Ian Rogerse0dcd462014-03-08 15:21:04 -08001535 uint32_t cookie = *(sp32 - 1);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001536
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001537 MethodHelper mh(called);
1538 char return_shorty_char = mh.GetShorty()[0];
1539
1540 if (return_shorty_char == 'L') {
1541 // the only special ending call
1542 if (called->IsSynchronized()) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001543 StackIndirectReferenceTable* table =
1544 reinterpret_cast<StackIndirectReferenceTable*>(
1545 reinterpret_cast<uint8_t*>(sp) + kPointerSize);
1546 jobject tmp = reinterpret_cast<jobject>(table->GetStackReference(0));
Andreas Gampec147b002014-03-06 18:11:06 -08001547
1548 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(result.l, cookie, tmp,
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001549 self));
1550 } else {
1551 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(result.l, cookie, self));
1552 }
1553 } else {
1554 if (called->IsSynchronized()) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001555 StackIndirectReferenceTable* table =
1556 reinterpret_cast<StackIndirectReferenceTable*>(
1557 reinterpret_cast<uint8_t*>(sp) + kPointerSize);
1558 jobject tmp = reinterpret_cast<jobject>(table->GetStackReference(0));
Andreas Gampec147b002014-03-06 18:11:06 -08001559
1560 JniMethodEndSynchronized(cookie, tmp, self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001561 } else {
1562 JniMethodEnd(cookie, self);
1563 }
1564
1565 switch (return_shorty_char) {
1566 case 'F': // Fall-through.
1567 case 'D':
1568 return result_f;
1569 case 'Z':
1570 return result.z;
1571 case 'B':
1572 return result.b;
1573 case 'C':
1574 return result.c;
1575 case 'S':
1576 return result.s;
1577 case 'I':
1578 return result.i;
1579 case 'J':
1580 return result.j;
1581 case 'V':
1582 return 0;
1583 default:
1584 LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char;
1585 return 0;
1586 }
1587 }
Andreas Gampe2da88232014-02-27 12:26:20 -08001588}
1589
Ian Rogers848871b2013-08-05 10:56:33 -07001590} // namespace art