blob: 1302c5f17b7fb314920510314fcfbd21d51e2f83 [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartiere401d142015-04-22 13:56:20 -070017#include "art_method-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070018#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070019#include "common_throws.h"
Ian Rogers848871b2013-08-05 10:56:33 -070020#include "dex_file-inl.h"
21#include "dex_instruction-inl.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070022#include "entrypoints/entrypoint_utils-inl.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070023#include "entrypoints/runtime_asm_entrypoints.h"
Ian Rogers83883d72013-10-21 21:07:24 -070024#include "gc/accounting/card_table-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070025#include "interpreter/interpreter.h"
Ian Rogerse0a02da2014-12-02 14:10:53 -080026#include "method_reference.h"
Ian Rogers848871b2013-08-05 10:56:33 -070027#include "mirror/class-inl.h"
Mathieu Chartier5f3ded42014-04-03 15:25:30 -070028#include "mirror/dex_cache-inl.h"
Mathieu Chartierfc58af42015-04-16 18:00:39 -070029#include "mirror/method.h"
Ian Rogers848871b2013-08-05 10:56:33 -070030#include "mirror/object-inl.h"
31#include "mirror/object_array-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070032#include "runtime.h"
Ian Rogers53b8b092014-03-13 23:45:53 -070033#include "scoped_thread_state_change.h"
Daniel Mihalyieb076692014-08-22 17:33:31 +020034#include "debugger.h"
Ian Rogers848871b2013-08-05 10:56:33 -070035
36namespace art {
37
38// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
39class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080040 // Number of bytes for each out register in the caller method's frame.
41 static constexpr size_t kBytesStackArgLocation = 4;
Alexei Zavjalov41c507a2014-05-15 16:02:46 +070042 // Frame size in bytes of a callee-save frame for RefsAndArgs.
43 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize =
44 GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -070045#if defined(__arm__)
46 // The callee save frame is pointed to by SP.
47 // | argN | |
48 // | ... | |
49 // | arg4 | |
50 // | arg3 spill | | Caller's frame
51 // | arg2 spill | |
52 // | arg1 spill | |
53 // | Method* | ---
54 // | LR |
Zheng Xu5667fdb2014-10-23 18:29:55 +080055 // | ... | 4x6 bytes callee saves
56 // | R3 |
57 // | R2 |
58 // | R1 |
59 // | S15 |
60 // | : |
61 // | S0 |
62 // | | 4x2 bytes padding
Ian Rogers848871b2013-08-05 10:56:33 -070063 // | Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -050064 static constexpr bool kSplitPairAcrossRegisterAndStack = kArm32QuickCodeUseSoftFloat;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +000065 static constexpr bool kAlignPairRegister = !kArm32QuickCodeUseSoftFloat;
Zheng Xu5667fdb2014-10-23 18:29:55 +080066 static constexpr bool kQuickSoftFloatAbi = kArm32QuickCodeUseSoftFloat;
67 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = !kArm32QuickCodeUseSoftFloat;
Goran Jakovljevicff734982015-08-24 12:58:55 +000068 static constexpr bool kQuickSkipOddFpRegisters = false;
Zheng Xu5667fdb2014-10-23 18:29:55 +080069 static constexpr size_t kNumQuickGprArgs = 3;
70 static constexpr size_t kNumQuickFprArgs = kArm32QuickCodeUseSoftFloat ? 0 : 16;
Andreas Gampe1a5c4062015-01-15 12:10:47 -080071 static constexpr bool kGprFprLockstep = false;
Zheng Xub551fdc2014-07-25 11:49:42 +080072 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
73 arm::ArmCalleeSaveFpr1Offset(Runtime::kRefsAndArgs); // Offset of first FPR arg.
74 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
75 arm::ArmCalleeSaveGpr1Offset(Runtime::kRefsAndArgs); // Offset of first GPR arg.
76 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset =
77 arm::ArmCalleeSaveLrOffset(Runtime::kRefsAndArgs); // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -080078 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000079 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -080080 }
Stuart Monteithb95a5342014-03-12 13:32:32 +000081#elif defined(__aarch64__)
82 // The callee save frame is pointed to by SP.
83 // | argN | |
84 // | ... | |
85 // | arg4 | |
86 // | arg3 spill | | Caller's frame
87 // | arg2 spill | |
88 // | arg1 spill | |
89 // | Method* | ---
90 // | LR |
Zheng Xub551fdc2014-07-25 11:49:42 +080091 // | X29 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000092 // | : |
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010093 // | X20 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000094 // | X7 |
95 // | : |
96 // | X1 |
Zheng Xub551fdc2014-07-25 11:49:42 +080097 // | D7 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000098 // | : |
99 // | D0 |
100 // | | padding
101 // | Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500102 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000103 static constexpr bool kAlignPairRegister = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000104 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800105 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000106 static constexpr bool kQuickSkipOddFpRegisters = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000107 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
108 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800109 static constexpr bool kGprFprLockstep = false;
Zheng Xub551fdc2014-07-25 11:49:42 +0800110 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
111 arm64::Arm64CalleeSaveFpr1Offset(Runtime::kRefsAndArgs); // Offset of first FPR arg.
112 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
113 arm64::Arm64CalleeSaveGpr1Offset(Runtime::kRefsAndArgs); // Offset of first GPR arg.
114 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset =
115 arm64::Arm64CalleeSaveLrOffset(Runtime::kRefsAndArgs); // Offset of return address.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000116 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000117 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Stuart Monteithb95a5342014-03-12 13:32:32 +0000118 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800119#elif defined(__mips__) && !defined(__LP64__)
Ian Rogers848871b2013-08-05 10:56:33 -0700120 // The callee save frame is pointed to by SP.
121 // | argN | |
122 // | ... | |
123 // | arg4 | |
124 // | arg3 spill | | Caller's frame
125 // | arg2 spill | |
126 // | arg1 spill | |
127 // | Method* | ---
128 // | RA |
129 // | ... | callee saves
130 // | A3 | arg3
131 // | A2 | arg2
132 // | A1 | arg1
Goran Jakovljevicff734982015-08-24 12:58:55 +0000133 // | F15 |
134 // | F14 | f_arg1
135 // | F13 |
136 // | F12 | f_arg0
137 // | | padding
Ian Rogers848871b2013-08-05 10:56:33 -0700138 // | A0/Method* | <- sp
Goran Jakovljevicff734982015-08-24 12:58:55 +0000139 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
140 static constexpr bool kAlignPairRegister = true;
141 static constexpr bool kQuickSoftFloatAbi = false;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800142 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000143 static constexpr bool kQuickSkipOddFpRegisters = true;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800144 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
Goran Jakovljevicff734982015-08-24 12:58:55 +0000145 static constexpr size_t kNumQuickFprArgs = 4; // 2 arguments passed in FPRs. Floats can be passed
146 // only in even numbered registers and each double
147 // occupies two registers.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800148 static constexpr bool kGprFprLockstep = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000149 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
150 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 32; // Offset of first GPR arg.
151 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 76; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800152 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000153 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800154 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800155#elif defined(__mips__) && defined(__LP64__)
156 // The callee save frame is pointed to by SP.
157 // | argN | |
158 // | ... | |
159 // | arg4 | |
160 // | arg3 spill | | Caller's frame
161 // | arg2 spill | |
162 // | arg1 spill | |
163 // | Method* | ---
164 // | RA |
165 // | ... | callee saves
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800166 // | A7 | arg7
167 // | A6 | arg6
168 // | A5 | arg5
169 // | A4 | arg4
170 // | A3 | arg3
171 // | A2 | arg2
172 // | A1 | arg1
Goran Jakovljevicff734982015-08-24 12:58:55 +0000173 // | F19 | f_arg7
174 // | F18 | f_arg6
175 // | F17 | f_arg5
176 // | F16 | f_arg4
177 // | F15 | f_arg3
178 // | F14 | f_arg2
179 // | F13 | f_arg1
180 // | F12 | f_arg0
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800181 // | | padding
182 // | A0/Method* | <- sp
183 // NOTE: for Mip64, when A0 is skipped, F0 is also skipped.
Douglas Leungd18e0832015-02-09 15:22:26 -0800184 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800185 static constexpr bool kAlignPairRegister = false;
186 static constexpr bool kQuickSoftFloatAbi = false;
187 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000188 static constexpr bool kQuickSkipOddFpRegisters = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800189 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
190 static constexpr size_t kNumQuickFprArgs = 7; // 7 arguments passed in FPRs.
191 static constexpr bool kGprFprLockstep = true;
192
193 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 24; // Offset of first FPR arg (F1).
194 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80; // Offset of first GPR arg (A1).
195 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 200; // Offset of return address.
196 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
197 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
198 }
Ian Rogers848871b2013-08-05 10:56:33 -0700199#elif defined(__i386__)
200 // The callee save frame is pointed to by SP.
201 // | argN | |
202 // | ... | |
203 // | arg4 | |
204 // | arg3 spill | | Caller's frame
205 // | arg2 spill | |
206 // | arg1 spill | |
207 // | Method* | ---
208 // | Return |
209 // | EBP,ESI,EDI | callee saves
210 // | EBX | arg3
211 // | EDX | arg2
212 // | ECX | arg1
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000213 // | XMM3 | float arg 4
214 // | XMM2 | float arg 3
215 // | XMM1 | float arg 2
216 // | XMM0 | float arg 1
Ian Rogers848871b2013-08-05 10:56:33 -0700217 // | EAX/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500218 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000219 static constexpr bool kAlignPairRegister = false;
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000220 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800221 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000222 static constexpr bool kQuickSkipOddFpRegisters = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800223 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000224 static constexpr size_t kNumQuickFprArgs = 4; // 4 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800225 static constexpr bool kGprFprLockstep = false;
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000226 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 4; // Offset of first FPR arg.
227 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4 + 4*8; // Offset of first GPR arg.
228 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 28 + 4*8; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800229 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000230 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800231 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800232#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800233 // The callee save frame is pointed to by SP.
234 // | argN | |
235 // | ... | |
236 // | reg. arg spills | | Caller's frame
237 // | Method* | ---
238 // | Return |
239 // | R15 | callee save
240 // | R14 | callee save
241 // | R13 | callee save
242 // | R12 | callee save
243 // | R9 | arg5
244 // | R8 | arg4
245 // | RSI/R6 | arg1
246 // | RBP/R5 | callee save
247 // | RBX/R3 | callee save
248 // | RDX/R2 | arg2
249 // | RCX/R1 | arg3
250 // | XMM7 | float arg 8
251 // | XMM6 | float arg 7
252 // | XMM5 | float arg 6
253 // | XMM4 | float arg 5
254 // | XMM3 | float arg 4
255 // | XMM2 | float arg 3
256 // | XMM1 | float arg 2
257 // | XMM0 | float arg 1
258 // | Padding |
259 // | RDI/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500260 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000261 static constexpr bool kAlignPairRegister = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800262 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800263 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000264 static constexpr bool kQuickSkipOddFpRegisters = false;
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700265 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700266 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800267 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800268 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
Serguei Katkovc3801912014-07-08 17:21:53 +0700269 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80 + 4*8; // Offset of first GPR arg.
270 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 168 + 4*8; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800271 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
272 switch (gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000273 case 0: return (4 * GetBytesPerGprSpillLocation(kRuntimeISA));
274 case 1: return (1 * GetBytesPerGprSpillLocation(kRuntimeISA));
275 case 2: return (0 * GetBytesPerGprSpillLocation(kRuntimeISA));
276 case 3: return (5 * GetBytesPerGprSpillLocation(kRuntimeISA));
277 case 4: return (6 * GetBytesPerGprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800278 default:
Andreas Gampec200a4a2014-06-16 18:39:09 -0700279 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
280 return 0;
Ian Rogers936b37f2014-02-14 00:52:24 -0800281 }
282 }
Ian Rogers848871b2013-08-05 10:56:33 -0700283#else
284#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700285#endif
286
Ian Rogers936b37f2014-02-14 00:52:24 -0800287 public:
Sebastien Hertza836bc92014-11-25 16:30:53 +0100288 // Special handling for proxy methods. Proxy methods are instance methods so the
289 // 'this' object is the 1st argument. They also have the same frame layout as the
290 // kRefAndArgs runtime method. Since 'this' is a reference, it is located in the
291 // 1st GPR.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700292 static mirror::Object* GetProxyThisObject(ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700293 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700294 CHECK((*sp)->IsProxyMethod());
295 CHECK_EQ(kQuickCalleeSaveFrame_RefAndArgs_FrameSize, (*sp)->GetFrameSizeInBytes());
Sebastien Hertza836bc92014-11-25 16:30:53 +0100296 CHECK_GT(kNumQuickGprArgs, 0u);
297 constexpr uint32_t kThisGprIndex = 0u; // 'this' is in the 1st GPR.
298 size_t this_arg_offset = kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset +
299 GprIndexToGprOffset(kThisGprIndex);
300 uint8_t* this_arg_address = reinterpret_cast<uint8_t*>(sp) + this_arg_offset;
301 return reinterpret_cast<StackReference<mirror::Object>*>(this_arg_address)->AsMirrorPtr();
302 }
303
Mathieu Chartier90443472015-07-16 20:32:27 -0700304 static ArtMethod* GetCallingMethod(ArtMethod** sp) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700305 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100306 return GetCalleeSaveMethodCaller(sp, Runtime::kRefsAndArgs);
307 }
308
Mathieu Chartier90443472015-07-16 20:32:27 -0700309 static ArtMethod* GetOuterMethod(ArtMethod** sp) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700310 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100311 uint8_t* previous_sp =
312 reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700313 return *reinterpret_cast<ArtMethod**>(previous_sp);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100314 }
315
Mathieu Chartier90443472015-07-16 20:32:27 -0700316 static uint32_t GetCallingDexPc(ArtMethod** sp) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700317 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100318 const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsAndArgs);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700319 ArtMethod** caller_sp = reinterpret_cast<ArtMethod**>(
320 reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
321 ArtMethod* outer_method = *caller_sp;
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100322 uintptr_t outer_pc = QuickArgumentVisitor::GetCallingPc(sp);
323 uintptr_t outer_pc_offset = outer_method->NativeQuickPcOffset(outer_pc);
324
325 if (outer_method->IsOptimized(sizeof(void*))) {
326 CodeInfo code_info = outer_method->GetOptimizedCodeInfo();
David Brazdilf677ebf2015-05-29 16:29:43 +0100327 StackMapEncoding encoding = code_info.ExtractEncoding();
328 StackMap stack_map = code_info.GetStackMapForNativePcOffset(outer_pc_offset, encoding);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100329 DCHECK(stack_map.IsValid());
David Brazdilf677ebf2015-05-29 16:29:43 +0100330 if (stack_map.HasInlineInfo(encoding)) {
331 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100332 return inline_info.GetDexPcAtDepth(inline_info.GetDepth() - 1);
333 } else {
David Brazdilf677ebf2015-05-29 16:29:43 +0100334 return stack_map.GetDexPc(encoding);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100335 }
336 } else {
337 return outer_method->ToDexPc(outer_pc);
338 }
Ian Rogers848871b2013-08-05 10:56:33 -0700339 }
340
Ian Rogers936b37f2014-02-14 00:52:24 -0800341 // For the given quick ref and args quick frame, return the caller's PC.
Mathieu Chartier90443472015-07-16 20:32:27 -0700342 static uintptr_t GetCallingPc(ArtMethod** sp) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700343 DCHECK((*sp)->IsCalleeSaveMethod());
Ian Rogers13735952014-10-08 12:43:28 -0700344 uint8_t* lr = reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset;
Ian Rogers848871b2013-08-05 10:56:33 -0700345 return *reinterpret_cast<uintptr_t*>(lr);
346 }
347
Mathieu Chartiere401d142015-04-22 13:56:20 -0700348 QuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty,
Mathieu Chartier90443472015-07-16 20:32:27 -0700349 uint32_t shorty_len) SHARED_REQUIRES(Locks::mutator_lock_) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700350 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
Ian Rogers13735952014-10-08 12:43:28 -0700351 gpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
352 fpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
353 stack_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
Mathieu Chartiere401d142015-04-22 13:56:20 -0700354 + sizeof(ArtMethod*)), // Skip ArtMethod*.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800355 gpr_index_(0), fpr_index_(0), fpr_double_index_(0), stack_index_(0),
356 cur_type_(Primitive::kPrimVoid), is_split_long_or_double_(false) {
Andreas Gampe575e78c2014-11-03 23:41:03 -0800357 static_assert(kQuickSoftFloatAbi == (kNumQuickFprArgs == 0),
358 "Number of Quick FPR arguments unexpected");
359 static_assert(!(kQuickSoftFloatAbi && kQuickDoubleRegAlignedFloatBackFilled),
360 "Double alignment unexpected");
Zheng Xu5667fdb2014-10-23 18:29:55 +0800361 // For register alignment, we want to assume that counters(fpr_double_index_) are even if the
362 // next register is even.
Andreas Gampe575e78c2014-11-03 23:41:03 -0800363 static_assert(!kQuickDoubleRegAlignedFloatBackFilled || kNumQuickFprArgs % 2 == 0,
364 "Number of Quick FPR arguments not even");
Mathieu Chartiere401d142015-04-22 13:56:20 -0700365 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), sizeof(void*));
Zheng Xu5667fdb2014-10-23 18:29:55 +0800366 }
Ian Rogers848871b2013-08-05 10:56:33 -0700367
368 virtual ~QuickArgumentVisitor() {}
369
370 virtual void Visit() = 0;
371
Ian Rogers936b37f2014-02-14 00:52:24 -0800372 Primitive::Type GetParamPrimitiveType() const {
373 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700374 }
375
Ian Rogers13735952014-10-08 12:43:28 -0700376 uint8_t* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800377 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800378 Primitive::Type type = GetParamPrimitiveType();
379 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800380 if (type == Primitive::kPrimDouble && kQuickDoubleRegAlignedFloatBackFilled) {
381 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
382 return fpr_args_ + (fpr_double_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
383 }
384 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000385 return fpr_args_ + (fpr_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800386 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700387 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers936b37f2014-02-14 00:52:24 -0800388 }
389 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800390 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800391 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
392 }
393 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700394 }
395
396 bool IsSplitLongOrDouble() const {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700397 if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) ||
398 (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800399 return is_split_long_or_double_;
400 } else {
401 return false; // An optimization for when GPR and FPRs are 64bit.
402 }
Ian Rogers848871b2013-08-05 10:56:33 -0700403 }
404
Ian Rogers936b37f2014-02-14 00:52:24 -0800405 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700406 return GetParamPrimitiveType() == Primitive::kPrimNot;
407 }
408
Ian Rogers936b37f2014-02-14 00:52:24 -0800409 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700410 Primitive::Type type = GetParamPrimitiveType();
411 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
412 }
413
414 uint64_t ReadSplitLongParam() const {
Nicolas Geoffray425f2392015-01-08 14:52:29 +0000415 // The splitted long is always available through the stack.
416 return *reinterpret_cast<uint64_t*>(stack_args_
417 + stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700418 }
419
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800420 void IncGprIndex() {
421 gpr_index_++;
422 if (kGprFprLockstep) {
423 fpr_index_++;
424 }
425 }
426
427 void IncFprIndex() {
428 fpr_index_++;
429 if (kGprFprLockstep) {
430 gpr_index_++;
431 }
432 }
433
Mathieu Chartier90443472015-07-16 20:32:27 -0700434 void VisitArguments() SHARED_REQUIRES(Locks::mutator_lock_) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800435 // (a) 'stack_args_' should point to the first method's argument
436 // (b) whatever the argument type it is, the 'stack_index_' should
437 // be moved forward along with every visiting.
Ian Rogers936b37f2014-02-14 00:52:24 -0800438 gpr_index_ = 0;
439 fpr_index_ = 0;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800440 if (kQuickDoubleRegAlignedFloatBackFilled) {
441 fpr_double_index_ = 0;
442 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800443 stack_index_ = 0;
444 if (!is_static_) { // Handle this.
445 cur_type_ = Primitive::kPrimNot;
446 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700447 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800448 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800449 if (kNumQuickGprArgs > 0) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800450 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800451 }
Ian Rogers848871b2013-08-05 10:56:33 -0700452 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800453 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
454 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
455 switch (cur_type_) {
456 case Primitive::kPrimNot:
457 case Primitive::kPrimBoolean:
458 case Primitive::kPrimByte:
459 case Primitive::kPrimChar:
460 case Primitive::kPrimShort:
461 case Primitive::kPrimInt:
462 is_split_long_or_double_ = false;
463 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800464 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800465 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800466 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800467 }
468 break;
469 case Primitive::kPrimFloat:
470 is_split_long_or_double_ = false;
471 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800472 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800473 if (kQuickSoftFloatAbi) {
474 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800475 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800476 }
477 } else {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800478 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800479 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800480 if (kQuickDoubleRegAlignedFloatBackFilled) {
481 // Double should not overlap with float.
482 // For example, if fpr_index_ = 3, fpr_double_index_ should be at least 4.
483 fpr_double_index_ = std::max(fpr_double_index_, RoundUp(fpr_index_, 2));
484 // Float should not overlap with double.
485 if (fpr_index_ % 2 == 0) {
486 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
487 }
Goran Jakovljevicff734982015-08-24 12:58:55 +0000488 } else if (kQuickSkipOddFpRegisters) {
489 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800490 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800491 }
492 }
493 break;
494 case Primitive::kPrimDouble:
495 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800496 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000497 if (cur_type_ == Primitive::kPrimLong && kAlignPairRegister && gpr_index_ == 0) {
Goran Jakovljevicff734982015-08-24 12:58:55 +0000498 // Currently, this is only for ARM and MIPS, where the first available parameter
499 // register is R1 (on ARM) or A1 (on MIPS). So we skip it, and use R2 (on ARM) or
500 // A2 (on MIPS) instead.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800501 IncGprIndex();
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000502 }
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000503 is_split_long_or_double_ = (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800504 ((gpr_index_ + 1) == kNumQuickGprArgs);
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500505 if (!kSplitPairAcrossRegisterAndStack && is_split_long_or_double_) {
506 // We don't want to split this. Pass over this register.
507 gpr_index_++;
508 is_split_long_or_double_ = false;
509 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800510 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800511 if (kBytesStackArgLocation == 4) {
512 stack_index_+= 2;
513 } else {
514 CHECK_EQ(kBytesStackArgLocation, 8U);
515 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800516 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700517 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800518 IncGprIndex();
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000519 if (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700520 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800521 IncGprIndex();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700522 }
523 }
524 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800525 } else {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000526 is_split_long_or_double_ = (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) &&
Zheng Xu5667fdb2014-10-23 18:29:55 +0800527 ((fpr_index_ + 1) == kNumQuickFprArgs) && !kQuickDoubleRegAlignedFloatBackFilled;
Ian Rogers936b37f2014-02-14 00:52:24 -0800528 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700529 if (kBytesStackArgLocation == 4) {
530 stack_index_+= 2;
Ian Rogers936b37f2014-02-14 00:52:24 -0800531 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700532 CHECK_EQ(kBytesStackArgLocation, 8U);
533 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800534 }
Zheng Xu5667fdb2014-10-23 18:29:55 +0800535 if (kQuickDoubleRegAlignedFloatBackFilled) {
536 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
537 fpr_double_index_ += 2;
538 // Float should not overlap with double.
539 if (fpr_index_ % 2 == 0) {
540 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
541 }
542 }
543 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800544 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800545 if (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) {
546 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800547 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800548 }
549 }
550 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800551 }
552 break;
553 default:
554 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
555 }
Ian Rogers848871b2013-08-05 10:56:33 -0700556 }
557 }
558
Andreas Gampec200a4a2014-06-16 18:39:09 -0700559 protected:
Ian Rogers848871b2013-08-05 10:56:33 -0700560 const bool is_static_;
561 const char* const shorty_;
562 const uint32_t shorty_len_;
Andreas Gampec200a4a2014-06-16 18:39:09 -0700563
564 private:
Ian Rogers13735952014-10-08 12:43:28 -0700565 uint8_t* const gpr_args_; // Address of GPR arguments in callee save frame.
566 uint8_t* const fpr_args_; // Address of FPR arguments in callee save frame.
567 uint8_t* const stack_args_; // Address of stack arguments in caller's frame.
Ian Rogers936b37f2014-02-14 00:52:24 -0800568 uint32_t gpr_index_; // Index into spilled GPRs.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800569 // Index into spilled FPRs.
570 // In case kQuickDoubleRegAlignedFloatBackFilled, it may index a hole while fpr_double_index_
571 // holds a higher register number.
572 uint32_t fpr_index_;
573 // Index into spilled FPRs for aligned double.
574 // Only used when kQuickDoubleRegAlignedFloatBackFilled. Next available double register indexed in
575 // terms of singles, may be behind fpr_index.
576 uint32_t fpr_double_index_;
Ian Rogers936b37f2014-02-14 00:52:24 -0800577 uint32_t stack_index_; // Index into arguments on the stack.
578 // The current type of argument during VisitArguments.
579 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700580 // Does a 64bit parameter straddle the register and stack arguments?
581 bool is_split_long_or_double_;
582};
583
Sebastien Hertza836bc92014-11-25 16:30:53 +0100584// Returns the 'this' object of a proxy method. This function is only used by StackVisitor. It
585// allows to use the QuickArgumentVisitor constants without moving all the code in its own module.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700586extern "C" mirror::Object* artQuickGetProxyThisObject(ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700587 SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertza836bc92014-11-25 16:30:53 +0100588 return QuickArgumentVisitor::GetProxyThisObject(sp);
589}
590
Ian Rogers848871b2013-08-05 10:56:33 -0700591// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800592class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700593 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700594 BuildQuickShadowFrameVisitor(ArtMethod** sp, bool is_static, const char* shorty,
595 uint32_t shorty_len, ShadowFrame* sf, size_t first_arg_reg) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700596 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700597
Mathieu Chartier90443472015-07-16 20:32:27 -0700598 void Visit() SHARED_REQUIRES(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700599
600 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800601 ShadowFrame* const sf_;
602 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700603
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700604 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700605};
606
Andreas Gampec200a4a2014-06-16 18:39:09 -0700607void BuildQuickShadowFrameVisitor::Visit() {
Ian Rogers9758f792014-03-13 09:02:55 -0700608 Primitive::Type type = GetParamPrimitiveType();
609 switch (type) {
610 case Primitive::kPrimLong: // Fall-through.
611 case Primitive::kPrimDouble:
612 if (IsSplitLongOrDouble()) {
613 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
614 } else {
615 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
616 }
617 ++cur_reg_;
618 break;
619 case Primitive::kPrimNot: {
620 StackReference<mirror::Object>* stack_ref =
621 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
622 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
623 }
624 break;
625 case Primitive::kPrimBoolean: // Fall-through.
626 case Primitive::kPrimByte: // Fall-through.
627 case Primitive::kPrimChar: // Fall-through.
628 case Primitive::kPrimShort: // Fall-through.
629 case Primitive::kPrimInt: // Fall-through.
630 case Primitive::kPrimFloat:
631 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
632 break;
633 case Primitive::kPrimVoid:
634 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700635 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700636 }
637 ++cur_reg_;
638}
639
Mathieu Chartiere401d142015-04-22 13:56:20 -0700640extern "C" uint64_t artQuickToInterpreterBridge(ArtMethod* method, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700641 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers848871b2013-08-05 10:56:33 -0700642 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
643 // frame.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700644 ScopedQuickEntrypointChecks sqec(self);
Ian Rogers848871b2013-08-05 10:56:33 -0700645
646 if (method->IsAbstract()) {
647 ThrowAbstractMethodError(method);
648 return 0;
649 } else {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800650 DCHECK(!method->IsNative()) << PrettyMethod(method);
Andreas Gampec200a4a2014-06-16 18:39:09 -0700651 const char* old_cause = self->StartAssertNoThreadSuspension(
652 "Building interpreter shadow frame");
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700653 const DexFile::CodeItem* code_item = method->GetCodeItem();
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800654 DCHECK(code_item != nullptr) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700655 uint16_t num_regs = code_item->registers_size_;
656 void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
Andreas Gampec200a4a2014-06-16 18:39:09 -0700657 // No last shadow coming from quick.
658 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, nullptr, method, 0, memory));
Ian Rogers848871b2013-08-05 10:56:33 -0700659 size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700660 uint32_t shorty_len = 0;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700661 auto* non_proxy_method = method->GetInterfaceMethodIfProxy(sizeof(void*));
662 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700663 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len,
Ian Rogers936b37f2014-02-14 00:52:24 -0800664 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700665 shadow_frame_builder.VisitArguments();
Ian Rogerse94652f2014-12-02 11:13:19 -0800666 const bool needs_initialization =
667 method->IsStatic() && !method->GetDeclaringClass()->IsInitialized();
Ian Rogers848871b2013-08-05 10:56:33 -0700668 // Push a transition back into managed code onto the linked list in thread.
669 ManagedStack fragment;
670 self->PushManagedStackFragment(&fragment);
671 self->PushShadowFrame(shadow_frame);
672 self->EndAssertNoThreadSuspension(old_cause);
673
Ian Rogerse94652f2014-12-02 11:13:19 -0800674 if (needs_initialization) {
Ian Rogers848871b2013-08-05 10:56:33 -0700675 // Ensure static method's class is initialized.
Ian Rogerse94652f2014-12-02 11:13:19 -0800676 StackHandleScope<1> hs(self);
677 Handle<mirror::Class> h_class(hs.NewHandle(shadow_frame->GetMethod()->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -0700678 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
Ian Rogerse94652f2014-12-02 11:13:19 -0800679 DCHECK(Thread::Current()->IsExceptionPending()) << PrettyMethod(shadow_frame->GetMethod());
Ian Rogers848871b2013-08-05 10:56:33 -0700680 self->PopManagedStackFragment(fragment);
681 return 0;
682 }
683 }
Ian Rogerse94652f2014-12-02 11:13:19 -0800684 JValue result = interpreter::EnterInterpreterFromEntryPoint(self, code_item, shadow_frame);
Ian Rogers848871b2013-08-05 10:56:33 -0700685 // Pop transition.
686 self->PopManagedStackFragment(fragment);
Daniel Mihalyieb076692014-08-22 17:33:31 +0200687
688 // Request a stack deoptimization if needed
Mathieu Chartiere401d142015-04-22 13:56:20 -0700689 ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
Daniel Mihalyieb076692014-08-22 17:33:31 +0200690 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForUpcall(self, caller))) {
Sebastien Hertz07474662015-08-25 15:12:33 +0000691 // Push the context of the deoptimization stack so we can restore the return value and the
692 // exception before executing the deoptimized frames.
693 self->PushDeoptimizationContext(result, shorty[0] == 'L', self->GetException());
694
695 // Set special exception to cause deoptimization.
Daniel Mihalyieb076692014-08-22 17:33:31 +0200696 self->SetException(Thread::GetDeoptimizationException());
Daniel Mihalyieb076692014-08-22 17:33:31 +0200697 }
698
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800699 // No need to restore the args since the method has already been run by the interpreter.
Ian Rogers848871b2013-08-05 10:56:33 -0700700 return result.GetJ();
701 }
702}
703
704// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
705// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800706class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700707 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700708 BuildQuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty, uint32_t shorty_len,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700709 ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700710 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700711
Mathieu Chartier90443472015-07-16 20:32:27 -0700712 void Visit() SHARED_REQUIRES(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700713
Mathieu Chartier90443472015-07-16 20:32:27 -0700714 void FixupReferences() SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800715
Ian Rogers848871b2013-08-05 10:56:33 -0700716 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700717 ScopedObjectAccessUnchecked* const soa_;
718 std::vector<jvalue>* const args_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800719 // References which we must update when exiting in case the GC moved the objects.
Ian Rogers700a4022014-05-19 16:49:03 -0700720 std::vector<std::pair<jobject, StackReference<mirror::Object>*>> references_;
Ian Rogers9758f792014-03-13 09:02:55 -0700721
Ian Rogers848871b2013-08-05 10:56:33 -0700722 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
723};
724
Ian Rogers9758f792014-03-13 09:02:55 -0700725void BuildQuickArgumentVisitor::Visit() {
726 jvalue val;
727 Primitive::Type type = GetParamPrimitiveType();
728 switch (type) {
729 case Primitive::kPrimNot: {
730 StackReference<mirror::Object>* stack_ref =
731 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
732 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
733 references_.push_back(std::make_pair(val.l, stack_ref));
734 break;
735 }
736 case Primitive::kPrimLong: // Fall-through.
737 case Primitive::kPrimDouble:
738 if (IsSplitLongOrDouble()) {
739 val.j = ReadSplitLongParam();
740 } else {
741 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
742 }
743 break;
744 case Primitive::kPrimBoolean: // Fall-through.
745 case Primitive::kPrimByte: // Fall-through.
746 case Primitive::kPrimChar: // Fall-through.
747 case Primitive::kPrimShort: // Fall-through.
748 case Primitive::kPrimInt: // Fall-through.
749 case Primitive::kPrimFloat:
750 val.i = *reinterpret_cast<jint*>(GetParamAddress());
751 break;
752 case Primitive::kPrimVoid:
753 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700754 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700755 }
756 args_->push_back(val);
757}
758
759void BuildQuickArgumentVisitor::FixupReferences() {
760 // Fixup any references which may have changed.
761 for (const auto& pair : references_) {
762 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700763 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700764 }
765}
766
Ian Rogers848871b2013-08-05 10:56:33 -0700767// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
768// which is responsible for recording callee save registers. We explicitly place into jobjects the
769// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
770// field within the proxy object, which will box the primitive arguments and deal with error cases.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700771extern "C" uint64_t artQuickProxyInvokeHandler(
772 ArtMethod* proxy_method, mirror::Object* receiver, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700773 SHARED_REQUIRES(Locks::mutator_lock_) {
Brian Carlstromd3633d52013-08-20 21:06:26 -0700774 DCHECK(proxy_method->IsProxyMethod()) << PrettyMethod(proxy_method);
775 DCHECK(receiver->GetClass()->IsProxyClass()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700776 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
777 const char* old_cause =
778 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
779 // Register the top of the managed stack, making stack crawlable.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700780 DCHECK_EQ((*sp), proxy_method) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700781 DCHECK_EQ(proxy_method->GetFrameSizeInBytes(),
Brian Carlstromd3633d52013-08-20 21:06:26 -0700782 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes())
783 << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700784 self->VerifyStack();
785 // Start new JNI local reference state.
786 JNIEnvExt* env = self->GetJniEnv();
787 ScopedObjectAccessUnchecked soa(env);
788 ScopedJniEnvLocalRefState env_state(env);
789 // Create local ref. copies of proxy method and the receiver.
790 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
791
792 // Placing arguments into args vector and remove the receiver.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700793 ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(sizeof(void*));
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700794 CHECK(!non_proxy_method->IsStatic()) << PrettyMethod(proxy_method) << " "
Andreas Gampec200a4a2014-06-16 18:39:09 -0700795 << PrettyMethod(non_proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700796 std::vector<jvalue> args;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700797 uint32_t shorty_len = 0;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700798 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700799 BuildQuickArgumentVisitor local_ref_visitor(sp, false, shorty, shorty_len, &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700800
Ian Rogers848871b2013-08-05 10:56:33 -0700801 local_ref_visitor.VisitArguments();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700802 DCHECK_GT(args.size(), 0U) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700803 args.erase(args.begin());
804
805 // Convert proxy method into expected interface method.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700806 ArtMethod* interface_method = proxy_method->FindOverriddenMethod(sizeof(void*));
Ian Rogerse0a02da2014-12-02 14:10:53 -0800807 DCHECK(interface_method != nullptr) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700808 DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700809 self->EndAssertNoThreadSuspension(old_cause);
810 jobject interface_method_jobj = soa.AddLocalReference<jobject>(
811 mirror::Method::CreateFromArtMethod(soa.Self(), interface_method));
Ian Rogers848871b2013-08-05 10:56:33 -0700812
813 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
814 // that performs allocations.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700815 JValue result = InvokeProxyInvocationHandler(soa, shorty, rcvr_jobj, interface_method_jobj, args);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800816 // Restore references which might have moved.
817 local_ref_visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700818 return result.GetJ();
819}
820
821// Read object references held in arguments from quick frames and place in a JNI local references,
822// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800823class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700824 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700825 RememberForGcArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty,
826 uint32_t shorty_len, ScopedObjectAccessUnchecked* soa) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700827 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700828
Mathieu Chartier90443472015-07-16 20:32:27 -0700829 void Visit() SHARED_REQUIRES(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700830
Mathieu Chartier90443472015-07-16 20:32:27 -0700831 void FixupReferences() SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -0700832
833 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700834 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800835 // References which we must update when exiting in case the GC moved the objects.
Andreas Gampec200a4a2014-06-16 18:39:09 -0700836 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
837
Mathieu Chartier590fee92013-09-13 13:46:47 -0700838 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700839};
840
Ian Rogers9758f792014-03-13 09:02:55 -0700841void RememberForGcArgumentVisitor::Visit() {
842 if (IsParamAReference()) {
843 StackReference<mirror::Object>* stack_ref =
844 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
845 jobject reference =
846 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
847 references_.push_back(std::make_pair(reference, stack_ref));
848 }
849}
850
851void RememberForGcArgumentVisitor::FixupReferences() {
852 // Fixup any references which may have changed.
853 for (const auto& pair : references_) {
854 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700855 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700856 }
857}
858
Ian Rogers848871b2013-08-05 10:56:33 -0700859// Lazily resolve a method for quick. Called by stub code.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700860extern "C" const void* artQuickResolutionTrampoline(
861 ArtMethod* called, mirror::Object* receiver, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700862 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampe3b45ef22015-05-26 21:34:09 -0700863 // The resolution trampoline stashes the resolved method into the callee-save frame to transport
864 // it. Thus, when exiting, the stack cannot be verified (as the resolved method most likely
865 // does not have the same stack layout as the callee-save method).
866 ScopedQuickEntrypointChecks sqec(self, kIsDebugBuild, false);
Ian Rogers848871b2013-08-05 10:56:33 -0700867 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800868 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -0700869 ScopedObjectAccessUnchecked soa(env);
870 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800871 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -0700872
873 // Compute details about the called method (avoid GCs)
874 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Ian Rogers848871b2013-08-05 10:56:33 -0700875 InvokeType invoke_type;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800876 MethodReference called_method(nullptr, 0);
877 const bool called_method_known_on_entry = !called->IsRuntimeMethod();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700878 ArtMethod* caller = nullptr;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800879 if (!called_method_known_on_entry) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100880 caller = QuickArgumentVisitor::GetCallingMethod(sp);
881 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700882 const DexFile::CodeItem* code;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800883 called_method.dex_file = caller->GetDexFile();
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700884 code = caller->GetCodeItem();
Ian Rogers848871b2013-08-05 10:56:33 -0700885 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
886 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
887 Instruction::Code instr_code = instr->Opcode();
888 bool is_range;
889 switch (instr_code) {
890 case Instruction::INVOKE_DIRECT:
891 invoke_type = kDirect;
892 is_range = false;
893 break;
894 case Instruction::INVOKE_DIRECT_RANGE:
895 invoke_type = kDirect;
896 is_range = true;
897 break;
898 case Instruction::INVOKE_STATIC:
899 invoke_type = kStatic;
900 is_range = false;
901 break;
902 case Instruction::INVOKE_STATIC_RANGE:
903 invoke_type = kStatic;
904 is_range = true;
905 break;
906 case Instruction::INVOKE_SUPER:
907 invoke_type = kSuper;
908 is_range = false;
909 break;
910 case Instruction::INVOKE_SUPER_RANGE:
911 invoke_type = kSuper;
912 is_range = true;
913 break;
914 case Instruction::INVOKE_VIRTUAL:
915 invoke_type = kVirtual;
916 is_range = false;
917 break;
918 case Instruction::INVOKE_VIRTUAL_RANGE:
919 invoke_type = kVirtual;
920 is_range = true;
921 break;
922 case Instruction::INVOKE_INTERFACE:
923 invoke_type = kInterface;
924 is_range = false;
925 break;
926 case Instruction::INVOKE_INTERFACE_RANGE:
927 invoke_type = kInterface;
928 is_range = true;
929 break;
930 default:
Ian Rogerse0a02da2014-12-02 14:10:53 -0800931 LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(nullptr);
932 UNREACHABLE();
Ian Rogers848871b2013-08-05 10:56:33 -0700933 }
Ian Rogerse0a02da2014-12-02 14:10:53 -0800934 called_method.dex_method_index = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
Ian Rogers848871b2013-08-05 10:56:33 -0700935 } else {
936 invoke_type = kStatic;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800937 called_method.dex_file = called->GetDexFile();
938 called_method.dex_method_index = called->GetDexMethodIndex();
Ian Rogers848871b2013-08-05 10:56:33 -0700939 }
940 uint32_t shorty_len;
941 const char* shorty =
Ian Rogerse0a02da2014-12-02 14:10:53 -0800942 called_method.dex_file->GetMethodShorty(
943 called_method.dex_file->GetMethodId(called_method.dex_method_index), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700944 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -0700945 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800946 self->EndAssertNoThreadSuspension(old_cause);
Ian Rogerse0a02da2014-12-02 14:10:53 -0800947 const bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -0700948 // Resolve method filling in dex cache.
Ian Rogerse0a02da2014-12-02 14:10:53 -0800949 if (!called_method_known_on_entry) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700950 StackHandleScope<1> hs(self);
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700951 mirror::Object* dummy = nullptr;
952 HandleWrapper<mirror::Object> h_receiver(
953 hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy));
Ian Rogerse0a02da2014-12-02 14:10:53 -0800954 DCHECK_EQ(caller->GetDexFile(), called_method.dex_file);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700955 called = linker->ResolveMethod(self, called_method.dex_method_index, caller, invoke_type);
Ian Rogers848871b2013-08-05 10:56:33 -0700956 }
Ian Rogerse0a02da2014-12-02 14:10:53 -0800957 const void* code = nullptr;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800958 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -0700959 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800960 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
961 << PrettyMethod(called) << " " << invoke_type;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800962 if (virtual_or_interface) {
963 // Refine called method based on receiver.
964 CHECK(receiver != nullptr) << invoke_type;
Mingyao Yangf4867782014-05-05 11:55:02 -0700965
Mathieu Chartiere401d142015-04-22 13:56:20 -0700966 ArtMethod* orig_called = called;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800967 if (invoke_type == kVirtual) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700968 called = receiver->GetClass()->FindVirtualMethodForVirtual(called, sizeof(void*));
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800969 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700970 called = receiver->GetClass()->FindVirtualMethodForInterface(called, sizeof(void*));
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800971 }
Mingyao Yangf4867782014-05-05 11:55:02 -0700972
973 CHECK(called != nullptr) << PrettyMethod(orig_called) << " "
974 << PrettyTypeOf(receiver) << " "
975 << invoke_type << " " << orig_called->GetVtableIndex();
976
Ian Rogers83883d72013-10-21 21:07:24 -0700977 // We came here because of sharpening. Ensure the dex cache is up-to-date on the method index
Ian Rogerse0a02da2014-12-02 14:10:53 -0800978 // of the sharpened method avoiding dirtying the dex cache if possible.
Ian Rogers00f15272014-12-02 16:55:46 -0800979 // Note, called_method.dex_method_index references the dex method before the
980 // FindVirtualMethodFor... This is ok for FindDexMethodIndexInOtherDexFile that only cares
981 // about the name and signature.
982 uint32_t update_dex_cache_method_index = called->GetDexMethodIndex();
Vladimir Marko05792b92015-08-03 11:56:49 +0100983 if (!called->HasSameDexCacheResolvedMethods(caller, sizeof(void*))) {
Ian Rogers83883d72013-10-21 21:07:24 -0700984 // Calling from one dex file to another, need to compute the method index appropriate to
Vladimir Markobbcc0c02014-02-03 14:08:42 +0000985 // the caller's dex file. Since we get here only if the original called was a runtime
986 // method, we've got the correct dex_file and a dex_method_idx from above.
Ian Rogerse0a02da2014-12-02 14:10:53 -0800987 DCHECK(!called_method_known_on_entry);
988 DCHECK_EQ(caller->GetDexFile(), called_method.dex_file);
989 const DexFile* caller_dex_file = called_method.dex_file;
990 uint32_t caller_method_name_and_sig_index = called_method.dex_method_index;
991 update_dex_cache_method_index =
992 called->FindDexMethodIndexInOtherDexFile(*caller_dex_file,
993 caller_method_name_and_sig_index);
994 }
995 if ((update_dex_cache_method_index != DexFile::kDexNoIndex) &&
Mathieu Chartiere401d142015-04-22 13:56:20 -0700996 (caller->GetDexCacheResolvedMethod(
997 update_dex_cache_method_index, sizeof(void*)) != called)) {
998 caller->SetDexCacheResolvedMethod(update_dex_cache_method_index, called, sizeof(void*));
Ian Rogers83883d72013-10-21 21:07:24 -0700999 }
Mathieu Chartiere4a91bb2015-01-28 13:11:44 -08001000 } else if (invoke_type == kStatic) {
1001 const auto called_dex_method_idx = called->GetDexMethodIndex();
1002 // For static invokes, we may dispatch to the static method in the superclass but resolve
1003 // using the subclass. To prevent getting slow paths on each invoke, we force set the
1004 // resolved method for the super class dex method index if we are in the same dex file.
1005 // b/19175856
1006 if (called->GetDexFile() == called_method.dex_file &&
1007 called_method.dex_method_index != called_dex_method_idx) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001008 called->GetDexCache()->SetResolvedMethod(called_dex_method_idx, called, sizeof(void*));
Mathieu Chartiere4a91bb2015-01-28 13:11:44 -08001009 }
Ian Rogers83883d72013-10-21 21:07:24 -07001010 }
Daniel Mihalyieb076692014-08-22 17:33:31 +02001011
Ian Rogers848871b2013-08-05 10:56:33 -07001012 // Ensure that the called method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001013 StackHandleScope<1> hs(soa.Self());
1014 Handle<mirror::Class> called_class(hs.NewHandle(called->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -07001015 linker->EnsureInitialized(soa.Self(), called_class, true, true);
Ian Rogers848871b2013-08-05 10:56:33 -07001016 if (LIKELY(called_class->IsInitialized())) {
Daniel Mihalyieb076692014-08-22 17:33:31 +02001017 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) {
1018 // If we are single-stepping or the called method is deoptimized (by a
1019 // breakpoint, for example), then we have to execute the called method
1020 // with the interpreter.
1021 code = GetQuickToInterpreterBridge();
1022 } else if (UNLIKELY(Dbg::IsForcedInstrumentationNeededForResolution(self, caller))) {
1023 // If the caller is deoptimized (by a breakpoint, for example), we have to
1024 // continue its execution with interpreter when returning from the called
1025 // method. Because we do not want to execute the called method with the
1026 // interpreter, we wrap its execution into the instrumentation stubs.
1027 // When the called method returns, it will execute the instrumentation
1028 // exit hook that will determine the need of the interpreter with a call
1029 // to Dbg::IsForcedInterpreterNeededForUpcall and deoptimize the stack if
1030 // it is needed.
1031 code = GetQuickInstrumentationEntryPoint();
1032 } else {
1033 code = called->GetEntryPointFromQuickCompiledCode();
1034 }
Ian Rogers848871b2013-08-05 10:56:33 -07001035 } else if (called_class->IsInitializing()) {
Daniel Mihalyieb076692014-08-22 17:33:31 +02001036 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) {
1037 // If we are single-stepping or the called method is deoptimized (by a
1038 // breakpoint, for example), then we have to execute the called method
1039 // with the interpreter.
1040 code = GetQuickToInterpreterBridge();
1041 } else if (invoke_type == kStatic) {
Ian Rogers848871b2013-08-05 10:56:33 -07001042 // Class is still initializing, go to oat and grab code (trampoline must be left in place
1043 // until class is initialized to stop races between threads).
Ian Rogersef7d42f2014-01-06 12:55:46 -08001044 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -07001045 } else {
1046 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -08001047 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -07001048 }
1049 } else {
1050 DCHECK(called_class->IsErroneous());
1051 }
1052 }
Ian Rogerse0a02da2014-12-02 14:10:53 -08001053 CHECK_EQ(code == nullptr, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -07001054 // Fixup any locally saved objects may have moved during a GC.
1055 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -07001056 // Place called method in callee-save frame to be placed as first argument to quick method.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001057 *sp = called;
1058
Ian Rogers848871b2013-08-05 10:56:33 -07001059 return code;
1060}
1061
Andreas Gampec147b002014-03-06 18:11:06 -08001062/*
1063 * This class uses a couple of observations to unite the different calling conventions through
1064 * a few constants.
1065 *
1066 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
1067 * possible alignment.
1068 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
1069 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
1070 * when we have to split things
1071 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
1072 * and we can use Int handling directly.
1073 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
1074 * necessary when widening. Also, widening of Ints will take place implicitly, and the
1075 * extension should be compatible with Aarch64, which mandates copying the available bits
1076 * into LSB and leaving the rest unspecified.
1077 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
1078 * the stack.
1079 * 6) There is only little endian.
1080 *
1081 *
1082 * Actual work is supposed to be done in a delegate of the template type. The interface is as
1083 * follows:
1084 *
1085 * void PushGpr(uintptr_t): Add a value for the next GPR
1086 *
1087 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
1088 * padding, that is, think the architecture is 32b and aligns 64b.
1089 *
1090 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
1091 * split this if necessary. The current state will have aligned, if
1092 * necessary.
1093 *
1094 * void PushStack(uintptr_t): Push a value to the stack.
1095 *
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001096 * uintptr_t PushHandleScope(mirror::Object* ref): Add a reference to the HandleScope. This _will_ have nullptr,
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001097 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -08001098 * Must return the jobject, that is, the reference to the
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001099 * entry in the HandleScope (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -08001100 *
1101 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001102template<class T> class BuildNativeCallFrameStateMachine {
Andreas Gampec147b002014-03-06 18:11:06 -08001103 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001104#if defined(__arm__)
1105 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -08001106 static constexpr bool kNativeSoftFloatAbi = true;
1107 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001108 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
1109
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001110 static constexpr size_t kRegistersNeededForLong = 2;
1111 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001112 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001113 static constexpr bool kMultiFPRegistersWidened = false;
1114 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001115 static constexpr bool kAlignLongOnStack = true;
1116 static constexpr bool kAlignDoubleOnStack = true;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001117#elif defined(__aarch64__)
1118 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1119 static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs.
1120 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1121
1122 static constexpr size_t kRegistersNeededForLong = 1;
1123 static constexpr size_t kRegistersNeededForDouble = 1;
1124 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001125 static constexpr bool kMultiFPRegistersWidened = false;
1126 static constexpr bool kMultiGPRegistersWidened = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001127 static constexpr bool kAlignLongOnStack = false;
1128 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001129#elif defined(__mips__) && !defined(__LP64__)
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001130 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
Douglas Leung735b8552014-10-31 12:21:40 -07001131 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs.
1132 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001133
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001134 static constexpr size_t kRegistersNeededForLong = 2;
1135 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001136 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001137 static constexpr bool kMultiFPRegistersWidened = true;
1138 static constexpr bool kMultiGPRegistersWidened = false;
Douglas Leung735b8552014-10-31 12:21:40 -07001139 static constexpr bool kAlignLongOnStack = true;
1140 static constexpr bool kAlignDoubleOnStack = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001141#elif defined(__mips__) && defined(__LP64__)
1142 // Let the code prepare GPRs only and we will load the FPRs with same data.
1143 static constexpr bool kNativeSoftFloatAbi = true;
1144 static constexpr size_t kNumNativeGprArgs = 8;
1145 static constexpr size_t kNumNativeFprArgs = 0;
1146
1147 static constexpr size_t kRegistersNeededForLong = 1;
1148 static constexpr size_t kRegistersNeededForDouble = 1;
1149 static constexpr bool kMultiRegistersAligned = false;
1150 static constexpr bool kMultiFPRegistersWidened = false;
1151 static constexpr bool kMultiGPRegistersWidened = true;
1152 static constexpr bool kAlignLongOnStack = false;
1153 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001154#elif defined(__i386__)
1155 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -08001156 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001157 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
1158 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
1159
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001160 static constexpr size_t kRegistersNeededForLong = 2;
1161 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001162 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001163 static constexpr bool kMultiFPRegistersWidened = false;
1164 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001165 static constexpr bool kAlignLongOnStack = false;
1166 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001167#elif defined(__x86_64__)
1168 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1169 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
1170 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1171
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001172 static constexpr size_t kRegistersNeededForLong = 1;
1173 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -08001174 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001175 static constexpr bool kMultiFPRegistersWidened = false;
1176 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001177 static constexpr bool kAlignLongOnStack = false;
1178 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001179#else
1180#error "Unsupported architecture"
1181#endif
1182
Andreas Gampec147b002014-03-06 18:11:06 -08001183 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001184 explicit BuildNativeCallFrameStateMachine(T* delegate)
1185 : gpr_index_(kNumNativeGprArgs),
1186 fpr_index_(kNumNativeFprArgs),
1187 stack_entries_(0),
1188 delegate_(delegate) {
Andreas Gampec147b002014-03-06 18:11:06 -08001189 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
1190 // the next register is even; counting down is just to make the compiler happy...
Andreas Gampe575e78c2014-11-03 23:41:03 -08001191 static_assert(kNumNativeGprArgs % 2 == 0U, "Number of native GPR arguments not even");
1192 static_assert(kNumNativeFprArgs % 2 == 0U, "Number of native FPR arguments not even");
Andreas Gampec147b002014-03-06 18:11:06 -08001193 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001194
Andreas Gampec200a4a2014-06-16 18:39:09 -07001195 virtual ~BuildNativeCallFrameStateMachine() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001196
Ian Rogers1428dce2014-10-21 15:02:15 -07001197 bool HavePointerGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001198 return gpr_index_ > 0;
1199 }
1200
Andreas Gampec200a4a2014-06-16 18:39:09 -07001201 void AdvancePointer(const void* val) {
Andreas Gampec147b002014-03-06 18:11:06 -08001202 if (HavePointerGpr()) {
1203 gpr_index_--;
1204 PushGpr(reinterpret_cast<uintptr_t>(val));
1205 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001206 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
Andreas Gampec147b002014-03-06 18:11:06 -08001207 PushStack(reinterpret_cast<uintptr_t>(val));
1208 gpr_index_ = 0;
1209 }
1210 }
1211
Ian Rogers1428dce2014-10-21 15:02:15 -07001212 bool HaveHandleScopeGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001213 return gpr_index_ > 0;
1214 }
1215
Mathieu Chartier90443472015-07-16 20:32:27 -07001216 void AdvanceHandleScope(mirror::Object* ptr) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001217 uintptr_t handle = PushHandle(ptr);
1218 if (HaveHandleScopeGpr()) {
Andreas Gampec147b002014-03-06 18:11:06 -08001219 gpr_index_--;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001220 PushGpr(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001221 } else {
1222 stack_entries_++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001223 PushStack(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001224 gpr_index_ = 0;
1225 }
1226 }
1227
Ian Rogers1428dce2014-10-21 15:02:15 -07001228 bool HaveIntGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001229 return gpr_index_ > 0;
1230 }
1231
1232 void AdvanceInt(uint32_t val) {
1233 if (HaveIntGpr()) {
1234 gpr_index_--;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001235 if (kMultiGPRegistersWidened) {
1236 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001237 PushGpr(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val)));
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001238 } else {
1239 PushGpr(val);
1240 }
Andreas Gampec147b002014-03-06 18:11:06 -08001241 } else {
1242 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001243 if (kMultiGPRegistersWidened) {
1244 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001245 PushStack(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val)));
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001246 } else {
1247 PushStack(val);
1248 }
Andreas Gampec147b002014-03-06 18:11:06 -08001249 gpr_index_ = 0;
1250 }
1251 }
1252
Ian Rogers1428dce2014-10-21 15:02:15 -07001253 bool HaveLongGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001254 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
1255 }
1256
Ian Rogers1428dce2014-10-21 15:02:15 -07001257 bool LongGprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001258 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1259 kAlignLongOnStack && // and when it needs alignment
1260 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1261 }
1262
Ian Rogers1428dce2014-10-21 15:02:15 -07001263 bool LongStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001264 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1265 kAlignLongOnStack && // and when it needs 8B alignment
1266 (stack_entries_ & 1) == 1; // counter is odd
1267 }
1268
1269 void AdvanceLong(uint64_t val) {
1270 if (HaveLongGpr()) {
1271 if (LongGprNeedsPadding()) {
1272 PushGpr(0);
1273 gpr_index_--;
1274 }
1275 if (kRegistersNeededForLong == 1) {
1276 PushGpr(static_cast<uintptr_t>(val));
1277 } else {
1278 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1279 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1280 }
1281 gpr_index_ -= kRegistersNeededForLong;
1282 } else {
1283 if (LongStackNeedsPadding()) {
1284 PushStack(0);
1285 stack_entries_++;
1286 }
1287 if (kRegistersNeededForLong == 1) {
1288 PushStack(static_cast<uintptr_t>(val));
1289 stack_entries_++;
1290 } else {
1291 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1292 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1293 stack_entries_ += 2;
1294 }
1295 gpr_index_ = 0;
1296 }
1297 }
1298
Ian Rogers1428dce2014-10-21 15:02:15 -07001299 bool HaveFloatFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001300 return fpr_index_ > 0;
1301 }
1302
Andreas Gampec147b002014-03-06 18:11:06 -08001303 void AdvanceFloat(float val) {
1304 if (kNativeSoftFloatAbi) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001305 AdvanceInt(bit_cast<uint32_t, float>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001306 } else {
1307 if (HaveFloatFpr()) {
1308 fpr_index_--;
1309 if (kRegistersNeededForDouble == 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001310 if (kMultiFPRegistersWidened) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001311 PushFpr8(bit_cast<uint64_t, double>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001312 } else {
1313 // No widening, just use the bits.
Roland Levillainda4d79b2015-03-24 14:36:11 +00001314 PushFpr8(static_cast<uint64_t>(bit_cast<uint32_t, float>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001315 }
1316 } else {
1317 PushFpr4(val);
1318 }
1319 } else {
1320 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001321 if (kRegistersNeededForDouble == 1 && kMultiFPRegistersWidened) {
Andreas Gampec147b002014-03-06 18:11:06 -08001322 // Need to widen before storing: Note the "double" in the template instantiation.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001323 // Note: We need to jump through those hoops to make the compiler happy.
1324 DCHECK_EQ(sizeof(uintptr_t), sizeof(uint64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001325 PushStack(static_cast<uintptr_t>(bit_cast<uint64_t, double>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001326 } else {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001327 PushStack(static_cast<uintptr_t>(bit_cast<uint32_t, float>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001328 }
1329 fpr_index_ = 0;
1330 }
1331 }
1332 }
1333
Ian Rogers1428dce2014-10-21 15:02:15 -07001334 bool HaveDoubleFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001335 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1336 }
1337
Ian Rogers1428dce2014-10-21 15:02:15 -07001338 bool DoubleFprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001339 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1340 kAlignDoubleOnStack && // and when it needs alignment
1341 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1342 }
1343
Ian Rogers1428dce2014-10-21 15:02:15 -07001344 bool DoubleStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001345 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1346 kAlignDoubleOnStack && // and when it needs 8B alignment
1347 (stack_entries_ & 1) == 1; // counter is odd
1348 }
1349
1350 void AdvanceDouble(uint64_t val) {
1351 if (kNativeSoftFloatAbi) {
1352 AdvanceLong(val);
1353 } else {
1354 if (HaveDoubleFpr()) {
1355 if (DoubleFprNeedsPadding()) {
1356 PushFpr4(0);
1357 fpr_index_--;
1358 }
1359 PushFpr8(val);
1360 fpr_index_ -= kRegistersNeededForDouble;
1361 } else {
1362 if (DoubleStackNeedsPadding()) {
1363 PushStack(0);
1364 stack_entries_++;
1365 }
1366 if (kRegistersNeededForDouble == 1) {
1367 PushStack(static_cast<uintptr_t>(val));
1368 stack_entries_++;
1369 } else {
1370 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1371 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1372 stack_entries_ += 2;
1373 }
1374 fpr_index_ = 0;
1375 }
1376 }
1377 }
1378
Ian Rogers1428dce2014-10-21 15:02:15 -07001379 uint32_t GetStackEntries() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001380 return stack_entries_;
1381 }
1382
Ian Rogers1428dce2014-10-21 15:02:15 -07001383 uint32_t GetNumberOfUsedGprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001384 return kNumNativeGprArgs - gpr_index_;
1385 }
1386
Ian Rogers1428dce2014-10-21 15:02:15 -07001387 uint32_t GetNumberOfUsedFprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001388 return kNumNativeFprArgs - fpr_index_;
1389 }
1390
1391 private:
1392 void PushGpr(uintptr_t val) {
1393 delegate_->PushGpr(val);
1394 }
1395 void PushFpr4(float val) {
1396 delegate_->PushFpr4(val);
1397 }
1398 void PushFpr8(uint64_t val) {
1399 delegate_->PushFpr8(val);
1400 }
1401 void PushStack(uintptr_t val) {
1402 delegate_->PushStack(val);
1403 }
Mathieu Chartier90443472015-07-16 20:32:27 -07001404 uintptr_t PushHandle(mirror::Object* ref) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001405 return delegate_->PushHandle(ref);
Andreas Gampec147b002014-03-06 18:11:06 -08001406 }
1407
1408 uint32_t gpr_index_; // Number of free GPRs
1409 uint32_t fpr_index_; // Number of free FPRs
1410 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1411 // extended
Ian Rogers1428dce2014-10-21 15:02:15 -07001412 T* const delegate_; // What Push implementation gets called
Andreas Gampec147b002014-03-06 18:11:06 -08001413};
1414
Andreas Gampec200a4a2014-06-16 18:39:09 -07001415// Computes the sizes of register stacks and call stack area. Handling of references can be extended
1416// in subclasses.
1417//
1418// To handle native pointers, use "L" in the shorty for an object reference, which simulates
1419// them with handles.
1420class ComputeNativeCallFrameSize {
Andreas Gampec147b002014-03-06 18:11:06 -08001421 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001422 ComputeNativeCallFrameSize() : num_stack_entries_(0) {}
1423
1424 virtual ~ComputeNativeCallFrameSize() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001425
Ian Rogers1428dce2014-10-21 15:02:15 -07001426 uint32_t GetStackSize() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001427 return num_stack_entries_ * sizeof(uintptr_t);
1428 }
1429
Ian Rogers1428dce2014-10-21 15:02:15 -07001430 uint8_t* LayoutCallStack(uint8_t* sp8) const {
Andreas Gampec147b002014-03-06 18:11:06 -08001431 sp8 -= GetStackSize();
Andreas Gampe779f8c92014-06-09 18:29:38 -07001432 // Align by kStackAlignment.
1433 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001434 return sp8;
Andreas Gampec147b002014-03-06 18:11:06 -08001435 }
1436
Ian Rogers1428dce2014-10-21 15:02:15 -07001437 uint8_t* LayoutCallRegisterStacks(uint8_t* sp8, uintptr_t** start_gpr, uint32_t** start_fpr)
1438 const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001439 // Assumption is OK right now, as we have soft-float arm
1440 size_t fregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeFprArgs;
1441 sp8 -= fregs * sizeof(uintptr_t);
1442 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1443 size_t iregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeGprArgs;
1444 sp8 -= iregs * sizeof(uintptr_t);
1445 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1446 return sp8;
1447 }
Andreas Gampec147b002014-03-06 18:11:06 -08001448
Andreas Gampec200a4a2014-06-16 18:39:09 -07001449 uint8_t* LayoutNativeCall(uint8_t* sp8, uintptr_t** start_stack, uintptr_t** start_gpr,
Ian Rogers1428dce2014-10-21 15:02:15 -07001450 uint32_t** start_fpr) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001451 // Native call stack.
1452 sp8 = LayoutCallStack(sp8);
1453 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
Andreas Gampec147b002014-03-06 18:11:06 -08001454
Andreas Gampec200a4a2014-06-16 18:39:09 -07001455 // Put fprs and gprs below.
1456 sp8 = LayoutCallRegisterStacks(sp8, start_gpr, start_fpr);
Andreas Gampec147b002014-03-06 18:11:06 -08001457
Andreas Gampec200a4a2014-06-16 18:39:09 -07001458 // Return the new bottom.
1459 return sp8;
1460 }
1461
1462 virtual void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm)
Mathieu Chartier90443472015-07-16 20:32:27 -07001463 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001464 UNUSED(sm);
1465 }
Andreas Gampec200a4a2014-06-16 18:39:09 -07001466
Mathieu Chartier90443472015-07-16 20:32:27 -07001467 void Walk(const char* shorty, uint32_t shorty_len) SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001468 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize> sm(this);
1469
1470 WalkHeader(&sm);
Andreas Gampec147b002014-03-06 18:11:06 -08001471
1472 for (uint32_t i = 1; i < shorty_len; ++i) {
1473 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1474 switch (cur_type_) {
1475 case Primitive::kPrimNot:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001476 // TODO: fix abuse of mirror types.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001477 sm.AdvanceHandleScope(
1478 reinterpret_cast<mirror::Object*>(0x12345678));
Andreas Gampec147b002014-03-06 18:11:06 -08001479 break;
1480
1481 case Primitive::kPrimBoolean:
1482 case Primitive::kPrimByte:
1483 case Primitive::kPrimChar:
1484 case Primitive::kPrimShort:
1485 case Primitive::kPrimInt:
1486 sm.AdvanceInt(0);
1487 break;
1488 case Primitive::kPrimFloat:
1489 sm.AdvanceFloat(0);
1490 break;
1491 case Primitive::kPrimDouble:
1492 sm.AdvanceDouble(0);
1493 break;
1494 case Primitive::kPrimLong:
1495 sm.AdvanceLong(0);
1496 break;
1497 default:
1498 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001499 UNREACHABLE();
Andreas Gampec147b002014-03-06 18:11:06 -08001500 }
1501 }
1502
Ian Rogers1428dce2014-10-21 15:02:15 -07001503 num_stack_entries_ = sm.GetStackEntries();
Andreas Gampec147b002014-03-06 18:11:06 -08001504 }
1505
1506 void PushGpr(uintptr_t /* val */) {
1507 // not optimizing registers, yet
1508 }
1509
1510 void PushFpr4(float /* val */) {
1511 // not optimizing registers, yet
1512 }
1513
1514 void PushFpr8(uint64_t /* val */) {
1515 // not optimizing registers, yet
1516 }
1517
1518 void PushStack(uintptr_t /* val */) {
1519 // counting is already done in the superclass
1520 }
1521
Andreas Gampec200a4a2014-06-16 18:39:09 -07001522 virtual uintptr_t PushHandle(mirror::Object* /* ptr */) {
Andreas Gampec147b002014-03-06 18:11:06 -08001523 return reinterpret_cast<uintptr_t>(nullptr);
1524 }
1525
Andreas Gampec200a4a2014-06-16 18:39:09 -07001526 protected:
Andreas Gampec147b002014-03-06 18:11:06 -08001527 uint32_t num_stack_entries_;
1528};
1529
Andreas Gampec200a4a2014-06-16 18:39:09 -07001530class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001531 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001532 ComputeGenericJniFrameSize() : num_handle_scope_references_(0) {}
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001533
Andreas Gampec200a4a2014-06-16 18:39:09 -07001534 // Lays out the callee-save frame. Assumes that the incorrect frame corresponding to RefsAndArgs
1535 // is at *m = sp. Will update to point to the bottom of the save frame.
1536 //
1537 // Note: assumes ComputeAll() has been run before.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001538 void LayoutCalleeSaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope)
Mathieu Chartier90443472015-07-16 20:32:27 -07001539 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001540 ArtMethod* method = **m;
1541
1542 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), sizeof(void*));
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001543
Andreas Gampec200a4a2014-06-16 18:39:09 -07001544 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
1545
1546 // First, fix up the layout of the callee-save frame.
1547 // We have to squeeze in the HandleScope, and relocate the method pointer.
1548
1549 // "Free" the slot for the method.
Ian Rogers13735952014-10-08 12:43:28 -07001550 sp8 += sizeof(void*); // In the callee-save frame we use a full pointer.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001551
1552 // Under the callee saves put handle scope and new method stack reference.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001553 size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001554 size_t scope_and_method = handle_scope_size + sizeof(ArtMethod*);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001555
1556 sp8 -= scope_and_method;
1557 // Align by kStackAlignment.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001558 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001559
Mathieu Chartiere401d142015-04-22 13:56:20 -07001560 uint8_t* sp8_table = sp8 + sizeof(ArtMethod*);
Ian Rogers59c07062014-10-10 13:03:39 -07001561 *handle_scope = HandleScope::Create(sp8_table, self->GetTopHandleScope(),
1562 num_handle_scope_references_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001563
1564 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
1565 uint8_t* method_pointer = sp8;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001566 auto** new_method_ref = reinterpret_cast<ArtMethod**>(method_pointer);
1567 *new_method_ref = method;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001568 *m = new_method_ref;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001569 }
1570
Andreas Gampec200a4a2014-06-16 18:39:09 -07001571 // Adds space for the cookie. Note: may leave stack unaligned.
Ian Rogers1428dce2014-10-21 15:02:15 -07001572 void LayoutCookie(uint8_t** sp) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001573 // Reference cookie and padding
1574 *sp -= 8;
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001575 }
1576
Andreas Gampec200a4a2014-06-16 18:39:09 -07001577 // Re-layout the callee-save frame (insert a handle-scope). Then add space for the cookie.
1578 // Returns the new bottom. Note: this may be unaligned.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001579 uint8_t* LayoutJNISaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope)
Mathieu Chartier90443472015-07-16 20:32:27 -07001580 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001581 // First, fix up the layout of the callee-save frame.
1582 // We have to squeeze in the HandleScope, and relocate the method pointer.
Ian Rogers59c07062014-10-10 13:03:39 -07001583 LayoutCalleeSaveFrame(self, m, sp, handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001584
1585 // The bottom of the callee-save frame is now where the method is, *m.
1586 uint8_t* sp8 = reinterpret_cast<uint8_t*>(*m);
1587
1588 // Add space for cookie.
1589 LayoutCookie(&sp8);
1590
1591 return sp8;
1592 }
1593
1594 // WARNING: After this, *sp won't be pointing to the method anymore!
Mathieu Chartiere401d142015-04-22 13:56:20 -07001595 uint8_t* ComputeLayout(Thread* self, ArtMethod*** m, const char* shorty, uint32_t shorty_len,
1596 HandleScope** handle_scope, uintptr_t** start_stack, uintptr_t** start_gpr,
1597 uint32_t** start_fpr)
Mathieu Chartier90443472015-07-16 20:32:27 -07001598 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001599 Walk(shorty, shorty_len);
1600
1601 // JNI part.
Ian Rogers59c07062014-10-10 13:03:39 -07001602 uint8_t* sp8 = LayoutJNISaveFrame(self, m, reinterpret_cast<void*>(*m), handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001603
1604 sp8 = LayoutNativeCall(sp8, start_stack, start_gpr, start_fpr);
1605
1606 // Return the new bottom.
1607 return sp8;
1608 }
1609
1610 uintptr_t PushHandle(mirror::Object* /* ptr */) OVERRIDE;
1611
1612 // Add JNIEnv* and jobj/jclass before the shorty-derived elements.
1613 void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) OVERRIDE
Mathieu Chartier90443472015-07-16 20:32:27 -07001614 SHARED_REQUIRES(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001615
1616 private:
1617 uint32_t num_handle_scope_references_;
1618};
1619
1620uintptr_t ComputeGenericJniFrameSize::PushHandle(mirror::Object* /* ptr */) {
1621 num_handle_scope_references_++;
1622 return reinterpret_cast<uintptr_t>(nullptr);
1623}
1624
1625void ComputeGenericJniFrameSize::WalkHeader(
1626 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) {
1627 // JNIEnv
1628 sm->AdvancePointer(nullptr);
1629
1630 // Class object or this as first argument
1631 sm->AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678));
1632}
1633
1634// Class to push values to three separate regions. Used to fill the native call part. Adheres to
1635// the template requirements of BuildGenericJniFrameStateMachine.
1636class FillNativeCall {
1637 public:
1638 FillNativeCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) :
1639 cur_gpr_reg_(gpr_regs), cur_fpr_reg_(fpr_regs), cur_stack_arg_(stack_args) {}
1640
1641 virtual ~FillNativeCall() {}
1642
1643 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) {
1644 cur_gpr_reg_ = gpr_regs;
1645 cur_fpr_reg_ = fpr_regs;
1646 cur_stack_arg_ = stack_args;
Andreas Gampec147b002014-03-06 18:11:06 -08001647 }
1648
1649 void PushGpr(uintptr_t val) {
1650 *cur_gpr_reg_ = val;
1651 cur_gpr_reg_++;
1652 }
1653
1654 void PushFpr4(float val) {
1655 *cur_fpr_reg_ = val;
1656 cur_fpr_reg_++;
1657 }
1658
1659 void PushFpr8(uint64_t val) {
1660 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
1661 *tmp = val;
1662 cur_fpr_reg_ += 2;
1663 }
1664
1665 void PushStack(uintptr_t val) {
1666 *cur_stack_arg_ = val;
1667 cur_stack_arg_++;
1668 }
1669
Mathieu Chartier90443472015-07-16 20:32:27 -07001670 virtual uintptr_t PushHandle(mirror::Object*) SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001671 LOG(FATAL) << "(Non-JNI) Native call does not use handles.";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001672 UNREACHABLE();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001673 }
1674
1675 private:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001676 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001677 uint32_t* cur_fpr_reg_;
1678 uintptr_t* cur_stack_arg_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001679};
Andreas Gampec147b002014-03-06 18:11:06 -08001680
Andreas Gampec200a4a2014-06-16 18:39:09 -07001681// Visits arguments on the stack placing them into a region lower down the stack for the benefit
1682// of transitioning into native code.
1683class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
1684 public:
Ian Rogers59c07062014-10-10 13:03:39 -07001685 BuildGenericJniFrameVisitor(Thread* self, bool is_static, const char* shorty, uint32_t shorty_len,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001686 ArtMethod*** sp)
Andreas Gampec200a4a2014-06-16 18:39:09 -07001687 : QuickArgumentVisitor(*sp, is_static, shorty, shorty_len),
1688 jni_call_(nullptr, nullptr, nullptr, nullptr), sm_(&jni_call_) {
1689 ComputeGenericJniFrameSize fsc;
1690 uintptr_t* start_gpr_reg;
1691 uint32_t* start_fpr_reg;
1692 uintptr_t* start_stack_arg;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001693 bottom_of_used_area_ = fsc.ComputeLayout(self, sp, shorty, shorty_len,
Ian Rogers59c07062014-10-10 13:03:39 -07001694 &handle_scope_,
1695 &start_stack_arg,
Andreas Gampec200a4a2014-06-16 18:39:09 -07001696 &start_gpr_reg, &start_fpr_reg);
1697
Andreas Gampec200a4a2014-06-16 18:39:09 -07001698 jni_call_.Reset(start_gpr_reg, start_fpr_reg, start_stack_arg, handle_scope_);
1699
1700 // jni environment is always first argument
1701 sm_.AdvancePointer(self->GetJniEnv());
1702
1703 if (is_static) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001704 sm_.AdvanceHandleScope((**sp)->GetDeclaringClass());
Andreas Gampec200a4a2014-06-16 18:39:09 -07001705 }
1706 }
1707
Mathieu Chartier90443472015-07-16 20:32:27 -07001708 void Visit() SHARED_REQUIRES(Locks::mutator_lock_) OVERRIDE;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001709
Mathieu Chartier90443472015-07-16 20:32:27 -07001710 void FinalizeHandleScope(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001711
1712 StackReference<mirror::Object>* GetFirstHandleScopeEntry()
Mathieu Chartier90443472015-07-16 20:32:27 -07001713 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001714 return handle_scope_->GetHandle(0).GetReference();
1715 }
1716
Mathieu Chartier90443472015-07-16 20:32:27 -07001717 jobject GetFirstHandleScopeJObject() const SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001718 return handle_scope_->GetHandle(0).ToJObject();
1719 }
1720
Ian Rogers1428dce2014-10-21 15:02:15 -07001721 void* GetBottomOfUsedArea() const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001722 return bottom_of_used_area_;
1723 }
1724
1725 private:
1726 // A class to fill a JNI call. Adds reference/handle-scope management to FillNativeCall.
1727 class FillJniCall FINAL : public FillNativeCall {
1728 public:
1729 FillJniCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args,
1730 HandleScope* handle_scope) : FillNativeCall(gpr_regs, fpr_regs, stack_args),
1731 handle_scope_(handle_scope), cur_entry_(0) {}
1732
Mathieu Chartier90443472015-07-16 20:32:27 -07001733 uintptr_t PushHandle(mirror::Object* ref) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001734
1735 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args, HandleScope* scope) {
1736 FillNativeCall::Reset(gpr_regs, fpr_regs, stack_args);
1737 handle_scope_ = scope;
1738 cur_entry_ = 0U;
1739 }
1740
Mathieu Chartier90443472015-07-16 20:32:27 -07001741 void ResetRemainingScopeSlots() SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001742 // Initialize padding entries.
1743 size_t expected_slots = handle_scope_->NumberOfReferences();
1744 while (cur_entry_ < expected_slots) {
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07001745 handle_scope_->GetMutableHandle(cur_entry_++).Assign(nullptr);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001746 }
1747 DCHECK_NE(cur_entry_, 0U);
1748 }
1749
1750 private:
1751 HandleScope* handle_scope_;
1752 size_t cur_entry_;
1753 };
1754
1755 HandleScope* handle_scope_;
1756 FillJniCall jni_call_;
1757 void* bottom_of_used_area_;
1758
1759 BuildNativeCallFrameStateMachine<FillJniCall> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001760
1761 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
1762};
1763
Andreas Gampec200a4a2014-06-16 18:39:09 -07001764uintptr_t BuildGenericJniFrameVisitor::FillJniCall::PushHandle(mirror::Object* ref) {
1765 uintptr_t tmp;
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07001766 MutableHandle<mirror::Object> h = handle_scope_->GetMutableHandle(cur_entry_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001767 h.Assign(ref);
1768 tmp = reinterpret_cast<uintptr_t>(h.ToJObject());
1769 cur_entry_++;
1770 return tmp;
1771}
1772
Ian Rogers9758f792014-03-13 09:02:55 -07001773void BuildGenericJniFrameVisitor::Visit() {
1774 Primitive::Type type = GetParamPrimitiveType();
1775 switch (type) {
1776 case Primitive::kPrimLong: {
1777 jlong long_arg;
1778 if (IsSplitLongOrDouble()) {
1779 long_arg = ReadSplitLongParam();
1780 } else {
1781 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
1782 }
1783 sm_.AdvanceLong(long_arg);
1784 break;
1785 }
1786 case Primitive::kPrimDouble: {
1787 uint64_t double_arg;
1788 if (IsSplitLongOrDouble()) {
1789 // Read into union so that we don't case to a double.
1790 double_arg = ReadSplitLongParam();
1791 } else {
1792 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
1793 }
1794 sm_.AdvanceDouble(double_arg);
1795 break;
1796 }
1797 case Primitive::kPrimNot: {
1798 StackReference<mirror::Object>* stack_ref =
1799 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001800 sm_.AdvanceHandleScope(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -07001801 break;
1802 }
1803 case Primitive::kPrimFloat:
1804 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
1805 break;
1806 case Primitive::kPrimBoolean: // Fall-through.
1807 case Primitive::kPrimByte: // Fall-through.
1808 case Primitive::kPrimChar: // Fall-through.
1809 case Primitive::kPrimShort: // Fall-through.
1810 case Primitive::kPrimInt: // Fall-through.
1811 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
1812 break;
1813 case Primitive::kPrimVoid:
1814 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07001815 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -07001816 }
1817}
1818
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001819void BuildGenericJniFrameVisitor::FinalizeHandleScope(Thread* self) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001820 // Clear out rest of the scope.
1821 jni_call_.ResetRemainingScopeSlots();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001822 // Install HandleScope.
1823 self->PushHandleScope(handle_scope_);
Ian Rogers9758f792014-03-13 09:02:55 -07001824}
1825
Ian Rogers04c31d22014-07-07 21:44:06 -07001826#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07001827extern "C" void* artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07001828#else
1829extern "C" void* artFindNativeMethod(Thread* self);
1830#endif
Andreas Gampe90546832014-03-12 18:07:19 -07001831
Andreas Gampead615172014-04-04 16:20:13 -07001832uint64_t artQuickGenericJniEndJNIRef(Thread* self, uint32_t cookie, jobject l, jobject lock) {
1833 if (lock != nullptr) {
1834 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self));
1835 } else {
1836 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(l, cookie, self));
1837 }
1838}
1839
1840void artQuickGenericJniEndJNINonRef(Thread* self, uint32_t cookie, jobject lock) {
1841 if (lock != nullptr) {
1842 JniMethodEndSynchronized(cookie, lock, self);
1843 } else {
1844 JniMethodEnd(cookie, self);
1845 }
1846}
1847
Andreas Gampec147b002014-03-06 18:11:06 -08001848/*
1849 * Initializes an alloca region assumed to be directly below sp for a native call:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001850 * Create a HandleScope and call stack and fill a mini stack with values to be pushed to registers.
Andreas Gampec147b002014-03-06 18:11:06 -08001851 * The final element on the stack is a pointer to the native code.
1852 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001853 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001854 * We need to fix this, as the handle scope needs to go into the callee-save frame.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001855 *
Andreas Gampec147b002014-03-06 18:11:06 -08001856 * The return of this function denotes:
1857 * 1) How many bytes of the alloca can be released, if the value is non-negative.
1858 * 2) An error, if the value is negative.
1859 */
Mathieu Chartiere401d142015-04-22 13:56:20 -07001860extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07001861 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001862 ArtMethod* called = *sp;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001863 DCHECK(called->IsNative()) << PrettyMethod(called, true);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001864 uint32_t shorty_len = 0;
1865 const char* shorty = called->GetShorty(&shorty_len);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001866
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001867 // Run the visitor and update sp.
Ian Rogers59c07062014-10-10 13:03:39 -07001868 BuildGenericJniFrameVisitor visitor(self, called->IsStatic(), shorty, shorty_len, &sp);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001869 visitor.VisitArguments();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001870 visitor.FinalizeHandleScope(self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001871
Andreas Gampec200a4a2014-06-16 18:39:09 -07001872 // Fix up managed-stack things in Thread.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001873 self->SetTopOfStack(sp);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001874
Ian Rogerse0dcd462014-03-08 15:21:04 -08001875 self->VerifyStack();
1876
Andreas Gampe90546832014-03-12 18:07:19 -07001877 // Start JNI, save the cookie.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001878 uint32_t cookie;
1879 if (called->IsSynchronized()) {
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001880 cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001881 if (self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001882 self->PopHandleScope();
Andreas Gampec147b002014-03-06 18:11:06 -08001883 // A negative value denotes an error.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001884 return GetTwoWordFailureValue();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001885 }
1886 } else {
1887 cookie = JniMethodStart(self);
1888 }
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001889 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Ian Rogerse0dcd462014-03-08 15:21:04 -08001890 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001891
Andreas Gampe90546832014-03-12 18:07:19 -07001892 // Retrieve the stored native code.
Mathieu Chartier2d721012014-11-10 11:08:06 -08001893 void* nativeCode = called->GetEntryPointFromJni();
Andreas Gampe90546832014-03-12 18:07:19 -07001894
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07001895 // There are two cases for the content of nativeCode:
1896 // 1) Pointer to the native function.
1897 // 2) Pointer to the trampoline for native code binding.
1898 // In the second case, we need to execute the binding and continue with the actual native function
1899 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07001900 DCHECK(nativeCode != nullptr);
1901 if (nativeCode == GetJniDlsymLookupStub()) {
Ian Rogers04c31d22014-07-07 21:44:06 -07001902#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07001903 nativeCode = artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07001904#else
1905 nativeCode = artFindNativeMethod(self);
1906#endif
Andreas Gampe90546832014-03-12 18:07:19 -07001907
1908 if (nativeCode == nullptr) {
1909 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
Andreas Gampead615172014-04-04 16:20:13 -07001910
1911 // End JNI, as the assembly will move to deliver the exception.
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001912 jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001913 if (shorty[0] == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001914 artQuickGenericJniEndJNIRef(self, cookie, nullptr, lock);
1915 } else {
1916 artQuickGenericJniEndJNINonRef(self, cookie, lock);
1917 }
1918
Andreas Gampec200a4a2014-06-16 18:39:09 -07001919 return GetTwoWordFailureValue();
Andreas Gampe90546832014-03-12 18:07:19 -07001920 }
1921 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001922 }
1923
Andreas Gampec200a4a2014-06-16 18:39:09 -07001924 // Return native code addr(lo) and bottom of alloca address(hi).
1925 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(visitor.GetBottomOfUsedArea()),
1926 reinterpret_cast<uintptr_t>(nativeCode));
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001927}
1928
1929/*
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001930 * Is called after the native JNI code. Responsible for cleanup (handle scope, saved state) and
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001931 * unlocking.
1932 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001933extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self, jvalue result, uint64_t result_f)
Mathieu Chartier90443472015-07-16 20:32:27 -07001934 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001935 ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001936 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001937 ArtMethod* called = *sp;
Ian Rogerse0dcd462014-03-08 15:21:04 -08001938 uint32_t cookie = *(sp32 - 1);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001939
Andreas Gampead615172014-04-04 16:20:13 -07001940 jobject lock = nullptr;
1941 if (called->IsSynchronized()) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001942 HandleScope* table = reinterpret_cast<HandleScope*>(reinterpret_cast<uint8_t*>(sp)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001943 + sizeof(*sp));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001944 lock = table->GetHandle(0).ToJObject();
Andreas Gampead615172014-04-04 16:20:13 -07001945 }
1946
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001947 char return_shorty_char = called->GetShorty()[0];
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001948
1949 if (return_shorty_char == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001950 return artQuickGenericJniEndJNIRef(self, cookie, result.l, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001951 } else {
Andreas Gampead615172014-04-04 16:20:13 -07001952 artQuickGenericJniEndJNINonRef(self, cookie, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001953
1954 switch (return_shorty_char) {
Nicolas Geoffray54accbc2014-08-13 03:40:45 +01001955 case 'F': {
1956 if (kRuntimeISA == kX86) {
1957 // Convert back the result to float.
Roland Levillainda4d79b2015-03-24 14:36:11 +00001958 double d = bit_cast<double, uint64_t>(result_f);
1959 return bit_cast<uint32_t, float>(static_cast<float>(d));
Nicolas Geoffray54accbc2014-08-13 03:40:45 +01001960 } else {
1961 return result_f;
1962 }
1963 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001964 case 'D':
1965 return result_f;
1966 case 'Z':
1967 return result.z;
1968 case 'B':
1969 return result.b;
1970 case 'C':
1971 return result.c;
1972 case 'S':
1973 return result.s;
1974 case 'I':
1975 return result.i;
1976 case 'J':
1977 return result.j;
1978 case 'V':
1979 return 0;
1980 default:
1981 LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char;
1982 return 0;
1983 }
1984 }
Andreas Gampe2da88232014-02-27 12:26:20 -08001985}
1986
Andreas Gamped58342c2014-06-05 14:18:08 -07001987// We use TwoWordReturn to optimize scalar returns. We use the hi value for code, and the lo value
1988// for the method pointer.
Andreas Gampe51f76352014-05-21 08:28:48 -07001989//
Andreas Gamped58342c2014-06-05 14:18:08 -07001990// It is valid to use this, as at the usage points here (returns from C functions) we are assuming
Mathieu Chartier90443472015-07-16 20:32:27 -07001991// to hold the mutator lock (see SHARED_REQUIRES(Locks::mutator_lock_) annotations).
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001992
1993template<InvokeType type, bool access_check>
Mathieu Chartiere401d142015-04-22 13:56:20 -07001994static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object, Thread* self,
1995 ArtMethod** sp) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001996 ScopedQuickEntrypointChecks sqec(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001997 DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
1998 ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
1999 ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check, type);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002000 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002001 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
2002 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002003 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002004 {
2005 // Remember the args in case a GC happens in FindMethodFromCode.
2006 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
2007 RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa);
2008 visitor.VisitArguments();
Andreas Gampe3a357142015-08-07 17:20:11 -07002009 method = FindMethodFromCode<type, access_check>(method_idx, &this_object, caller_method,
Mathieu Chartier0cd81352014-05-22 16:48:55 -07002010 self);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002011 visitor.FixupReferences();
2012 }
2013
Ian Rogerse0a02da2014-12-02 14:10:53 -08002014 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002015 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07002016 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002017 }
2018 }
2019 DCHECK(!self->IsExceptionPending());
2020 const void* code = method->GetEntryPointFromQuickCompiledCode();
2021
2022 // When we return, the caller will branch to this address, so it had better not be 0!
Ian Rogerse0a02da2014-12-02 14:10:53 -08002023 DCHECK(code != nullptr) << "Code was null in method: " << PrettyMethod(method)
Andreas Gampec200a4a2014-06-16 18:39:09 -07002024 << " location: "
2025 << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002026
Andreas Gamped58342c2014-06-05 14:18:08 -07002027 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2028 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002029}
2030
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01002031// Explicit artInvokeCommon template function declarations to please analysis tool.
2032#define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \
Mathieu Chartier90443472015-07-16 20:32:27 -07002033 template SHARED_REQUIRES(Locks::mutator_lock_) \
Mathieu Chartiere401d142015-04-22 13:56:20 -07002034 TwoWordReturn artInvokeCommon<type, access_check>( \
2035 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01002036
2037EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false);
2038EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true);
2039EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false);
2040EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true);
2041EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false);
2042EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true);
2043EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false);
2044EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true);
2045EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false);
2046EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true);
2047#undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL
2048
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002049// See comments in runtime_support_asm.S
Andreas Gampec200a4a2014-06-16 18:39:09 -07002050extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002051 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002052 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002053 return artInvokeCommon<kInterface, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002054}
2055
Andreas Gampec200a4a2014-06-16 18:39:09 -07002056extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002057 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002058 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002059 return artInvokeCommon<kDirect, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002060}
2061
Andreas Gampec200a4a2014-06-16 18:39:09 -07002062extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002063 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002064 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002065 return artInvokeCommon<kStatic, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002066}
2067
Andreas Gampec200a4a2014-06-16 18:39:09 -07002068extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002069 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002070 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002071 return artInvokeCommon<kSuper, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002072}
2073
Andreas Gampec200a4a2014-06-16 18:39:09 -07002074extern "C" TwoWordReturn artInvokeVirtualTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002075 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002076 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002077 return artInvokeCommon<kVirtual, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002078}
2079
2080// Determine target of interface dispatch. This object is known non-null.
Nicolas Geoffray8ea18d02015-05-26 16:29:08 +01002081extern "C" TwoWordReturn artInvokeInterfaceTrampoline(uint32_t dex_method_idx,
Andreas Gampe51f76352014-05-21 08:28:48 -07002082 mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002083 Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002084 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002085 ScopedQuickEntrypointChecks sqec(self);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +01002086 // The optimizing compiler currently does not inline methods that have an interface
2087 // invocation. We use the outer method directly to avoid fetching a stack map, which is
2088 // more expensive.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002089 ArtMethod* caller_method = QuickArgumentVisitor::GetOuterMethod(sp);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +01002090 DCHECK_EQ(caller_method, QuickArgumentVisitor::GetCallingMethod(sp));
Mathieu Chartiere401d142015-04-22 13:56:20 -07002091 ArtMethod* interface_method = caller_method->GetDexCacheResolvedMethod(
2092 dex_method_idx, sizeof(void*));
2093 DCHECK(interface_method != nullptr) << dex_method_idx << " " << PrettyMethod(caller_method);
2094 ArtMethod* method;
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002095 if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002096 method = this_object->GetClass()->FindVirtualMethodForInterface(
2097 interface_method, sizeof(void*));
Ian Rogerse0a02da2014-12-02 14:10:53 -08002098 if (UNLIKELY(method == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002099 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(
2100 interface_method, this_object, caller_method);
Andreas Gamped58342c2014-06-05 14:18:08 -07002101 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002102 }
2103 } else {
Mathieu Chartier4edd8472015-06-01 10:47:36 -07002104 DCHECK_EQ(interface_method, Runtime::Current()->GetResolutionMethod());
Nicolas Geoffray8ea18d02015-05-26 16:29:08 +01002105 if (kIsDebugBuild) {
2106 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
2107 const DexFile::CodeItem* code = caller_method->GetCodeItem();
2108 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
2109 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
2110 Instruction::Code instr_code = instr->Opcode();
2111 CHECK(instr_code == Instruction::INVOKE_INTERFACE ||
2112 instr_code == Instruction::INVOKE_INTERFACE_RANGE)
2113 << "Unexpected call into interface trampoline: " << instr->DumpString(nullptr);
2114 if (instr_code == Instruction::INVOKE_INTERFACE) {
2115 CHECK_EQ(dex_method_idx, instr->VRegB_35c());
2116 } else {
2117 CHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE);
2118 CHECK_EQ(dex_method_idx, instr->VRegB_3rc());
2119 }
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002120 }
2121
Andreas Gampec200a4a2014-06-16 18:39:09 -07002122 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()
2123 ->GetDexFile();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002124 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002125 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx),
2126 &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002127 {
2128 // Remember the args in case a GC happens in FindMethodFromCode.
2129 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
2130 RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa);
2131 visitor.VisitArguments();
Andreas Gampe3a357142015-08-07 17:20:11 -07002132 method = FindMethodFromCode<kInterface, false>(dex_method_idx, &this_object, caller_method,
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002133 self);
2134 visitor.FixupReferences();
2135 }
2136
2137 if (UNLIKELY(method == nullptr)) {
2138 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07002139 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002140 }
2141 }
2142 const void* code = method->GetEntryPointFromQuickCompiledCode();
2143
2144 // When we return, the caller will branch to this address, so it had better not be 0!
Ian Rogerse0a02da2014-12-02 14:10:53 -08002145 DCHECK(code != nullptr) << "Code was null in method: " << PrettyMethod(method)
Andreas Gampec200a4a2014-06-16 18:39:09 -07002146 << " location: " << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002147
Andreas Gamped58342c2014-06-05 14:18:08 -07002148 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2149 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002150}
2151
Ian Rogers848871b2013-08-05 10:56:33 -07002152} // namespace art