blob: b567303b55e91a9c5ff8793e78b6e0cffec8ed36 [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartiere401d142015-04-22 13:56:20 -070017#include "art_method-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070018#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070019#include "common_throws.h"
Ian Rogers848871b2013-08-05 10:56:33 -070020#include "dex_file-inl.h"
21#include "dex_instruction-inl.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070022#include "entrypoints/entrypoint_utils-inl.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070023#include "entrypoints/runtime_asm_entrypoints.h"
Ian Rogers83883d72013-10-21 21:07:24 -070024#include "gc/accounting/card_table-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070025#include "interpreter/interpreter.h"
Ian Rogerse0a02da2014-12-02 14:10:53 -080026#include "method_reference.h"
Ian Rogers848871b2013-08-05 10:56:33 -070027#include "mirror/class-inl.h"
Mathieu Chartier5f3ded42014-04-03 15:25:30 -070028#include "mirror/dex_cache-inl.h"
Mathieu Chartierfc58af42015-04-16 18:00:39 -070029#include "mirror/method.h"
Ian Rogers848871b2013-08-05 10:56:33 -070030#include "mirror/object-inl.h"
31#include "mirror/object_array-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070032#include "runtime.h"
Ian Rogers53b8b092014-03-13 23:45:53 -070033#include "scoped_thread_state_change.h"
Andreas Gampeb3025922015-09-01 14:45:00 -070034#include "stack.h"
Daniel Mihalyieb076692014-08-22 17:33:31 +020035#include "debugger.h"
Ian Rogers848871b2013-08-05 10:56:33 -070036
37namespace art {
38
39// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
40class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080041 // Number of bytes for each out register in the caller method's frame.
42 static constexpr size_t kBytesStackArgLocation = 4;
Alexei Zavjalov41c507a2014-05-15 16:02:46 +070043 // Frame size in bytes of a callee-save frame for RefsAndArgs.
44 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize =
45 GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -070046#if defined(__arm__)
47 // The callee save frame is pointed to by SP.
48 // | argN | |
49 // | ... | |
50 // | arg4 | |
51 // | arg3 spill | | Caller's frame
52 // | arg2 spill | |
53 // | arg1 spill | |
54 // | Method* | ---
55 // | LR |
Zheng Xu5667fdb2014-10-23 18:29:55 +080056 // | ... | 4x6 bytes callee saves
57 // | R3 |
58 // | R2 |
59 // | R1 |
60 // | S15 |
61 // | : |
62 // | S0 |
63 // | | 4x2 bytes padding
Ian Rogers848871b2013-08-05 10:56:33 -070064 // | Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -050065 static constexpr bool kSplitPairAcrossRegisterAndStack = kArm32QuickCodeUseSoftFloat;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +000066 static constexpr bool kAlignPairRegister = !kArm32QuickCodeUseSoftFloat;
Zheng Xu5667fdb2014-10-23 18:29:55 +080067 static constexpr bool kQuickSoftFloatAbi = kArm32QuickCodeUseSoftFloat;
68 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = !kArm32QuickCodeUseSoftFloat;
Goran Jakovljevicff734982015-08-24 12:58:55 +000069 static constexpr bool kQuickSkipOddFpRegisters = false;
Zheng Xu5667fdb2014-10-23 18:29:55 +080070 static constexpr size_t kNumQuickGprArgs = 3;
71 static constexpr size_t kNumQuickFprArgs = kArm32QuickCodeUseSoftFloat ? 0 : 16;
Andreas Gampe1a5c4062015-01-15 12:10:47 -080072 static constexpr bool kGprFprLockstep = false;
Zheng Xub551fdc2014-07-25 11:49:42 +080073 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
74 arm::ArmCalleeSaveFpr1Offset(Runtime::kRefsAndArgs); // Offset of first FPR arg.
75 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
76 arm::ArmCalleeSaveGpr1Offset(Runtime::kRefsAndArgs); // Offset of first GPR arg.
77 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset =
78 arm::ArmCalleeSaveLrOffset(Runtime::kRefsAndArgs); // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -080079 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000080 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -080081 }
Stuart Monteithb95a5342014-03-12 13:32:32 +000082#elif defined(__aarch64__)
83 // The callee save frame is pointed to by SP.
84 // | argN | |
85 // | ... | |
86 // | arg4 | |
87 // | arg3 spill | | Caller's frame
88 // | arg2 spill | |
89 // | arg1 spill | |
90 // | Method* | ---
91 // | LR |
Zheng Xub551fdc2014-07-25 11:49:42 +080092 // | X29 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000093 // | : |
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010094 // | X20 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000095 // | X7 |
96 // | : |
97 // | X1 |
Zheng Xub551fdc2014-07-25 11:49:42 +080098 // | D7 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000099 // | : |
100 // | D0 |
101 // | | padding
102 // | Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500103 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000104 static constexpr bool kAlignPairRegister = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000105 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800106 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000107 static constexpr bool kQuickSkipOddFpRegisters = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000108 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
109 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800110 static constexpr bool kGprFprLockstep = false;
Zheng Xub551fdc2014-07-25 11:49:42 +0800111 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
112 arm64::Arm64CalleeSaveFpr1Offset(Runtime::kRefsAndArgs); // Offset of first FPR arg.
113 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
114 arm64::Arm64CalleeSaveGpr1Offset(Runtime::kRefsAndArgs); // Offset of first GPR arg.
115 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset =
116 arm64::Arm64CalleeSaveLrOffset(Runtime::kRefsAndArgs); // Offset of return address.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000117 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000118 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Stuart Monteithb95a5342014-03-12 13:32:32 +0000119 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800120#elif defined(__mips__) && !defined(__LP64__)
Ian Rogers848871b2013-08-05 10:56:33 -0700121 // The callee save frame is pointed to by SP.
122 // | argN | |
123 // | ... | |
124 // | arg4 | |
125 // | arg3 spill | | Caller's frame
126 // | arg2 spill | |
127 // | arg1 spill | |
128 // | Method* | ---
129 // | RA |
130 // | ... | callee saves
131 // | A3 | arg3
132 // | A2 | arg2
133 // | A1 | arg1
Goran Jakovljevicff734982015-08-24 12:58:55 +0000134 // | F15 |
135 // | F14 | f_arg1
136 // | F13 |
137 // | F12 | f_arg0
138 // | | padding
Ian Rogers848871b2013-08-05 10:56:33 -0700139 // | A0/Method* | <- sp
Goran Jakovljevicff734982015-08-24 12:58:55 +0000140 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
141 static constexpr bool kAlignPairRegister = true;
142 static constexpr bool kQuickSoftFloatAbi = false;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800143 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000144 static constexpr bool kQuickSkipOddFpRegisters = true;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800145 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
Goran Jakovljevicff734982015-08-24 12:58:55 +0000146 static constexpr size_t kNumQuickFprArgs = 4; // 2 arguments passed in FPRs. Floats can be passed
147 // only in even numbered registers and each double
148 // occupies two registers.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800149 static constexpr bool kGprFprLockstep = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000150 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
151 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 32; // Offset of first GPR arg.
152 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 76; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800153 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000154 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800155 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800156#elif defined(__mips__) && defined(__LP64__)
157 // The callee save frame is pointed to by SP.
158 // | argN | |
159 // | ... | |
160 // | arg4 | |
161 // | arg3 spill | | Caller's frame
162 // | arg2 spill | |
163 // | arg1 spill | |
164 // | Method* | ---
165 // | RA |
166 // | ... | callee saves
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800167 // | A7 | arg7
168 // | A6 | arg6
169 // | A5 | arg5
170 // | A4 | arg4
171 // | A3 | arg3
172 // | A2 | arg2
173 // | A1 | arg1
Goran Jakovljevicff734982015-08-24 12:58:55 +0000174 // | F19 | f_arg7
175 // | F18 | f_arg6
176 // | F17 | f_arg5
177 // | F16 | f_arg4
178 // | F15 | f_arg3
179 // | F14 | f_arg2
180 // | F13 | f_arg1
181 // | F12 | f_arg0
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800182 // | | padding
183 // | A0/Method* | <- sp
184 // NOTE: for Mip64, when A0 is skipped, F0 is also skipped.
Douglas Leungd18e0832015-02-09 15:22:26 -0800185 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800186 static constexpr bool kAlignPairRegister = false;
187 static constexpr bool kQuickSoftFloatAbi = false;
188 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000189 static constexpr bool kQuickSkipOddFpRegisters = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800190 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
191 static constexpr size_t kNumQuickFprArgs = 7; // 7 arguments passed in FPRs.
192 static constexpr bool kGprFprLockstep = true;
193
194 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 24; // Offset of first FPR arg (F1).
195 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80; // Offset of first GPR arg (A1).
196 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 200; // Offset of return address.
197 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
198 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
199 }
Ian Rogers848871b2013-08-05 10:56:33 -0700200#elif defined(__i386__)
201 // The callee save frame is pointed to by SP.
202 // | argN | |
203 // | ... | |
204 // | arg4 | |
205 // | arg3 spill | | Caller's frame
206 // | arg2 spill | |
207 // | arg1 spill | |
208 // | Method* | ---
209 // | Return |
210 // | EBP,ESI,EDI | callee saves
211 // | EBX | arg3
212 // | EDX | arg2
213 // | ECX | arg1
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000214 // | XMM3 | float arg 4
215 // | XMM2 | float arg 3
216 // | XMM1 | float arg 2
217 // | XMM0 | float arg 1
Ian Rogers848871b2013-08-05 10:56:33 -0700218 // | EAX/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500219 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000220 static constexpr bool kAlignPairRegister = false;
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000221 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800222 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000223 static constexpr bool kQuickSkipOddFpRegisters = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800224 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000225 static constexpr size_t kNumQuickFprArgs = 4; // 4 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800226 static constexpr bool kGprFprLockstep = false;
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000227 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 4; // Offset of first FPR arg.
228 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4 + 4*8; // Offset of first GPR arg.
229 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 28 + 4*8; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800230 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000231 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800232 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800233#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800234 // The callee save frame is pointed to by SP.
235 // | argN | |
236 // | ... | |
237 // | reg. arg spills | | Caller's frame
238 // | Method* | ---
239 // | Return |
240 // | R15 | callee save
241 // | R14 | callee save
242 // | R13 | callee save
243 // | R12 | callee save
244 // | R9 | arg5
245 // | R8 | arg4
246 // | RSI/R6 | arg1
247 // | RBP/R5 | callee save
248 // | RBX/R3 | callee save
249 // | RDX/R2 | arg2
250 // | RCX/R1 | arg3
251 // | XMM7 | float arg 8
252 // | XMM6 | float arg 7
253 // | XMM5 | float arg 6
254 // | XMM4 | float arg 5
255 // | XMM3 | float arg 4
256 // | XMM2 | float arg 3
257 // | XMM1 | float arg 2
258 // | XMM0 | float arg 1
259 // | Padding |
260 // | RDI/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500261 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000262 static constexpr bool kAlignPairRegister = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800263 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800264 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000265 static constexpr bool kQuickSkipOddFpRegisters = false;
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700266 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700267 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800268 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800269 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
Serguei Katkovc3801912014-07-08 17:21:53 +0700270 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80 + 4*8; // Offset of first GPR arg.
271 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 168 + 4*8; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800272 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
273 switch (gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000274 case 0: return (4 * GetBytesPerGprSpillLocation(kRuntimeISA));
275 case 1: return (1 * GetBytesPerGprSpillLocation(kRuntimeISA));
276 case 2: return (0 * GetBytesPerGprSpillLocation(kRuntimeISA));
277 case 3: return (5 * GetBytesPerGprSpillLocation(kRuntimeISA));
278 case 4: return (6 * GetBytesPerGprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800279 default:
Andreas Gampec200a4a2014-06-16 18:39:09 -0700280 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
281 return 0;
Ian Rogers936b37f2014-02-14 00:52:24 -0800282 }
283 }
Ian Rogers848871b2013-08-05 10:56:33 -0700284#else
285#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700286#endif
287
Ian Rogers936b37f2014-02-14 00:52:24 -0800288 public:
Sebastien Hertza836bc92014-11-25 16:30:53 +0100289 // Special handling for proxy methods. Proxy methods are instance methods so the
290 // 'this' object is the 1st argument. They also have the same frame layout as the
291 // kRefAndArgs runtime method. Since 'this' is a reference, it is located in the
292 // 1st GPR.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700293 static mirror::Object* GetProxyThisObject(ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700294 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700295 CHECK((*sp)->IsProxyMethod());
296 CHECK_EQ(kQuickCalleeSaveFrame_RefAndArgs_FrameSize, (*sp)->GetFrameSizeInBytes());
Sebastien Hertza836bc92014-11-25 16:30:53 +0100297 CHECK_GT(kNumQuickGprArgs, 0u);
298 constexpr uint32_t kThisGprIndex = 0u; // 'this' is in the 1st GPR.
299 size_t this_arg_offset = kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset +
300 GprIndexToGprOffset(kThisGprIndex);
301 uint8_t* this_arg_address = reinterpret_cast<uint8_t*>(sp) + this_arg_offset;
302 return reinterpret_cast<StackReference<mirror::Object>*>(this_arg_address)->AsMirrorPtr();
303 }
304
Mathieu Chartier90443472015-07-16 20:32:27 -0700305 static ArtMethod* GetCallingMethod(ArtMethod** sp) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700306 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100307 return GetCalleeSaveMethodCaller(sp, Runtime::kRefsAndArgs);
308 }
309
Mathieu Chartier90443472015-07-16 20:32:27 -0700310 static ArtMethod* GetOuterMethod(ArtMethod** sp) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700311 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100312 uint8_t* previous_sp =
313 reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700314 return *reinterpret_cast<ArtMethod**>(previous_sp);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100315 }
316
Mathieu Chartier90443472015-07-16 20:32:27 -0700317 static uint32_t GetCallingDexPc(ArtMethod** sp) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700318 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100319 const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsAndArgs);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700320 ArtMethod** caller_sp = reinterpret_cast<ArtMethod**>(
321 reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
322 ArtMethod* outer_method = *caller_sp;
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100323 uintptr_t outer_pc = QuickArgumentVisitor::GetCallingPc(sp);
324 uintptr_t outer_pc_offset = outer_method->NativeQuickPcOffset(outer_pc);
325
326 if (outer_method->IsOptimized(sizeof(void*))) {
327 CodeInfo code_info = outer_method->GetOptimizedCodeInfo();
David Brazdilf677ebf2015-05-29 16:29:43 +0100328 StackMapEncoding encoding = code_info.ExtractEncoding();
329 StackMap stack_map = code_info.GetStackMapForNativePcOffset(outer_pc_offset, encoding);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100330 DCHECK(stack_map.IsValid());
David Brazdilf677ebf2015-05-29 16:29:43 +0100331 if (stack_map.HasInlineInfo(encoding)) {
332 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100333 return inline_info.GetDexPcAtDepth(inline_info.GetDepth() - 1);
334 } else {
David Brazdilf677ebf2015-05-29 16:29:43 +0100335 return stack_map.GetDexPc(encoding);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100336 }
337 } else {
338 return outer_method->ToDexPc(outer_pc);
339 }
Ian Rogers848871b2013-08-05 10:56:33 -0700340 }
341
Ian Rogers936b37f2014-02-14 00:52:24 -0800342 // For the given quick ref and args quick frame, return the caller's PC.
Mathieu Chartier90443472015-07-16 20:32:27 -0700343 static uintptr_t GetCallingPc(ArtMethod** sp) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700344 DCHECK((*sp)->IsCalleeSaveMethod());
Ian Rogers13735952014-10-08 12:43:28 -0700345 uint8_t* lr = reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset;
Ian Rogers848871b2013-08-05 10:56:33 -0700346 return *reinterpret_cast<uintptr_t*>(lr);
347 }
348
Mathieu Chartiere401d142015-04-22 13:56:20 -0700349 QuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty,
Mathieu Chartier90443472015-07-16 20:32:27 -0700350 uint32_t shorty_len) SHARED_REQUIRES(Locks::mutator_lock_) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700351 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
Ian Rogers13735952014-10-08 12:43:28 -0700352 gpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
353 fpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
354 stack_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
Mathieu Chartiere401d142015-04-22 13:56:20 -0700355 + sizeof(ArtMethod*)), // Skip ArtMethod*.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800356 gpr_index_(0), fpr_index_(0), fpr_double_index_(0), stack_index_(0),
357 cur_type_(Primitive::kPrimVoid), is_split_long_or_double_(false) {
Andreas Gampe575e78c2014-11-03 23:41:03 -0800358 static_assert(kQuickSoftFloatAbi == (kNumQuickFprArgs == 0),
359 "Number of Quick FPR arguments unexpected");
360 static_assert(!(kQuickSoftFloatAbi && kQuickDoubleRegAlignedFloatBackFilled),
361 "Double alignment unexpected");
Zheng Xu5667fdb2014-10-23 18:29:55 +0800362 // For register alignment, we want to assume that counters(fpr_double_index_) are even if the
363 // next register is even.
Andreas Gampe575e78c2014-11-03 23:41:03 -0800364 static_assert(!kQuickDoubleRegAlignedFloatBackFilled || kNumQuickFprArgs % 2 == 0,
365 "Number of Quick FPR arguments not even");
Mathieu Chartiere401d142015-04-22 13:56:20 -0700366 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), sizeof(void*));
Zheng Xu5667fdb2014-10-23 18:29:55 +0800367 }
Ian Rogers848871b2013-08-05 10:56:33 -0700368
369 virtual ~QuickArgumentVisitor() {}
370
371 virtual void Visit() = 0;
372
Ian Rogers936b37f2014-02-14 00:52:24 -0800373 Primitive::Type GetParamPrimitiveType() const {
374 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700375 }
376
Ian Rogers13735952014-10-08 12:43:28 -0700377 uint8_t* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800378 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800379 Primitive::Type type = GetParamPrimitiveType();
380 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800381 if (type == Primitive::kPrimDouble && kQuickDoubleRegAlignedFloatBackFilled) {
382 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
383 return fpr_args_ + (fpr_double_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
384 }
385 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000386 return fpr_args_ + (fpr_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800387 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700388 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers936b37f2014-02-14 00:52:24 -0800389 }
390 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800391 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800392 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
393 }
394 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700395 }
396
397 bool IsSplitLongOrDouble() const {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700398 if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) ||
399 (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800400 return is_split_long_or_double_;
401 } else {
402 return false; // An optimization for when GPR and FPRs are 64bit.
403 }
Ian Rogers848871b2013-08-05 10:56:33 -0700404 }
405
Ian Rogers936b37f2014-02-14 00:52:24 -0800406 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700407 return GetParamPrimitiveType() == Primitive::kPrimNot;
408 }
409
Ian Rogers936b37f2014-02-14 00:52:24 -0800410 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700411 Primitive::Type type = GetParamPrimitiveType();
412 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
413 }
414
415 uint64_t ReadSplitLongParam() const {
Nicolas Geoffray425f2392015-01-08 14:52:29 +0000416 // The splitted long is always available through the stack.
417 return *reinterpret_cast<uint64_t*>(stack_args_
418 + stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700419 }
420
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800421 void IncGprIndex() {
422 gpr_index_++;
423 if (kGprFprLockstep) {
424 fpr_index_++;
425 }
426 }
427
428 void IncFprIndex() {
429 fpr_index_++;
430 if (kGprFprLockstep) {
431 gpr_index_++;
432 }
433 }
434
Mathieu Chartier90443472015-07-16 20:32:27 -0700435 void VisitArguments() SHARED_REQUIRES(Locks::mutator_lock_) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800436 // (a) 'stack_args_' should point to the first method's argument
437 // (b) whatever the argument type it is, the 'stack_index_' should
438 // be moved forward along with every visiting.
Ian Rogers936b37f2014-02-14 00:52:24 -0800439 gpr_index_ = 0;
440 fpr_index_ = 0;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800441 if (kQuickDoubleRegAlignedFloatBackFilled) {
442 fpr_double_index_ = 0;
443 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800444 stack_index_ = 0;
445 if (!is_static_) { // Handle this.
446 cur_type_ = Primitive::kPrimNot;
447 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700448 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800449 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800450 if (kNumQuickGprArgs > 0) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800451 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800452 }
Ian Rogers848871b2013-08-05 10:56:33 -0700453 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800454 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
455 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
456 switch (cur_type_) {
457 case Primitive::kPrimNot:
458 case Primitive::kPrimBoolean:
459 case Primitive::kPrimByte:
460 case Primitive::kPrimChar:
461 case Primitive::kPrimShort:
462 case Primitive::kPrimInt:
463 is_split_long_or_double_ = false;
464 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800465 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800466 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800467 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800468 }
469 break;
470 case Primitive::kPrimFloat:
471 is_split_long_or_double_ = false;
472 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800473 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800474 if (kQuickSoftFloatAbi) {
475 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800476 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800477 }
478 } else {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800479 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800480 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800481 if (kQuickDoubleRegAlignedFloatBackFilled) {
482 // Double should not overlap with float.
483 // For example, if fpr_index_ = 3, fpr_double_index_ should be at least 4.
484 fpr_double_index_ = std::max(fpr_double_index_, RoundUp(fpr_index_, 2));
485 // Float should not overlap with double.
486 if (fpr_index_ % 2 == 0) {
487 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
488 }
Goran Jakovljevicff734982015-08-24 12:58:55 +0000489 } else if (kQuickSkipOddFpRegisters) {
490 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800491 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800492 }
493 }
494 break;
495 case Primitive::kPrimDouble:
496 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800497 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000498 if (cur_type_ == Primitive::kPrimLong && kAlignPairRegister && gpr_index_ == 0) {
Goran Jakovljevicff734982015-08-24 12:58:55 +0000499 // Currently, this is only for ARM and MIPS, where the first available parameter
500 // register is R1 (on ARM) or A1 (on MIPS). So we skip it, and use R2 (on ARM) or
501 // A2 (on MIPS) instead.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800502 IncGprIndex();
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000503 }
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000504 is_split_long_or_double_ = (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800505 ((gpr_index_ + 1) == kNumQuickGprArgs);
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500506 if (!kSplitPairAcrossRegisterAndStack && is_split_long_or_double_) {
507 // We don't want to split this. Pass over this register.
508 gpr_index_++;
509 is_split_long_or_double_ = false;
510 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800511 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800512 if (kBytesStackArgLocation == 4) {
513 stack_index_+= 2;
514 } else {
515 CHECK_EQ(kBytesStackArgLocation, 8U);
516 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800517 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700518 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800519 IncGprIndex();
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000520 if (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700521 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800522 IncGprIndex();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700523 }
524 }
525 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800526 } else {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000527 is_split_long_or_double_ = (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) &&
Zheng Xu5667fdb2014-10-23 18:29:55 +0800528 ((fpr_index_ + 1) == kNumQuickFprArgs) && !kQuickDoubleRegAlignedFloatBackFilled;
Ian Rogers936b37f2014-02-14 00:52:24 -0800529 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700530 if (kBytesStackArgLocation == 4) {
531 stack_index_+= 2;
Ian Rogers936b37f2014-02-14 00:52:24 -0800532 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700533 CHECK_EQ(kBytesStackArgLocation, 8U);
534 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800535 }
Zheng Xu5667fdb2014-10-23 18:29:55 +0800536 if (kQuickDoubleRegAlignedFloatBackFilled) {
537 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
538 fpr_double_index_ += 2;
539 // Float should not overlap with double.
540 if (fpr_index_ % 2 == 0) {
541 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
542 }
543 }
544 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800545 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800546 if (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) {
547 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800548 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800549 }
550 }
551 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800552 }
553 break;
554 default:
555 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
556 }
Ian Rogers848871b2013-08-05 10:56:33 -0700557 }
558 }
559
Andreas Gampec200a4a2014-06-16 18:39:09 -0700560 protected:
Ian Rogers848871b2013-08-05 10:56:33 -0700561 const bool is_static_;
562 const char* const shorty_;
563 const uint32_t shorty_len_;
Andreas Gampec200a4a2014-06-16 18:39:09 -0700564
565 private:
Ian Rogers13735952014-10-08 12:43:28 -0700566 uint8_t* const gpr_args_; // Address of GPR arguments in callee save frame.
567 uint8_t* const fpr_args_; // Address of FPR arguments in callee save frame.
568 uint8_t* const stack_args_; // Address of stack arguments in caller's frame.
Ian Rogers936b37f2014-02-14 00:52:24 -0800569 uint32_t gpr_index_; // Index into spilled GPRs.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800570 // Index into spilled FPRs.
571 // In case kQuickDoubleRegAlignedFloatBackFilled, it may index a hole while fpr_double_index_
572 // holds a higher register number.
573 uint32_t fpr_index_;
574 // Index into spilled FPRs for aligned double.
575 // Only used when kQuickDoubleRegAlignedFloatBackFilled. Next available double register indexed in
576 // terms of singles, may be behind fpr_index.
577 uint32_t fpr_double_index_;
Ian Rogers936b37f2014-02-14 00:52:24 -0800578 uint32_t stack_index_; // Index into arguments on the stack.
579 // The current type of argument during VisitArguments.
580 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700581 // Does a 64bit parameter straddle the register and stack arguments?
582 bool is_split_long_or_double_;
583};
584
Sebastien Hertza836bc92014-11-25 16:30:53 +0100585// Returns the 'this' object of a proxy method. This function is only used by StackVisitor. It
586// allows to use the QuickArgumentVisitor constants without moving all the code in its own module.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700587extern "C" mirror::Object* artQuickGetProxyThisObject(ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700588 SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertza836bc92014-11-25 16:30:53 +0100589 return QuickArgumentVisitor::GetProxyThisObject(sp);
590}
591
Ian Rogers848871b2013-08-05 10:56:33 -0700592// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800593class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700594 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700595 BuildQuickShadowFrameVisitor(ArtMethod** sp, bool is_static, const char* shorty,
596 uint32_t shorty_len, ShadowFrame* sf, size_t first_arg_reg) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700597 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700598
Mathieu Chartier90443472015-07-16 20:32:27 -0700599 void Visit() SHARED_REQUIRES(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700600
601 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800602 ShadowFrame* const sf_;
603 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700604
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700605 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700606};
607
Andreas Gampec200a4a2014-06-16 18:39:09 -0700608void BuildQuickShadowFrameVisitor::Visit() {
Ian Rogers9758f792014-03-13 09:02:55 -0700609 Primitive::Type type = GetParamPrimitiveType();
610 switch (type) {
611 case Primitive::kPrimLong: // Fall-through.
612 case Primitive::kPrimDouble:
613 if (IsSplitLongOrDouble()) {
614 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
615 } else {
616 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
617 }
618 ++cur_reg_;
619 break;
620 case Primitive::kPrimNot: {
621 StackReference<mirror::Object>* stack_ref =
622 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
623 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
624 }
625 break;
626 case Primitive::kPrimBoolean: // Fall-through.
627 case Primitive::kPrimByte: // Fall-through.
628 case Primitive::kPrimChar: // Fall-through.
629 case Primitive::kPrimShort: // Fall-through.
630 case Primitive::kPrimInt: // Fall-through.
631 case Primitive::kPrimFloat:
632 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
633 break;
634 case Primitive::kPrimVoid:
635 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700636 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700637 }
638 ++cur_reg_;
639}
640
Mathieu Chartiere401d142015-04-22 13:56:20 -0700641extern "C" uint64_t artQuickToInterpreterBridge(ArtMethod* method, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700642 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers848871b2013-08-05 10:56:33 -0700643 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
644 // frame.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700645 ScopedQuickEntrypointChecks sqec(self);
Ian Rogers848871b2013-08-05 10:56:33 -0700646
647 if (method->IsAbstract()) {
648 ThrowAbstractMethodError(method);
649 return 0;
650 } else {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800651 DCHECK(!method->IsNative()) << PrettyMethod(method);
Andreas Gampec200a4a2014-06-16 18:39:09 -0700652 const char* old_cause = self->StartAssertNoThreadSuspension(
653 "Building interpreter shadow frame");
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700654 const DexFile::CodeItem* code_item = method->GetCodeItem();
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800655 DCHECK(code_item != nullptr) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700656 uint16_t num_regs = code_item->registers_size_;
Andreas Gampec200a4a2014-06-16 18:39:09 -0700657 // No last shadow coming from quick.
Andreas Gampeb3025922015-09-01 14:45:00 -0700658 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
659 CREATE_SHADOW_FRAME(num_regs, nullptr, method, 0);
660 ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
Ian Rogers848871b2013-08-05 10:56:33 -0700661 size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700662 uint32_t shorty_len = 0;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700663 auto* non_proxy_method = method->GetInterfaceMethodIfProxy(sizeof(void*));
664 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700665 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len,
Ian Rogers936b37f2014-02-14 00:52:24 -0800666 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700667 shadow_frame_builder.VisitArguments();
Ian Rogerse94652f2014-12-02 11:13:19 -0800668 const bool needs_initialization =
669 method->IsStatic() && !method->GetDeclaringClass()->IsInitialized();
Ian Rogers848871b2013-08-05 10:56:33 -0700670 // Push a transition back into managed code onto the linked list in thread.
671 ManagedStack fragment;
672 self->PushManagedStackFragment(&fragment);
673 self->PushShadowFrame(shadow_frame);
674 self->EndAssertNoThreadSuspension(old_cause);
675
Ian Rogerse94652f2014-12-02 11:13:19 -0800676 if (needs_initialization) {
Ian Rogers848871b2013-08-05 10:56:33 -0700677 // Ensure static method's class is initialized.
Ian Rogerse94652f2014-12-02 11:13:19 -0800678 StackHandleScope<1> hs(self);
679 Handle<mirror::Class> h_class(hs.NewHandle(shadow_frame->GetMethod()->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -0700680 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
Ian Rogerse94652f2014-12-02 11:13:19 -0800681 DCHECK(Thread::Current()->IsExceptionPending()) << PrettyMethod(shadow_frame->GetMethod());
Ian Rogers848871b2013-08-05 10:56:33 -0700682 self->PopManagedStackFragment(fragment);
683 return 0;
684 }
685 }
Ian Rogerse94652f2014-12-02 11:13:19 -0800686 JValue result = interpreter::EnterInterpreterFromEntryPoint(self, code_item, shadow_frame);
Ian Rogers848871b2013-08-05 10:56:33 -0700687 // Pop transition.
688 self->PopManagedStackFragment(fragment);
Daniel Mihalyieb076692014-08-22 17:33:31 +0200689
690 // Request a stack deoptimization if needed
Mathieu Chartiere401d142015-04-22 13:56:20 -0700691 ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
Daniel Mihalyieb076692014-08-22 17:33:31 +0200692 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForUpcall(self, caller))) {
Sebastien Hertz07474662015-08-25 15:12:33 +0000693 // Push the context of the deoptimization stack so we can restore the return value and the
694 // exception before executing the deoptimized frames.
695 self->PushDeoptimizationContext(result, shorty[0] == 'L', self->GetException());
696
697 // Set special exception to cause deoptimization.
Daniel Mihalyieb076692014-08-22 17:33:31 +0200698 self->SetException(Thread::GetDeoptimizationException());
Daniel Mihalyieb076692014-08-22 17:33:31 +0200699 }
700
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800701 // No need to restore the args since the method has already been run by the interpreter.
Ian Rogers848871b2013-08-05 10:56:33 -0700702 return result.GetJ();
703 }
704}
705
706// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
707// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800708class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700709 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700710 BuildQuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty, uint32_t shorty_len,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700711 ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700712 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700713
Mathieu Chartier90443472015-07-16 20:32:27 -0700714 void Visit() SHARED_REQUIRES(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700715
Mathieu Chartier90443472015-07-16 20:32:27 -0700716 void FixupReferences() SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800717
Ian Rogers848871b2013-08-05 10:56:33 -0700718 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700719 ScopedObjectAccessUnchecked* const soa_;
720 std::vector<jvalue>* const args_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800721 // References which we must update when exiting in case the GC moved the objects.
Ian Rogers700a4022014-05-19 16:49:03 -0700722 std::vector<std::pair<jobject, StackReference<mirror::Object>*>> references_;
Ian Rogers9758f792014-03-13 09:02:55 -0700723
Ian Rogers848871b2013-08-05 10:56:33 -0700724 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
725};
726
Ian Rogers9758f792014-03-13 09:02:55 -0700727void BuildQuickArgumentVisitor::Visit() {
728 jvalue val;
729 Primitive::Type type = GetParamPrimitiveType();
730 switch (type) {
731 case Primitive::kPrimNot: {
732 StackReference<mirror::Object>* stack_ref =
733 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
734 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
735 references_.push_back(std::make_pair(val.l, stack_ref));
736 break;
737 }
738 case Primitive::kPrimLong: // Fall-through.
739 case Primitive::kPrimDouble:
740 if (IsSplitLongOrDouble()) {
741 val.j = ReadSplitLongParam();
742 } else {
743 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
744 }
745 break;
746 case Primitive::kPrimBoolean: // Fall-through.
747 case Primitive::kPrimByte: // Fall-through.
748 case Primitive::kPrimChar: // Fall-through.
749 case Primitive::kPrimShort: // Fall-through.
750 case Primitive::kPrimInt: // Fall-through.
751 case Primitive::kPrimFloat:
752 val.i = *reinterpret_cast<jint*>(GetParamAddress());
753 break;
754 case Primitive::kPrimVoid:
755 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700756 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700757 }
758 args_->push_back(val);
759}
760
761void BuildQuickArgumentVisitor::FixupReferences() {
762 // Fixup any references which may have changed.
763 for (const auto& pair : references_) {
764 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700765 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700766 }
767}
768
Ian Rogers848871b2013-08-05 10:56:33 -0700769// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
770// which is responsible for recording callee save registers. We explicitly place into jobjects the
771// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
772// field within the proxy object, which will box the primitive arguments and deal with error cases.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700773extern "C" uint64_t artQuickProxyInvokeHandler(
774 ArtMethod* proxy_method, mirror::Object* receiver, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700775 SHARED_REQUIRES(Locks::mutator_lock_) {
Brian Carlstromd3633d52013-08-20 21:06:26 -0700776 DCHECK(proxy_method->IsProxyMethod()) << PrettyMethod(proxy_method);
777 DCHECK(receiver->GetClass()->IsProxyClass()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700778 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
779 const char* old_cause =
780 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
781 // Register the top of the managed stack, making stack crawlable.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700782 DCHECK_EQ((*sp), proxy_method) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700783 DCHECK_EQ(proxy_method->GetFrameSizeInBytes(),
Brian Carlstromd3633d52013-08-20 21:06:26 -0700784 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes())
785 << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700786 self->VerifyStack();
787 // Start new JNI local reference state.
788 JNIEnvExt* env = self->GetJniEnv();
789 ScopedObjectAccessUnchecked soa(env);
790 ScopedJniEnvLocalRefState env_state(env);
791 // Create local ref. copies of proxy method and the receiver.
792 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
793
794 // Placing arguments into args vector and remove the receiver.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700795 ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(sizeof(void*));
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700796 CHECK(!non_proxy_method->IsStatic()) << PrettyMethod(proxy_method) << " "
Andreas Gampec200a4a2014-06-16 18:39:09 -0700797 << PrettyMethod(non_proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700798 std::vector<jvalue> args;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700799 uint32_t shorty_len = 0;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700800 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700801 BuildQuickArgumentVisitor local_ref_visitor(sp, false, shorty, shorty_len, &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700802
Ian Rogers848871b2013-08-05 10:56:33 -0700803 local_ref_visitor.VisitArguments();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700804 DCHECK_GT(args.size(), 0U) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700805 args.erase(args.begin());
806
807 // Convert proxy method into expected interface method.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700808 ArtMethod* interface_method = proxy_method->FindOverriddenMethod(sizeof(void*));
Ian Rogerse0a02da2014-12-02 14:10:53 -0800809 DCHECK(interface_method != nullptr) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700810 DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700811 self->EndAssertNoThreadSuspension(old_cause);
812 jobject interface_method_jobj = soa.AddLocalReference<jobject>(
813 mirror::Method::CreateFromArtMethod(soa.Self(), interface_method));
Ian Rogers848871b2013-08-05 10:56:33 -0700814
815 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
816 // that performs allocations.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700817 JValue result = InvokeProxyInvocationHandler(soa, shorty, rcvr_jobj, interface_method_jobj, args);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800818 // Restore references which might have moved.
819 local_ref_visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700820 return result.GetJ();
821}
822
823// Read object references held in arguments from quick frames and place in a JNI local references,
824// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800825class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700826 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700827 RememberForGcArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty,
828 uint32_t shorty_len, ScopedObjectAccessUnchecked* soa) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700829 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700830
Mathieu Chartier90443472015-07-16 20:32:27 -0700831 void Visit() SHARED_REQUIRES(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700832
Mathieu Chartier90443472015-07-16 20:32:27 -0700833 void FixupReferences() SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -0700834
835 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700836 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800837 // References which we must update when exiting in case the GC moved the objects.
Andreas Gampec200a4a2014-06-16 18:39:09 -0700838 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
839
Mathieu Chartier590fee92013-09-13 13:46:47 -0700840 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700841};
842
Ian Rogers9758f792014-03-13 09:02:55 -0700843void RememberForGcArgumentVisitor::Visit() {
844 if (IsParamAReference()) {
845 StackReference<mirror::Object>* stack_ref =
846 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
847 jobject reference =
848 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
849 references_.push_back(std::make_pair(reference, stack_ref));
850 }
851}
852
853void RememberForGcArgumentVisitor::FixupReferences() {
854 // Fixup any references which may have changed.
855 for (const auto& pair : references_) {
856 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700857 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700858 }
859}
860
Ian Rogers848871b2013-08-05 10:56:33 -0700861// Lazily resolve a method for quick. Called by stub code.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700862extern "C" const void* artQuickResolutionTrampoline(
863 ArtMethod* called, mirror::Object* receiver, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700864 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampe3b45ef22015-05-26 21:34:09 -0700865 // The resolution trampoline stashes the resolved method into the callee-save frame to transport
866 // it. Thus, when exiting, the stack cannot be verified (as the resolved method most likely
867 // does not have the same stack layout as the callee-save method).
868 ScopedQuickEntrypointChecks sqec(self, kIsDebugBuild, false);
Ian Rogers848871b2013-08-05 10:56:33 -0700869 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800870 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -0700871 ScopedObjectAccessUnchecked soa(env);
872 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800873 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -0700874
875 // Compute details about the called method (avoid GCs)
876 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Ian Rogers848871b2013-08-05 10:56:33 -0700877 InvokeType invoke_type;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800878 MethodReference called_method(nullptr, 0);
879 const bool called_method_known_on_entry = !called->IsRuntimeMethod();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700880 ArtMethod* caller = nullptr;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800881 if (!called_method_known_on_entry) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100882 caller = QuickArgumentVisitor::GetCallingMethod(sp);
883 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700884 const DexFile::CodeItem* code;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800885 called_method.dex_file = caller->GetDexFile();
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700886 code = caller->GetCodeItem();
Ian Rogers848871b2013-08-05 10:56:33 -0700887 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
888 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
889 Instruction::Code instr_code = instr->Opcode();
890 bool is_range;
891 switch (instr_code) {
892 case Instruction::INVOKE_DIRECT:
893 invoke_type = kDirect;
894 is_range = false;
895 break;
896 case Instruction::INVOKE_DIRECT_RANGE:
897 invoke_type = kDirect;
898 is_range = true;
899 break;
900 case Instruction::INVOKE_STATIC:
901 invoke_type = kStatic;
902 is_range = false;
903 break;
904 case Instruction::INVOKE_STATIC_RANGE:
905 invoke_type = kStatic;
906 is_range = true;
907 break;
908 case Instruction::INVOKE_SUPER:
909 invoke_type = kSuper;
910 is_range = false;
911 break;
912 case Instruction::INVOKE_SUPER_RANGE:
913 invoke_type = kSuper;
914 is_range = true;
915 break;
916 case Instruction::INVOKE_VIRTUAL:
917 invoke_type = kVirtual;
918 is_range = false;
919 break;
920 case Instruction::INVOKE_VIRTUAL_RANGE:
921 invoke_type = kVirtual;
922 is_range = true;
923 break;
924 case Instruction::INVOKE_INTERFACE:
925 invoke_type = kInterface;
926 is_range = false;
927 break;
928 case Instruction::INVOKE_INTERFACE_RANGE:
929 invoke_type = kInterface;
930 is_range = true;
931 break;
932 default:
Ian Rogerse0a02da2014-12-02 14:10:53 -0800933 LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(nullptr);
934 UNREACHABLE();
Ian Rogers848871b2013-08-05 10:56:33 -0700935 }
Ian Rogerse0a02da2014-12-02 14:10:53 -0800936 called_method.dex_method_index = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
Ian Rogers848871b2013-08-05 10:56:33 -0700937 } else {
938 invoke_type = kStatic;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800939 called_method.dex_file = called->GetDexFile();
940 called_method.dex_method_index = called->GetDexMethodIndex();
Ian Rogers848871b2013-08-05 10:56:33 -0700941 }
942 uint32_t shorty_len;
943 const char* shorty =
Ian Rogerse0a02da2014-12-02 14:10:53 -0800944 called_method.dex_file->GetMethodShorty(
945 called_method.dex_file->GetMethodId(called_method.dex_method_index), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700946 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -0700947 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800948 self->EndAssertNoThreadSuspension(old_cause);
Ian Rogerse0a02da2014-12-02 14:10:53 -0800949 const bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -0700950 // Resolve method filling in dex cache.
Ian Rogerse0a02da2014-12-02 14:10:53 -0800951 if (!called_method_known_on_entry) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700952 StackHandleScope<1> hs(self);
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700953 mirror::Object* dummy = nullptr;
954 HandleWrapper<mirror::Object> h_receiver(
955 hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy));
Ian Rogerse0a02da2014-12-02 14:10:53 -0800956 DCHECK_EQ(caller->GetDexFile(), called_method.dex_file);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700957 called = linker->ResolveMethod(self, called_method.dex_method_index, caller, invoke_type);
Ian Rogers848871b2013-08-05 10:56:33 -0700958 }
Ian Rogerse0a02da2014-12-02 14:10:53 -0800959 const void* code = nullptr;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800960 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -0700961 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800962 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
963 << PrettyMethod(called) << " " << invoke_type;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800964 if (virtual_or_interface) {
965 // Refine called method based on receiver.
966 CHECK(receiver != nullptr) << invoke_type;
Mingyao Yangf4867782014-05-05 11:55:02 -0700967
Mathieu Chartiere401d142015-04-22 13:56:20 -0700968 ArtMethod* orig_called = called;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800969 if (invoke_type == kVirtual) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700970 called = receiver->GetClass()->FindVirtualMethodForVirtual(called, sizeof(void*));
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800971 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700972 called = receiver->GetClass()->FindVirtualMethodForInterface(called, sizeof(void*));
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800973 }
Mingyao Yangf4867782014-05-05 11:55:02 -0700974
975 CHECK(called != nullptr) << PrettyMethod(orig_called) << " "
976 << PrettyTypeOf(receiver) << " "
977 << invoke_type << " " << orig_called->GetVtableIndex();
978
Ian Rogers83883d72013-10-21 21:07:24 -0700979 // We came here because of sharpening. Ensure the dex cache is up-to-date on the method index
Ian Rogerse0a02da2014-12-02 14:10:53 -0800980 // of the sharpened method avoiding dirtying the dex cache if possible.
Ian Rogers00f15272014-12-02 16:55:46 -0800981 // Note, called_method.dex_method_index references the dex method before the
982 // FindVirtualMethodFor... This is ok for FindDexMethodIndexInOtherDexFile that only cares
983 // about the name and signature.
984 uint32_t update_dex_cache_method_index = called->GetDexMethodIndex();
Vladimir Marko05792b92015-08-03 11:56:49 +0100985 if (!called->HasSameDexCacheResolvedMethods(caller, sizeof(void*))) {
Ian Rogers83883d72013-10-21 21:07:24 -0700986 // Calling from one dex file to another, need to compute the method index appropriate to
Vladimir Markobbcc0c02014-02-03 14:08:42 +0000987 // the caller's dex file. Since we get here only if the original called was a runtime
988 // method, we've got the correct dex_file and a dex_method_idx from above.
Ian Rogerse0a02da2014-12-02 14:10:53 -0800989 DCHECK(!called_method_known_on_entry);
990 DCHECK_EQ(caller->GetDexFile(), called_method.dex_file);
991 const DexFile* caller_dex_file = called_method.dex_file;
992 uint32_t caller_method_name_and_sig_index = called_method.dex_method_index;
993 update_dex_cache_method_index =
994 called->FindDexMethodIndexInOtherDexFile(*caller_dex_file,
995 caller_method_name_and_sig_index);
996 }
997 if ((update_dex_cache_method_index != DexFile::kDexNoIndex) &&
Mathieu Chartiere401d142015-04-22 13:56:20 -0700998 (caller->GetDexCacheResolvedMethod(
999 update_dex_cache_method_index, sizeof(void*)) != called)) {
1000 caller->SetDexCacheResolvedMethod(update_dex_cache_method_index, called, sizeof(void*));
Ian Rogers83883d72013-10-21 21:07:24 -07001001 }
Mathieu Chartiere4a91bb2015-01-28 13:11:44 -08001002 } else if (invoke_type == kStatic) {
1003 const auto called_dex_method_idx = called->GetDexMethodIndex();
1004 // For static invokes, we may dispatch to the static method in the superclass but resolve
1005 // using the subclass. To prevent getting slow paths on each invoke, we force set the
1006 // resolved method for the super class dex method index if we are in the same dex file.
1007 // b/19175856
1008 if (called->GetDexFile() == called_method.dex_file &&
1009 called_method.dex_method_index != called_dex_method_idx) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001010 called->GetDexCache()->SetResolvedMethod(called_dex_method_idx, called, sizeof(void*));
Mathieu Chartiere4a91bb2015-01-28 13:11:44 -08001011 }
Ian Rogers83883d72013-10-21 21:07:24 -07001012 }
Daniel Mihalyieb076692014-08-22 17:33:31 +02001013
Ian Rogers848871b2013-08-05 10:56:33 -07001014 // Ensure that the called method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001015 StackHandleScope<1> hs(soa.Self());
1016 Handle<mirror::Class> called_class(hs.NewHandle(called->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -07001017 linker->EnsureInitialized(soa.Self(), called_class, true, true);
Ian Rogers848871b2013-08-05 10:56:33 -07001018 if (LIKELY(called_class->IsInitialized())) {
Daniel Mihalyieb076692014-08-22 17:33:31 +02001019 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) {
1020 // If we are single-stepping or the called method is deoptimized (by a
1021 // breakpoint, for example), then we have to execute the called method
1022 // with the interpreter.
1023 code = GetQuickToInterpreterBridge();
1024 } else if (UNLIKELY(Dbg::IsForcedInstrumentationNeededForResolution(self, caller))) {
1025 // If the caller is deoptimized (by a breakpoint, for example), we have to
1026 // continue its execution with interpreter when returning from the called
1027 // method. Because we do not want to execute the called method with the
1028 // interpreter, we wrap its execution into the instrumentation stubs.
1029 // When the called method returns, it will execute the instrumentation
1030 // exit hook that will determine the need of the interpreter with a call
1031 // to Dbg::IsForcedInterpreterNeededForUpcall and deoptimize the stack if
1032 // it is needed.
1033 code = GetQuickInstrumentationEntryPoint();
1034 } else {
1035 code = called->GetEntryPointFromQuickCompiledCode();
1036 }
Ian Rogers848871b2013-08-05 10:56:33 -07001037 } else if (called_class->IsInitializing()) {
Daniel Mihalyieb076692014-08-22 17:33:31 +02001038 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) {
1039 // If we are single-stepping or the called method is deoptimized (by a
1040 // breakpoint, for example), then we have to execute the called method
1041 // with the interpreter.
1042 code = GetQuickToInterpreterBridge();
1043 } else if (invoke_type == kStatic) {
Ian Rogers848871b2013-08-05 10:56:33 -07001044 // Class is still initializing, go to oat and grab code (trampoline must be left in place
1045 // until class is initialized to stop races between threads).
Ian Rogersef7d42f2014-01-06 12:55:46 -08001046 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -07001047 } else {
1048 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -08001049 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -07001050 }
1051 } else {
1052 DCHECK(called_class->IsErroneous());
1053 }
1054 }
Ian Rogerse0a02da2014-12-02 14:10:53 -08001055 CHECK_EQ(code == nullptr, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -07001056 // Fixup any locally saved objects may have moved during a GC.
1057 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -07001058 // Place called method in callee-save frame to be placed as first argument to quick method.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001059 *sp = called;
1060
Ian Rogers848871b2013-08-05 10:56:33 -07001061 return code;
1062}
1063
Andreas Gampec147b002014-03-06 18:11:06 -08001064/*
1065 * This class uses a couple of observations to unite the different calling conventions through
1066 * a few constants.
1067 *
1068 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
1069 * possible alignment.
1070 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
1071 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
1072 * when we have to split things
1073 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
1074 * and we can use Int handling directly.
1075 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
1076 * necessary when widening. Also, widening of Ints will take place implicitly, and the
1077 * extension should be compatible with Aarch64, which mandates copying the available bits
1078 * into LSB and leaving the rest unspecified.
1079 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
1080 * the stack.
1081 * 6) There is only little endian.
1082 *
1083 *
1084 * Actual work is supposed to be done in a delegate of the template type. The interface is as
1085 * follows:
1086 *
1087 * void PushGpr(uintptr_t): Add a value for the next GPR
1088 *
1089 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
1090 * padding, that is, think the architecture is 32b and aligns 64b.
1091 *
1092 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
1093 * split this if necessary. The current state will have aligned, if
1094 * necessary.
1095 *
1096 * void PushStack(uintptr_t): Push a value to the stack.
1097 *
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001098 * uintptr_t PushHandleScope(mirror::Object* ref): Add a reference to the HandleScope. This _will_ have nullptr,
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001099 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -08001100 * Must return the jobject, that is, the reference to the
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001101 * entry in the HandleScope (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -08001102 *
1103 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001104template<class T> class BuildNativeCallFrameStateMachine {
Andreas Gampec147b002014-03-06 18:11:06 -08001105 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001106#if defined(__arm__)
1107 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -08001108 static constexpr bool kNativeSoftFloatAbi = true;
1109 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001110 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
1111
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001112 static constexpr size_t kRegistersNeededForLong = 2;
1113 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001114 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001115 static constexpr bool kMultiFPRegistersWidened = false;
1116 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001117 static constexpr bool kAlignLongOnStack = true;
1118 static constexpr bool kAlignDoubleOnStack = true;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001119#elif defined(__aarch64__)
1120 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1121 static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs.
1122 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1123
1124 static constexpr size_t kRegistersNeededForLong = 1;
1125 static constexpr size_t kRegistersNeededForDouble = 1;
1126 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001127 static constexpr bool kMultiFPRegistersWidened = false;
1128 static constexpr bool kMultiGPRegistersWidened = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001129 static constexpr bool kAlignLongOnStack = false;
1130 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001131#elif defined(__mips__) && !defined(__LP64__)
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001132 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
Douglas Leung735b8552014-10-31 12:21:40 -07001133 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs.
1134 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001135
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001136 static constexpr size_t kRegistersNeededForLong = 2;
1137 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001138 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001139 static constexpr bool kMultiFPRegistersWidened = true;
1140 static constexpr bool kMultiGPRegistersWidened = false;
Douglas Leung735b8552014-10-31 12:21:40 -07001141 static constexpr bool kAlignLongOnStack = true;
1142 static constexpr bool kAlignDoubleOnStack = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001143#elif defined(__mips__) && defined(__LP64__)
1144 // Let the code prepare GPRs only and we will load the FPRs with same data.
1145 static constexpr bool kNativeSoftFloatAbi = true;
1146 static constexpr size_t kNumNativeGprArgs = 8;
1147 static constexpr size_t kNumNativeFprArgs = 0;
1148
1149 static constexpr size_t kRegistersNeededForLong = 1;
1150 static constexpr size_t kRegistersNeededForDouble = 1;
1151 static constexpr bool kMultiRegistersAligned = false;
1152 static constexpr bool kMultiFPRegistersWidened = false;
1153 static constexpr bool kMultiGPRegistersWidened = true;
1154 static constexpr bool kAlignLongOnStack = false;
1155 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001156#elif defined(__i386__)
1157 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -08001158 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001159 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
1160 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
1161
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001162 static constexpr size_t kRegistersNeededForLong = 2;
1163 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001164 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001165 static constexpr bool kMultiFPRegistersWidened = false;
1166 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001167 static constexpr bool kAlignLongOnStack = false;
1168 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001169#elif defined(__x86_64__)
1170 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1171 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
1172 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1173
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001174 static constexpr size_t kRegistersNeededForLong = 1;
1175 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -08001176 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001177 static constexpr bool kMultiFPRegistersWidened = false;
1178 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001179 static constexpr bool kAlignLongOnStack = false;
1180 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001181#else
1182#error "Unsupported architecture"
1183#endif
1184
Andreas Gampec147b002014-03-06 18:11:06 -08001185 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001186 explicit BuildNativeCallFrameStateMachine(T* delegate)
1187 : gpr_index_(kNumNativeGprArgs),
1188 fpr_index_(kNumNativeFprArgs),
1189 stack_entries_(0),
1190 delegate_(delegate) {
Andreas Gampec147b002014-03-06 18:11:06 -08001191 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
1192 // the next register is even; counting down is just to make the compiler happy...
Andreas Gampe575e78c2014-11-03 23:41:03 -08001193 static_assert(kNumNativeGprArgs % 2 == 0U, "Number of native GPR arguments not even");
1194 static_assert(kNumNativeFprArgs % 2 == 0U, "Number of native FPR arguments not even");
Andreas Gampec147b002014-03-06 18:11:06 -08001195 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001196
Andreas Gampec200a4a2014-06-16 18:39:09 -07001197 virtual ~BuildNativeCallFrameStateMachine() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001198
Ian Rogers1428dce2014-10-21 15:02:15 -07001199 bool HavePointerGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001200 return gpr_index_ > 0;
1201 }
1202
Andreas Gampec200a4a2014-06-16 18:39:09 -07001203 void AdvancePointer(const void* val) {
Andreas Gampec147b002014-03-06 18:11:06 -08001204 if (HavePointerGpr()) {
1205 gpr_index_--;
1206 PushGpr(reinterpret_cast<uintptr_t>(val));
1207 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001208 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
Andreas Gampec147b002014-03-06 18:11:06 -08001209 PushStack(reinterpret_cast<uintptr_t>(val));
1210 gpr_index_ = 0;
1211 }
1212 }
1213
Ian Rogers1428dce2014-10-21 15:02:15 -07001214 bool HaveHandleScopeGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001215 return gpr_index_ > 0;
1216 }
1217
Mathieu Chartier90443472015-07-16 20:32:27 -07001218 void AdvanceHandleScope(mirror::Object* ptr) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001219 uintptr_t handle = PushHandle(ptr);
1220 if (HaveHandleScopeGpr()) {
Andreas Gampec147b002014-03-06 18:11:06 -08001221 gpr_index_--;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001222 PushGpr(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001223 } else {
1224 stack_entries_++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001225 PushStack(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001226 gpr_index_ = 0;
1227 }
1228 }
1229
Ian Rogers1428dce2014-10-21 15:02:15 -07001230 bool HaveIntGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001231 return gpr_index_ > 0;
1232 }
1233
1234 void AdvanceInt(uint32_t val) {
1235 if (HaveIntGpr()) {
1236 gpr_index_--;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001237 if (kMultiGPRegistersWidened) {
1238 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001239 PushGpr(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val)));
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001240 } else {
1241 PushGpr(val);
1242 }
Andreas Gampec147b002014-03-06 18:11:06 -08001243 } else {
1244 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001245 if (kMultiGPRegistersWidened) {
1246 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001247 PushStack(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val)));
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001248 } else {
1249 PushStack(val);
1250 }
Andreas Gampec147b002014-03-06 18:11:06 -08001251 gpr_index_ = 0;
1252 }
1253 }
1254
Ian Rogers1428dce2014-10-21 15:02:15 -07001255 bool HaveLongGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001256 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
1257 }
1258
Ian Rogers1428dce2014-10-21 15:02:15 -07001259 bool LongGprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001260 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1261 kAlignLongOnStack && // and when it needs alignment
1262 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1263 }
1264
Ian Rogers1428dce2014-10-21 15:02:15 -07001265 bool LongStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001266 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1267 kAlignLongOnStack && // and when it needs 8B alignment
1268 (stack_entries_ & 1) == 1; // counter is odd
1269 }
1270
1271 void AdvanceLong(uint64_t val) {
1272 if (HaveLongGpr()) {
1273 if (LongGprNeedsPadding()) {
1274 PushGpr(0);
1275 gpr_index_--;
1276 }
1277 if (kRegistersNeededForLong == 1) {
1278 PushGpr(static_cast<uintptr_t>(val));
1279 } else {
1280 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1281 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1282 }
1283 gpr_index_ -= kRegistersNeededForLong;
1284 } else {
1285 if (LongStackNeedsPadding()) {
1286 PushStack(0);
1287 stack_entries_++;
1288 }
1289 if (kRegistersNeededForLong == 1) {
1290 PushStack(static_cast<uintptr_t>(val));
1291 stack_entries_++;
1292 } else {
1293 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1294 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1295 stack_entries_ += 2;
1296 }
1297 gpr_index_ = 0;
1298 }
1299 }
1300
Ian Rogers1428dce2014-10-21 15:02:15 -07001301 bool HaveFloatFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001302 return fpr_index_ > 0;
1303 }
1304
Andreas Gampec147b002014-03-06 18:11:06 -08001305 void AdvanceFloat(float val) {
1306 if (kNativeSoftFloatAbi) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001307 AdvanceInt(bit_cast<uint32_t, float>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001308 } else {
1309 if (HaveFloatFpr()) {
1310 fpr_index_--;
1311 if (kRegistersNeededForDouble == 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001312 if (kMultiFPRegistersWidened) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001313 PushFpr8(bit_cast<uint64_t, double>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001314 } else {
1315 // No widening, just use the bits.
Roland Levillainda4d79b2015-03-24 14:36:11 +00001316 PushFpr8(static_cast<uint64_t>(bit_cast<uint32_t, float>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001317 }
1318 } else {
1319 PushFpr4(val);
1320 }
1321 } else {
1322 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001323 if (kRegistersNeededForDouble == 1 && kMultiFPRegistersWidened) {
Andreas Gampec147b002014-03-06 18:11:06 -08001324 // Need to widen before storing: Note the "double" in the template instantiation.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001325 // Note: We need to jump through those hoops to make the compiler happy.
1326 DCHECK_EQ(sizeof(uintptr_t), sizeof(uint64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001327 PushStack(static_cast<uintptr_t>(bit_cast<uint64_t, double>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001328 } else {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001329 PushStack(static_cast<uintptr_t>(bit_cast<uint32_t, float>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001330 }
1331 fpr_index_ = 0;
1332 }
1333 }
1334 }
1335
Ian Rogers1428dce2014-10-21 15:02:15 -07001336 bool HaveDoubleFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001337 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1338 }
1339
Ian Rogers1428dce2014-10-21 15:02:15 -07001340 bool DoubleFprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001341 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1342 kAlignDoubleOnStack && // and when it needs alignment
1343 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1344 }
1345
Ian Rogers1428dce2014-10-21 15:02:15 -07001346 bool DoubleStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001347 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1348 kAlignDoubleOnStack && // and when it needs 8B alignment
1349 (stack_entries_ & 1) == 1; // counter is odd
1350 }
1351
1352 void AdvanceDouble(uint64_t val) {
1353 if (kNativeSoftFloatAbi) {
1354 AdvanceLong(val);
1355 } else {
1356 if (HaveDoubleFpr()) {
1357 if (DoubleFprNeedsPadding()) {
1358 PushFpr4(0);
1359 fpr_index_--;
1360 }
1361 PushFpr8(val);
1362 fpr_index_ -= kRegistersNeededForDouble;
1363 } else {
1364 if (DoubleStackNeedsPadding()) {
1365 PushStack(0);
1366 stack_entries_++;
1367 }
1368 if (kRegistersNeededForDouble == 1) {
1369 PushStack(static_cast<uintptr_t>(val));
1370 stack_entries_++;
1371 } else {
1372 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1373 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1374 stack_entries_ += 2;
1375 }
1376 fpr_index_ = 0;
1377 }
1378 }
1379 }
1380
Ian Rogers1428dce2014-10-21 15:02:15 -07001381 uint32_t GetStackEntries() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001382 return stack_entries_;
1383 }
1384
Ian Rogers1428dce2014-10-21 15:02:15 -07001385 uint32_t GetNumberOfUsedGprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001386 return kNumNativeGprArgs - gpr_index_;
1387 }
1388
Ian Rogers1428dce2014-10-21 15:02:15 -07001389 uint32_t GetNumberOfUsedFprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001390 return kNumNativeFprArgs - fpr_index_;
1391 }
1392
1393 private:
1394 void PushGpr(uintptr_t val) {
1395 delegate_->PushGpr(val);
1396 }
1397 void PushFpr4(float val) {
1398 delegate_->PushFpr4(val);
1399 }
1400 void PushFpr8(uint64_t val) {
1401 delegate_->PushFpr8(val);
1402 }
1403 void PushStack(uintptr_t val) {
1404 delegate_->PushStack(val);
1405 }
Mathieu Chartier90443472015-07-16 20:32:27 -07001406 uintptr_t PushHandle(mirror::Object* ref) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001407 return delegate_->PushHandle(ref);
Andreas Gampec147b002014-03-06 18:11:06 -08001408 }
1409
1410 uint32_t gpr_index_; // Number of free GPRs
1411 uint32_t fpr_index_; // Number of free FPRs
1412 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1413 // extended
Ian Rogers1428dce2014-10-21 15:02:15 -07001414 T* const delegate_; // What Push implementation gets called
Andreas Gampec147b002014-03-06 18:11:06 -08001415};
1416
Andreas Gampec200a4a2014-06-16 18:39:09 -07001417// Computes the sizes of register stacks and call stack area. Handling of references can be extended
1418// in subclasses.
1419//
1420// To handle native pointers, use "L" in the shorty for an object reference, which simulates
1421// them with handles.
1422class ComputeNativeCallFrameSize {
Andreas Gampec147b002014-03-06 18:11:06 -08001423 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001424 ComputeNativeCallFrameSize() : num_stack_entries_(0) {}
1425
1426 virtual ~ComputeNativeCallFrameSize() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001427
Ian Rogers1428dce2014-10-21 15:02:15 -07001428 uint32_t GetStackSize() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001429 return num_stack_entries_ * sizeof(uintptr_t);
1430 }
1431
Ian Rogers1428dce2014-10-21 15:02:15 -07001432 uint8_t* LayoutCallStack(uint8_t* sp8) const {
Andreas Gampec147b002014-03-06 18:11:06 -08001433 sp8 -= GetStackSize();
Andreas Gampe779f8c92014-06-09 18:29:38 -07001434 // Align by kStackAlignment.
1435 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001436 return sp8;
Andreas Gampec147b002014-03-06 18:11:06 -08001437 }
1438
Ian Rogers1428dce2014-10-21 15:02:15 -07001439 uint8_t* LayoutCallRegisterStacks(uint8_t* sp8, uintptr_t** start_gpr, uint32_t** start_fpr)
1440 const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001441 // Assumption is OK right now, as we have soft-float arm
1442 size_t fregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeFprArgs;
1443 sp8 -= fregs * sizeof(uintptr_t);
1444 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1445 size_t iregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeGprArgs;
1446 sp8 -= iregs * sizeof(uintptr_t);
1447 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1448 return sp8;
1449 }
Andreas Gampec147b002014-03-06 18:11:06 -08001450
Andreas Gampec200a4a2014-06-16 18:39:09 -07001451 uint8_t* LayoutNativeCall(uint8_t* sp8, uintptr_t** start_stack, uintptr_t** start_gpr,
Ian Rogers1428dce2014-10-21 15:02:15 -07001452 uint32_t** start_fpr) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001453 // Native call stack.
1454 sp8 = LayoutCallStack(sp8);
1455 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
Andreas Gampec147b002014-03-06 18:11:06 -08001456
Andreas Gampec200a4a2014-06-16 18:39:09 -07001457 // Put fprs and gprs below.
1458 sp8 = LayoutCallRegisterStacks(sp8, start_gpr, start_fpr);
Andreas Gampec147b002014-03-06 18:11:06 -08001459
Andreas Gampec200a4a2014-06-16 18:39:09 -07001460 // Return the new bottom.
1461 return sp8;
1462 }
1463
1464 virtual void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm)
Mathieu Chartier90443472015-07-16 20:32:27 -07001465 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001466 UNUSED(sm);
1467 }
Andreas Gampec200a4a2014-06-16 18:39:09 -07001468
Mathieu Chartier90443472015-07-16 20:32:27 -07001469 void Walk(const char* shorty, uint32_t shorty_len) SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001470 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize> sm(this);
1471
1472 WalkHeader(&sm);
Andreas Gampec147b002014-03-06 18:11:06 -08001473
1474 for (uint32_t i = 1; i < shorty_len; ++i) {
1475 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1476 switch (cur_type_) {
1477 case Primitive::kPrimNot:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001478 // TODO: fix abuse of mirror types.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001479 sm.AdvanceHandleScope(
1480 reinterpret_cast<mirror::Object*>(0x12345678));
Andreas Gampec147b002014-03-06 18:11:06 -08001481 break;
1482
1483 case Primitive::kPrimBoolean:
1484 case Primitive::kPrimByte:
1485 case Primitive::kPrimChar:
1486 case Primitive::kPrimShort:
1487 case Primitive::kPrimInt:
1488 sm.AdvanceInt(0);
1489 break;
1490 case Primitive::kPrimFloat:
1491 sm.AdvanceFloat(0);
1492 break;
1493 case Primitive::kPrimDouble:
1494 sm.AdvanceDouble(0);
1495 break;
1496 case Primitive::kPrimLong:
1497 sm.AdvanceLong(0);
1498 break;
1499 default:
1500 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001501 UNREACHABLE();
Andreas Gampec147b002014-03-06 18:11:06 -08001502 }
1503 }
1504
Ian Rogers1428dce2014-10-21 15:02:15 -07001505 num_stack_entries_ = sm.GetStackEntries();
Andreas Gampec147b002014-03-06 18:11:06 -08001506 }
1507
1508 void PushGpr(uintptr_t /* val */) {
1509 // not optimizing registers, yet
1510 }
1511
1512 void PushFpr4(float /* val */) {
1513 // not optimizing registers, yet
1514 }
1515
1516 void PushFpr8(uint64_t /* val */) {
1517 // not optimizing registers, yet
1518 }
1519
1520 void PushStack(uintptr_t /* val */) {
1521 // counting is already done in the superclass
1522 }
1523
Andreas Gampec200a4a2014-06-16 18:39:09 -07001524 virtual uintptr_t PushHandle(mirror::Object* /* ptr */) {
Andreas Gampec147b002014-03-06 18:11:06 -08001525 return reinterpret_cast<uintptr_t>(nullptr);
1526 }
1527
Andreas Gampec200a4a2014-06-16 18:39:09 -07001528 protected:
Andreas Gampec147b002014-03-06 18:11:06 -08001529 uint32_t num_stack_entries_;
1530};
1531
Andreas Gampec200a4a2014-06-16 18:39:09 -07001532class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001533 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001534 ComputeGenericJniFrameSize() : num_handle_scope_references_(0) {}
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001535
Andreas Gampec200a4a2014-06-16 18:39:09 -07001536 // Lays out the callee-save frame. Assumes that the incorrect frame corresponding to RefsAndArgs
1537 // is at *m = sp. Will update to point to the bottom of the save frame.
1538 //
1539 // Note: assumes ComputeAll() has been run before.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001540 void LayoutCalleeSaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope)
Mathieu Chartier90443472015-07-16 20:32:27 -07001541 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001542 ArtMethod* method = **m;
1543
1544 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), sizeof(void*));
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001545
Andreas Gampec200a4a2014-06-16 18:39:09 -07001546 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
1547
1548 // First, fix up the layout of the callee-save frame.
1549 // We have to squeeze in the HandleScope, and relocate the method pointer.
1550
1551 // "Free" the slot for the method.
Ian Rogers13735952014-10-08 12:43:28 -07001552 sp8 += sizeof(void*); // In the callee-save frame we use a full pointer.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001553
1554 // Under the callee saves put handle scope and new method stack reference.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001555 size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001556 size_t scope_and_method = handle_scope_size + sizeof(ArtMethod*);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001557
1558 sp8 -= scope_and_method;
1559 // Align by kStackAlignment.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001560 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001561
Mathieu Chartiere401d142015-04-22 13:56:20 -07001562 uint8_t* sp8_table = sp8 + sizeof(ArtMethod*);
Ian Rogers59c07062014-10-10 13:03:39 -07001563 *handle_scope = HandleScope::Create(sp8_table, self->GetTopHandleScope(),
1564 num_handle_scope_references_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001565
1566 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
1567 uint8_t* method_pointer = sp8;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001568 auto** new_method_ref = reinterpret_cast<ArtMethod**>(method_pointer);
1569 *new_method_ref = method;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001570 *m = new_method_ref;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001571 }
1572
Andreas Gampec200a4a2014-06-16 18:39:09 -07001573 // Adds space for the cookie. Note: may leave stack unaligned.
Ian Rogers1428dce2014-10-21 15:02:15 -07001574 void LayoutCookie(uint8_t** sp) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001575 // Reference cookie and padding
1576 *sp -= 8;
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001577 }
1578
Andreas Gampec200a4a2014-06-16 18:39:09 -07001579 // Re-layout the callee-save frame (insert a handle-scope). Then add space for the cookie.
1580 // Returns the new bottom. Note: this may be unaligned.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001581 uint8_t* LayoutJNISaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope)
Mathieu Chartier90443472015-07-16 20:32:27 -07001582 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001583 // First, fix up the layout of the callee-save frame.
1584 // We have to squeeze in the HandleScope, and relocate the method pointer.
Ian Rogers59c07062014-10-10 13:03:39 -07001585 LayoutCalleeSaveFrame(self, m, sp, handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001586
1587 // The bottom of the callee-save frame is now where the method is, *m.
1588 uint8_t* sp8 = reinterpret_cast<uint8_t*>(*m);
1589
1590 // Add space for cookie.
1591 LayoutCookie(&sp8);
1592
1593 return sp8;
1594 }
1595
1596 // WARNING: After this, *sp won't be pointing to the method anymore!
Mathieu Chartiere401d142015-04-22 13:56:20 -07001597 uint8_t* ComputeLayout(Thread* self, ArtMethod*** m, const char* shorty, uint32_t shorty_len,
1598 HandleScope** handle_scope, uintptr_t** start_stack, uintptr_t** start_gpr,
1599 uint32_t** start_fpr)
Mathieu Chartier90443472015-07-16 20:32:27 -07001600 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001601 Walk(shorty, shorty_len);
1602
1603 // JNI part.
Ian Rogers59c07062014-10-10 13:03:39 -07001604 uint8_t* sp8 = LayoutJNISaveFrame(self, m, reinterpret_cast<void*>(*m), handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001605
1606 sp8 = LayoutNativeCall(sp8, start_stack, start_gpr, start_fpr);
1607
1608 // Return the new bottom.
1609 return sp8;
1610 }
1611
1612 uintptr_t PushHandle(mirror::Object* /* ptr */) OVERRIDE;
1613
1614 // Add JNIEnv* and jobj/jclass before the shorty-derived elements.
1615 void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) OVERRIDE
Mathieu Chartier90443472015-07-16 20:32:27 -07001616 SHARED_REQUIRES(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001617
1618 private:
1619 uint32_t num_handle_scope_references_;
1620};
1621
1622uintptr_t ComputeGenericJniFrameSize::PushHandle(mirror::Object* /* ptr */) {
1623 num_handle_scope_references_++;
1624 return reinterpret_cast<uintptr_t>(nullptr);
1625}
1626
1627void ComputeGenericJniFrameSize::WalkHeader(
1628 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) {
1629 // JNIEnv
1630 sm->AdvancePointer(nullptr);
1631
1632 // Class object or this as first argument
1633 sm->AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678));
1634}
1635
1636// Class to push values to three separate regions. Used to fill the native call part. Adheres to
1637// the template requirements of BuildGenericJniFrameStateMachine.
1638class FillNativeCall {
1639 public:
1640 FillNativeCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) :
1641 cur_gpr_reg_(gpr_regs), cur_fpr_reg_(fpr_regs), cur_stack_arg_(stack_args) {}
1642
1643 virtual ~FillNativeCall() {}
1644
1645 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) {
1646 cur_gpr_reg_ = gpr_regs;
1647 cur_fpr_reg_ = fpr_regs;
1648 cur_stack_arg_ = stack_args;
Andreas Gampec147b002014-03-06 18:11:06 -08001649 }
1650
1651 void PushGpr(uintptr_t val) {
1652 *cur_gpr_reg_ = val;
1653 cur_gpr_reg_++;
1654 }
1655
1656 void PushFpr4(float val) {
1657 *cur_fpr_reg_ = val;
1658 cur_fpr_reg_++;
1659 }
1660
1661 void PushFpr8(uint64_t val) {
1662 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
1663 *tmp = val;
1664 cur_fpr_reg_ += 2;
1665 }
1666
1667 void PushStack(uintptr_t val) {
1668 *cur_stack_arg_ = val;
1669 cur_stack_arg_++;
1670 }
1671
Mathieu Chartier90443472015-07-16 20:32:27 -07001672 virtual uintptr_t PushHandle(mirror::Object*) SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001673 LOG(FATAL) << "(Non-JNI) Native call does not use handles.";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001674 UNREACHABLE();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001675 }
1676
1677 private:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001678 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001679 uint32_t* cur_fpr_reg_;
1680 uintptr_t* cur_stack_arg_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001681};
Andreas Gampec147b002014-03-06 18:11:06 -08001682
Andreas Gampec200a4a2014-06-16 18:39:09 -07001683// Visits arguments on the stack placing them into a region lower down the stack for the benefit
1684// of transitioning into native code.
1685class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
1686 public:
Ian Rogers59c07062014-10-10 13:03:39 -07001687 BuildGenericJniFrameVisitor(Thread* self, bool is_static, const char* shorty, uint32_t shorty_len,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001688 ArtMethod*** sp)
Andreas Gampec200a4a2014-06-16 18:39:09 -07001689 : QuickArgumentVisitor(*sp, is_static, shorty, shorty_len),
1690 jni_call_(nullptr, nullptr, nullptr, nullptr), sm_(&jni_call_) {
1691 ComputeGenericJniFrameSize fsc;
1692 uintptr_t* start_gpr_reg;
1693 uint32_t* start_fpr_reg;
1694 uintptr_t* start_stack_arg;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001695 bottom_of_used_area_ = fsc.ComputeLayout(self, sp, shorty, shorty_len,
Ian Rogers59c07062014-10-10 13:03:39 -07001696 &handle_scope_,
1697 &start_stack_arg,
Andreas Gampec200a4a2014-06-16 18:39:09 -07001698 &start_gpr_reg, &start_fpr_reg);
1699
Andreas Gampec200a4a2014-06-16 18:39:09 -07001700 jni_call_.Reset(start_gpr_reg, start_fpr_reg, start_stack_arg, handle_scope_);
1701
1702 // jni environment is always first argument
1703 sm_.AdvancePointer(self->GetJniEnv());
1704
1705 if (is_static) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001706 sm_.AdvanceHandleScope((**sp)->GetDeclaringClass());
Andreas Gampec200a4a2014-06-16 18:39:09 -07001707 }
1708 }
1709
Mathieu Chartier90443472015-07-16 20:32:27 -07001710 void Visit() SHARED_REQUIRES(Locks::mutator_lock_) OVERRIDE;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001711
Mathieu Chartier90443472015-07-16 20:32:27 -07001712 void FinalizeHandleScope(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001713
1714 StackReference<mirror::Object>* GetFirstHandleScopeEntry()
Mathieu Chartier90443472015-07-16 20:32:27 -07001715 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001716 return handle_scope_->GetHandle(0).GetReference();
1717 }
1718
Mathieu Chartier90443472015-07-16 20:32:27 -07001719 jobject GetFirstHandleScopeJObject() const SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001720 return handle_scope_->GetHandle(0).ToJObject();
1721 }
1722
Ian Rogers1428dce2014-10-21 15:02:15 -07001723 void* GetBottomOfUsedArea() const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001724 return bottom_of_used_area_;
1725 }
1726
1727 private:
1728 // A class to fill a JNI call. Adds reference/handle-scope management to FillNativeCall.
1729 class FillJniCall FINAL : public FillNativeCall {
1730 public:
1731 FillJniCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args,
1732 HandleScope* handle_scope) : FillNativeCall(gpr_regs, fpr_regs, stack_args),
1733 handle_scope_(handle_scope), cur_entry_(0) {}
1734
Mathieu Chartier90443472015-07-16 20:32:27 -07001735 uintptr_t PushHandle(mirror::Object* ref) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001736
1737 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args, HandleScope* scope) {
1738 FillNativeCall::Reset(gpr_regs, fpr_regs, stack_args);
1739 handle_scope_ = scope;
1740 cur_entry_ = 0U;
1741 }
1742
Mathieu Chartier90443472015-07-16 20:32:27 -07001743 void ResetRemainingScopeSlots() SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001744 // Initialize padding entries.
1745 size_t expected_slots = handle_scope_->NumberOfReferences();
1746 while (cur_entry_ < expected_slots) {
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07001747 handle_scope_->GetMutableHandle(cur_entry_++).Assign(nullptr);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001748 }
1749 DCHECK_NE(cur_entry_, 0U);
1750 }
1751
1752 private:
1753 HandleScope* handle_scope_;
1754 size_t cur_entry_;
1755 };
1756
1757 HandleScope* handle_scope_;
1758 FillJniCall jni_call_;
1759 void* bottom_of_used_area_;
1760
1761 BuildNativeCallFrameStateMachine<FillJniCall> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001762
1763 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
1764};
1765
Andreas Gampec200a4a2014-06-16 18:39:09 -07001766uintptr_t BuildGenericJniFrameVisitor::FillJniCall::PushHandle(mirror::Object* ref) {
1767 uintptr_t tmp;
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07001768 MutableHandle<mirror::Object> h = handle_scope_->GetMutableHandle(cur_entry_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001769 h.Assign(ref);
1770 tmp = reinterpret_cast<uintptr_t>(h.ToJObject());
1771 cur_entry_++;
1772 return tmp;
1773}
1774
Ian Rogers9758f792014-03-13 09:02:55 -07001775void BuildGenericJniFrameVisitor::Visit() {
1776 Primitive::Type type = GetParamPrimitiveType();
1777 switch (type) {
1778 case Primitive::kPrimLong: {
1779 jlong long_arg;
1780 if (IsSplitLongOrDouble()) {
1781 long_arg = ReadSplitLongParam();
1782 } else {
1783 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
1784 }
1785 sm_.AdvanceLong(long_arg);
1786 break;
1787 }
1788 case Primitive::kPrimDouble: {
1789 uint64_t double_arg;
1790 if (IsSplitLongOrDouble()) {
1791 // Read into union so that we don't case to a double.
1792 double_arg = ReadSplitLongParam();
1793 } else {
1794 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
1795 }
1796 sm_.AdvanceDouble(double_arg);
1797 break;
1798 }
1799 case Primitive::kPrimNot: {
1800 StackReference<mirror::Object>* stack_ref =
1801 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001802 sm_.AdvanceHandleScope(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -07001803 break;
1804 }
1805 case Primitive::kPrimFloat:
1806 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
1807 break;
1808 case Primitive::kPrimBoolean: // Fall-through.
1809 case Primitive::kPrimByte: // Fall-through.
1810 case Primitive::kPrimChar: // Fall-through.
1811 case Primitive::kPrimShort: // Fall-through.
1812 case Primitive::kPrimInt: // Fall-through.
1813 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
1814 break;
1815 case Primitive::kPrimVoid:
1816 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07001817 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -07001818 }
1819}
1820
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001821void BuildGenericJniFrameVisitor::FinalizeHandleScope(Thread* self) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001822 // Clear out rest of the scope.
1823 jni_call_.ResetRemainingScopeSlots();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001824 // Install HandleScope.
1825 self->PushHandleScope(handle_scope_);
Ian Rogers9758f792014-03-13 09:02:55 -07001826}
1827
Ian Rogers04c31d22014-07-07 21:44:06 -07001828#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07001829extern "C" void* artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07001830#else
1831extern "C" void* artFindNativeMethod(Thread* self);
1832#endif
Andreas Gampe90546832014-03-12 18:07:19 -07001833
Andreas Gampead615172014-04-04 16:20:13 -07001834uint64_t artQuickGenericJniEndJNIRef(Thread* self, uint32_t cookie, jobject l, jobject lock) {
1835 if (lock != nullptr) {
1836 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self));
1837 } else {
1838 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(l, cookie, self));
1839 }
1840}
1841
1842void artQuickGenericJniEndJNINonRef(Thread* self, uint32_t cookie, jobject lock) {
1843 if (lock != nullptr) {
1844 JniMethodEndSynchronized(cookie, lock, self);
1845 } else {
1846 JniMethodEnd(cookie, self);
1847 }
1848}
1849
Andreas Gampec147b002014-03-06 18:11:06 -08001850/*
1851 * Initializes an alloca region assumed to be directly below sp for a native call:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001852 * Create a HandleScope and call stack and fill a mini stack with values to be pushed to registers.
Andreas Gampec147b002014-03-06 18:11:06 -08001853 * The final element on the stack is a pointer to the native code.
1854 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001855 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001856 * We need to fix this, as the handle scope needs to go into the callee-save frame.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001857 *
Andreas Gampec147b002014-03-06 18:11:06 -08001858 * The return of this function denotes:
1859 * 1) How many bytes of the alloca can be released, if the value is non-negative.
1860 * 2) An error, if the value is negative.
1861 */
Mathieu Chartiere401d142015-04-22 13:56:20 -07001862extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07001863 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001864 ArtMethod* called = *sp;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001865 DCHECK(called->IsNative()) << PrettyMethod(called, true);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001866 uint32_t shorty_len = 0;
1867 const char* shorty = called->GetShorty(&shorty_len);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001868
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001869 // Run the visitor and update sp.
Ian Rogers59c07062014-10-10 13:03:39 -07001870 BuildGenericJniFrameVisitor visitor(self, called->IsStatic(), shorty, shorty_len, &sp);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001871 visitor.VisitArguments();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001872 visitor.FinalizeHandleScope(self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001873
Andreas Gampec200a4a2014-06-16 18:39:09 -07001874 // Fix up managed-stack things in Thread.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001875 self->SetTopOfStack(sp);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001876
Ian Rogerse0dcd462014-03-08 15:21:04 -08001877 self->VerifyStack();
1878
Andreas Gampe90546832014-03-12 18:07:19 -07001879 // Start JNI, save the cookie.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001880 uint32_t cookie;
1881 if (called->IsSynchronized()) {
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001882 cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001883 if (self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001884 self->PopHandleScope();
Andreas Gampec147b002014-03-06 18:11:06 -08001885 // A negative value denotes an error.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001886 return GetTwoWordFailureValue();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001887 }
1888 } else {
1889 cookie = JniMethodStart(self);
1890 }
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001891 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Ian Rogerse0dcd462014-03-08 15:21:04 -08001892 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001893
Andreas Gampe90546832014-03-12 18:07:19 -07001894 // Retrieve the stored native code.
Mathieu Chartier2d721012014-11-10 11:08:06 -08001895 void* nativeCode = called->GetEntryPointFromJni();
Andreas Gampe90546832014-03-12 18:07:19 -07001896
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07001897 // There are two cases for the content of nativeCode:
1898 // 1) Pointer to the native function.
1899 // 2) Pointer to the trampoline for native code binding.
1900 // In the second case, we need to execute the binding and continue with the actual native function
1901 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07001902 DCHECK(nativeCode != nullptr);
1903 if (nativeCode == GetJniDlsymLookupStub()) {
Ian Rogers04c31d22014-07-07 21:44:06 -07001904#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07001905 nativeCode = artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07001906#else
1907 nativeCode = artFindNativeMethod(self);
1908#endif
Andreas Gampe90546832014-03-12 18:07:19 -07001909
1910 if (nativeCode == nullptr) {
1911 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
Andreas Gampead615172014-04-04 16:20:13 -07001912
1913 // End JNI, as the assembly will move to deliver the exception.
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001914 jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001915 if (shorty[0] == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001916 artQuickGenericJniEndJNIRef(self, cookie, nullptr, lock);
1917 } else {
1918 artQuickGenericJniEndJNINonRef(self, cookie, lock);
1919 }
1920
Andreas Gampec200a4a2014-06-16 18:39:09 -07001921 return GetTwoWordFailureValue();
Andreas Gampe90546832014-03-12 18:07:19 -07001922 }
1923 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001924 }
1925
Andreas Gampec200a4a2014-06-16 18:39:09 -07001926 // Return native code addr(lo) and bottom of alloca address(hi).
1927 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(visitor.GetBottomOfUsedArea()),
1928 reinterpret_cast<uintptr_t>(nativeCode));
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001929}
1930
1931/*
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001932 * Is called after the native JNI code. Responsible for cleanup (handle scope, saved state) and
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001933 * unlocking.
1934 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001935extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self, jvalue result, uint64_t result_f)
Mathieu Chartier90443472015-07-16 20:32:27 -07001936 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001937 ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001938 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001939 ArtMethod* called = *sp;
Ian Rogerse0dcd462014-03-08 15:21:04 -08001940 uint32_t cookie = *(sp32 - 1);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001941
Andreas Gampead615172014-04-04 16:20:13 -07001942 jobject lock = nullptr;
1943 if (called->IsSynchronized()) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001944 HandleScope* table = reinterpret_cast<HandleScope*>(reinterpret_cast<uint8_t*>(sp)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001945 + sizeof(*sp));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001946 lock = table->GetHandle(0).ToJObject();
Andreas Gampead615172014-04-04 16:20:13 -07001947 }
1948
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001949 char return_shorty_char = called->GetShorty()[0];
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001950
1951 if (return_shorty_char == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001952 return artQuickGenericJniEndJNIRef(self, cookie, result.l, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001953 } else {
Andreas Gampead615172014-04-04 16:20:13 -07001954 artQuickGenericJniEndJNINonRef(self, cookie, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001955
1956 switch (return_shorty_char) {
Nicolas Geoffray54accbc2014-08-13 03:40:45 +01001957 case 'F': {
1958 if (kRuntimeISA == kX86) {
1959 // Convert back the result to float.
Roland Levillainda4d79b2015-03-24 14:36:11 +00001960 double d = bit_cast<double, uint64_t>(result_f);
1961 return bit_cast<uint32_t, float>(static_cast<float>(d));
Nicolas Geoffray54accbc2014-08-13 03:40:45 +01001962 } else {
1963 return result_f;
1964 }
1965 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001966 case 'D':
1967 return result_f;
1968 case 'Z':
1969 return result.z;
1970 case 'B':
1971 return result.b;
1972 case 'C':
1973 return result.c;
1974 case 'S':
1975 return result.s;
1976 case 'I':
1977 return result.i;
1978 case 'J':
1979 return result.j;
1980 case 'V':
1981 return 0;
1982 default:
1983 LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char;
1984 return 0;
1985 }
1986 }
Andreas Gampe2da88232014-02-27 12:26:20 -08001987}
1988
Andreas Gamped58342c2014-06-05 14:18:08 -07001989// We use TwoWordReturn to optimize scalar returns. We use the hi value for code, and the lo value
1990// for the method pointer.
Andreas Gampe51f76352014-05-21 08:28:48 -07001991//
Andreas Gamped58342c2014-06-05 14:18:08 -07001992// It is valid to use this, as at the usage points here (returns from C functions) we are assuming
Mathieu Chartier90443472015-07-16 20:32:27 -07001993// to hold the mutator lock (see SHARED_REQUIRES(Locks::mutator_lock_) annotations).
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001994
1995template<InvokeType type, bool access_check>
Mathieu Chartiere401d142015-04-22 13:56:20 -07001996static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object, Thread* self,
1997 ArtMethod** sp) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001998 ScopedQuickEntrypointChecks sqec(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001999 DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
2000 ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
2001 ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check, type);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002002 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002003 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
2004 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002005 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002006 {
2007 // Remember the args in case a GC happens in FindMethodFromCode.
2008 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
2009 RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa);
2010 visitor.VisitArguments();
Andreas Gampe3a357142015-08-07 17:20:11 -07002011 method = FindMethodFromCode<type, access_check>(method_idx, &this_object, caller_method,
Mathieu Chartier0cd81352014-05-22 16:48:55 -07002012 self);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002013 visitor.FixupReferences();
2014 }
2015
Ian Rogerse0a02da2014-12-02 14:10:53 -08002016 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002017 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07002018 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002019 }
2020 }
2021 DCHECK(!self->IsExceptionPending());
2022 const void* code = method->GetEntryPointFromQuickCompiledCode();
2023
2024 // When we return, the caller will branch to this address, so it had better not be 0!
Ian Rogerse0a02da2014-12-02 14:10:53 -08002025 DCHECK(code != nullptr) << "Code was null in method: " << PrettyMethod(method)
Andreas Gampec200a4a2014-06-16 18:39:09 -07002026 << " location: "
2027 << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002028
Andreas Gamped58342c2014-06-05 14:18:08 -07002029 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2030 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002031}
2032
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01002033// Explicit artInvokeCommon template function declarations to please analysis tool.
2034#define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \
Mathieu Chartier90443472015-07-16 20:32:27 -07002035 template SHARED_REQUIRES(Locks::mutator_lock_) \
Mathieu Chartiere401d142015-04-22 13:56:20 -07002036 TwoWordReturn artInvokeCommon<type, access_check>( \
2037 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01002038
2039EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false);
2040EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true);
2041EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false);
2042EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true);
2043EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false);
2044EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true);
2045EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false);
2046EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true);
2047EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false);
2048EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true);
2049#undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL
2050
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002051// See comments in runtime_support_asm.S
Andreas Gampec200a4a2014-06-16 18:39:09 -07002052extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002053 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002054 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002055 return artInvokeCommon<kInterface, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002056}
2057
Andreas Gampec200a4a2014-06-16 18:39:09 -07002058extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002059 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002060 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002061 return artInvokeCommon<kDirect, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002062}
2063
Andreas Gampec200a4a2014-06-16 18:39:09 -07002064extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002065 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002066 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002067 return artInvokeCommon<kStatic, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002068}
2069
Andreas Gampec200a4a2014-06-16 18:39:09 -07002070extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002071 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002072 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002073 return artInvokeCommon<kSuper, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002074}
2075
Andreas Gampec200a4a2014-06-16 18:39:09 -07002076extern "C" TwoWordReturn artInvokeVirtualTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002077 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002078 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002079 return artInvokeCommon<kVirtual, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002080}
2081
2082// Determine target of interface dispatch. This object is known non-null.
Nicolas Geoffray8ea18d02015-05-26 16:29:08 +01002083extern "C" TwoWordReturn artInvokeInterfaceTrampoline(uint32_t dex_method_idx,
Andreas Gampe51f76352014-05-21 08:28:48 -07002084 mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002085 Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002086 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002087 ScopedQuickEntrypointChecks sqec(self);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +01002088 // The optimizing compiler currently does not inline methods that have an interface
2089 // invocation. We use the outer method directly to avoid fetching a stack map, which is
2090 // more expensive.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002091 ArtMethod* caller_method = QuickArgumentVisitor::GetOuterMethod(sp);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +01002092 DCHECK_EQ(caller_method, QuickArgumentVisitor::GetCallingMethod(sp));
Mathieu Chartiere401d142015-04-22 13:56:20 -07002093 ArtMethod* interface_method = caller_method->GetDexCacheResolvedMethod(
2094 dex_method_idx, sizeof(void*));
2095 DCHECK(interface_method != nullptr) << dex_method_idx << " " << PrettyMethod(caller_method);
2096 ArtMethod* method;
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002097 if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002098 method = this_object->GetClass()->FindVirtualMethodForInterface(
2099 interface_method, sizeof(void*));
Ian Rogerse0a02da2014-12-02 14:10:53 -08002100 if (UNLIKELY(method == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002101 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(
2102 interface_method, this_object, caller_method);
Andreas Gamped58342c2014-06-05 14:18:08 -07002103 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002104 }
2105 } else {
Mathieu Chartier4edd8472015-06-01 10:47:36 -07002106 DCHECK_EQ(interface_method, Runtime::Current()->GetResolutionMethod());
Nicolas Geoffray8ea18d02015-05-26 16:29:08 +01002107 if (kIsDebugBuild) {
2108 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
2109 const DexFile::CodeItem* code = caller_method->GetCodeItem();
2110 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
2111 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
2112 Instruction::Code instr_code = instr->Opcode();
2113 CHECK(instr_code == Instruction::INVOKE_INTERFACE ||
2114 instr_code == Instruction::INVOKE_INTERFACE_RANGE)
2115 << "Unexpected call into interface trampoline: " << instr->DumpString(nullptr);
2116 if (instr_code == Instruction::INVOKE_INTERFACE) {
2117 CHECK_EQ(dex_method_idx, instr->VRegB_35c());
2118 } else {
2119 CHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE);
2120 CHECK_EQ(dex_method_idx, instr->VRegB_3rc());
2121 }
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002122 }
2123
Andreas Gampec200a4a2014-06-16 18:39:09 -07002124 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()
2125 ->GetDexFile();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002126 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002127 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx),
2128 &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002129 {
2130 // Remember the args in case a GC happens in FindMethodFromCode.
2131 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
2132 RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa);
2133 visitor.VisitArguments();
Andreas Gampe3a357142015-08-07 17:20:11 -07002134 method = FindMethodFromCode<kInterface, false>(dex_method_idx, &this_object, caller_method,
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002135 self);
2136 visitor.FixupReferences();
2137 }
2138
2139 if (UNLIKELY(method == nullptr)) {
2140 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07002141 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002142 }
2143 }
2144 const void* code = method->GetEntryPointFromQuickCompiledCode();
2145
2146 // When we return, the caller will branch to this address, so it had better not be 0!
Ian Rogerse0a02da2014-12-02 14:10:53 -08002147 DCHECK(code != nullptr) << "Code was null in method: " << PrettyMethod(method)
Andreas Gampec200a4a2014-06-16 18:39:09 -07002148 << " location: " << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002149
Andreas Gamped58342c2014-06-05 14:18:08 -07002150 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2151 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002152}
2153
Ian Rogers848871b2013-08-05 10:56:33 -07002154} // namespace art