blob: 7c92b18e606c00321614d8ea066e600b756eb72b [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartiere401d142015-04-22 13:56:20 -070017#include "art_method-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070018#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070019#include "common_throws.h"
Ian Rogers848871b2013-08-05 10:56:33 -070020#include "dex_file-inl.h"
21#include "dex_instruction-inl.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070022#include "entrypoints/entrypoint_utils-inl.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070023#include "entrypoints/runtime_asm_entrypoints.h"
Ian Rogers83883d72013-10-21 21:07:24 -070024#include "gc/accounting/card_table-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070025#include "interpreter/interpreter.h"
Ian Rogerse0a02da2014-12-02 14:10:53 -080026#include "method_reference.h"
Ian Rogers848871b2013-08-05 10:56:33 -070027#include "mirror/class-inl.h"
Mathieu Chartier5f3ded42014-04-03 15:25:30 -070028#include "mirror/dex_cache-inl.h"
Mathieu Chartierfc58af42015-04-16 18:00:39 -070029#include "mirror/method.h"
Ian Rogers848871b2013-08-05 10:56:33 -070030#include "mirror/object-inl.h"
31#include "mirror/object_array-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070032#include "runtime.h"
Ian Rogers53b8b092014-03-13 23:45:53 -070033#include "scoped_thread_state_change.h"
Daniel Mihalyieb076692014-08-22 17:33:31 +020034#include "debugger.h"
Ian Rogers848871b2013-08-05 10:56:33 -070035
36namespace art {
37
38// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
39class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080040 // Number of bytes for each out register in the caller method's frame.
41 static constexpr size_t kBytesStackArgLocation = 4;
Alexei Zavjalov41c507a2014-05-15 16:02:46 +070042 // Frame size in bytes of a callee-save frame for RefsAndArgs.
43 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize =
44 GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -070045#if defined(__arm__)
46 // The callee save frame is pointed to by SP.
47 // | argN | |
48 // | ... | |
49 // | arg4 | |
50 // | arg3 spill | | Caller's frame
51 // | arg2 spill | |
52 // | arg1 spill | |
53 // | Method* | ---
54 // | LR |
Zheng Xu5667fdb2014-10-23 18:29:55 +080055 // | ... | 4x6 bytes callee saves
56 // | R3 |
57 // | R2 |
58 // | R1 |
59 // | S15 |
60 // | : |
61 // | S0 |
62 // | | 4x2 bytes padding
Ian Rogers848871b2013-08-05 10:56:33 -070063 // | Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -050064 static constexpr bool kSplitPairAcrossRegisterAndStack = kArm32QuickCodeUseSoftFloat;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +000065 static constexpr bool kAlignPairRegister = !kArm32QuickCodeUseSoftFloat;
Zheng Xu5667fdb2014-10-23 18:29:55 +080066 static constexpr bool kQuickSoftFloatAbi = kArm32QuickCodeUseSoftFloat;
67 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = !kArm32QuickCodeUseSoftFloat;
68 static constexpr size_t kNumQuickGprArgs = 3;
69 static constexpr size_t kNumQuickFprArgs = kArm32QuickCodeUseSoftFloat ? 0 : 16;
Andreas Gampe1a5c4062015-01-15 12:10:47 -080070 static constexpr bool kGprFprLockstep = false;
Zheng Xub551fdc2014-07-25 11:49:42 +080071 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
72 arm::ArmCalleeSaveFpr1Offset(Runtime::kRefsAndArgs); // Offset of first FPR arg.
73 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
74 arm::ArmCalleeSaveGpr1Offset(Runtime::kRefsAndArgs); // Offset of first GPR arg.
75 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset =
76 arm::ArmCalleeSaveLrOffset(Runtime::kRefsAndArgs); // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -080077 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000078 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -080079 }
Stuart Monteithb95a5342014-03-12 13:32:32 +000080#elif defined(__aarch64__)
81 // The callee save frame is pointed to by SP.
82 // | argN | |
83 // | ... | |
84 // | arg4 | |
85 // | arg3 spill | | Caller's frame
86 // | arg2 spill | |
87 // | arg1 spill | |
88 // | Method* | ---
89 // | LR |
Zheng Xub551fdc2014-07-25 11:49:42 +080090 // | X29 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000091 // | : |
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010092 // | X20 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000093 // | X7 |
94 // | : |
95 // | X1 |
Zheng Xub551fdc2014-07-25 11:49:42 +080096 // | D7 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000097 // | : |
98 // | D0 |
99 // | | padding
100 // | Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500101 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000102 static constexpr bool kAlignPairRegister = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000103 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800104 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000105 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
106 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800107 static constexpr bool kGprFprLockstep = false;
Zheng Xub551fdc2014-07-25 11:49:42 +0800108 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
109 arm64::Arm64CalleeSaveFpr1Offset(Runtime::kRefsAndArgs); // Offset of first FPR arg.
110 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
111 arm64::Arm64CalleeSaveGpr1Offset(Runtime::kRefsAndArgs); // Offset of first GPR arg.
112 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset =
113 arm64::Arm64CalleeSaveLrOffset(Runtime::kRefsAndArgs); // Offset of return address.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000114 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000115 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Stuart Monteithb95a5342014-03-12 13:32:32 +0000116 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800117#elif defined(__mips__) && !defined(__LP64__)
Ian Rogers848871b2013-08-05 10:56:33 -0700118 // The callee save frame is pointed to by SP.
119 // | argN | |
120 // | ... | |
121 // | arg4 | |
122 // | arg3 spill | | Caller's frame
123 // | arg2 spill | |
124 // | arg1 spill | |
125 // | Method* | ---
126 // | RA |
127 // | ... | callee saves
128 // | A3 | arg3
129 // | A2 | arg2
130 // | A1 | arg1
131 // | A0/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500132 static constexpr bool kSplitPairAcrossRegisterAndStack = true;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000133 static constexpr bool kAlignPairRegister = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800134 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800135 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800136 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
137 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800138 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800139 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
Douglas Leungc6d86722014-12-10 16:15:17 -0800140 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 16; // Offset of first GPR arg.
Ian Rogers936b37f2014-02-14 00:52:24 -0800141 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 60; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800142 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000143 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800144 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800145#elif defined(__mips__) && defined(__LP64__)
146 // The callee save frame is pointed to by SP.
147 // | argN | |
148 // | ... | |
149 // | arg4 | |
150 // | arg3 spill | | Caller's frame
151 // | arg2 spill | |
152 // | arg1 spill | |
153 // | Method* | ---
154 // | RA |
155 // | ... | callee saves
156 // | F7 | f_arg7
157 // | F6 | f_arg6
158 // | F5 | f_arg5
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800159 // | F4 | f_arg4
160 // | F3 | f_arg3
161 // | F2 | f_arg2
162 // | F1 | f_arg1
163 // | F0 | f_arg0
164 // | A7 | arg7
165 // | A6 | arg6
166 // | A5 | arg5
167 // | A4 | arg4
168 // | A3 | arg3
169 // | A2 | arg2
170 // | A1 | arg1
171 // | | padding
172 // | A0/Method* | <- sp
173 // NOTE: for Mip64, when A0 is skipped, F0 is also skipped.
Douglas Leungd18e0832015-02-09 15:22:26 -0800174 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800175 static constexpr bool kAlignPairRegister = false;
176 static constexpr bool kQuickSoftFloatAbi = false;
177 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
178 // These values are set to zeros because GPR and FPR register
179 // assignments for Mips64 are interleaved, which the current VisitArguments()
180 // function does not support.
181 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
182 static constexpr size_t kNumQuickFprArgs = 7; // 7 arguments passed in FPRs.
183 static constexpr bool kGprFprLockstep = true;
184
185 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 24; // Offset of first FPR arg (F1).
186 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80; // Offset of first GPR arg (A1).
187 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 200; // Offset of return address.
188 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
189 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
190 }
Ian Rogers848871b2013-08-05 10:56:33 -0700191#elif defined(__i386__)
192 // The callee save frame is pointed to by SP.
193 // | argN | |
194 // | ... | |
195 // | arg4 | |
196 // | arg3 spill | | Caller's frame
197 // | arg2 spill | |
198 // | arg1 spill | |
199 // | Method* | ---
200 // | Return |
201 // | EBP,ESI,EDI | callee saves
202 // | EBX | arg3
203 // | EDX | arg2
204 // | ECX | arg1
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000205 // | XMM3 | float arg 4
206 // | XMM2 | float arg 3
207 // | XMM1 | float arg 2
208 // | XMM0 | float arg 1
Ian Rogers848871b2013-08-05 10:56:33 -0700209 // | EAX/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500210 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000211 static constexpr bool kAlignPairRegister = false;
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000212 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800213 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800214 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000215 static constexpr size_t kNumQuickFprArgs = 4; // 4 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800216 static constexpr bool kGprFprLockstep = false;
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000217 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 4; // Offset of first FPR arg.
218 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4 + 4*8; // Offset of first GPR arg.
219 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 28 + 4*8; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800220 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000221 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800222 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800223#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800224 // The callee save frame is pointed to by SP.
225 // | argN | |
226 // | ... | |
227 // | reg. arg spills | | Caller's frame
228 // | Method* | ---
229 // | Return |
230 // | R15 | callee save
231 // | R14 | callee save
232 // | R13 | callee save
233 // | R12 | callee save
234 // | R9 | arg5
235 // | R8 | arg4
236 // | RSI/R6 | arg1
237 // | RBP/R5 | callee save
238 // | RBX/R3 | callee save
239 // | RDX/R2 | arg2
240 // | RCX/R1 | arg3
241 // | XMM7 | float arg 8
242 // | XMM6 | float arg 7
243 // | XMM5 | float arg 6
244 // | XMM4 | float arg 5
245 // | XMM3 | float arg 4
246 // | XMM2 | float arg 3
247 // | XMM1 | float arg 2
248 // | XMM0 | float arg 1
249 // | Padding |
250 // | RDI/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500251 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000252 static constexpr bool kAlignPairRegister = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800253 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800254 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700255 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700256 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800257 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800258 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
Serguei Katkovc3801912014-07-08 17:21:53 +0700259 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80 + 4*8; // Offset of first GPR arg.
260 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 168 + 4*8; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800261 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
262 switch (gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000263 case 0: return (4 * GetBytesPerGprSpillLocation(kRuntimeISA));
264 case 1: return (1 * GetBytesPerGprSpillLocation(kRuntimeISA));
265 case 2: return (0 * GetBytesPerGprSpillLocation(kRuntimeISA));
266 case 3: return (5 * GetBytesPerGprSpillLocation(kRuntimeISA));
267 case 4: return (6 * GetBytesPerGprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800268 default:
Andreas Gampec200a4a2014-06-16 18:39:09 -0700269 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
270 return 0;
Ian Rogers936b37f2014-02-14 00:52:24 -0800271 }
272 }
Ian Rogers848871b2013-08-05 10:56:33 -0700273#else
274#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700275#endif
276
Ian Rogers936b37f2014-02-14 00:52:24 -0800277 public:
Sebastien Hertza836bc92014-11-25 16:30:53 +0100278 // Special handling for proxy methods. Proxy methods are instance methods so the
279 // 'this' object is the 1st argument. They also have the same frame layout as the
280 // kRefAndArgs runtime method. Since 'this' is a reference, it is located in the
281 // 1st GPR.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700282 static mirror::Object* GetProxyThisObject(ArtMethod** sp)
Sebastien Hertza836bc92014-11-25 16:30:53 +0100283 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700284 CHECK((*sp)->IsProxyMethod());
285 CHECK_EQ(kQuickCalleeSaveFrame_RefAndArgs_FrameSize, (*sp)->GetFrameSizeInBytes());
Sebastien Hertza836bc92014-11-25 16:30:53 +0100286 CHECK_GT(kNumQuickGprArgs, 0u);
287 constexpr uint32_t kThisGprIndex = 0u; // 'this' is in the 1st GPR.
288 size_t this_arg_offset = kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset +
289 GprIndexToGprOffset(kThisGprIndex);
290 uint8_t* this_arg_address = reinterpret_cast<uint8_t*>(sp) + this_arg_offset;
291 return reinterpret_cast<StackReference<mirror::Object>*>(this_arg_address)->AsMirrorPtr();
292 }
293
Mathieu Chartiere401d142015-04-22 13:56:20 -0700294 static ArtMethod* GetCallingMethod(ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
295 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100296 return GetCalleeSaveMethodCaller(sp, Runtime::kRefsAndArgs);
297 }
298
Mathieu Chartiere401d142015-04-22 13:56:20 -0700299 static ArtMethod* GetOuterMethod(ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
300 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100301 uint8_t* previous_sp =
302 reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700303 return *reinterpret_cast<ArtMethod**>(previous_sp);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100304 }
305
Mathieu Chartiere401d142015-04-22 13:56:20 -0700306 static uint32_t GetCallingDexPc(ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
307 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100308 const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsAndArgs);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700309 ArtMethod** caller_sp = reinterpret_cast<ArtMethod**>(
310 reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
311 ArtMethod* outer_method = *caller_sp;
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100312 uintptr_t outer_pc = QuickArgumentVisitor::GetCallingPc(sp);
313 uintptr_t outer_pc_offset = outer_method->NativeQuickPcOffset(outer_pc);
314
315 if (outer_method->IsOptimized(sizeof(void*))) {
316 CodeInfo code_info = outer_method->GetOptimizedCodeInfo();
317 StackMap stack_map = code_info.GetStackMapForNativePcOffset(outer_pc_offset);
318 DCHECK(stack_map.IsValid());
319 if (stack_map.HasInlineInfo(code_info)) {
320 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
321 return inline_info.GetDexPcAtDepth(inline_info.GetDepth() - 1);
322 } else {
323 return stack_map.GetDexPc(code_info);
324 }
325 } else {
326 return outer_method->ToDexPc(outer_pc);
327 }
Ian Rogers848871b2013-08-05 10:56:33 -0700328 }
329
Ian Rogers936b37f2014-02-14 00:52:24 -0800330 // For the given quick ref and args quick frame, return the caller's PC.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700331 static uintptr_t GetCallingPc(ArtMethod** sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
332 DCHECK((*sp)->IsCalleeSaveMethod());
Ian Rogers13735952014-10-08 12:43:28 -0700333 uint8_t* lr = reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset;
Ian Rogers848871b2013-08-05 10:56:33 -0700334 return *reinterpret_cast<uintptr_t*>(lr);
335 }
336
Mathieu Chartiere401d142015-04-22 13:56:20 -0700337 QuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty,
Andreas Gampec200a4a2014-06-16 18:39:09 -0700338 uint32_t shorty_len) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) :
339 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
Ian Rogers13735952014-10-08 12:43:28 -0700340 gpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
341 fpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
342 stack_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
Mathieu Chartiere401d142015-04-22 13:56:20 -0700343 + sizeof(ArtMethod*)), // Skip ArtMethod*.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800344 gpr_index_(0), fpr_index_(0), fpr_double_index_(0), stack_index_(0),
345 cur_type_(Primitive::kPrimVoid), is_split_long_or_double_(false) {
Andreas Gampe575e78c2014-11-03 23:41:03 -0800346 static_assert(kQuickSoftFloatAbi == (kNumQuickFprArgs == 0),
347 "Number of Quick FPR arguments unexpected");
348 static_assert(!(kQuickSoftFloatAbi && kQuickDoubleRegAlignedFloatBackFilled),
349 "Double alignment unexpected");
Zheng Xu5667fdb2014-10-23 18:29:55 +0800350 // For register alignment, we want to assume that counters(fpr_double_index_) are even if the
351 // next register is even.
Andreas Gampe575e78c2014-11-03 23:41:03 -0800352 static_assert(!kQuickDoubleRegAlignedFloatBackFilled || kNumQuickFprArgs % 2 == 0,
353 "Number of Quick FPR arguments not even");
Mathieu Chartiere401d142015-04-22 13:56:20 -0700354 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), sizeof(void*));
Zheng Xu5667fdb2014-10-23 18:29:55 +0800355 }
Ian Rogers848871b2013-08-05 10:56:33 -0700356
357 virtual ~QuickArgumentVisitor() {}
358
359 virtual void Visit() = 0;
360
Ian Rogers936b37f2014-02-14 00:52:24 -0800361 Primitive::Type GetParamPrimitiveType() const {
362 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700363 }
364
Ian Rogers13735952014-10-08 12:43:28 -0700365 uint8_t* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800366 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800367 Primitive::Type type = GetParamPrimitiveType();
368 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800369 if (type == Primitive::kPrimDouble && kQuickDoubleRegAlignedFloatBackFilled) {
370 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
371 return fpr_args_ + (fpr_double_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
372 }
373 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000374 return fpr_args_ + (fpr_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800375 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700376 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers936b37f2014-02-14 00:52:24 -0800377 }
378 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800379 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800380 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
381 }
382 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700383 }
384
385 bool IsSplitLongOrDouble() const {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700386 if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) ||
387 (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800388 return is_split_long_or_double_;
389 } else {
390 return false; // An optimization for when GPR and FPRs are 64bit.
391 }
Ian Rogers848871b2013-08-05 10:56:33 -0700392 }
393
Ian Rogers936b37f2014-02-14 00:52:24 -0800394 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700395 return GetParamPrimitiveType() == Primitive::kPrimNot;
396 }
397
Ian Rogers936b37f2014-02-14 00:52:24 -0800398 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700399 Primitive::Type type = GetParamPrimitiveType();
400 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
401 }
402
403 uint64_t ReadSplitLongParam() const {
Nicolas Geoffray425f2392015-01-08 14:52:29 +0000404 // The splitted long is always available through the stack.
405 return *reinterpret_cast<uint64_t*>(stack_args_
406 + stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700407 }
408
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800409 void IncGprIndex() {
410 gpr_index_++;
411 if (kGprFprLockstep) {
412 fpr_index_++;
413 }
414 }
415
416 void IncFprIndex() {
417 fpr_index_++;
418 if (kGprFprLockstep) {
419 gpr_index_++;
420 }
421 }
422
Ian Rogers848871b2013-08-05 10:56:33 -0700423 void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800424 // (a) 'stack_args_' should point to the first method's argument
425 // (b) whatever the argument type it is, the 'stack_index_' should
426 // be moved forward along with every visiting.
Ian Rogers936b37f2014-02-14 00:52:24 -0800427 gpr_index_ = 0;
428 fpr_index_ = 0;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800429 if (kQuickDoubleRegAlignedFloatBackFilled) {
430 fpr_double_index_ = 0;
431 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800432 stack_index_ = 0;
433 if (!is_static_) { // Handle this.
434 cur_type_ = Primitive::kPrimNot;
435 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700436 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800437 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800438 if (kNumQuickGprArgs > 0) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800439 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800440 }
Ian Rogers848871b2013-08-05 10:56:33 -0700441 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800442 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
443 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
444 switch (cur_type_) {
445 case Primitive::kPrimNot:
446 case Primitive::kPrimBoolean:
447 case Primitive::kPrimByte:
448 case Primitive::kPrimChar:
449 case Primitive::kPrimShort:
450 case Primitive::kPrimInt:
451 is_split_long_or_double_ = false;
452 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800453 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800454 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800455 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800456 }
457 break;
458 case Primitive::kPrimFloat:
459 is_split_long_or_double_ = false;
460 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800461 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800462 if (kQuickSoftFloatAbi) {
463 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800464 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800465 }
466 } else {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800467 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800468 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800469 if (kQuickDoubleRegAlignedFloatBackFilled) {
470 // Double should not overlap with float.
471 // For example, if fpr_index_ = 3, fpr_double_index_ should be at least 4.
472 fpr_double_index_ = std::max(fpr_double_index_, RoundUp(fpr_index_, 2));
473 // Float should not overlap with double.
474 if (fpr_index_ % 2 == 0) {
475 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
476 }
477 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800478 }
479 }
480 break;
481 case Primitive::kPrimDouble:
482 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800483 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000484 if (cur_type_ == Primitive::kPrimLong && kAlignPairRegister && gpr_index_ == 0) {
485 // Currently, this is only for ARM, where the first available parameter register
486 // is R1. So we skip it, and use R2 instead.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800487 IncGprIndex();
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000488 }
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000489 is_split_long_or_double_ = (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800490 ((gpr_index_ + 1) == kNumQuickGprArgs);
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500491 if (!kSplitPairAcrossRegisterAndStack && is_split_long_or_double_) {
492 // We don't want to split this. Pass over this register.
493 gpr_index_++;
494 is_split_long_or_double_ = false;
495 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800496 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800497 if (kBytesStackArgLocation == 4) {
498 stack_index_+= 2;
499 } else {
500 CHECK_EQ(kBytesStackArgLocation, 8U);
501 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800502 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700503 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800504 IncGprIndex();
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000505 if (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700506 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800507 IncGprIndex();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700508 }
509 }
510 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800511 } else {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000512 is_split_long_or_double_ = (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) &&
Zheng Xu5667fdb2014-10-23 18:29:55 +0800513 ((fpr_index_ + 1) == kNumQuickFprArgs) && !kQuickDoubleRegAlignedFloatBackFilled;
Ian Rogers936b37f2014-02-14 00:52:24 -0800514 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700515 if (kBytesStackArgLocation == 4) {
516 stack_index_+= 2;
Ian Rogers936b37f2014-02-14 00:52:24 -0800517 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700518 CHECK_EQ(kBytesStackArgLocation, 8U);
519 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800520 }
Zheng Xu5667fdb2014-10-23 18:29:55 +0800521 if (kQuickDoubleRegAlignedFloatBackFilled) {
522 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
523 fpr_double_index_ += 2;
524 // Float should not overlap with double.
525 if (fpr_index_ % 2 == 0) {
526 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
527 }
528 }
529 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800530 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800531 if (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) {
532 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800533 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800534 }
535 }
536 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800537 }
538 break;
539 default:
540 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
541 }
Ian Rogers848871b2013-08-05 10:56:33 -0700542 }
543 }
544
Andreas Gampec200a4a2014-06-16 18:39:09 -0700545 protected:
Ian Rogers848871b2013-08-05 10:56:33 -0700546 const bool is_static_;
547 const char* const shorty_;
548 const uint32_t shorty_len_;
Andreas Gampec200a4a2014-06-16 18:39:09 -0700549
550 private:
Ian Rogers13735952014-10-08 12:43:28 -0700551 uint8_t* const gpr_args_; // Address of GPR arguments in callee save frame.
552 uint8_t* const fpr_args_; // Address of FPR arguments in callee save frame.
553 uint8_t* const stack_args_; // Address of stack arguments in caller's frame.
Ian Rogers936b37f2014-02-14 00:52:24 -0800554 uint32_t gpr_index_; // Index into spilled GPRs.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800555 // Index into spilled FPRs.
556 // In case kQuickDoubleRegAlignedFloatBackFilled, it may index a hole while fpr_double_index_
557 // holds a higher register number.
558 uint32_t fpr_index_;
559 // Index into spilled FPRs for aligned double.
560 // Only used when kQuickDoubleRegAlignedFloatBackFilled. Next available double register indexed in
561 // terms of singles, may be behind fpr_index.
562 uint32_t fpr_double_index_;
Ian Rogers936b37f2014-02-14 00:52:24 -0800563 uint32_t stack_index_; // Index into arguments on the stack.
564 // The current type of argument during VisitArguments.
565 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700566 // Does a 64bit parameter straddle the register and stack arguments?
567 bool is_split_long_or_double_;
568};
569
Sebastien Hertza836bc92014-11-25 16:30:53 +0100570// Returns the 'this' object of a proxy method. This function is only used by StackVisitor. It
571// allows to use the QuickArgumentVisitor constants without moving all the code in its own module.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700572extern "C" mirror::Object* artQuickGetProxyThisObject(ArtMethod** sp)
Sebastien Hertza836bc92014-11-25 16:30:53 +0100573 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
574 return QuickArgumentVisitor::GetProxyThisObject(sp);
575}
576
Ian Rogers848871b2013-08-05 10:56:33 -0700577// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800578class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700579 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700580 BuildQuickShadowFrameVisitor(ArtMethod** sp, bool is_static, const char* shorty,
581 uint32_t shorty_len, ShadowFrame* sf, size_t first_arg_reg) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700582 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700583
Ian Rogers9758f792014-03-13 09:02:55 -0700584 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700585
586 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800587 ShadowFrame* const sf_;
588 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700589
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700590 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700591};
592
Andreas Gampec200a4a2014-06-16 18:39:09 -0700593void BuildQuickShadowFrameVisitor::Visit() {
Ian Rogers9758f792014-03-13 09:02:55 -0700594 Primitive::Type type = GetParamPrimitiveType();
595 switch (type) {
596 case Primitive::kPrimLong: // Fall-through.
597 case Primitive::kPrimDouble:
598 if (IsSplitLongOrDouble()) {
599 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
600 } else {
601 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
602 }
603 ++cur_reg_;
604 break;
605 case Primitive::kPrimNot: {
606 StackReference<mirror::Object>* stack_ref =
607 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
608 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
609 }
610 break;
611 case Primitive::kPrimBoolean: // Fall-through.
612 case Primitive::kPrimByte: // Fall-through.
613 case Primitive::kPrimChar: // Fall-through.
614 case Primitive::kPrimShort: // Fall-through.
615 case Primitive::kPrimInt: // Fall-through.
616 case Primitive::kPrimFloat:
617 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
618 break;
619 case Primitive::kPrimVoid:
620 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700621 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700622 }
623 ++cur_reg_;
624}
625
Mathieu Chartiere401d142015-04-22 13:56:20 -0700626extern "C" uint64_t artQuickToInterpreterBridge(ArtMethod* method, Thread* self, ArtMethod** sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700627 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
628 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
629 // frame.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700630 ScopedQuickEntrypointChecks sqec(self);
Ian Rogers848871b2013-08-05 10:56:33 -0700631
632 if (method->IsAbstract()) {
633 ThrowAbstractMethodError(method);
634 return 0;
635 } else {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800636 DCHECK(!method->IsNative()) << PrettyMethod(method);
Andreas Gampec200a4a2014-06-16 18:39:09 -0700637 const char* old_cause = self->StartAssertNoThreadSuspension(
638 "Building interpreter shadow frame");
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700639 const DexFile::CodeItem* code_item = method->GetCodeItem();
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800640 DCHECK(code_item != nullptr) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700641 uint16_t num_regs = code_item->registers_size_;
642 void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
Andreas Gampec200a4a2014-06-16 18:39:09 -0700643 // No last shadow coming from quick.
644 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, nullptr, method, 0, memory));
Ian Rogers848871b2013-08-05 10:56:33 -0700645 size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700646 uint32_t shorty_len = 0;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700647 auto* non_proxy_method = method->GetInterfaceMethodIfProxy(sizeof(void*));
648 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700649 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len,
Ian Rogers936b37f2014-02-14 00:52:24 -0800650 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700651 shadow_frame_builder.VisitArguments();
Ian Rogerse94652f2014-12-02 11:13:19 -0800652 const bool needs_initialization =
653 method->IsStatic() && !method->GetDeclaringClass()->IsInitialized();
Ian Rogers848871b2013-08-05 10:56:33 -0700654 // Push a transition back into managed code onto the linked list in thread.
655 ManagedStack fragment;
656 self->PushManagedStackFragment(&fragment);
657 self->PushShadowFrame(shadow_frame);
658 self->EndAssertNoThreadSuspension(old_cause);
659
Ian Rogerse94652f2014-12-02 11:13:19 -0800660 if (needs_initialization) {
Ian Rogers848871b2013-08-05 10:56:33 -0700661 // Ensure static method's class is initialized.
Ian Rogerse94652f2014-12-02 11:13:19 -0800662 StackHandleScope<1> hs(self);
663 Handle<mirror::Class> h_class(hs.NewHandle(shadow_frame->GetMethod()->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -0700664 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
Ian Rogerse94652f2014-12-02 11:13:19 -0800665 DCHECK(Thread::Current()->IsExceptionPending()) << PrettyMethod(shadow_frame->GetMethod());
Ian Rogers848871b2013-08-05 10:56:33 -0700666 self->PopManagedStackFragment(fragment);
667 return 0;
668 }
669 }
Ian Rogerse94652f2014-12-02 11:13:19 -0800670 JValue result = interpreter::EnterInterpreterFromEntryPoint(self, code_item, shadow_frame);
Ian Rogers848871b2013-08-05 10:56:33 -0700671 // Pop transition.
672 self->PopManagedStackFragment(fragment);
Daniel Mihalyieb076692014-08-22 17:33:31 +0200673
674 // Request a stack deoptimization if needed
Mathieu Chartiere401d142015-04-22 13:56:20 -0700675 ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
Daniel Mihalyieb076692014-08-22 17:33:31 +0200676 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForUpcall(self, caller))) {
677 self->SetException(Thread::GetDeoptimizationException());
678 self->SetDeoptimizationReturnValue(result);
679 }
680
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800681 // No need to restore the args since the method has already been run by the interpreter.
Ian Rogers848871b2013-08-05 10:56:33 -0700682 return result.GetJ();
683 }
684}
685
686// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
687// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800688class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700689 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700690 BuildQuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty, uint32_t shorty_len,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700691 ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700692 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700693
Ian Rogers9758f792014-03-13 09:02:55 -0700694 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700695
Ian Rogers9758f792014-03-13 09:02:55 -0700696 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800697
Ian Rogers848871b2013-08-05 10:56:33 -0700698 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700699 ScopedObjectAccessUnchecked* const soa_;
700 std::vector<jvalue>* const args_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800701 // References which we must update when exiting in case the GC moved the objects.
Ian Rogers700a4022014-05-19 16:49:03 -0700702 std::vector<std::pair<jobject, StackReference<mirror::Object>*>> references_;
Ian Rogers9758f792014-03-13 09:02:55 -0700703
Ian Rogers848871b2013-08-05 10:56:33 -0700704 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
705};
706
Ian Rogers9758f792014-03-13 09:02:55 -0700707void BuildQuickArgumentVisitor::Visit() {
708 jvalue val;
709 Primitive::Type type = GetParamPrimitiveType();
710 switch (type) {
711 case Primitive::kPrimNot: {
712 StackReference<mirror::Object>* stack_ref =
713 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
714 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
715 references_.push_back(std::make_pair(val.l, stack_ref));
716 break;
717 }
718 case Primitive::kPrimLong: // Fall-through.
719 case Primitive::kPrimDouble:
720 if (IsSplitLongOrDouble()) {
721 val.j = ReadSplitLongParam();
722 } else {
723 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
724 }
725 break;
726 case Primitive::kPrimBoolean: // Fall-through.
727 case Primitive::kPrimByte: // Fall-through.
728 case Primitive::kPrimChar: // Fall-through.
729 case Primitive::kPrimShort: // Fall-through.
730 case Primitive::kPrimInt: // Fall-through.
731 case Primitive::kPrimFloat:
732 val.i = *reinterpret_cast<jint*>(GetParamAddress());
733 break;
734 case Primitive::kPrimVoid:
735 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700736 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700737 }
738 args_->push_back(val);
739}
740
741void BuildQuickArgumentVisitor::FixupReferences() {
742 // Fixup any references which may have changed.
743 for (const auto& pair : references_) {
744 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700745 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700746 }
747}
748
Ian Rogers848871b2013-08-05 10:56:33 -0700749// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
750// which is responsible for recording callee save registers. We explicitly place into jobjects the
751// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
752// field within the proxy object, which will box the primitive arguments and deal with error cases.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700753extern "C" uint64_t artQuickProxyInvokeHandler(
754 ArtMethod* proxy_method, mirror::Object* receiver, Thread* self, ArtMethod** sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700755 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromd3633d52013-08-20 21:06:26 -0700756 DCHECK(proxy_method->IsProxyMethod()) << PrettyMethod(proxy_method);
757 DCHECK(receiver->GetClass()->IsProxyClass()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700758 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
759 const char* old_cause =
760 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
761 // Register the top of the managed stack, making stack crawlable.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700762 DCHECK_EQ((*sp), proxy_method) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700763 DCHECK_EQ(proxy_method->GetFrameSizeInBytes(),
Brian Carlstromd3633d52013-08-20 21:06:26 -0700764 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes())
765 << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700766 self->VerifyStack();
767 // Start new JNI local reference state.
768 JNIEnvExt* env = self->GetJniEnv();
769 ScopedObjectAccessUnchecked soa(env);
770 ScopedJniEnvLocalRefState env_state(env);
771 // Create local ref. copies of proxy method and the receiver.
772 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
773
774 // Placing arguments into args vector and remove the receiver.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700775 ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(sizeof(void*));
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700776 CHECK(!non_proxy_method->IsStatic()) << PrettyMethod(proxy_method) << " "
Andreas Gampec200a4a2014-06-16 18:39:09 -0700777 << PrettyMethod(non_proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700778 std::vector<jvalue> args;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700779 uint32_t shorty_len = 0;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700780 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700781 BuildQuickArgumentVisitor local_ref_visitor(sp, false, shorty, shorty_len, &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700782
Ian Rogers848871b2013-08-05 10:56:33 -0700783 local_ref_visitor.VisitArguments();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700784 DCHECK_GT(args.size(), 0U) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700785 args.erase(args.begin());
786
787 // Convert proxy method into expected interface method.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700788 ArtMethod* interface_method = proxy_method->FindOverriddenMethod(sizeof(void*));
Ian Rogerse0a02da2014-12-02 14:10:53 -0800789 DCHECK(interface_method != nullptr) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700790 DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700791 self->EndAssertNoThreadSuspension(old_cause);
792 jobject interface_method_jobj = soa.AddLocalReference<jobject>(
793 mirror::Method::CreateFromArtMethod(soa.Self(), interface_method));
Ian Rogers848871b2013-08-05 10:56:33 -0700794
795 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
796 // that performs allocations.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700797 JValue result = InvokeProxyInvocationHandler(soa, shorty, rcvr_jobj, interface_method_jobj, args);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800798 // Restore references which might have moved.
799 local_ref_visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700800 return result.GetJ();
801}
802
803// Read object references held in arguments from quick frames and place in a JNI local references,
804// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800805class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700806 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700807 RememberForGcArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty,
808 uint32_t shorty_len, ScopedObjectAccessUnchecked* soa) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700809 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700810
Ian Rogers9758f792014-03-13 09:02:55 -0700811 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700812
Ian Rogers9758f792014-03-13 09:02:55 -0700813 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -0700814
815 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700816 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800817 // References which we must update when exiting in case the GC moved the objects.
Andreas Gampec200a4a2014-06-16 18:39:09 -0700818 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
819
Mathieu Chartier590fee92013-09-13 13:46:47 -0700820 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700821};
822
Ian Rogers9758f792014-03-13 09:02:55 -0700823void RememberForGcArgumentVisitor::Visit() {
824 if (IsParamAReference()) {
825 StackReference<mirror::Object>* stack_ref =
826 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
827 jobject reference =
828 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
829 references_.push_back(std::make_pair(reference, stack_ref));
830 }
831}
832
833void RememberForGcArgumentVisitor::FixupReferences() {
834 // Fixup any references which may have changed.
835 for (const auto& pair : references_) {
836 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700837 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700838 }
839}
840
Ian Rogers848871b2013-08-05 10:56:33 -0700841// Lazily resolve a method for quick. Called by stub code.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700842extern "C" const void* artQuickResolutionTrampoline(
843 ArtMethod* called, mirror::Object* receiver, Thread* self, ArtMethod** sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700844 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampe3b45ef22015-05-26 21:34:09 -0700845 // The resolution trampoline stashes the resolved method into the callee-save frame to transport
846 // it. Thus, when exiting, the stack cannot be verified (as the resolved method most likely
847 // does not have the same stack layout as the callee-save method).
848 ScopedQuickEntrypointChecks sqec(self, kIsDebugBuild, false);
Ian Rogers848871b2013-08-05 10:56:33 -0700849 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800850 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -0700851 ScopedObjectAccessUnchecked soa(env);
852 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800853 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -0700854
855 // Compute details about the called method (avoid GCs)
856 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Ian Rogers848871b2013-08-05 10:56:33 -0700857 InvokeType invoke_type;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800858 MethodReference called_method(nullptr, 0);
859 const bool called_method_known_on_entry = !called->IsRuntimeMethod();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700860 ArtMethod* caller = nullptr;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800861 if (!called_method_known_on_entry) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100862 caller = QuickArgumentVisitor::GetCallingMethod(sp);
863 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700864 const DexFile::CodeItem* code;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800865 called_method.dex_file = caller->GetDexFile();
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700866 code = caller->GetCodeItem();
Ian Rogers848871b2013-08-05 10:56:33 -0700867 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
868 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
869 Instruction::Code instr_code = instr->Opcode();
870 bool is_range;
871 switch (instr_code) {
872 case Instruction::INVOKE_DIRECT:
873 invoke_type = kDirect;
874 is_range = false;
875 break;
876 case Instruction::INVOKE_DIRECT_RANGE:
877 invoke_type = kDirect;
878 is_range = true;
879 break;
880 case Instruction::INVOKE_STATIC:
881 invoke_type = kStatic;
882 is_range = false;
883 break;
884 case Instruction::INVOKE_STATIC_RANGE:
885 invoke_type = kStatic;
886 is_range = true;
887 break;
888 case Instruction::INVOKE_SUPER:
889 invoke_type = kSuper;
890 is_range = false;
891 break;
892 case Instruction::INVOKE_SUPER_RANGE:
893 invoke_type = kSuper;
894 is_range = true;
895 break;
896 case Instruction::INVOKE_VIRTUAL:
897 invoke_type = kVirtual;
898 is_range = false;
899 break;
900 case Instruction::INVOKE_VIRTUAL_RANGE:
901 invoke_type = kVirtual;
902 is_range = true;
903 break;
904 case Instruction::INVOKE_INTERFACE:
905 invoke_type = kInterface;
906 is_range = false;
907 break;
908 case Instruction::INVOKE_INTERFACE_RANGE:
909 invoke_type = kInterface;
910 is_range = true;
911 break;
912 default:
Ian Rogerse0a02da2014-12-02 14:10:53 -0800913 LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(nullptr);
914 UNREACHABLE();
Ian Rogers848871b2013-08-05 10:56:33 -0700915 }
Ian Rogerse0a02da2014-12-02 14:10:53 -0800916 called_method.dex_method_index = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
Ian Rogers848871b2013-08-05 10:56:33 -0700917 } else {
918 invoke_type = kStatic;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800919 called_method.dex_file = called->GetDexFile();
920 called_method.dex_method_index = called->GetDexMethodIndex();
Ian Rogers848871b2013-08-05 10:56:33 -0700921 }
922 uint32_t shorty_len;
923 const char* shorty =
Ian Rogerse0a02da2014-12-02 14:10:53 -0800924 called_method.dex_file->GetMethodShorty(
925 called_method.dex_file->GetMethodId(called_method.dex_method_index), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700926 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -0700927 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800928 self->EndAssertNoThreadSuspension(old_cause);
Ian Rogerse0a02da2014-12-02 14:10:53 -0800929 const bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -0700930 // Resolve method filling in dex cache.
Ian Rogerse0a02da2014-12-02 14:10:53 -0800931 if (!called_method_known_on_entry) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700932 StackHandleScope<1> hs(self);
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700933 mirror::Object* dummy = nullptr;
934 HandleWrapper<mirror::Object> h_receiver(
935 hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy));
Ian Rogerse0a02da2014-12-02 14:10:53 -0800936 DCHECK_EQ(caller->GetDexFile(), called_method.dex_file);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700937 called = linker->ResolveMethod(self, called_method.dex_method_index, caller, invoke_type);
Ian Rogers848871b2013-08-05 10:56:33 -0700938 }
Ian Rogerse0a02da2014-12-02 14:10:53 -0800939 const void* code = nullptr;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800940 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -0700941 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800942 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
943 << PrettyMethod(called) << " " << invoke_type;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800944 if (virtual_or_interface) {
945 // Refine called method based on receiver.
946 CHECK(receiver != nullptr) << invoke_type;
Mingyao Yangf4867782014-05-05 11:55:02 -0700947
Mathieu Chartiere401d142015-04-22 13:56:20 -0700948 ArtMethod* orig_called = called;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800949 if (invoke_type == kVirtual) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700950 called = receiver->GetClass()->FindVirtualMethodForVirtual(called, sizeof(void*));
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800951 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700952 called = receiver->GetClass()->FindVirtualMethodForInterface(called, sizeof(void*));
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800953 }
Mingyao Yangf4867782014-05-05 11:55:02 -0700954
955 CHECK(called != nullptr) << PrettyMethod(orig_called) << " "
956 << PrettyTypeOf(receiver) << " "
957 << invoke_type << " " << orig_called->GetVtableIndex();
958
Ian Rogers83883d72013-10-21 21:07:24 -0700959 // We came here because of sharpening. Ensure the dex cache is up-to-date on the method index
Ian Rogerse0a02da2014-12-02 14:10:53 -0800960 // of the sharpened method avoiding dirtying the dex cache if possible.
Ian Rogers00f15272014-12-02 16:55:46 -0800961 // Note, called_method.dex_method_index references the dex method before the
962 // FindVirtualMethodFor... This is ok for FindDexMethodIndexInOtherDexFile that only cares
963 // about the name and signature.
964 uint32_t update_dex_cache_method_index = called->GetDexMethodIndex();
Ian Rogerse0a02da2014-12-02 14:10:53 -0800965 if (!called->HasSameDexCacheResolvedMethods(caller)) {
Ian Rogers83883d72013-10-21 21:07:24 -0700966 // Calling from one dex file to another, need to compute the method index appropriate to
Vladimir Markobbcc0c02014-02-03 14:08:42 +0000967 // the caller's dex file. Since we get here only if the original called was a runtime
968 // method, we've got the correct dex_file and a dex_method_idx from above.
Ian Rogerse0a02da2014-12-02 14:10:53 -0800969 DCHECK(!called_method_known_on_entry);
970 DCHECK_EQ(caller->GetDexFile(), called_method.dex_file);
971 const DexFile* caller_dex_file = called_method.dex_file;
972 uint32_t caller_method_name_and_sig_index = called_method.dex_method_index;
973 update_dex_cache_method_index =
974 called->FindDexMethodIndexInOtherDexFile(*caller_dex_file,
975 caller_method_name_and_sig_index);
976 }
977 if ((update_dex_cache_method_index != DexFile::kDexNoIndex) &&
Mathieu Chartiere401d142015-04-22 13:56:20 -0700978 (caller->GetDexCacheResolvedMethod(
979 update_dex_cache_method_index, sizeof(void*)) != called)) {
980 caller->SetDexCacheResolvedMethod(update_dex_cache_method_index, called, sizeof(void*));
Ian Rogers83883d72013-10-21 21:07:24 -0700981 }
Mathieu Chartiere4a91bb2015-01-28 13:11:44 -0800982 } else if (invoke_type == kStatic) {
983 const auto called_dex_method_idx = called->GetDexMethodIndex();
984 // For static invokes, we may dispatch to the static method in the superclass but resolve
985 // using the subclass. To prevent getting slow paths on each invoke, we force set the
986 // resolved method for the super class dex method index if we are in the same dex file.
987 // b/19175856
988 if (called->GetDexFile() == called_method.dex_file &&
989 called_method.dex_method_index != called_dex_method_idx) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700990 called->GetDexCache()->SetResolvedMethod(called_dex_method_idx, called, sizeof(void*));
Mathieu Chartiere4a91bb2015-01-28 13:11:44 -0800991 }
Ian Rogers83883d72013-10-21 21:07:24 -0700992 }
Daniel Mihalyieb076692014-08-22 17:33:31 +0200993
Ian Rogers848871b2013-08-05 10:56:33 -0700994 // Ensure that the called method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700995 StackHandleScope<1> hs(soa.Self());
996 Handle<mirror::Class> called_class(hs.NewHandle(called->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -0700997 linker->EnsureInitialized(soa.Self(), called_class, true, true);
Ian Rogers848871b2013-08-05 10:56:33 -0700998 if (LIKELY(called_class->IsInitialized())) {
Daniel Mihalyieb076692014-08-22 17:33:31 +0200999 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) {
1000 // If we are single-stepping or the called method is deoptimized (by a
1001 // breakpoint, for example), then we have to execute the called method
1002 // with the interpreter.
1003 code = GetQuickToInterpreterBridge();
1004 } else if (UNLIKELY(Dbg::IsForcedInstrumentationNeededForResolution(self, caller))) {
1005 // If the caller is deoptimized (by a breakpoint, for example), we have to
1006 // continue its execution with interpreter when returning from the called
1007 // method. Because we do not want to execute the called method with the
1008 // interpreter, we wrap its execution into the instrumentation stubs.
1009 // When the called method returns, it will execute the instrumentation
1010 // exit hook that will determine the need of the interpreter with a call
1011 // to Dbg::IsForcedInterpreterNeededForUpcall and deoptimize the stack if
1012 // it is needed.
1013 code = GetQuickInstrumentationEntryPoint();
1014 } else {
1015 code = called->GetEntryPointFromQuickCompiledCode();
1016 }
Ian Rogers848871b2013-08-05 10:56:33 -07001017 } else if (called_class->IsInitializing()) {
Daniel Mihalyieb076692014-08-22 17:33:31 +02001018 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) {
1019 // If we are single-stepping or the called method is deoptimized (by a
1020 // breakpoint, for example), then we have to execute the called method
1021 // with the interpreter.
1022 code = GetQuickToInterpreterBridge();
1023 } else if (invoke_type == kStatic) {
Ian Rogers848871b2013-08-05 10:56:33 -07001024 // Class is still initializing, go to oat and grab code (trampoline must be left in place
1025 // until class is initialized to stop races between threads).
Ian Rogersef7d42f2014-01-06 12:55:46 -08001026 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -07001027 } else {
1028 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -08001029 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -07001030 }
1031 } else {
1032 DCHECK(called_class->IsErroneous());
1033 }
1034 }
Ian Rogerse0a02da2014-12-02 14:10:53 -08001035 CHECK_EQ(code == nullptr, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -07001036 // Fixup any locally saved objects may have moved during a GC.
1037 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -07001038 // Place called method in callee-save frame to be placed as first argument to quick method.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001039 *sp = called;
1040
Ian Rogers848871b2013-08-05 10:56:33 -07001041 return code;
1042}
1043
Andreas Gampec147b002014-03-06 18:11:06 -08001044/*
1045 * This class uses a couple of observations to unite the different calling conventions through
1046 * a few constants.
1047 *
1048 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
1049 * possible alignment.
1050 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
1051 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
1052 * when we have to split things
1053 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
1054 * and we can use Int handling directly.
1055 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
1056 * necessary when widening. Also, widening of Ints will take place implicitly, and the
1057 * extension should be compatible with Aarch64, which mandates copying the available bits
1058 * into LSB and leaving the rest unspecified.
1059 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
1060 * the stack.
1061 * 6) There is only little endian.
1062 *
1063 *
1064 * Actual work is supposed to be done in a delegate of the template type. The interface is as
1065 * follows:
1066 *
1067 * void PushGpr(uintptr_t): Add a value for the next GPR
1068 *
1069 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
1070 * padding, that is, think the architecture is 32b and aligns 64b.
1071 *
1072 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
1073 * split this if necessary. The current state will have aligned, if
1074 * necessary.
1075 *
1076 * void PushStack(uintptr_t): Push a value to the stack.
1077 *
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001078 * uintptr_t PushHandleScope(mirror::Object* ref): Add a reference to the HandleScope. This _will_ have nullptr,
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001079 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -08001080 * Must return the jobject, that is, the reference to the
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001081 * entry in the HandleScope (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -08001082 *
1083 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001084template<class T> class BuildNativeCallFrameStateMachine {
Andreas Gampec147b002014-03-06 18:11:06 -08001085 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001086#if defined(__arm__)
1087 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -08001088 static constexpr bool kNativeSoftFloatAbi = true;
1089 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001090 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
1091
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001092 static constexpr size_t kRegistersNeededForLong = 2;
1093 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001094 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001095 static constexpr bool kMultiFPRegistersWidened = false;
1096 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001097 static constexpr bool kAlignLongOnStack = true;
1098 static constexpr bool kAlignDoubleOnStack = true;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001099#elif defined(__aarch64__)
1100 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1101 static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs.
1102 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1103
1104 static constexpr size_t kRegistersNeededForLong = 1;
1105 static constexpr size_t kRegistersNeededForDouble = 1;
1106 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001107 static constexpr bool kMultiFPRegistersWidened = false;
1108 static constexpr bool kMultiGPRegistersWidened = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001109 static constexpr bool kAlignLongOnStack = false;
1110 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001111#elif defined(__mips__) && !defined(__LP64__)
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001112 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
Douglas Leung735b8552014-10-31 12:21:40 -07001113 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs.
1114 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001115
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001116 static constexpr size_t kRegistersNeededForLong = 2;
1117 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001118 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001119 static constexpr bool kMultiFPRegistersWidened = true;
1120 static constexpr bool kMultiGPRegistersWidened = false;
Douglas Leung735b8552014-10-31 12:21:40 -07001121 static constexpr bool kAlignLongOnStack = true;
1122 static constexpr bool kAlignDoubleOnStack = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001123#elif defined(__mips__) && defined(__LP64__)
1124 // Let the code prepare GPRs only and we will load the FPRs with same data.
1125 static constexpr bool kNativeSoftFloatAbi = true;
1126 static constexpr size_t kNumNativeGprArgs = 8;
1127 static constexpr size_t kNumNativeFprArgs = 0;
1128
1129 static constexpr size_t kRegistersNeededForLong = 1;
1130 static constexpr size_t kRegistersNeededForDouble = 1;
1131 static constexpr bool kMultiRegistersAligned = false;
1132 static constexpr bool kMultiFPRegistersWidened = false;
1133 static constexpr bool kMultiGPRegistersWidened = true;
1134 static constexpr bool kAlignLongOnStack = false;
1135 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001136#elif defined(__i386__)
1137 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -08001138 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001139 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
1140 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
1141
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001142 static constexpr size_t kRegistersNeededForLong = 2;
1143 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001144 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001145 static constexpr bool kMultiFPRegistersWidened = false;
1146 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001147 static constexpr bool kAlignLongOnStack = false;
1148 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001149#elif defined(__x86_64__)
1150 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1151 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
1152 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1153
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001154 static constexpr size_t kRegistersNeededForLong = 1;
1155 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -08001156 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001157 static constexpr bool kMultiFPRegistersWidened = false;
1158 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001159 static constexpr bool kAlignLongOnStack = false;
1160 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001161#else
1162#error "Unsupported architecture"
1163#endif
1164
Andreas Gampec147b002014-03-06 18:11:06 -08001165 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001166 explicit BuildNativeCallFrameStateMachine(T* delegate)
1167 : gpr_index_(kNumNativeGprArgs),
1168 fpr_index_(kNumNativeFprArgs),
1169 stack_entries_(0),
1170 delegate_(delegate) {
Andreas Gampec147b002014-03-06 18:11:06 -08001171 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
1172 // the next register is even; counting down is just to make the compiler happy...
Andreas Gampe575e78c2014-11-03 23:41:03 -08001173 static_assert(kNumNativeGprArgs % 2 == 0U, "Number of native GPR arguments not even");
1174 static_assert(kNumNativeFprArgs % 2 == 0U, "Number of native FPR arguments not even");
Andreas Gampec147b002014-03-06 18:11:06 -08001175 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001176
Andreas Gampec200a4a2014-06-16 18:39:09 -07001177 virtual ~BuildNativeCallFrameStateMachine() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001178
Ian Rogers1428dce2014-10-21 15:02:15 -07001179 bool HavePointerGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001180 return gpr_index_ > 0;
1181 }
1182
Andreas Gampec200a4a2014-06-16 18:39:09 -07001183 void AdvancePointer(const void* val) {
Andreas Gampec147b002014-03-06 18:11:06 -08001184 if (HavePointerGpr()) {
1185 gpr_index_--;
1186 PushGpr(reinterpret_cast<uintptr_t>(val));
1187 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001188 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
Andreas Gampec147b002014-03-06 18:11:06 -08001189 PushStack(reinterpret_cast<uintptr_t>(val));
1190 gpr_index_ = 0;
1191 }
1192 }
1193
Ian Rogers1428dce2014-10-21 15:02:15 -07001194 bool HaveHandleScopeGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001195 return gpr_index_ > 0;
1196 }
1197
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001198 void AdvanceHandleScope(mirror::Object* ptr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1199 uintptr_t handle = PushHandle(ptr);
1200 if (HaveHandleScopeGpr()) {
Andreas Gampec147b002014-03-06 18:11:06 -08001201 gpr_index_--;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001202 PushGpr(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001203 } else {
1204 stack_entries_++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001205 PushStack(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001206 gpr_index_ = 0;
1207 }
1208 }
1209
Ian Rogers1428dce2014-10-21 15:02:15 -07001210 bool HaveIntGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001211 return gpr_index_ > 0;
1212 }
1213
1214 void AdvanceInt(uint32_t val) {
1215 if (HaveIntGpr()) {
1216 gpr_index_--;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001217 if (kMultiGPRegistersWidened) {
1218 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001219 PushGpr(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val)));
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001220 } else {
1221 PushGpr(val);
1222 }
Andreas Gampec147b002014-03-06 18:11:06 -08001223 } else {
1224 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001225 if (kMultiGPRegistersWidened) {
1226 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001227 PushStack(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val)));
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001228 } else {
1229 PushStack(val);
1230 }
Andreas Gampec147b002014-03-06 18:11:06 -08001231 gpr_index_ = 0;
1232 }
1233 }
1234
Ian Rogers1428dce2014-10-21 15:02:15 -07001235 bool HaveLongGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001236 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
1237 }
1238
Ian Rogers1428dce2014-10-21 15:02:15 -07001239 bool LongGprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001240 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1241 kAlignLongOnStack && // and when it needs alignment
1242 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1243 }
1244
Ian Rogers1428dce2014-10-21 15:02:15 -07001245 bool LongStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001246 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1247 kAlignLongOnStack && // and when it needs 8B alignment
1248 (stack_entries_ & 1) == 1; // counter is odd
1249 }
1250
1251 void AdvanceLong(uint64_t val) {
1252 if (HaveLongGpr()) {
1253 if (LongGprNeedsPadding()) {
1254 PushGpr(0);
1255 gpr_index_--;
1256 }
1257 if (kRegistersNeededForLong == 1) {
1258 PushGpr(static_cast<uintptr_t>(val));
1259 } else {
1260 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1261 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1262 }
1263 gpr_index_ -= kRegistersNeededForLong;
1264 } else {
1265 if (LongStackNeedsPadding()) {
1266 PushStack(0);
1267 stack_entries_++;
1268 }
1269 if (kRegistersNeededForLong == 1) {
1270 PushStack(static_cast<uintptr_t>(val));
1271 stack_entries_++;
1272 } else {
1273 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1274 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1275 stack_entries_ += 2;
1276 }
1277 gpr_index_ = 0;
1278 }
1279 }
1280
Ian Rogers1428dce2014-10-21 15:02:15 -07001281 bool HaveFloatFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001282 return fpr_index_ > 0;
1283 }
1284
Andreas Gampec147b002014-03-06 18:11:06 -08001285 void AdvanceFloat(float val) {
1286 if (kNativeSoftFloatAbi) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001287 AdvanceInt(bit_cast<uint32_t, float>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001288 } else {
1289 if (HaveFloatFpr()) {
1290 fpr_index_--;
1291 if (kRegistersNeededForDouble == 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001292 if (kMultiFPRegistersWidened) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001293 PushFpr8(bit_cast<uint64_t, double>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001294 } else {
1295 // No widening, just use the bits.
Roland Levillainda4d79b2015-03-24 14:36:11 +00001296 PushFpr8(static_cast<uint64_t>(bit_cast<uint32_t, float>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001297 }
1298 } else {
1299 PushFpr4(val);
1300 }
1301 } else {
1302 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001303 if (kRegistersNeededForDouble == 1 && kMultiFPRegistersWidened) {
Andreas Gampec147b002014-03-06 18:11:06 -08001304 // Need to widen before storing: Note the "double" in the template instantiation.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001305 // Note: We need to jump through those hoops to make the compiler happy.
1306 DCHECK_EQ(sizeof(uintptr_t), sizeof(uint64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001307 PushStack(static_cast<uintptr_t>(bit_cast<uint64_t, double>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001308 } else {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001309 PushStack(static_cast<uintptr_t>(bit_cast<uint32_t, float>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001310 }
1311 fpr_index_ = 0;
1312 }
1313 }
1314 }
1315
Ian Rogers1428dce2014-10-21 15:02:15 -07001316 bool HaveDoubleFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001317 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1318 }
1319
Ian Rogers1428dce2014-10-21 15:02:15 -07001320 bool DoubleFprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001321 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1322 kAlignDoubleOnStack && // and when it needs alignment
1323 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1324 }
1325
Ian Rogers1428dce2014-10-21 15:02:15 -07001326 bool DoubleStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001327 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1328 kAlignDoubleOnStack && // and when it needs 8B alignment
1329 (stack_entries_ & 1) == 1; // counter is odd
1330 }
1331
1332 void AdvanceDouble(uint64_t val) {
1333 if (kNativeSoftFloatAbi) {
1334 AdvanceLong(val);
1335 } else {
1336 if (HaveDoubleFpr()) {
1337 if (DoubleFprNeedsPadding()) {
1338 PushFpr4(0);
1339 fpr_index_--;
1340 }
1341 PushFpr8(val);
1342 fpr_index_ -= kRegistersNeededForDouble;
1343 } else {
1344 if (DoubleStackNeedsPadding()) {
1345 PushStack(0);
1346 stack_entries_++;
1347 }
1348 if (kRegistersNeededForDouble == 1) {
1349 PushStack(static_cast<uintptr_t>(val));
1350 stack_entries_++;
1351 } else {
1352 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1353 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1354 stack_entries_ += 2;
1355 }
1356 fpr_index_ = 0;
1357 }
1358 }
1359 }
1360
Ian Rogers1428dce2014-10-21 15:02:15 -07001361 uint32_t GetStackEntries() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001362 return stack_entries_;
1363 }
1364
Ian Rogers1428dce2014-10-21 15:02:15 -07001365 uint32_t GetNumberOfUsedGprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001366 return kNumNativeGprArgs - gpr_index_;
1367 }
1368
Ian Rogers1428dce2014-10-21 15:02:15 -07001369 uint32_t GetNumberOfUsedFprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001370 return kNumNativeFprArgs - fpr_index_;
1371 }
1372
1373 private:
1374 void PushGpr(uintptr_t val) {
1375 delegate_->PushGpr(val);
1376 }
1377 void PushFpr4(float val) {
1378 delegate_->PushFpr4(val);
1379 }
1380 void PushFpr8(uint64_t val) {
1381 delegate_->PushFpr8(val);
1382 }
1383 void PushStack(uintptr_t val) {
1384 delegate_->PushStack(val);
1385 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001386 uintptr_t PushHandle(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1387 return delegate_->PushHandle(ref);
Andreas Gampec147b002014-03-06 18:11:06 -08001388 }
1389
1390 uint32_t gpr_index_; // Number of free GPRs
1391 uint32_t fpr_index_; // Number of free FPRs
1392 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1393 // extended
Ian Rogers1428dce2014-10-21 15:02:15 -07001394 T* const delegate_; // What Push implementation gets called
Andreas Gampec147b002014-03-06 18:11:06 -08001395};
1396
Andreas Gampec200a4a2014-06-16 18:39:09 -07001397// Computes the sizes of register stacks and call stack area. Handling of references can be extended
1398// in subclasses.
1399//
1400// To handle native pointers, use "L" in the shorty for an object reference, which simulates
1401// them with handles.
1402class ComputeNativeCallFrameSize {
Andreas Gampec147b002014-03-06 18:11:06 -08001403 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001404 ComputeNativeCallFrameSize() : num_stack_entries_(0) {}
1405
1406 virtual ~ComputeNativeCallFrameSize() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001407
Ian Rogers1428dce2014-10-21 15:02:15 -07001408 uint32_t GetStackSize() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001409 return num_stack_entries_ * sizeof(uintptr_t);
1410 }
1411
Ian Rogers1428dce2014-10-21 15:02:15 -07001412 uint8_t* LayoutCallStack(uint8_t* sp8) const {
Andreas Gampec147b002014-03-06 18:11:06 -08001413 sp8 -= GetStackSize();
Andreas Gampe779f8c92014-06-09 18:29:38 -07001414 // Align by kStackAlignment.
1415 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001416 return sp8;
Andreas Gampec147b002014-03-06 18:11:06 -08001417 }
1418
Ian Rogers1428dce2014-10-21 15:02:15 -07001419 uint8_t* LayoutCallRegisterStacks(uint8_t* sp8, uintptr_t** start_gpr, uint32_t** start_fpr)
1420 const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001421 // Assumption is OK right now, as we have soft-float arm
1422 size_t fregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeFprArgs;
1423 sp8 -= fregs * sizeof(uintptr_t);
1424 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1425 size_t iregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeGprArgs;
1426 sp8 -= iregs * sizeof(uintptr_t);
1427 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1428 return sp8;
1429 }
Andreas Gampec147b002014-03-06 18:11:06 -08001430
Andreas Gampec200a4a2014-06-16 18:39:09 -07001431 uint8_t* LayoutNativeCall(uint8_t* sp8, uintptr_t** start_stack, uintptr_t** start_gpr,
Ian Rogers1428dce2014-10-21 15:02:15 -07001432 uint32_t** start_fpr) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001433 // Native call stack.
1434 sp8 = LayoutCallStack(sp8);
1435 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
Andreas Gampec147b002014-03-06 18:11:06 -08001436
Andreas Gampec200a4a2014-06-16 18:39:09 -07001437 // Put fprs and gprs below.
1438 sp8 = LayoutCallRegisterStacks(sp8, start_gpr, start_fpr);
Andreas Gampec147b002014-03-06 18:11:06 -08001439
Andreas Gampec200a4a2014-06-16 18:39:09 -07001440 // Return the new bottom.
1441 return sp8;
1442 }
1443
1444 virtual void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm)
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001445 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1446 UNUSED(sm);
1447 }
Andreas Gampec200a4a2014-06-16 18:39:09 -07001448
1449 void Walk(const char* shorty, uint32_t shorty_len) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1450 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize> sm(this);
1451
1452 WalkHeader(&sm);
Andreas Gampec147b002014-03-06 18:11:06 -08001453
1454 for (uint32_t i = 1; i < shorty_len; ++i) {
1455 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1456 switch (cur_type_) {
1457 case Primitive::kPrimNot:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001458 // TODO: fix abuse of mirror types.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001459 sm.AdvanceHandleScope(
1460 reinterpret_cast<mirror::Object*>(0x12345678));
Andreas Gampec147b002014-03-06 18:11:06 -08001461 break;
1462
1463 case Primitive::kPrimBoolean:
1464 case Primitive::kPrimByte:
1465 case Primitive::kPrimChar:
1466 case Primitive::kPrimShort:
1467 case Primitive::kPrimInt:
1468 sm.AdvanceInt(0);
1469 break;
1470 case Primitive::kPrimFloat:
1471 sm.AdvanceFloat(0);
1472 break;
1473 case Primitive::kPrimDouble:
1474 sm.AdvanceDouble(0);
1475 break;
1476 case Primitive::kPrimLong:
1477 sm.AdvanceLong(0);
1478 break;
1479 default:
1480 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001481 UNREACHABLE();
Andreas Gampec147b002014-03-06 18:11:06 -08001482 }
1483 }
1484
Ian Rogers1428dce2014-10-21 15:02:15 -07001485 num_stack_entries_ = sm.GetStackEntries();
Andreas Gampec147b002014-03-06 18:11:06 -08001486 }
1487
1488 void PushGpr(uintptr_t /* val */) {
1489 // not optimizing registers, yet
1490 }
1491
1492 void PushFpr4(float /* val */) {
1493 // not optimizing registers, yet
1494 }
1495
1496 void PushFpr8(uint64_t /* val */) {
1497 // not optimizing registers, yet
1498 }
1499
1500 void PushStack(uintptr_t /* val */) {
1501 // counting is already done in the superclass
1502 }
1503
Andreas Gampec200a4a2014-06-16 18:39:09 -07001504 virtual uintptr_t PushHandle(mirror::Object* /* ptr */) {
Andreas Gampec147b002014-03-06 18:11:06 -08001505 return reinterpret_cast<uintptr_t>(nullptr);
1506 }
1507
Andreas Gampec200a4a2014-06-16 18:39:09 -07001508 protected:
Andreas Gampec147b002014-03-06 18:11:06 -08001509 uint32_t num_stack_entries_;
1510};
1511
Andreas Gampec200a4a2014-06-16 18:39:09 -07001512class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001513 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001514 ComputeGenericJniFrameSize() : num_handle_scope_references_(0) {}
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001515
Andreas Gampec200a4a2014-06-16 18:39:09 -07001516 // Lays out the callee-save frame. Assumes that the incorrect frame corresponding to RefsAndArgs
1517 // is at *m = sp. Will update to point to the bottom of the save frame.
1518 //
1519 // Note: assumes ComputeAll() has been run before.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001520 void LayoutCalleeSaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope)
Andreas Gampec200a4a2014-06-16 18:39:09 -07001521 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001522 ArtMethod* method = **m;
1523
1524 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), sizeof(void*));
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001525
Andreas Gampec200a4a2014-06-16 18:39:09 -07001526 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
1527
1528 // First, fix up the layout of the callee-save frame.
1529 // We have to squeeze in the HandleScope, and relocate the method pointer.
1530
1531 // "Free" the slot for the method.
Ian Rogers13735952014-10-08 12:43:28 -07001532 sp8 += sizeof(void*); // In the callee-save frame we use a full pointer.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001533
1534 // Under the callee saves put handle scope and new method stack reference.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001535 size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001536 size_t scope_and_method = handle_scope_size + sizeof(ArtMethod*);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001537
1538 sp8 -= scope_and_method;
1539 // Align by kStackAlignment.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001540 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001541
Mathieu Chartiere401d142015-04-22 13:56:20 -07001542 uint8_t* sp8_table = sp8 + sizeof(ArtMethod*);
Ian Rogers59c07062014-10-10 13:03:39 -07001543 *handle_scope = HandleScope::Create(sp8_table, self->GetTopHandleScope(),
1544 num_handle_scope_references_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001545
1546 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
1547 uint8_t* method_pointer = sp8;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001548 auto** new_method_ref = reinterpret_cast<ArtMethod**>(method_pointer);
1549 *new_method_ref = method;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001550 *m = new_method_ref;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001551 }
1552
Andreas Gampec200a4a2014-06-16 18:39:09 -07001553 // Adds space for the cookie. Note: may leave stack unaligned.
Ian Rogers1428dce2014-10-21 15:02:15 -07001554 void LayoutCookie(uint8_t** sp) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001555 // Reference cookie and padding
1556 *sp -= 8;
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001557 }
1558
Andreas Gampec200a4a2014-06-16 18:39:09 -07001559 // Re-layout the callee-save frame (insert a handle-scope). Then add space for the cookie.
1560 // Returns the new bottom. Note: this may be unaligned.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001561 uint8_t* LayoutJNISaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope)
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001562 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001563 // First, fix up the layout of the callee-save frame.
1564 // We have to squeeze in the HandleScope, and relocate the method pointer.
Ian Rogers59c07062014-10-10 13:03:39 -07001565 LayoutCalleeSaveFrame(self, m, sp, handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001566
1567 // The bottom of the callee-save frame is now where the method is, *m.
1568 uint8_t* sp8 = reinterpret_cast<uint8_t*>(*m);
1569
1570 // Add space for cookie.
1571 LayoutCookie(&sp8);
1572
1573 return sp8;
1574 }
1575
1576 // WARNING: After this, *sp won't be pointing to the method anymore!
Mathieu Chartiere401d142015-04-22 13:56:20 -07001577 uint8_t* ComputeLayout(Thread* self, ArtMethod*** m, const char* shorty, uint32_t shorty_len,
1578 HandleScope** handle_scope, uintptr_t** start_stack, uintptr_t** start_gpr,
1579 uint32_t** start_fpr)
Andreas Gampec200a4a2014-06-16 18:39:09 -07001580 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1581 Walk(shorty, shorty_len);
1582
1583 // JNI part.
Ian Rogers59c07062014-10-10 13:03:39 -07001584 uint8_t* sp8 = LayoutJNISaveFrame(self, m, reinterpret_cast<void*>(*m), handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001585
1586 sp8 = LayoutNativeCall(sp8, start_stack, start_gpr, start_fpr);
1587
1588 // Return the new bottom.
1589 return sp8;
1590 }
1591
1592 uintptr_t PushHandle(mirror::Object* /* ptr */) OVERRIDE;
1593
1594 // Add JNIEnv* and jobj/jclass before the shorty-derived elements.
1595 void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) OVERRIDE
1596 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
1597
1598 private:
1599 uint32_t num_handle_scope_references_;
1600};
1601
1602uintptr_t ComputeGenericJniFrameSize::PushHandle(mirror::Object* /* ptr */) {
1603 num_handle_scope_references_++;
1604 return reinterpret_cast<uintptr_t>(nullptr);
1605}
1606
1607void ComputeGenericJniFrameSize::WalkHeader(
1608 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) {
1609 // JNIEnv
1610 sm->AdvancePointer(nullptr);
1611
1612 // Class object or this as first argument
1613 sm->AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678));
1614}
1615
1616// Class to push values to three separate regions. Used to fill the native call part. Adheres to
1617// the template requirements of BuildGenericJniFrameStateMachine.
1618class FillNativeCall {
1619 public:
1620 FillNativeCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) :
1621 cur_gpr_reg_(gpr_regs), cur_fpr_reg_(fpr_regs), cur_stack_arg_(stack_args) {}
1622
1623 virtual ~FillNativeCall() {}
1624
1625 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) {
1626 cur_gpr_reg_ = gpr_regs;
1627 cur_fpr_reg_ = fpr_regs;
1628 cur_stack_arg_ = stack_args;
Andreas Gampec147b002014-03-06 18:11:06 -08001629 }
1630
1631 void PushGpr(uintptr_t val) {
1632 *cur_gpr_reg_ = val;
1633 cur_gpr_reg_++;
1634 }
1635
1636 void PushFpr4(float val) {
1637 *cur_fpr_reg_ = val;
1638 cur_fpr_reg_++;
1639 }
1640
1641 void PushFpr8(uint64_t val) {
1642 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
1643 *tmp = val;
1644 cur_fpr_reg_ += 2;
1645 }
1646
1647 void PushStack(uintptr_t val) {
1648 *cur_stack_arg_ = val;
1649 cur_stack_arg_++;
1650 }
1651
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001652 virtual uintptr_t PushHandle(mirror::Object*) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001653 LOG(FATAL) << "(Non-JNI) Native call does not use handles.";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001654 UNREACHABLE();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001655 }
1656
1657 private:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001658 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001659 uint32_t* cur_fpr_reg_;
1660 uintptr_t* cur_stack_arg_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001661};
Andreas Gampec147b002014-03-06 18:11:06 -08001662
Andreas Gampec200a4a2014-06-16 18:39:09 -07001663// Visits arguments on the stack placing them into a region lower down the stack for the benefit
1664// of transitioning into native code.
1665class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
1666 public:
Ian Rogers59c07062014-10-10 13:03:39 -07001667 BuildGenericJniFrameVisitor(Thread* self, bool is_static, const char* shorty, uint32_t shorty_len,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001668 ArtMethod*** sp)
Andreas Gampec200a4a2014-06-16 18:39:09 -07001669 : QuickArgumentVisitor(*sp, is_static, shorty, shorty_len),
1670 jni_call_(nullptr, nullptr, nullptr, nullptr), sm_(&jni_call_) {
1671 ComputeGenericJniFrameSize fsc;
1672 uintptr_t* start_gpr_reg;
1673 uint32_t* start_fpr_reg;
1674 uintptr_t* start_stack_arg;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001675 bottom_of_used_area_ = fsc.ComputeLayout(self, sp, shorty, shorty_len,
Ian Rogers59c07062014-10-10 13:03:39 -07001676 &handle_scope_,
1677 &start_stack_arg,
Andreas Gampec200a4a2014-06-16 18:39:09 -07001678 &start_gpr_reg, &start_fpr_reg);
1679
Andreas Gampec200a4a2014-06-16 18:39:09 -07001680 jni_call_.Reset(start_gpr_reg, start_fpr_reg, start_stack_arg, handle_scope_);
1681
1682 // jni environment is always first argument
1683 sm_.AdvancePointer(self->GetJniEnv());
1684
1685 if (is_static) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001686 sm_.AdvanceHandleScope((**sp)->GetDeclaringClass());
Andreas Gampec200a4a2014-06-16 18:39:09 -07001687 }
1688 }
1689
1690 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
1691
1692 void FinalizeHandleScope(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
1693
1694 StackReference<mirror::Object>* GetFirstHandleScopeEntry()
1695 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1696 return handle_scope_->GetHandle(0).GetReference();
1697 }
1698
Ian Rogers1428dce2014-10-21 15:02:15 -07001699 jobject GetFirstHandleScopeJObject() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001700 return handle_scope_->GetHandle(0).ToJObject();
1701 }
1702
Ian Rogers1428dce2014-10-21 15:02:15 -07001703 void* GetBottomOfUsedArea() const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001704 return bottom_of_used_area_;
1705 }
1706
1707 private:
1708 // A class to fill a JNI call. Adds reference/handle-scope management to FillNativeCall.
1709 class FillJniCall FINAL : public FillNativeCall {
1710 public:
1711 FillJniCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args,
1712 HandleScope* handle_scope) : FillNativeCall(gpr_regs, fpr_regs, stack_args),
1713 handle_scope_(handle_scope), cur_entry_(0) {}
1714
1715 uintptr_t PushHandle(mirror::Object* ref) OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
1716
1717 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args, HandleScope* scope) {
1718 FillNativeCall::Reset(gpr_regs, fpr_regs, stack_args);
1719 handle_scope_ = scope;
1720 cur_entry_ = 0U;
1721 }
1722
1723 void ResetRemainingScopeSlots() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1724 // Initialize padding entries.
1725 size_t expected_slots = handle_scope_->NumberOfReferences();
1726 while (cur_entry_ < expected_slots) {
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07001727 handle_scope_->GetMutableHandle(cur_entry_++).Assign(nullptr);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001728 }
1729 DCHECK_NE(cur_entry_, 0U);
1730 }
1731
1732 private:
1733 HandleScope* handle_scope_;
1734 size_t cur_entry_;
1735 };
1736
1737 HandleScope* handle_scope_;
1738 FillJniCall jni_call_;
1739 void* bottom_of_used_area_;
1740
1741 BuildNativeCallFrameStateMachine<FillJniCall> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001742
1743 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
1744};
1745
Andreas Gampec200a4a2014-06-16 18:39:09 -07001746uintptr_t BuildGenericJniFrameVisitor::FillJniCall::PushHandle(mirror::Object* ref) {
1747 uintptr_t tmp;
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07001748 MutableHandle<mirror::Object> h = handle_scope_->GetMutableHandle(cur_entry_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001749 h.Assign(ref);
1750 tmp = reinterpret_cast<uintptr_t>(h.ToJObject());
1751 cur_entry_++;
1752 return tmp;
1753}
1754
Ian Rogers9758f792014-03-13 09:02:55 -07001755void BuildGenericJniFrameVisitor::Visit() {
1756 Primitive::Type type = GetParamPrimitiveType();
1757 switch (type) {
1758 case Primitive::kPrimLong: {
1759 jlong long_arg;
1760 if (IsSplitLongOrDouble()) {
1761 long_arg = ReadSplitLongParam();
1762 } else {
1763 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
1764 }
1765 sm_.AdvanceLong(long_arg);
1766 break;
1767 }
1768 case Primitive::kPrimDouble: {
1769 uint64_t double_arg;
1770 if (IsSplitLongOrDouble()) {
1771 // Read into union so that we don't case to a double.
1772 double_arg = ReadSplitLongParam();
1773 } else {
1774 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
1775 }
1776 sm_.AdvanceDouble(double_arg);
1777 break;
1778 }
1779 case Primitive::kPrimNot: {
1780 StackReference<mirror::Object>* stack_ref =
1781 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001782 sm_.AdvanceHandleScope(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -07001783 break;
1784 }
1785 case Primitive::kPrimFloat:
1786 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
1787 break;
1788 case Primitive::kPrimBoolean: // Fall-through.
1789 case Primitive::kPrimByte: // Fall-through.
1790 case Primitive::kPrimChar: // Fall-through.
1791 case Primitive::kPrimShort: // Fall-through.
1792 case Primitive::kPrimInt: // Fall-through.
1793 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
1794 break;
1795 case Primitive::kPrimVoid:
1796 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07001797 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -07001798 }
1799}
1800
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001801void BuildGenericJniFrameVisitor::FinalizeHandleScope(Thread* self) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001802 // Clear out rest of the scope.
1803 jni_call_.ResetRemainingScopeSlots();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001804 // Install HandleScope.
1805 self->PushHandleScope(handle_scope_);
Ian Rogers9758f792014-03-13 09:02:55 -07001806}
1807
Ian Rogers04c31d22014-07-07 21:44:06 -07001808#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07001809extern "C" void* artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07001810#else
1811extern "C" void* artFindNativeMethod(Thread* self);
1812#endif
Andreas Gampe90546832014-03-12 18:07:19 -07001813
Andreas Gampead615172014-04-04 16:20:13 -07001814uint64_t artQuickGenericJniEndJNIRef(Thread* self, uint32_t cookie, jobject l, jobject lock) {
1815 if (lock != nullptr) {
1816 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self));
1817 } else {
1818 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(l, cookie, self));
1819 }
1820}
1821
1822void artQuickGenericJniEndJNINonRef(Thread* self, uint32_t cookie, jobject lock) {
1823 if (lock != nullptr) {
1824 JniMethodEndSynchronized(cookie, lock, self);
1825 } else {
1826 JniMethodEnd(cookie, self);
1827 }
1828}
1829
Andreas Gampec147b002014-03-06 18:11:06 -08001830/*
1831 * Initializes an alloca region assumed to be directly below sp for a native call:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001832 * Create a HandleScope and call stack and fill a mini stack with values to be pushed to registers.
Andreas Gampec147b002014-03-06 18:11:06 -08001833 * The final element on the stack is a pointer to the native code.
1834 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001835 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001836 * We need to fix this, as the handle scope needs to go into the callee-save frame.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001837 *
Andreas Gampec147b002014-03-06 18:11:06 -08001838 * The return of this function denotes:
1839 * 1) How many bytes of the alloca can be released, if the value is non-negative.
1840 * 2) An error, if the value is negative.
1841 */
Mathieu Chartiere401d142015-04-22 13:56:20 -07001842extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod** sp)
Andreas Gampe2da88232014-02-27 12:26:20 -08001843 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001844 ArtMethod* called = *sp;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001845 DCHECK(called->IsNative()) << PrettyMethod(called, true);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001846 uint32_t shorty_len = 0;
1847 const char* shorty = called->GetShorty(&shorty_len);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001848
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001849 // Run the visitor and update sp.
Ian Rogers59c07062014-10-10 13:03:39 -07001850 BuildGenericJniFrameVisitor visitor(self, called->IsStatic(), shorty, shorty_len, &sp);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001851 visitor.VisitArguments();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001852 visitor.FinalizeHandleScope(self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001853
Andreas Gampec200a4a2014-06-16 18:39:09 -07001854 // Fix up managed-stack things in Thread.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001855 self->SetTopOfStack(sp);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001856
Ian Rogerse0dcd462014-03-08 15:21:04 -08001857 self->VerifyStack();
1858
Andreas Gampe90546832014-03-12 18:07:19 -07001859 // Start JNI, save the cookie.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001860 uint32_t cookie;
1861 if (called->IsSynchronized()) {
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001862 cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001863 if (self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001864 self->PopHandleScope();
Andreas Gampec147b002014-03-06 18:11:06 -08001865 // A negative value denotes an error.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001866 return GetTwoWordFailureValue();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001867 }
1868 } else {
1869 cookie = JniMethodStart(self);
1870 }
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001871 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Ian Rogerse0dcd462014-03-08 15:21:04 -08001872 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001873
Andreas Gampe90546832014-03-12 18:07:19 -07001874 // Retrieve the stored native code.
Mathieu Chartier2d721012014-11-10 11:08:06 -08001875 void* nativeCode = called->GetEntryPointFromJni();
Andreas Gampe90546832014-03-12 18:07:19 -07001876
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07001877 // There are two cases for the content of nativeCode:
1878 // 1) Pointer to the native function.
1879 // 2) Pointer to the trampoline for native code binding.
1880 // In the second case, we need to execute the binding and continue with the actual native function
1881 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07001882 DCHECK(nativeCode != nullptr);
1883 if (nativeCode == GetJniDlsymLookupStub()) {
Ian Rogers04c31d22014-07-07 21:44:06 -07001884#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07001885 nativeCode = artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07001886#else
1887 nativeCode = artFindNativeMethod(self);
1888#endif
Andreas Gampe90546832014-03-12 18:07:19 -07001889
1890 if (nativeCode == nullptr) {
1891 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
Andreas Gampead615172014-04-04 16:20:13 -07001892
1893 // End JNI, as the assembly will move to deliver the exception.
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001894 jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001895 if (shorty[0] == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001896 artQuickGenericJniEndJNIRef(self, cookie, nullptr, lock);
1897 } else {
1898 artQuickGenericJniEndJNINonRef(self, cookie, lock);
1899 }
1900
Andreas Gampec200a4a2014-06-16 18:39:09 -07001901 return GetTwoWordFailureValue();
Andreas Gampe90546832014-03-12 18:07:19 -07001902 }
1903 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001904 }
1905
Andreas Gampec200a4a2014-06-16 18:39:09 -07001906 // Return native code addr(lo) and bottom of alloca address(hi).
1907 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(visitor.GetBottomOfUsedArea()),
1908 reinterpret_cast<uintptr_t>(nativeCode));
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001909}
1910
1911/*
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001912 * Is called after the native JNI code. Responsible for cleanup (handle scope, saved state) and
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001913 * unlocking.
1914 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001915extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self, jvalue result, uint64_t result_f)
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001916 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001917 ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001918 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001919 ArtMethod* called = *sp;
Ian Rogerse0dcd462014-03-08 15:21:04 -08001920 uint32_t cookie = *(sp32 - 1);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001921
Andreas Gampead615172014-04-04 16:20:13 -07001922 jobject lock = nullptr;
1923 if (called->IsSynchronized()) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001924 HandleScope* table = reinterpret_cast<HandleScope*>(reinterpret_cast<uint8_t*>(sp)
Mathieu Chartiere401d142015-04-22 13:56:20 -07001925 + sizeof(*sp));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001926 lock = table->GetHandle(0).ToJObject();
Andreas Gampead615172014-04-04 16:20:13 -07001927 }
1928
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001929 char return_shorty_char = called->GetShorty()[0];
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001930
1931 if (return_shorty_char == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001932 return artQuickGenericJniEndJNIRef(self, cookie, result.l, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001933 } else {
Andreas Gampead615172014-04-04 16:20:13 -07001934 artQuickGenericJniEndJNINonRef(self, cookie, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001935
1936 switch (return_shorty_char) {
Nicolas Geoffray54accbc2014-08-13 03:40:45 +01001937 case 'F': {
1938 if (kRuntimeISA == kX86) {
1939 // Convert back the result to float.
Roland Levillainda4d79b2015-03-24 14:36:11 +00001940 double d = bit_cast<double, uint64_t>(result_f);
1941 return bit_cast<uint32_t, float>(static_cast<float>(d));
Nicolas Geoffray54accbc2014-08-13 03:40:45 +01001942 } else {
1943 return result_f;
1944 }
1945 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001946 case 'D':
1947 return result_f;
1948 case 'Z':
1949 return result.z;
1950 case 'B':
1951 return result.b;
1952 case 'C':
1953 return result.c;
1954 case 'S':
1955 return result.s;
1956 case 'I':
1957 return result.i;
1958 case 'J':
1959 return result.j;
1960 case 'V':
1961 return 0;
1962 default:
1963 LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char;
1964 return 0;
1965 }
1966 }
Andreas Gampe2da88232014-02-27 12:26:20 -08001967}
1968
Andreas Gamped58342c2014-06-05 14:18:08 -07001969// We use TwoWordReturn to optimize scalar returns. We use the hi value for code, and the lo value
1970// for the method pointer.
Andreas Gampe51f76352014-05-21 08:28:48 -07001971//
Andreas Gamped58342c2014-06-05 14:18:08 -07001972// It is valid to use this, as at the usage points here (returns from C functions) we are assuming
1973// to hold the mutator lock (see SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) annotations).
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001974
1975template<InvokeType type, bool access_check>
Mathieu Chartiere401d142015-04-22 13:56:20 -07001976static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object, Thread* self,
1977 ArtMethod** sp) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001978 ScopedQuickEntrypointChecks sqec(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001979 DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
1980 ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
1981 ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check, type);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001982 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001983 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
1984 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001985 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001986 {
1987 // Remember the args in case a GC happens in FindMethodFromCode.
1988 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
1989 RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa);
1990 visitor.VisitArguments();
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001991 method = FindMethodFromCode<type, access_check>(method_idx, &this_object, &caller_method,
1992 self);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001993 visitor.FixupReferences();
1994 }
1995
Ian Rogerse0a02da2014-12-02 14:10:53 -08001996 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001997 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07001998 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001999 }
2000 }
2001 DCHECK(!self->IsExceptionPending());
2002 const void* code = method->GetEntryPointFromQuickCompiledCode();
2003
2004 // When we return, the caller will branch to this address, so it had better not be 0!
Ian Rogerse0a02da2014-12-02 14:10:53 -08002005 DCHECK(code != nullptr) << "Code was null in method: " << PrettyMethod(method)
Andreas Gampec200a4a2014-06-16 18:39:09 -07002006 << " location: "
2007 << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002008
Andreas Gamped58342c2014-06-05 14:18:08 -07002009 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2010 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002011}
2012
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01002013// Explicit artInvokeCommon template function declarations to please analysis tool.
2014#define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \
2015 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) \
Mathieu Chartiere401d142015-04-22 13:56:20 -07002016 TwoWordReturn artInvokeCommon<type, access_check>( \
2017 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01002018
2019EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false);
2020EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true);
2021EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false);
2022EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true);
2023EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false);
2024EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true);
2025EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false);
2026EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true);
2027EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false);
2028EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true);
2029#undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL
2030
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002031// See comments in runtime_support_asm.S
Andreas Gampec200a4a2014-06-16 18:39:09 -07002032extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002033 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
2034 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002035 return artInvokeCommon<kInterface, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002036}
2037
Andreas Gampec200a4a2014-06-16 18:39:09 -07002038extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002039 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
2040 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002041 return artInvokeCommon<kDirect, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002042}
2043
Andreas Gampec200a4a2014-06-16 18:39:09 -07002044extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002045 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
2046 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002047 return artInvokeCommon<kStatic, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002048}
2049
Andreas Gampec200a4a2014-06-16 18:39:09 -07002050extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002051 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
2052 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002053 return artInvokeCommon<kSuper, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002054}
2055
Andreas Gampec200a4a2014-06-16 18:39:09 -07002056extern "C" TwoWordReturn artInvokeVirtualTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002057 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
2058 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002059 return artInvokeCommon<kVirtual, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002060}
2061
2062// Determine target of interface dispatch. This object is known non-null.
Nicolas Geoffray8ea18d02015-05-26 16:29:08 +01002063extern "C" TwoWordReturn artInvokeInterfaceTrampoline(uint32_t dex_method_idx,
Andreas Gampe51f76352014-05-21 08:28:48 -07002064 mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002065 Thread* self, ArtMethod** sp)
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002066 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002067 ScopedQuickEntrypointChecks sqec(self);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +01002068 // The optimizing compiler currently does not inline methods that have an interface
2069 // invocation. We use the outer method directly to avoid fetching a stack map, which is
2070 // more expensive.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002071 ArtMethod* caller_method = QuickArgumentVisitor::GetOuterMethod(sp);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +01002072 DCHECK_EQ(caller_method, QuickArgumentVisitor::GetCallingMethod(sp));
Mathieu Chartiere401d142015-04-22 13:56:20 -07002073 ArtMethod* interface_method = caller_method->GetDexCacheResolvedMethod(
2074 dex_method_idx, sizeof(void*));
2075 DCHECK(interface_method != nullptr) << dex_method_idx << " " << PrettyMethod(caller_method);
2076 ArtMethod* method;
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002077 if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002078 method = this_object->GetClass()->FindVirtualMethodForInterface(
2079 interface_method, sizeof(void*));
Ian Rogerse0a02da2014-12-02 14:10:53 -08002080 if (UNLIKELY(method == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002081 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(
2082 interface_method, this_object, caller_method);
Andreas Gamped58342c2014-06-05 14:18:08 -07002083 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002084 }
2085 } else {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002086 DCHECK(interface_method == Runtime::Current()->GetResolutionMethod());
Nicolas Geoffray8ea18d02015-05-26 16:29:08 +01002087 if (kIsDebugBuild) {
2088 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
2089 const DexFile::CodeItem* code = caller_method->GetCodeItem();
2090 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
2091 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
2092 Instruction::Code instr_code = instr->Opcode();
2093 CHECK(instr_code == Instruction::INVOKE_INTERFACE ||
2094 instr_code == Instruction::INVOKE_INTERFACE_RANGE)
2095 << "Unexpected call into interface trampoline: " << instr->DumpString(nullptr);
2096 if (instr_code == Instruction::INVOKE_INTERFACE) {
2097 CHECK_EQ(dex_method_idx, instr->VRegB_35c());
2098 } else {
2099 CHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE);
2100 CHECK_EQ(dex_method_idx, instr->VRegB_3rc());
2101 }
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002102 }
2103
Andreas Gampec200a4a2014-06-16 18:39:09 -07002104 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()
2105 ->GetDexFile();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002106 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002107 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx),
2108 &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002109 {
2110 // Remember the args in case a GC happens in FindMethodFromCode.
2111 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
2112 RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa);
2113 visitor.VisitArguments();
Mathieu Chartier0cd81352014-05-22 16:48:55 -07002114 method = FindMethodFromCode<kInterface, false>(dex_method_idx, &this_object, &caller_method,
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002115 self);
2116 visitor.FixupReferences();
2117 }
2118
2119 if (UNLIKELY(method == nullptr)) {
2120 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07002121 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002122 }
2123 }
2124 const void* code = method->GetEntryPointFromQuickCompiledCode();
2125
2126 // When we return, the caller will branch to this address, so it had better not be 0!
Ian Rogerse0a02da2014-12-02 14:10:53 -08002127 DCHECK(code != nullptr) << "Code was null in method: " << PrettyMethod(method)
Andreas Gampec200a4a2014-06-16 18:39:09 -07002128 << " location: " << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002129
Andreas Gamped58342c2014-06-05 14:18:08 -07002130 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2131 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002132}
2133
Ian Rogers848871b2013-08-05 10:56:33 -07002134} // namespace art