blob: a59faeae9b0c4edc8c54825bf49cd49c47698bd1 [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartiere401d142015-04-22 13:56:20 -070017#include "art_method-inl.h"
Andreas Gampe8228cdf2017-05-30 15:03:54 -070018#include "base/callee_save_type.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070019#include "base/enums.h"
Ian Rogers848871b2013-08-05 10:56:33 -070020#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070021#include "common_throws.h"
Vladimir Markoc7aa87e2018-05-24 15:19:52 +010022#include "class_root.h"
Vladimir Marko606adb32018-04-05 14:49:24 +010023#include "debug_print.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070024#include "debugger.h"
David Sehr9e734c72018-01-04 17:56:19 -080025#include "dex/dex_file-inl.h"
26#include "dex/dex_file_types.h"
27#include "dex/dex_instruction-inl.h"
David Sehr312f3b22018-03-19 08:39:26 -070028#include "dex/method_reference.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070029#include "entrypoints/entrypoint_utils-inl.h"
Vladimir Markod3083dd2018-05-17 08:43:47 +010030#include "entrypoints/quick/callee_save_frame.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070031#include "entrypoints/runtime_asm_entrypoints.h"
Ian Rogers83883d72013-10-21 21:07:24 -070032#include "gc/accounting/card_table-inl.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070033#include "imt_conflict_table.h"
34#include "imtable-inl.h"
Vladimir Markof3c52b42017-11-17 17:32:12 +000035#include "index_bss_mapping.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070036#include "instrumentation.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070037#include "interpreter/interpreter.h"
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +010038#include "interpreter/shadow_frame-inl.h"
Vladimir Marko2196c652017-11-30 16:16:07 +000039#include "jit/jit.h"
Nicolas Geoffray796d6302016-03-13 22:22:31 +000040#include "linear_alloc.h"
Orion Hodsonac141392017-01-13 11:53:47 +000041#include "method_handles.h"
Ian Rogers848871b2013-08-05 10:56:33 -070042#include "mirror/class-inl.h"
Mathieu Chartier5f3ded42014-04-03 15:25:30 -070043#include "mirror/dex_cache-inl.h"
Mathieu Chartierfc58af42015-04-16 18:00:39 -070044#include "mirror/method.h"
Orion Hodsonac141392017-01-13 11:53:47 +000045#include "mirror/method_handle_impl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070046#include "mirror/object-inl.h"
47#include "mirror/object_array-inl.h"
Orion Hodson537a4fe2018-05-15 13:57:58 +010048#include "mirror/var_handle.h"
Vladimir Marko0eb882b2017-05-15 13:39:18 +010049#include "oat_file.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010050#include "oat_quick_method_header.h"
Andreas Gampe639bdd12015-06-03 11:22:45 -070051#include "quick_exception_handler.h"
Ian Rogers848871b2013-08-05 10:56:33 -070052#include "runtime.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070053#include "scoped_thread_state_change-inl.h"
Andreas Gampeb3025922015-09-01 14:45:00 -070054#include "stack.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070055#include "thread-inl.h"
Orion Hodson537a4fe2018-05-15 13:57:58 +010056#include "var_handles.h"
Orion Hodsonac141392017-01-13 11:53:47 +000057#include "well_known_classes.h"
Ian Rogers848871b2013-08-05 10:56:33 -070058
59namespace art {
60
Andreas Gampe8228cdf2017-05-30 15:03:54 -070061// Visits the arguments as saved to the stack by a CalleeSaveType::kRefAndArgs callee save frame.
Ian Rogers848871b2013-08-05 10:56:33 -070062class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080063 // Number of bytes for each out register in the caller method's frame.
64 static constexpr size_t kBytesStackArgLocation = 4;
Alexei Zavjalov41c507a2014-05-15 16:02:46 +070065 // Frame size in bytes of a callee-save frame for RefsAndArgs.
66 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize =
Vladimir Markod3083dd2018-05-17 08:43:47 +010067 RuntimeCalleeSaveFrame::GetFrameSize(CalleeSaveType::kSaveRefsAndArgs);
68 // Offset of first GPR arg.
69 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
70 RuntimeCalleeSaveFrame::GetGpr1Offset(CalleeSaveType::kSaveRefsAndArgs);
71 // Offset of first FPR arg.
72 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
73 RuntimeCalleeSaveFrame::GetFpr1Offset(CalleeSaveType::kSaveRefsAndArgs);
74 // Offset of return address.
75 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_ReturnPcOffset =
76 RuntimeCalleeSaveFrame::GetReturnPcOffset(CalleeSaveType::kSaveRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -070077#if defined(__arm__)
78 // The callee save frame is pointed to by SP.
79 // | argN | |
80 // | ... | |
81 // | arg4 | |
82 // | arg3 spill | | Caller's frame
83 // | arg2 spill | |
84 // | arg1 spill | |
85 // | Method* | ---
86 // | LR |
Zheng Xu5667fdb2014-10-23 18:29:55 +080087 // | ... | 4x6 bytes callee saves
88 // | R3 |
89 // | R2 |
90 // | R1 |
91 // | S15 |
92 // | : |
93 // | S0 |
94 // | | 4x2 bytes padding
Ian Rogers848871b2013-08-05 10:56:33 -070095 // | Method* | <- sp
Andreas Gampe217d6d32017-09-18 12:48:20 -070096 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
97 static constexpr bool kAlignPairRegister = true;
98 static constexpr bool kQuickSoftFloatAbi = false;
99 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = true;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000100 static constexpr bool kQuickSkipOddFpRegisters = false;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800101 static constexpr size_t kNumQuickGprArgs = 3;
Andreas Gampe217d6d32017-09-18 12:48:20 -0700102 static constexpr size_t kNumQuickFprArgs = 16;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800103 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800104 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000105 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800106 }
Stuart Monteithb95a5342014-03-12 13:32:32 +0000107#elif defined(__aarch64__)
108 // The callee save frame is pointed to by SP.
109 // | argN | |
110 // | ... | |
111 // | arg4 | |
112 // | arg3 spill | | Caller's frame
113 // | arg2 spill | |
114 // | arg1 spill | |
115 // | Method* | ---
116 // | LR |
Zheng Xub551fdc2014-07-25 11:49:42 +0800117 // | X29 |
Stuart Monteithb95a5342014-03-12 13:32:32 +0000118 // | : |
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100119 // | X20 |
Stuart Monteithb95a5342014-03-12 13:32:32 +0000120 // | X7 |
121 // | : |
122 // | X1 |
Zheng Xub551fdc2014-07-25 11:49:42 +0800123 // | D7 |
Stuart Monteithb95a5342014-03-12 13:32:32 +0000124 // | : |
125 // | D0 |
126 // | | padding
127 // | Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500128 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000129 static constexpr bool kAlignPairRegister = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000130 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800131 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000132 static constexpr bool kQuickSkipOddFpRegisters = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000133 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
134 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800135 static constexpr bool kGprFprLockstep = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000136 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000137 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Stuart Monteithb95a5342014-03-12 13:32:32 +0000138 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800139#elif defined(__mips__) && !defined(__LP64__)
Ian Rogers848871b2013-08-05 10:56:33 -0700140 // The callee save frame is pointed to by SP.
141 // | argN | |
142 // | ... | |
143 // | arg4 | |
144 // | arg3 spill | | Caller's frame
145 // | arg2 spill | |
146 // | arg1 spill | |
147 // | Method* | ---
148 // | RA |
149 // | ... | callee saves
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800150 // | T1 | arg5
151 // | T0 | arg4
Ian Rogers848871b2013-08-05 10:56:33 -0700152 // | A3 | arg3
153 // | A2 | arg2
154 // | A1 | arg1
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800155 // | F19 |
156 // | F18 | f_arg5
157 // | F17 |
158 // | F16 | f_arg4
Goran Jakovljevicff734982015-08-24 12:58:55 +0000159 // | F15 |
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800160 // | F14 | f_arg3
Goran Jakovljevicff734982015-08-24 12:58:55 +0000161 // | F13 |
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800162 // | F12 | f_arg2
163 // | F11 |
164 // | F10 | f_arg1
165 // | F9 |
166 // | F8 | f_arg0
Goran Jakovljevicff734982015-08-24 12:58:55 +0000167 // | | padding
Ian Rogers848871b2013-08-05 10:56:33 -0700168 // | A0/Method* | <- sp
Goran Jakovljevicff734982015-08-24 12:58:55 +0000169 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
170 static constexpr bool kAlignPairRegister = true;
171 static constexpr bool kQuickSoftFloatAbi = false;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800172 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000173 static constexpr bool kQuickSkipOddFpRegisters = true;
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800174 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
175 static constexpr size_t kNumQuickFprArgs = 12; // 6 arguments passed in FPRs. Floats can be
176 // passed only in even numbered registers and each
177 // double occupies two registers.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800178 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800179 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000180 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800181 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800182#elif defined(__mips__) && defined(__LP64__)
183 // The callee save frame is pointed to by SP.
184 // | argN | |
185 // | ... | |
186 // | arg4 | |
187 // | arg3 spill | | Caller's frame
188 // | arg2 spill | |
189 // | arg1 spill | |
190 // | Method* | ---
191 // | RA |
192 // | ... | callee saves
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800193 // | A7 | arg7
194 // | A6 | arg6
195 // | A5 | arg5
196 // | A4 | arg4
197 // | A3 | arg3
198 // | A2 | arg2
199 // | A1 | arg1
Goran Jakovljevicff734982015-08-24 12:58:55 +0000200 // | F19 | f_arg7
201 // | F18 | f_arg6
202 // | F17 | f_arg5
203 // | F16 | f_arg4
204 // | F15 | f_arg3
205 // | F14 | f_arg2
206 // | F13 | f_arg1
207 // | F12 | f_arg0
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800208 // | | padding
209 // | A0/Method* | <- sp
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800210 // NOTE: for Mip64, when A0 is skipped, F12 is also skipped.
Douglas Leungd18e0832015-02-09 15:22:26 -0800211 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800212 static constexpr bool kAlignPairRegister = false;
213 static constexpr bool kQuickSoftFloatAbi = false;
214 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000215 static constexpr bool kQuickSkipOddFpRegisters = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800216 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
217 static constexpr size_t kNumQuickFprArgs = 7; // 7 arguments passed in FPRs.
218 static constexpr bool kGprFprLockstep = true;
219
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800220 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
221 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
222 }
Ian Rogers848871b2013-08-05 10:56:33 -0700223#elif defined(__i386__)
224 // The callee save frame is pointed to by SP.
225 // | argN | |
226 // | ... | |
227 // | arg4 | |
228 // | arg3 spill | | Caller's frame
229 // | arg2 spill | |
230 // | arg1 spill | |
231 // | Method* | ---
232 // | Return |
233 // | EBP,ESI,EDI | callee saves
234 // | EBX | arg3
235 // | EDX | arg2
236 // | ECX | arg1
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000237 // | XMM3 | float arg 4
238 // | XMM2 | float arg 3
239 // | XMM1 | float arg 2
240 // | XMM0 | float arg 1
Ian Rogers848871b2013-08-05 10:56:33 -0700241 // | EAX/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500242 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000243 static constexpr bool kAlignPairRegister = false;
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000244 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800245 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000246 static constexpr bool kQuickSkipOddFpRegisters = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800247 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000248 static constexpr size_t kNumQuickFprArgs = 4; // 4 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800249 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800250 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000251 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800252 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800253#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800254 // The callee save frame is pointed to by SP.
255 // | argN | |
256 // | ... | |
257 // | reg. arg spills | | Caller's frame
258 // | Method* | ---
259 // | Return |
260 // | R15 | callee save
261 // | R14 | callee save
262 // | R13 | callee save
263 // | R12 | callee save
264 // | R9 | arg5
265 // | R8 | arg4
266 // | RSI/R6 | arg1
267 // | RBP/R5 | callee save
268 // | RBX/R3 | callee save
269 // | RDX/R2 | arg2
270 // | RCX/R1 | arg3
271 // | XMM7 | float arg 8
272 // | XMM6 | float arg 7
273 // | XMM5 | float arg 6
274 // | XMM4 | float arg 5
275 // | XMM3 | float arg 4
276 // | XMM2 | float arg 3
277 // | XMM1 | float arg 2
278 // | XMM0 | float arg 1
279 // | Padding |
280 // | RDI/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500281 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000282 static constexpr bool kAlignPairRegister = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800283 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800284 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000285 static constexpr bool kQuickSkipOddFpRegisters = false;
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700286 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700287 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800288 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800289 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
290 switch (gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000291 case 0: return (4 * GetBytesPerGprSpillLocation(kRuntimeISA));
292 case 1: return (1 * GetBytesPerGprSpillLocation(kRuntimeISA));
293 case 2: return (0 * GetBytesPerGprSpillLocation(kRuntimeISA));
294 case 3: return (5 * GetBytesPerGprSpillLocation(kRuntimeISA));
295 case 4: return (6 * GetBytesPerGprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800296 default:
Andreas Gampec200a4a2014-06-16 18:39:09 -0700297 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
298 return 0;
Ian Rogers936b37f2014-02-14 00:52:24 -0800299 }
300 }
Ian Rogers848871b2013-08-05 10:56:33 -0700301#else
302#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700303#endif
304
Ian Rogers936b37f2014-02-14 00:52:24 -0800305 public:
Sebastien Hertza836bc92014-11-25 16:30:53 +0100306 // Special handling for proxy methods. Proxy methods are instance methods so the
307 // 'this' object is the 1st argument. They also have the same frame layout as the
308 // kRefAndArgs runtime method. Since 'this' is a reference, it is located in the
309 // 1st GPR.
Roland Levillainfa854e42018-02-07 13:09:55 +0000310 static StackReference<mirror::Object>* GetProxyThisObjectReference(ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700311 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray3a090922015-11-24 09:17:30 +0000312 CHECK((*sp)->IsProxyMethod());
Sebastien Hertza836bc92014-11-25 16:30:53 +0100313 CHECK_GT(kNumQuickGprArgs, 0u);
314 constexpr uint32_t kThisGprIndex = 0u; // 'this' is in the 1st GPR.
315 size_t this_arg_offset = kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset +
316 GprIndexToGprOffset(kThisGprIndex);
317 uint8_t* this_arg_address = reinterpret_cast<uint8_t*>(sp) + this_arg_offset;
Roland Levillainfa854e42018-02-07 13:09:55 +0000318 return reinterpret_cast<StackReference<mirror::Object>*>(this_arg_address);
Sebastien Hertza836bc92014-11-25 16:30:53 +0100319 }
320
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700321 static ArtMethod* GetCallingMethod(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700322 DCHECK((*sp)->IsCalleeSaveMethod());
Andreas Gampe8228cdf2017-05-30 15:03:54 -0700323 return GetCalleeSaveMethodCaller(sp, CalleeSaveType::kSaveRefsAndArgs);
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100324 }
325
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700326 static ArtMethod* GetOuterMethod(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700327 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100328 uint8_t* previous_sp =
329 reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700330 return *reinterpret_cast<ArtMethod**>(previous_sp);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100331 }
332
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700333 static uint32_t GetCallingDexPc(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700334 DCHECK((*sp)->IsCalleeSaveMethod());
Vladimir Markod3083dd2018-05-17 08:43:47 +0100335 constexpr size_t callee_frame_size =
336 RuntimeCalleeSaveFrame::GetFrameSize(CalleeSaveType::kSaveRefsAndArgs);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700337 ArtMethod** caller_sp = reinterpret_cast<ArtMethod**>(
338 reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100339 uintptr_t outer_pc = QuickArgumentVisitor::GetCallingPc(sp);
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100340 const OatQuickMethodHeader* current_code = (*caller_sp)->GetOatQuickMethodHeader(outer_pc);
341 uintptr_t outer_pc_offset = current_code->NativeQuickPcOffset(outer_pc);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100342
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100343 if (current_code->IsOptimized()) {
David Srbecky052f8ca2018-04-26 15:42:54 +0100344 CodeInfo code_info(current_code);
345 StackMap stack_map = code_info.GetStackMapForNativePcOffset(outer_pc_offset);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100346 DCHECK(stack_map.IsValid());
David Srbecky052f8ca2018-04-26 15:42:54 +0100347 if (stack_map.HasInlineInfo()) {
348 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
349 return inline_info.GetDexPcAtDepth(inline_info.GetDepth()-1);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100350 } else {
David Srbecky052f8ca2018-04-26 15:42:54 +0100351 return stack_map.GetDexPc();
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100352 }
353 } else {
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100354 return current_code->ToDexPc(*caller_sp, outer_pc);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100355 }
Ian Rogers848871b2013-08-05 10:56:33 -0700356 }
357
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800358 static bool GetInvokeType(ArtMethod** sp, InvokeType* invoke_type, uint32_t* dex_method_index)
359 REQUIRES_SHARED(Locks::mutator_lock_) {
360 DCHECK((*sp)->IsCalleeSaveMethod());
Vladimir Markod3083dd2018-05-17 08:43:47 +0100361 constexpr size_t callee_frame_size =
362 RuntimeCalleeSaveFrame::GetFrameSize(CalleeSaveType::kSaveRefsAndArgs);
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800363 ArtMethod** caller_sp = reinterpret_cast<ArtMethod**>(
364 reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
365 uintptr_t outer_pc = QuickArgumentVisitor::GetCallingPc(sp);
366 const OatQuickMethodHeader* current_code = (*caller_sp)->GetOatQuickMethodHeader(outer_pc);
367 if (!current_code->IsOptimized()) {
368 return false;
369 }
370 uintptr_t outer_pc_offset = current_code->NativeQuickPcOffset(outer_pc);
David Srbecky052f8ca2018-04-26 15:42:54 +0100371 CodeInfo code_info(current_code);
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700372 MethodInfo method_info = current_code->GetOptimizedMethodInfo();
David Srbecky052f8ca2018-04-26 15:42:54 +0100373 InvokeInfo invoke(code_info.GetInvokeInfoForNativePcOffset(outer_pc_offset));
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800374 if (invoke.IsValid()) {
David Srbecky052f8ca2018-04-26 15:42:54 +0100375 *invoke_type = static_cast<InvokeType>(invoke.GetInvokeType());
376 *dex_method_index = invoke.GetMethodIndex(method_info);
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800377 return true;
378 }
379 return false;
380 }
381
Ian Rogers936b37f2014-02-14 00:52:24 -0800382 // For the given quick ref and args quick frame, return the caller's PC.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700383 static uintptr_t GetCallingPc(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700384 DCHECK((*sp)->IsCalleeSaveMethod());
Vladimir Markod3083dd2018-05-17 08:43:47 +0100385 uint8_t* return_adress_spill =
386 reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_ReturnPcOffset;
387 return *reinterpret_cast<uintptr_t*>(return_adress_spill);
Ian Rogers848871b2013-08-05 10:56:33 -0700388 }
389
Mathieu Chartiere401d142015-04-22 13:56:20 -0700390 QuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty,
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700391 uint32_t shorty_len) REQUIRES_SHARED(Locks::mutator_lock_) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700392 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
Ian Rogers13735952014-10-08 12:43:28 -0700393 gpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
394 fpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
395 stack_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
Mathieu Chartiere401d142015-04-22 13:56:20 -0700396 + sizeof(ArtMethod*)), // Skip ArtMethod*.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800397 gpr_index_(0), fpr_index_(0), fpr_double_index_(0), stack_index_(0),
398 cur_type_(Primitive::kPrimVoid), is_split_long_or_double_(false) {
Andreas Gampe575e78c2014-11-03 23:41:03 -0800399 static_assert(kQuickSoftFloatAbi == (kNumQuickFprArgs == 0),
400 "Number of Quick FPR arguments unexpected");
401 static_assert(!(kQuickSoftFloatAbi && kQuickDoubleRegAlignedFloatBackFilled),
402 "Double alignment unexpected");
Zheng Xu5667fdb2014-10-23 18:29:55 +0800403 // For register alignment, we want to assume that counters(fpr_double_index_) are even if the
404 // next register is even.
Andreas Gampe575e78c2014-11-03 23:41:03 -0800405 static_assert(!kQuickDoubleRegAlignedFloatBackFilled || kNumQuickFprArgs % 2 == 0,
406 "Number of Quick FPR arguments not even");
Andreas Gampe542451c2016-07-26 09:02:02 -0700407 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize);
Zheng Xu5667fdb2014-10-23 18:29:55 +0800408 }
Ian Rogers848871b2013-08-05 10:56:33 -0700409
410 virtual ~QuickArgumentVisitor() {}
411
412 virtual void Visit() = 0;
413
Ian Rogers936b37f2014-02-14 00:52:24 -0800414 Primitive::Type GetParamPrimitiveType() const {
415 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700416 }
417
Ian Rogers13735952014-10-08 12:43:28 -0700418 uint8_t* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800419 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800420 Primitive::Type type = GetParamPrimitiveType();
421 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800422 if (type == Primitive::kPrimDouble && kQuickDoubleRegAlignedFloatBackFilled) {
423 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
424 return fpr_args_ + (fpr_double_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
425 }
426 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000427 return fpr_args_ + (fpr_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800428 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700429 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers936b37f2014-02-14 00:52:24 -0800430 }
431 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800432 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800433 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
434 }
435 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700436 }
437
438 bool IsSplitLongOrDouble() const {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700439 if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) ||
440 (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800441 return is_split_long_or_double_;
442 } else {
443 return false; // An optimization for when GPR and FPRs are 64bit.
444 }
Ian Rogers848871b2013-08-05 10:56:33 -0700445 }
446
Ian Rogers936b37f2014-02-14 00:52:24 -0800447 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700448 return GetParamPrimitiveType() == Primitive::kPrimNot;
449 }
450
Ian Rogers936b37f2014-02-14 00:52:24 -0800451 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700452 Primitive::Type type = GetParamPrimitiveType();
453 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
454 }
455
456 uint64_t ReadSplitLongParam() const {
Nicolas Geoffray425f2392015-01-08 14:52:29 +0000457 // The splitted long is always available through the stack.
458 return *reinterpret_cast<uint64_t*>(stack_args_
459 + stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700460 }
461
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800462 void IncGprIndex() {
463 gpr_index_++;
464 if (kGprFprLockstep) {
465 fpr_index_++;
466 }
467 }
468
469 void IncFprIndex() {
470 fpr_index_++;
471 if (kGprFprLockstep) {
472 gpr_index_++;
473 }
474 }
475
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700476 void VisitArguments() REQUIRES_SHARED(Locks::mutator_lock_) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800477 // (a) 'stack_args_' should point to the first method's argument
478 // (b) whatever the argument type it is, the 'stack_index_' should
479 // be moved forward along with every visiting.
Ian Rogers936b37f2014-02-14 00:52:24 -0800480 gpr_index_ = 0;
481 fpr_index_ = 0;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800482 if (kQuickDoubleRegAlignedFloatBackFilled) {
483 fpr_double_index_ = 0;
484 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800485 stack_index_ = 0;
486 if (!is_static_) { // Handle this.
487 cur_type_ = Primitive::kPrimNot;
488 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700489 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800490 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800491 if (kNumQuickGprArgs > 0) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800492 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800493 }
Ian Rogers848871b2013-08-05 10:56:33 -0700494 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800495 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
496 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
497 switch (cur_type_) {
498 case Primitive::kPrimNot:
499 case Primitive::kPrimBoolean:
500 case Primitive::kPrimByte:
501 case Primitive::kPrimChar:
502 case Primitive::kPrimShort:
503 case Primitive::kPrimInt:
504 is_split_long_or_double_ = false;
505 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800506 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800507 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800508 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800509 }
510 break;
511 case Primitive::kPrimFloat:
512 is_split_long_or_double_ = false;
513 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800514 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800515 if (kQuickSoftFloatAbi) {
516 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800517 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800518 }
519 } else {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800520 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800521 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800522 if (kQuickDoubleRegAlignedFloatBackFilled) {
523 // Double should not overlap with float.
524 // For example, if fpr_index_ = 3, fpr_double_index_ should be at least 4.
525 fpr_double_index_ = std::max(fpr_double_index_, RoundUp(fpr_index_, 2));
526 // Float should not overlap with double.
527 if (fpr_index_ % 2 == 0) {
528 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
529 }
Goran Jakovljevicff734982015-08-24 12:58:55 +0000530 } else if (kQuickSkipOddFpRegisters) {
531 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800532 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800533 }
534 }
535 break;
536 case Primitive::kPrimDouble:
537 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800538 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800539 if (cur_type_ == Primitive::kPrimLong &&
540#if defined(__mips__) && !defined(__LP64__)
541 (gpr_index_ == 0 || gpr_index_ == 2) &&
542#else
543 gpr_index_ == 0 &&
544#endif
545 kAlignPairRegister) {
546 // Currently, this is only for ARM and MIPS, where we align long parameters with
547 // even-numbered registers by skipping R1 (on ARM) or A1(A3) (on MIPS) and using
548 // R2 (on ARM) or A2(T0) (on MIPS) instead.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800549 IncGprIndex();
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000550 }
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000551 is_split_long_or_double_ = (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800552 ((gpr_index_ + 1) == kNumQuickGprArgs);
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500553 if (!kSplitPairAcrossRegisterAndStack && is_split_long_or_double_) {
554 // We don't want to split this. Pass over this register.
555 gpr_index_++;
556 is_split_long_or_double_ = false;
557 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800558 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800559 if (kBytesStackArgLocation == 4) {
560 stack_index_+= 2;
561 } else {
562 CHECK_EQ(kBytesStackArgLocation, 8U);
563 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800564 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700565 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800566 IncGprIndex();
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000567 if (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700568 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800569 IncGprIndex();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700570 }
571 }
572 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800573 } else {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000574 is_split_long_or_double_ = (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) &&
Zheng Xu5667fdb2014-10-23 18:29:55 +0800575 ((fpr_index_ + 1) == kNumQuickFprArgs) && !kQuickDoubleRegAlignedFloatBackFilled;
Ian Rogers936b37f2014-02-14 00:52:24 -0800576 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700577 if (kBytesStackArgLocation == 4) {
578 stack_index_+= 2;
Ian Rogers936b37f2014-02-14 00:52:24 -0800579 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700580 CHECK_EQ(kBytesStackArgLocation, 8U);
581 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800582 }
Zheng Xu5667fdb2014-10-23 18:29:55 +0800583 if (kQuickDoubleRegAlignedFloatBackFilled) {
584 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
585 fpr_double_index_ += 2;
586 // Float should not overlap with double.
587 if (fpr_index_ % 2 == 0) {
588 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
589 }
590 }
591 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800592 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800593 if (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) {
594 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800595 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800596 }
597 }
598 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800599 }
600 break;
601 default:
602 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
603 }
Ian Rogers848871b2013-08-05 10:56:33 -0700604 }
605 }
606
Andreas Gampec200a4a2014-06-16 18:39:09 -0700607 protected:
Ian Rogers848871b2013-08-05 10:56:33 -0700608 const bool is_static_;
609 const char* const shorty_;
610 const uint32_t shorty_len_;
Andreas Gampec200a4a2014-06-16 18:39:09 -0700611
612 private:
Ian Rogers13735952014-10-08 12:43:28 -0700613 uint8_t* const gpr_args_; // Address of GPR arguments in callee save frame.
614 uint8_t* const fpr_args_; // Address of FPR arguments in callee save frame.
615 uint8_t* const stack_args_; // Address of stack arguments in caller's frame.
Ian Rogers936b37f2014-02-14 00:52:24 -0800616 uint32_t gpr_index_; // Index into spilled GPRs.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800617 // Index into spilled FPRs.
618 // In case kQuickDoubleRegAlignedFloatBackFilled, it may index a hole while fpr_double_index_
619 // holds a higher register number.
620 uint32_t fpr_index_;
621 // Index into spilled FPRs for aligned double.
622 // Only used when kQuickDoubleRegAlignedFloatBackFilled. Next available double register indexed in
623 // terms of singles, may be behind fpr_index.
624 uint32_t fpr_double_index_;
Ian Rogers936b37f2014-02-14 00:52:24 -0800625 uint32_t stack_index_; // Index into arguments on the stack.
626 // The current type of argument during VisitArguments.
627 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700628 // Does a 64bit parameter straddle the register and stack arguments?
629 bool is_split_long_or_double_;
630};
631
Sebastien Hertza836bc92014-11-25 16:30:53 +0100632// Returns the 'this' object of a proxy method. This function is only used by StackVisitor. It
633// allows to use the QuickArgumentVisitor constants without moving all the code in its own module.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700634extern "C" mirror::Object* artQuickGetProxyThisObject(ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700635 REQUIRES_SHARED(Locks::mutator_lock_) {
Roland Levillainfa854e42018-02-07 13:09:55 +0000636 return QuickArgumentVisitor::GetProxyThisObjectReference(sp)->AsMirrorPtr();
637}
Sebastien Hertza836bc92014-11-25 16:30:53 +0100638
Ian Rogers848871b2013-08-05 10:56:33 -0700639// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800640class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700641 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700642 BuildQuickShadowFrameVisitor(ArtMethod** sp, bool is_static, const char* shorty,
643 uint32_t shorty_len, ShadowFrame* sf, size_t first_arg_reg) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700644 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700645
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700646 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700647
648 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800649 ShadowFrame* const sf_;
650 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700651
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700652 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700653};
654
Andreas Gampec200a4a2014-06-16 18:39:09 -0700655void BuildQuickShadowFrameVisitor::Visit() {
Ian Rogers9758f792014-03-13 09:02:55 -0700656 Primitive::Type type = GetParamPrimitiveType();
657 switch (type) {
658 case Primitive::kPrimLong: // Fall-through.
659 case Primitive::kPrimDouble:
660 if (IsSplitLongOrDouble()) {
661 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
662 } else {
663 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
664 }
665 ++cur_reg_;
666 break;
667 case Primitive::kPrimNot: {
668 StackReference<mirror::Object>* stack_ref =
669 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
670 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
671 }
672 break;
673 case Primitive::kPrimBoolean: // Fall-through.
674 case Primitive::kPrimByte: // Fall-through.
675 case Primitive::kPrimChar: // Fall-through.
676 case Primitive::kPrimShort: // Fall-through.
677 case Primitive::kPrimInt: // Fall-through.
678 case Primitive::kPrimFloat:
679 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
680 break;
681 case Primitive::kPrimVoid:
682 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700683 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700684 }
685 ++cur_reg_;
686}
687
Mingyao Yang417528d2017-09-13 12:10:40 -0700688// Don't inline. See b/65159206.
689NO_INLINE
690static void HandleDeoptimization(JValue* result,
691 ArtMethod* method,
692 ShadowFrame* deopt_frame,
693 ManagedStack* fragment)
694 REQUIRES_SHARED(Locks::mutator_lock_) {
695 // Coming from partial-fragment deopt.
696 Thread* self = Thread::Current();
697 if (kIsDebugBuild) {
698 // Sanity-check: are the methods as expected? We check that the last shadow frame (the bottom
699 // of the call-stack) corresponds to the called method.
700 ShadowFrame* linked = deopt_frame;
701 while (linked->GetLink() != nullptr) {
702 linked = linked->GetLink();
703 }
704 CHECK_EQ(method, linked->GetMethod()) << method->PrettyMethod() << " "
705 << ArtMethod::PrettyMethod(linked->GetMethod());
706 }
707
708 if (VLOG_IS_ON(deopt)) {
709 // Print out the stack to verify that it was a partial-fragment deopt.
710 LOG(INFO) << "Continue-ing from deopt. Stack is:";
711 QuickExceptionHandler::DumpFramesWithType(self, true);
712 }
713
714 ObjPtr<mirror::Throwable> pending_exception;
715 bool from_code = false;
716 DeoptimizationMethodType method_type;
717 self->PopDeoptimizationContext(/* out */ result,
718 /* out */ &pending_exception,
719 /* out */ &from_code,
720 /* out */ &method_type);
721
722 // Push a transition back into managed code onto the linked list in thread.
723 self->PushManagedStackFragment(fragment);
724
725 // Ensure that the stack is still in order.
726 if (kIsDebugBuild) {
727 class DummyStackVisitor : public StackVisitor {
728 public:
729 explicit DummyStackVisitor(Thread* self_in) REQUIRES_SHARED(Locks::mutator_lock_)
730 : StackVisitor(self_in, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames) {}
731
732 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
733 // Nothing to do here. In a debug build, SanityCheckFrame will do the work in the walking
734 // logic. Just always say we want to continue.
735 return true;
736 }
737 };
738 DummyStackVisitor dsv(self);
739 dsv.WalkStack();
740 }
741
742 // Restore the exception that was pending before deoptimization then interpret the
743 // deoptimized frames.
744 if (pending_exception != nullptr) {
745 self->SetException(pending_exception);
746 }
747 interpreter::EnterInterpreterFromDeoptimize(self,
748 deopt_frame,
749 result,
750 from_code,
751 DeoptimizationMethodType::kDefault);
752}
753
Mathieu Chartiere401d142015-04-22 13:56:20 -0700754extern "C" uint64_t artQuickToInterpreterBridge(ArtMethod* method, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700755 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers848871b2013-08-05 10:56:33 -0700756 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
757 // frame.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700758 ScopedQuickEntrypointChecks sqec(self);
Ian Rogers848871b2013-08-05 10:56:33 -0700759
Alex Light9139e002015-10-09 15:59:48 -0700760 if (UNLIKELY(!method->IsInvokable())) {
761 method->ThrowInvocationTimeError();
Ian Rogers848871b2013-08-05 10:56:33 -0700762 return 0;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700763 }
764
765 JValue tmp_value;
766 ShadowFrame* deopt_frame = self->PopStackedShadowFrame(
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700767 StackedShadowFrameType::kDeoptimizationShadowFrame, false);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700768 ManagedStack fragment;
769
David Sehr709b0702016-10-13 09:12:37 -0700770 DCHECK(!method->IsNative()) << method->PrettyMethod();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700771 uint32_t shorty_len = 0;
Andreas Gampe542451c2016-07-26 09:02:02 -0700772 ArtMethod* non_proxy_method = method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800773 DCHECK(non_proxy_method->GetCodeItem() != nullptr) << method->PrettyMethod();
David Sehr0225f8e2018-01-31 08:52:24 +0000774 CodeItemDataAccessor accessor(non_proxy_method->DexInstructionData());
Andreas Gampe639bdd12015-06-03 11:22:45 -0700775 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
776
777 JValue result;
778
Mingyao Yang417528d2017-09-13 12:10:40 -0700779 if (UNLIKELY(deopt_frame != nullptr)) {
780 HandleDeoptimization(&result, method, deopt_frame, &fragment);
Ian Rogers848871b2013-08-05 10:56:33 -0700781 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -0700782 const char* old_cause = self->StartAssertNoThreadSuspension(
783 "Building interpreter shadow frame");
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800784 uint16_t num_regs = accessor.RegistersSize();
Andreas Gampec200a4a2014-06-16 18:39:09 -0700785 // No last shadow coming from quick.
Andreas Gampeb3025922015-09-01 14:45:00 -0700786 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
Andreas Gampe03ec9302015-08-27 17:41:47 -0700787 CREATE_SHADOW_FRAME(num_regs, /* link */ nullptr, method, /* dex pc */ 0);
Andreas Gampeb3025922015-09-01 14:45:00 -0700788 ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800789 size_t first_arg_reg = accessor.RegistersSize() - accessor.InsSize();
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700790 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len,
Ian Rogers936b37f2014-02-14 00:52:24 -0800791 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700792 shadow_frame_builder.VisitArguments();
Ian Rogerse94652f2014-12-02 11:13:19 -0800793 const bool needs_initialization =
794 method->IsStatic() && !method->GetDeclaringClass()->IsInitialized();
Ian Rogers848871b2013-08-05 10:56:33 -0700795 // Push a transition back into managed code onto the linked list in thread.
Ian Rogers848871b2013-08-05 10:56:33 -0700796 self->PushManagedStackFragment(&fragment);
797 self->PushShadowFrame(shadow_frame);
798 self->EndAssertNoThreadSuspension(old_cause);
799
Ian Rogerse94652f2014-12-02 11:13:19 -0800800 if (needs_initialization) {
Ian Rogers848871b2013-08-05 10:56:33 -0700801 // Ensure static method's class is initialized.
Ian Rogerse94652f2014-12-02 11:13:19 -0800802 StackHandleScope<1> hs(self);
803 Handle<mirror::Class> h_class(hs.NewHandle(shadow_frame->GetMethod()->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -0700804 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
David Sehr709b0702016-10-13 09:12:37 -0700805 DCHECK(Thread::Current()->IsExceptionPending())
806 << shadow_frame->GetMethod()->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700807 self->PopManagedStackFragment(fragment);
808 return 0;
809 }
810 }
Daniel Mihalyieb076692014-08-22 17:33:31 +0200811
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800812 result = interpreter::EnterInterpreterFromEntryPoint(self, accessor, shadow_frame);
Ian Rogers848871b2013-08-05 10:56:33 -0700813 }
Andreas Gampe639bdd12015-06-03 11:22:45 -0700814
815 // Pop transition.
816 self->PopManagedStackFragment(fragment);
817
818 // Request a stack deoptimization if needed
819 ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700820 uintptr_t caller_pc = QuickArgumentVisitor::GetCallingPc(sp);
Mingyao Yanga3549d22016-06-02 17:01:02 -0700821 // If caller_pc is the instrumentation exit stub, the stub will check to see if deoptimization
822 // should be done and it knows the real return pc.
823 if (UNLIKELY(caller_pc != reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) &&
Nicolas Geoffray433b79a2017-01-30 20:54:45 +0000824 Dbg::IsForcedInterpreterNeededForUpcall(self, caller))) {
825 if (!Runtime::Current()->IsAsyncDeoptimizeable(caller_pc)) {
826 LOG(WARNING) << "Got a deoptimization request on un-deoptimizable method "
827 << caller->PrettyMethod();
828 } else {
829 // Push the context of the deoptimization stack so we can restore the return value and the
830 // exception before executing the deoptimized frames.
831 self->PushDeoptimizationContext(
Mingyao Yang2ee17902017-08-30 11:37:08 -0700832 result,
833 shorty[0] == 'L' || shorty[0] == '[', /* class or array */
834 self->GetException(),
835 false /* from_code */,
836 DeoptimizationMethodType::kDefault);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700837
Nicolas Geoffray433b79a2017-01-30 20:54:45 +0000838 // Set special exception to cause deoptimization.
839 self->SetException(Thread::GetDeoptimizationException());
840 }
Andreas Gampe639bdd12015-06-03 11:22:45 -0700841 }
842
843 // No need to restore the args since the method has already been run by the interpreter.
844 return result.GetJ();
Ian Rogers848871b2013-08-05 10:56:33 -0700845}
846
847// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
848// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800849class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700850 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700851 BuildQuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty, uint32_t shorty_len,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700852 ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700853 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700854
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700855 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700856
857 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700858 ScopedObjectAccessUnchecked* const soa_;
859 std::vector<jvalue>* const args_;
Ian Rogers9758f792014-03-13 09:02:55 -0700860
Ian Rogers848871b2013-08-05 10:56:33 -0700861 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
862};
863
Ian Rogers9758f792014-03-13 09:02:55 -0700864void BuildQuickArgumentVisitor::Visit() {
865 jvalue val;
866 Primitive::Type type = GetParamPrimitiveType();
867 switch (type) {
868 case Primitive::kPrimNot: {
869 StackReference<mirror::Object>* stack_ref =
870 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
871 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -0700872 break;
873 }
874 case Primitive::kPrimLong: // Fall-through.
875 case Primitive::kPrimDouble:
876 if (IsSplitLongOrDouble()) {
877 val.j = ReadSplitLongParam();
878 } else {
879 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
880 }
881 break;
882 case Primitive::kPrimBoolean: // Fall-through.
883 case Primitive::kPrimByte: // Fall-through.
884 case Primitive::kPrimChar: // Fall-through.
885 case Primitive::kPrimShort: // Fall-through.
886 case Primitive::kPrimInt: // Fall-through.
887 case Primitive::kPrimFloat:
888 val.i = *reinterpret_cast<jint*>(GetParamAddress());
889 break;
890 case Primitive::kPrimVoid:
891 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700892 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700893 }
894 args_->push_back(val);
895}
896
Ian Rogers848871b2013-08-05 10:56:33 -0700897// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
898// which is responsible for recording callee save registers. We explicitly place into jobjects the
899// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
900// field within the proxy object, which will box the primitive arguments and deal with error cases.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700901extern "C" uint64_t artQuickProxyInvokeHandler(
902 ArtMethod* proxy_method, mirror::Object* receiver, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700903 REQUIRES_SHARED(Locks::mutator_lock_) {
David Sehr709b0702016-10-13 09:12:37 -0700904 DCHECK(proxy_method->IsProxyMethod()) << proxy_method->PrettyMethod();
905 DCHECK(receiver->GetClass()->IsProxyClass()) << proxy_method->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700906 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
907 const char* old_cause =
908 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
909 // Register the top of the managed stack, making stack crawlable.
David Sehr709b0702016-10-13 09:12:37 -0700910 DCHECK_EQ((*sp), proxy_method) << proxy_method->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700911 self->VerifyStack();
912 // Start new JNI local reference state.
913 JNIEnvExt* env = self->GetJniEnv();
914 ScopedObjectAccessUnchecked soa(env);
915 ScopedJniEnvLocalRefState env_state(env);
916 // Create local ref. copies of proxy method and the receiver.
917 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
918
919 // Placing arguments into args vector and remove the receiver.
Andreas Gampe542451c2016-07-26 09:02:02 -0700920 ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
David Sehr709b0702016-10-13 09:12:37 -0700921 CHECK(!non_proxy_method->IsStatic()) << proxy_method->PrettyMethod() << " "
922 << non_proxy_method->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700923 std::vector<jvalue> args;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700924 uint32_t shorty_len = 0;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700925 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
Roland Levillainad0777d2018-02-12 20:00:18 +0000926 BuildQuickArgumentVisitor local_ref_visitor(
927 sp, /* is_static */ false, shorty, shorty_len, &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700928
Ian Rogers848871b2013-08-05 10:56:33 -0700929 local_ref_visitor.VisitArguments();
David Sehr709b0702016-10-13 09:12:37 -0700930 DCHECK_GT(args.size(), 0U) << proxy_method->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700931 args.erase(args.begin());
932
933 // Convert proxy method into expected interface method.
Andreas Gampe542451c2016-07-26 09:02:02 -0700934 ArtMethod* interface_method = proxy_method->FindOverriddenMethod(kRuntimePointerSize);
David Sehr709b0702016-10-13 09:12:37 -0700935 DCHECK(interface_method != nullptr) << proxy_method->PrettyMethod();
936 DCHECK(!interface_method->IsProxyMethod()) << interface_method->PrettyMethod();
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700937 self->EndAssertNoThreadSuspension(old_cause);
Andreas Gampe542451c2016-07-26 09:02:02 -0700938 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize);
Andreas Gampee01e3642016-07-25 13:06:04 -0700939 DCHECK(!Runtime::Current()->IsActiveTransaction());
Andreas Gampeee29a072017-11-02 15:28:09 -0700940 ObjPtr<mirror::Method> interface_reflect_method =
941 mirror::Method::CreateFromArtMethod<kRuntimePointerSize, false>(soa.Self(), interface_method);
942 if (interface_reflect_method == nullptr) {
943 soa.Self()->AssertPendingOOMException();
944 return 0;
945 }
946 jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_reflect_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700947
948 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
949 // that performs allocations.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700950 JValue result = InvokeProxyInvocationHandler(soa, shorty, rcvr_jobj, interface_method_jobj, args);
Ian Rogers848871b2013-08-05 10:56:33 -0700951 return result.GetJ();
952}
953
Roland Levillainad0777d2018-02-12 20:00:18 +0000954// Visitor returning a reference argument at a given position in a Quick stack frame.
955// NOTE: Only used for testing purposes.
956class GetQuickReferenceArgumentAtVisitor FINAL : public QuickArgumentVisitor {
957 public:
958 GetQuickReferenceArgumentAtVisitor(ArtMethod** sp,
959 const char* shorty,
960 uint32_t shorty_len,
961 size_t arg_pos)
962 : QuickArgumentVisitor(sp, /* is_static */ false, shorty, shorty_len),
963 cur_pos_(0u),
964 arg_pos_(arg_pos),
965 ref_arg_(nullptr) {
966 CHECK_LT(arg_pos, shorty_len) << "Argument position greater than the number arguments";
967 }
968
969 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE {
970 if (cur_pos_ == arg_pos_) {
971 Primitive::Type type = GetParamPrimitiveType();
972 CHECK_EQ(type, Primitive::kPrimNot) << "Argument at searched position is not a reference";
973 ref_arg_ = reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
974 }
975 ++cur_pos_;
976 }
977
978 StackReference<mirror::Object>* GetReferenceArgument() {
979 return ref_arg_;
980 }
981
982 private:
983 // The position of the currently visited argument.
984 size_t cur_pos_;
985 // The position of the searched argument.
986 const size_t arg_pos_;
987 // The reference argument, if found.
988 StackReference<mirror::Object>* ref_arg_;
989
990 DISALLOW_COPY_AND_ASSIGN(GetQuickReferenceArgumentAtVisitor);
991};
992
993// Returning reference argument at position `arg_pos` in Quick stack frame at address `sp`.
994// NOTE: Only used for testing purposes.
995extern "C" StackReference<mirror::Object>* artQuickGetProxyReferenceArgumentAt(size_t arg_pos,
996 ArtMethod** sp)
997 REQUIRES_SHARED(Locks::mutator_lock_) {
998 ArtMethod* proxy_method = *sp;
999 ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
1000 CHECK(!non_proxy_method->IsStatic())
1001 << proxy_method->PrettyMethod() << " " << non_proxy_method->PrettyMethod();
1002 uint32_t shorty_len = 0;
1003 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
1004 GetQuickReferenceArgumentAtVisitor ref_arg_visitor(sp, shorty, shorty_len, arg_pos);
1005 ref_arg_visitor.VisitArguments();
1006 StackReference<mirror::Object>* ref_arg = ref_arg_visitor.GetReferenceArgument();
1007 return ref_arg;
1008}
1009
1010// Visitor returning all the reference arguments in a Quick stack frame.
1011class GetQuickReferenceArgumentsVisitor FINAL : public QuickArgumentVisitor {
1012 public:
1013 GetQuickReferenceArgumentsVisitor(ArtMethod** sp,
1014 bool is_static,
1015 const char* shorty,
1016 uint32_t shorty_len)
1017 : QuickArgumentVisitor(sp, is_static, shorty, shorty_len) {}
1018
1019 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE {
1020 Primitive::Type type = GetParamPrimitiveType();
1021 if (type == Primitive::kPrimNot) {
1022 StackReference<mirror::Object>* ref_arg =
1023 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
1024 ref_args_.push_back(ref_arg);
1025 }
1026 }
1027
1028 std::vector<StackReference<mirror::Object>*> GetReferenceArguments() {
1029 return ref_args_;
1030 }
1031
1032 private:
1033 // The reference arguments.
1034 std::vector<StackReference<mirror::Object>*> ref_args_;
1035
1036 DISALLOW_COPY_AND_ASSIGN(GetQuickReferenceArgumentsVisitor);
1037};
1038
1039// Returning all reference arguments in Quick stack frame at address `sp`.
1040std::vector<StackReference<mirror::Object>*> GetProxyReferenceArguments(ArtMethod** sp)
1041 REQUIRES_SHARED(Locks::mutator_lock_) {
1042 ArtMethod* proxy_method = *sp;
1043 ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
1044 CHECK(!non_proxy_method->IsStatic())
1045 << proxy_method->PrettyMethod() << " " << non_proxy_method->PrettyMethod();
1046 uint32_t shorty_len = 0;
1047 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
1048 GetQuickReferenceArgumentsVisitor ref_args_visitor(sp, /* is_static */ false, shorty, shorty_len);
1049 ref_args_visitor.VisitArguments();
1050 std::vector<StackReference<mirror::Object>*> ref_args = ref_args_visitor.GetReferenceArguments();
1051 return ref_args;
1052}
1053
Ian Rogers848871b2013-08-05 10:56:33 -07001054// Read object references held in arguments from quick frames and place in a JNI local references,
1055// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001056class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -07001057 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -07001058 RememberForGcArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty,
1059 uint32_t shorty_len, ScopedObjectAccessUnchecked* soa) :
Andreas Gampec200a4a2014-06-16 18:39:09 -07001060 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
Ian Rogers848871b2013-08-05 10:56:33 -07001061
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001062 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -07001063
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001064 void FixupReferences() REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -07001065
1066 private:
Ian Rogers9758f792014-03-13 09:02:55 -07001067 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -08001068 // References which we must update when exiting in case the GC moved the objects.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001069 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
1070
Mathieu Chartier590fee92013-09-13 13:46:47 -07001071 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -07001072};
1073
Ian Rogers9758f792014-03-13 09:02:55 -07001074void RememberForGcArgumentVisitor::Visit() {
1075 if (IsParamAReference()) {
1076 StackReference<mirror::Object>* stack_ref =
1077 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
1078 jobject reference =
1079 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
1080 references_.push_back(std::make_pair(reference, stack_ref));
1081 }
1082}
1083
1084void RememberForGcArgumentVisitor::FixupReferences() {
1085 // Fixup any references which may have changed.
1086 for (const auto& pair : references_) {
Mathieu Chartier1a5337f2016-10-13 13:48:23 -07001087 pair.second->Assign(soa_->Decode<mirror::Object>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001088 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -07001089 }
1090}
1091
Alex Lightb7edcda2017-04-27 13:20:31 -07001092extern "C" const void* artInstrumentationMethodEntryFromCode(ArtMethod* method,
1093 mirror::Object* this_object,
1094 Thread* self,
1095 ArtMethod** sp)
1096 REQUIRES_SHARED(Locks::mutator_lock_) {
1097 const void* result;
1098 // Instrumentation changes the stack. Thus, when exiting, the stack cannot be verified, so skip
1099 // that part.
1100 ScopedQuickEntrypointChecks sqec(self, kIsDebugBuild, false);
1101 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
1102 if (instrumentation->IsDeoptimized(method)) {
1103 result = GetQuickToInterpreterBridge();
1104 } else {
1105 result = instrumentation->GetQuickCodeFor(method, kRuntimePointerSize);
1106 DCHECK(!Runtime::Current()->GetClassLinker()->IsQuickToInterpreterBridge(result));
1107 }
1108
1109 bool interpreter_entry = (result == GetQuickToInterpreterBridge());
1110 bool is_static = method->IsStatic();
1111 uint32_t shorty_len;
1112 const char* shorty =
1113 method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty(&shorty_len);
1114
1115 ScopedObjectAccessUnchecked soa(self);
1116 RememberForGcArgumentVisitor visitor(sp, is_static, shorty, shorty_len, &soa);
1117 visitor.VisitArguments();
1118
1119 instrumentation->PushInstrumentationStackFrame(self,
1120 is_static ? nullptr : this_object,
1121 method,
1122 QuickArgumentVisitor::GetCallingPc(sp),
1123 interpreter_entry);
1124
1125 visitor.FixupReferences();
1126 if (UNLIKELY(self->IsExceptionPending())) {
1127 return nullptr;
1128 }
1129 CHECK(result != nullptr) << method->PrettyMethod();
1130 return result;
1131}
1132
1133extern "C" TwoWordReturn artInstrumentationMethodExitFromCode(Thread* self,
1134 ArtMethod** sp,
1135 uint64_t* gpr_result,
1136 uint64_t* fpr_result)
1137 REQUIRES_SHARED(Locks::mutator_lock_) {
1138 DCHECK_EQ(reinterpret_cast<uintptr_t>(self), reinterpret_cast<uintptr_t>(Thread::Current()));
1139 CHECK(gpr_result != nullptr);
1140 CHECK(fpr_result != nullptr);
1141 // Instrumentation exit stub must not be entered with a pending exception.
1142 CHECK(!self->IsExceptionPending()) << "Enter instrumentation exit stub with pending exception "
1143 << self->GetException()->Dump();
1144 // Compute address of return PC and sanity check that it currently holds 0.
Vladimir Markod3083dd2018-05-17 08:43:47 +01001145 constexpr size_t return_pc_offset =
1146 RuntimeCalleeSaveFrame::GetReturnPcOffset(CalleeSaveType::kSaveEverything);
Alex Lightb7edcda2017-04-27 13:20:31 -07001147 uintptr_t* return_pc = reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(sp) +
1148 return_pc_offset);
1149 CHECK_EQ(*return_pc, 0U);
1150
1151 // Pop the frame filling in the return pc. The low half of the return value is 0 when
1152 // deoptimization shouldn't be performed with the high-half having the return address. When
1153 // deoptimization should be performed the low half is zero and the high-half the address of the
1154 // deoptimization entry point.
1155 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
1156 TwoWordReturn return_or_deoptimize_pc = instrumentation->PopInstrumentationStackFrame(
1157 self, return_pc, gpr_result, fpr_result);
Vladimir Markofac21782018-03-13 17:01:09 +00001158 if (self->IsExceptionPending() || self->ObserveAsyncException()) {
Alex Lightb7edcda2017-04-27 13:20:31 -07001159 return GetTwoWordFailureValue();
1160 }
1161 return return_or_deoptimize_pc;
1162}
1163
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001164static std::string DumpInstruction(ArtMethod* method, uint32_t dex_pc)
1165 REQUIRES_SHARED(Locks::mutator_lock_) {
1166 if (dex_pc == static_cast<uint32_t>(-1)) {
1167 CHECK(method == jni::DecodeArtMethod(WellKnownClasses::java_lang_String_charAt));
1168 return "<native>";
1169 } else {
1170 CodeItemInstructionAccessor accessor = method->DexInstructions();
1171 CHECK_LT(dex_pc, accessor.InsnsSizeInCodeUnits());
1172 return accessor.InstructionAt(dex_pc).DumpString(method->GetDexFile());
1173 }
1174}
1175
Vladimir Marko606adb32018-04-05 14:49:24 +01001176static void DumpB74410240ClassData(ObjPtr<mirror::Class> klass)
1177 REQUIRES_SHARED(Locks::mutator_lock_) {
1178 std::string storage;
1179 const char* descriptor = klass->GetDescriptor(&storage);
1180 LOG(FATAL_WITHOUT_ABORT) << " " << DescribeLoaders(klass->GetClassLoader(), descriptor);
1181 const OatDexFile* oat_dex_file = klass->GetDexFile().GetOatDexFile();
1182 if (oat_dex_file != nullptr) {
1183 const OatFile* oat_file = oat_dex_file->GetOatFile();
1184 const char* dex2oat_cmdline =
1185 oat_file->GetOatHeader().GetStoreValueByKey(OatHeader::kDex2OatCmdLineKey);
1186 LOG(FATAL_WITHOUT_ABORT) << " OatFile: " << oat_file->GetLocation()
1187 << "; " << (dex2oat_cmdline != nullptr ? dex2oat_cmdline : "<not recorded>");
1188 }
1189}
1190
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001191static void DumpB74410240DebugData(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
1192 // Mimick the search for the caller and dump some data while doing so.
Vladimir Marko606adb32018-04-05 14:49:24 +01001193 LOG(FATAL_WITHOUT_ABORT) << "Dumping debugging data, please attach a bugreport to b/74410240.";
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001194
1195 constexpr CalleeSaveType type = CalleeSaveType::kSaveRefsAndArgs;
1196 CHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(type));
1197
Vladimir Markod3083dd2018-05-17 08:43:47 +01001198 constexpr size_t callee_frame_size = RuntimeCalleeSaveFrame::GetFrameSize(type);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001199 auto** caller_sp = reinterpret_cast<ArtMethod**>(
1200 reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
Vladimir Markod3083dd2018-05-17 08:43:47 +01001201 constexpr size_t callee_return_pc_offset = RuntimeCalleeSaveFrame::GetReturnPcOffset(type);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001202 uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(
1203 (reinterpret_cast<uint8_t*>(sp) + callee_return_pc_offset));
1204 ArtMethod* outer_method = *caller_sp;
1205
1206 if (UNLIKELY(caller_pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()))) {
1207 LOG(FATAL_WITHOUT_ABORT) << "Method: " << outer_method->PrettyMethod()
1208 << " native pc: " << caller_pc << " Instrumented!";
1209 return;
1210 }
1211
1212 const OatQuickMethodHeader* current_code = outer_method->GetOatQuickMethodHeader(caller_pc);
1213 CHECK(current_code != nullptr);
1214 CHECK(current_code->IsOptimized());
1215 uintptr_t native_pc_offset = current_code->NativeQuickPcOffset(caller_pc);
David Srbecky052f8ca2018-04-26 15:42:54 +01001216 CodeInfo code_info(current_code);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001217 MethodInfo method_info = current_code->GetOptimizedMethodInfo();
David Srbecky052f8ca2018-04-26 15:42:54 +01001218 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001219 CHECK(stack_map.IsValid());
David Srbecky052f8ca2018-04-26 15:42:54 +01001220 uint32_t dex_pc = stack_map.GetDexPc();
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001221
1222 // Log the outer method and its associated dex file and class table pointer which can be used
1223 // to find out if the inlined methods were defined by other dex file(s) or class loader(s).
1224 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1225 LOG(FATAL_WITHOUT_ABORT) << "Outer: " << outer_method->PrettyMethod()
1226 << " native pc: " << caller_pc
1227 << " dex pc: " << dex_pc
1228 << " dex file: " << outer_method->GetDexFile()->GetLocation()
1229 << " class table: " << class_linker->ClassTableForClassLoader(outer_method->GetClassLoader());
Vladimir Marko606adb32018-04-05 14:49:24 +01001230 DumpB74410240ClassData(outer_method->GetDeclaringClass());
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001231 LOG(FATAL_WITHOUT_ABORT) << " instruction: " << DumpInstruction(outer_method, dex_pc);
1232
1233 ArtMethod* caller = outer_method;
David Srbecky052f8ca2018-04-26 15:42:54 +01001234 if (stack_map.HasInlineInfo()) {
1235 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
1236 size_t depth = inline_info.GetDepth();
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001237 for (size_t d = 0; d < depth; ++d) {
1238 const char* tag = "";
David Srbecky052f8ca2018-04-26 15:42:54 +01001239 dex_pc = inline_info.GetDexPcAtDepth(d);
1240 if (inline_info.EncodesArtMethodAtDepth(d)) {
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001241 tag = "encoded ";
David Srbecky052f8ca2018-04-26 15:42:54 +01001242 caller = inline_info.GetArtMethodAtDepth(d);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001243 } else {
David Srbecky052f8ca2018-04-26 15:42:54 +01001244 uint32_t method_index = inline_info.GetMethodIndexAtDepth(method_info, d);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001245 if (dex_pc == static_cast<uint32_t>(-1)) {
1246 tag = "special ";
1247 CHECK_EQ(d + 1u, depth);
1248 caller = jni::DecodeArtMethod(WellKnownClasses::java_lang_String_charAt);
1249 CHECK_EQ(caller->GetDexMethodIndex(), method_index);
1250 } else {
1251 ObjPtr<mirror::DexCache> dex_cache = caller->GetDexCache();
1252 ObjPtr<mirror::ClassLoader> class_loader = caller->GetClassLoader();
1253 caller = class_linker->LookupResolvedMethod(method_index, dex_cache, class_loader);
1254 CHECK(caller != nullptr);
1255 }
1256 }
1257 LOG(FATAL_WITHOUT_ABORT) << "Inlined method #" << d << ": " << tag << caller->PrettyMethod()
1258 << " dex pc: " << dex_pc
1259 << " dex file: " << caller->GetDexFile()->GetLocation()
1260 << " class table: "
Vladimir Marko606adb32018-04-05 14:49:24 +01001261 << class_linker->ClassTableForClassLoader(caller->GetClassLoader());
1262 DumpB74410240ClassData(caller->GetDeclaringClass());
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001263 LOG(FATAL_WITHOUT_ABORT) << " instruction: " << DumpInstruction(caller, dex_pc);
1264 }
1265 }
1266}
1267
Ian Rogers848871b2013-08-05 10:56:33 -07001268// Lazily resolve a method for quick. Called by stub code.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001269extern "C" const void* artQuickResolutionTrampoline(
1270 ArtMethod* called, mirror::Object* receiver, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001271 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe3b45ef22015-05-26 21:34:09 -07001272 // The resolution trampoline stashes the resolved method into the callee-save frame to transport
1273 // it. Thus, when exiting, the stack cannot be verified (as the resolved method most likely
1274 // does not have the same stack layout as the callee-save method).
1275 ScopedQuickEntrypointChecks sqec(self, kIsDebugBuild, false);
Ian Rogers848871b2013-08-05 10:56:33 -07001276 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001277 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -07001278 ScopedObjectAccessUnchecked soa(env);
1279 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001280 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -07001281
1282 // Compute details about the called method (avoid GCs)
1283 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Ian Rogers848871b2013-08-05 10:56:33 -07001284 InvokeType invoke_type;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001285 MethodReference called_method(nullptr, 0);
1286 const bool called_method_known_on_entry = !called->IsRuntimeMethod();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001287 ArtMethod* caller = nullptr;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001288 if (!called_method_known_on_entry) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01001289 caller = QuickArgumentVisitor::GetCallingMethod(sp);
Ian Rogerse0a02da2014-12-02 14:10:53 -08001290 called_method.dex_file = caller->GetDexFile();
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001291
1292 InvokeType stack_map_invoke_type;
1293 uint32_t stack_map_dex_method_idx;
1294 const bool found_stack_map = QuickArgumentVisitor::GetInvokeType(sp,
1295 &stack_map_invoke_type,
1296 &stack_map_dex_method_idx);
1297 // For debug builds, we make sure both of the paths are consistent by also looking at the dex
1298 // code.
1299 if (!found_stack_map || kIsDebugBuild) {
1300 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
David Sehr0225f8e2018-01-31 08:52:24 +00001301 CodeItemInstructionAccessor accessor(caller->DexInstructions());
Mathieu Chartier808c7a52017-12-15 11:19:33 -08001302 CHECK_LT(dex_pc, accessor.InsnsSizeInCodeUnits());
1303 const Instruction& instr = accessor.InstructionAt(dex_pc);
Vladimir Markod7559b72017-09-28 13:50:37 +01001304 Instruction::Code instr_code = instr.Opcode();
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001305 bool is_range;
1306 switch (instr_code) {
1307 case Instruction::INVOKE_DIRECT:
1308 invoke_type = kDirect;
1309 is_range = false;
1310 break;
1311 case Instruction::INVOKE_DIRECT_RANGE:
1312 invoke_type = kDirect;
1313 is_range = true;
1314 break;
1315 case Instruction::INVOKE_STATIC:
1316 invoke_type = kStatic;
1317 is_range = false;
1318 break;
1319 case Instruction::INVOKE_STATIC_RANGE:
1320 invoke_type = kStatic;
1321 is_range = true;
1322 break;
1323 case Instruction::INVOKE_SUPER:
1324 invoke_type = kSuper;
1325 is_range = false;
1326 break;
1327 case Instruction::INVOKE_SUPER_RANGE:
1328 invoke_type = kSuper;
1329 is_range = true;
1330 break;
1331 case Instruction::INVOKE_VIRTUAL:
1332 invoke_type = kVirtual;
1333 is_range = false;
1334 break;
1335 case Instruction::INVOKE_VIRTUAL_RANGE:
1336 invoke_type = kVirtual;
1337 is_range = true;
1338 break;
1339 case Instruction::INVOKE_INTERFACE:
1340 invoke_type = kInterface;
1341 is_range = false;
1342 break;
1343 case Instruction::INVOKE_INTERFACE_RANGE:
1344 invoke_type = kInterface;
1345 is_range = true;
1346 break;
1347 default:
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001348 DumpB74410240DebugData(sp);
Vladimir Markod7559b72017-09-28 13:50:37 +01001349 LOG(FATAL) << "Unexpected call into trampoline: " << instr.DumpString(nullptr);
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001350 UNREACHABLE();
1351 }
Vladimir Markod7559b72017-09-28 13:50:37 +01001352 called_method.index = (is_range) ? instr.VRegB_3rc() : instr.VRegB_35c();
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001353 // Check that the invoke matches what we expected, note that this path only happens for debug
1354 // builds.
1355 if (found_stack_map) {
1356 DCHECK_EQ(stack_map_invoke_type, invoke_type);
1357 if (invoke_type != kSuper) {
1358 // Super may be sharpened.
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001359 DCHECK_EQ(stack_map_dex_method_idx, called_method.index)
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001360 << called_method.dex_file->PrettyMethod(stack_map_dex_method_idx) << " "
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001361 << called_method.PrettyMethod();
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001362 }
1363 } else {
Andreas Gampe9e6dee22017-04-11 13:50:23 -07001364 VLOG(dex) << "Accessed dex file for invoke " << invoke_type << " "
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001365 << called_method.index;
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001366 }
1367 } else {
1368 invoke_type = stack_map_invoke_type;
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001369 called_method.index = stack_map_dex_method_idx;
Ian Rogers848871b2013-08-05 10:56:33 -07001370 }
Ian Rogers848871b2013-08-05 10:56:33 -07001371 } else {
1372 invoke_type = kStatic;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001373 called_method.dex_file = called->GetDexFile();
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001374 called_method.index = called->GetDexMethodIndex();
Ian Rogers848871b2013-08-05 10:56:33 -07001375 }
1376 uint32_t shorty_len;
1377 const char* shorty =
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001378 called_method.dex_file->GetMethodShorty(called_method.GetMethodId(), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001379 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -07001380 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001381 self->EndAssertNoThreadSuspension(old_cause);
Ian Rogerse0a02da2014-12-02 14:10:53 -08001382 const bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -07001383 // Resolve method filling in dex cache.
Ian Rogerse0a02da2014-12-02 14:10:53 -08001384 if (!called_method_known_on_entry) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001385 StackHandleScope<1> hs(self);
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001386 mirror::Object* dummy = nullptr;
1387 HandleWrapper<mirror::Object> h_receiver(
1388 hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy));
Ian Rogerse0a02da2014-12-02 14:10:53 -08001389 DCHECK_EQ(caller->GetDexFile(), called_method.dex_file);
Vladimir Markoba118822017-06-12 15:41:56 +01001390 called = linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001391 self, called_method.index, caller, invoke_type);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001392
1393 // Update .bss entry in oat file if any.
1394 if (called != nullptr && called_method.dex_file->GetOatDexFile() != nullptr) {
Vladimir Markof3c52b42017-11-17 17:32:12 +00001395 size_t bss_offset = IndexBssMappingLookup::GetBssOffset(
1396 called_method.dex_file->GetOatDexFile()->GetMethodBssMapping(),
1397 called_method.index,
1398 called_method.dex_file->NumMethodIds(),
1399 static_cast<size_t>(kRuntimePointerSize));
1400 if (bss_offset != IndexBssMappingLookup::npos) {
1401 DCHECK_ALIGNED(bss_offset, static_cast<size_t>(kRuntimePointerSize));
1402 const OatFile* oat_file = called_method.dex_file->GetOatDexFile()->GetOatFile();
1403 ArtMethod** method_entry = reinterpret_cast<ArtMethod**>(const_cast<uint8_t*>(
1404 oat_file->BssBegin() + bss_offset));
1405 DCHECK_GE(method_entry, oat_file->GetBssMethods().data());
1406 DCHECK_LT(method_entry,
1407 oat_file->GetBssMethods().data() + oat_file->GetBssMethods().size());
1408 *method_entry = called;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001409 }
1410 }
Ian Rogers848871b2013-08-05 10:56:33 -07001411 }
Ian Rogerse0a02da2014-12-02 14:10:53 -08001412 const void* code = nullptr;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001413 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -07001414 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -08001415 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
David Sehr709b0702016-10-13 09:12:37 -07001416 << called->PrettyMethod() << " " << invoke_type;
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001417 if (virtual_or_interface || invoke_type == kSuper) {
1418 // Refine called method based on receiver for kVirtual/kInterface, and
1419 // caller for kSuper.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001420 ArtMethod* orig_called = called;
Mathieu Chartier55871bf2014-02-27 10:24:50 -08001421 if (invoke_type == kVirtual) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001422 CHECK(receiver != nullptr) << invoke_type;
Andreas Gampe542451c2016-07-26 09:02:02 -07001423 called = receiver->GetClass()->FindVirtualMethodForVirtual(called, kRuntimePointerSize);
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001424 } else if (invoke_type == kInterface) {
1425 CHECK(receiver != nullptr) << invoke_type;
Andreas Gampe542451c2016-07-26 09:02:02 -07001426 called = receiver->GetClass()->FindVirtualMethodForInterface(called, kRuntimePointerSize);
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001427 } else {
1428 DCHECK_EQ(invoke_type, kSuper);
1429 CHECK(caller != nullptr) << invoke_type;
Vladimir Markoba118822017-06-12 15:41:56 +01001430 ObjPtr<mirror::Class> ref_class = linker->LookupResolvedType(
Vladimir Marko666ee3d2017-12-11 18:37:36 +00001431 caller->GetDexFile()->GetMethodId(called_method.index).class_idx_, caller);
Alex Lightfedd91d2016-01-07 14:49:16 -08001432 if (ref_class->IsInterface()) {
Andreas Gampe542451c2016-07-26 09:02:02 -07001433 called = ref_class->FindVirtualMethodForInterfaceSuper(called, kRuntimePointerSize);
Alex Lightfedd91d2016-01-07 14:49:16 -08001434 } else {
1435 called = caller->GetDeclaringClass()->GetSuperClass()->GetVTableEntry(
Andreas Gampe542451c2016-07-26 09:02:02 -07001436 called->GetMethodIndex(), kRuntimePointerSize);
Alex Lightfedd91d2016-01-07 14:49:16 -08001437 }
Mathieu Chartier55871bf2014-02-27 10:24:50 -08001438 }
Mingyao Yangf4867782014-05-05 11:55:02 -07001439
David Sehr709b0702016-10-13 09:12:37 -07001440 CHECK(called != nullptr) << orig_called->PrettyMethod() << " "
1441 << mirror::Object::PrettyTypeOf(receiver) << " "
Mingyao Yangf4867782014-05-05 11:55:02 -07001442 << invoke_type << " " << orig_called->GetVtableIndex();
Ian Rogers83883d72013-10-21 21:07:24 -07001443 }
Daniel Mihalyieb076692014-08-22 17:33:31 +02001444
Ian Rogers848871b2013-08-05 10:56:33 -07001445 // Ensure that the called method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001446 StackHandleScope<1> hs(soa.Self());
1447 Handle<mirror::Class> called_class(hs.NewHandle(called->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -07001448 linker->EnsureInitialized(soa.Self(), called_class, true, true);
Ian Rogers848871b2013-08-05 10:56:33 -07001449 if (LIKELY(called_class->IsInitialized())) {
Daniel Mihalyieb076692014-08-22 17:33:31 +02001450 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) {
1451 // If we are single-stepping or the called method is deoptimized (by a
1452 // breakpoint, for example), then we have to execute the called method
1453 // with the interpreter.
1454 code = GetQuickToInterpreterBridge();
1455 } else if (UNLIKELY(Dbg::IsForcedInstrumentationNeededForResolution(self, caller))) {
1456 // If the caller is deoptimized (by a breakpoint, for example), we have to
1457 // continue its execution with interpreter when returning from the called
1458 // method. Because we do not want to execute the called method with the
1459 // interpreter, we wrap its execution into the instrumentation stubs.
1460 // When the called method returns, it will execute the instrumentation
1461 // exit hook that will determine the need of the interpreter with a call
1462 // to Dbg::IsForcedInterpreterNeededForUpcall and deoptimize the stack if
1463 // it is needed.
1464 code = GetQuickInstrumentationEntryPoint();
1465 } else {
1466 code = called->GetEntryPointFromQuickCompiledCode();
1467 }
Ian Rogers848871b2013-08-05 10:56:33 -07001468 } else if (called_class->IsInitializing()) {
Daniel Mihalyieb076692014-08-22 17:33:31 +02001469 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) {
1470 // If we are single-stepping or the called method is deoptimized (by a
1471 // breakpoint, for example), then we have to execute the called method
1472 // with the interpreter.
1473 code = GetQuickToInterpreterBridge();
1474 } else if (invoke_type == kStatic) {
Alex Lightfc49fec2018-01-16 22:28:36 +00001475 // Class is still initializing, go to oat and grab code (trampoline must be left in place
1476 // until class is initialized to stop races between threads).
1477 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -07001478 } else {
1479 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -08001480 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -07001481 }
1482 } else {
1483 DCHECK(called_class->IsErroneous());
1484 }
1485 }
Ian Rogerse0a02da2014-12-02 14:10:53 -08001486 CHECK_EQ(code == nullptr, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -07001487 // Fixup any locally saved objects may have moved during a GC.
1488 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -07001489 // Place called method in callee-save frame to be placed as first argument to quick method.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001490 *sp = called;
1491
Ian Rogers848871b2013-08-05 10:56:33 -07001492 return code;
1493}
1494
Andreas Gampec147b002014-03-06 18:11:06 -08001495/*
1496 * This class uses a couple of observations to unite the different calling conventions through
1497 * a few constants.
1498 *
1499 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
1500 * possible alignment.
1501 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
1502 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
1503 * when we have to split things
1504 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
1505 * and we can use Int handling directly.
1506 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
1507 * necessary when widening. Also, widening of Ints will take place implicitly, and the
1508 * extension should be compatible with Aarch64, which mandates copying the available bits
1509 * into LSB and leaving the rest unspecified.
1510 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
1511 * the stack.
1512 * 6) There is only little endian.
1513 *
1514 *
1515 * Actual work is supposed to be done in a delegate of the template type. The interface is as
1516 * follows:
1517 *
1518 * void PushGpr(uintptr_t): Add a value for the next GPR
1519 *
1520 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
1521 * padding, that is, think the architecture is 32b and aligns 64b.
1522 *
1523 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
1524 * split this if necessary. The current state will have aligned, if
1525 * necessary.
1526 *
1527 * void PushStack(uintptr_t): Push a value to the stack.
1528 *
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001529 * uintptr_t PushHandleScope(mirror::Object* ref): Add a reference to the HandleScope. This _will_ have nullptr,
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001530 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -08001531 * Must return the jobject, that is, the reference to the
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001532 * entry in the HandleScope (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -08001533 *
1534 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001535template<class T> class BuildNativeCallFrameStateMachine {
Andreas Gampec147b002014-03-06 18:11:06 -08001536 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001537#if defined(__arm__)
1538 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -08001539 static constexpr bool kNativeSoftFloatAbi = true;
1540 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001541 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
1542
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001543 static constexpr size_t kRegistersNeededForLong = 2;
1544 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001545 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001546 static constexpr bool kMultiFPRegistersWidened = false;
1547 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001548 static constexpr bool kAlignLongOnStack = true;
1549 static constexpr bool kAlignDoubleOnStack = true;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001550#elif defined(__aarch64__)
1551 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1552 static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs.
1553 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1554
1555 static constexpr size_t kRegistersNeededForLong = 1;
1556 static constexpr size_t kRegistersNeededForDouble = 1;
1557 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001558 static constexpr bool kMultiFPRegistersWidened = false;
1559 static constexpr bool kMultiGPRegistersWidened = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001560 static constexpr bool kAlignLongOnStack = false;
1561 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001562#elif defined(__mips__) && !defined(__LP64__)
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001563 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
Douglas Leung735b8552014-10-31 12:21:40 -07001564 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs.
1565 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001566
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001567 static constexpr size_t kRegistersNeededForLong = 2;
1568 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001569 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001570 static constexpr bool kMultiFPRegistersWidened = true;
1571 static constexpr bool kMultiGPRegistersWidened = false;
Douglas Leung735b8552014-10-31 12:21:40 -07001572 static constexpr bool kAlignLongOnStack = true;
1573 static constexpr bool kAlignDoubleOnStack = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001574#elif defined(__mips__) && defined(__LP64__)
1575 // Let the code prepare GPRs only and we will load the FPRs with same data.
1576 static constexpr bool kNativeSoftFloatAbi = true;
1577 static constexpr size_t kNumNativeGprArgs = 8;
1578 static constexpr size_t kNumNativeFprArgs = 0;
1579
1580 static constexpr size_t kRegistersNeededForLong = 1;
1581 static constexpr size_t kRegistersNeededForDouble = 1;
1582 static constexpr bool kMultiRegistersAligned = false;
1583 static constexpr bool kMultiFPRegistersWidened = false;
1584 static constexpr bool kMultiGPRegistersWidened = true;
1585 static constexpr bool kAlignLongOnStack = false;
1586 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001587#elif defined(__i386__)
1588 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -08001589 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001590 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
1591 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
1592
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001593 static constexpr size_t kRegistersNeededForLong = 2;
1594 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001595 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001596 static constexpr bool kMultiFPRegistersWidened = false;
1597 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001598 static constexpr bool kAlignLongOnStack = false;
1599 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001600#elif defined(__x86_64__)
1601 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1602 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
1603 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1604
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001605 static constexpr size_t kRegistersNeededForLong = 1;
1606 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -08001607 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001608 static constexpr bool kMultiFPRegistersWidened = false;
1609 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001610 static constexpr bool kAlignLongOnStack = false;
1611 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001612#else
1613#error "Unsupported architecture"
1614#endif
1615
Andreas Gampec147b002014-03-06 18:11:06 -08001616 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001617 explicit BuildNativeCallFrameStateMachine(T* delegate)
1618 : gpr_index_(kNumNativeGprArgs),
1619 fpr_index_(kNumNativeFprArgs),
1620 stack_entries_(0),
1621 delegate_(delegate) {
Andreas Gampec147b002014-03-06 18:11:06 -08001622 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
1623 // the next register is even; counting down is just to make the compiler happy...
Andreas Gampe575e78c2014-11-03 23:41:03 -08001624 static_assert(kNumNativeGprArgs % 2 == 0U, "Number of native GPR arguments not even");
1625 static_assert(kNumNativeFprArgs % 2 == 0U, "Number of native FPR arguments not even");
Andreas Gampec147b002014-03-06 18:11:06 -08001626 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001627
Andreas Gampec200a4a2014-06-16 18:39:09 -07001628 virtual ~BuildNativeCallFrameStateMachine() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001629
Ian Rogers1428dce2014-10-21 15:02:15 -07001630 bool HavePointerGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001631 return gpr_index_ > 0;
1632 }
1633
Andreas Gampec200a4a2014-06-16 18:39:09 -07001634 void AdvancePointer(const void* val) {
Andreas Gampec147b002014-03-06 18:11:06 -08001635 if (HavePointerGpr()) {
1636 gpr_index_--;
1637 PushGpr(reinterpret_cast<uintptr_t>(val));
1638 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001639 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
Andreas Gampec147b002014-03-06 18:11:06 -08001640 PushStack(reinterpret_cast<uintptr_t>(val));
1641 gpr_index_ = 0;
1642 }
1643 }
1644
Ian Rogers1428dce2014-10-21 15:02:15 -07001645 bool HaveHandleScopeGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001646 return gpr_index_ > 0;
1647 }
1648
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001649 void AdvanceHandleScope(mirror::Object* ptr) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001650 uintptr_t handle = PushHandle(ptr);
1651 if (HaveHandleScopeGpr()) {
Andreas Gampec147b002014-03-06 18:11:06 -08001652 gpr_index_--;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001653 PushGpr(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001654 } else {
1655 stack_entries_++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001656 PushStack(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001657 gpr_index_ = 0;
1658 }
1659 }
1660
Ian Rogers1428dce2014-10-21 15:02:15 -07001661 bool HaveIntGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001662 return gpr_index_ > 0;
1663 }
1664
1665 void AdvanceInt(uint32_t val) {
1666 if (HaveIntGpr()) {
1667 gpr_index_--;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001668 if (kMultiGPRegistersWidened) {
1669 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001670 PushGpr(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val)));
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001671 } else {
1672 PushGpr(val);
1673 }
Andreas Gampec147b002014-03-06 18:11:06 -08001674 } else {
1675 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001676 if (kMultiGPRegistersWidened) {
1677 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001678 PushStack(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val)));
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001679 } else {
1680 PushStack(val);
1681 }
Andreas Gampec147b002014-03-06 18:11:06 -08001682 gpr_index_ = 0;
1683 }
1684 }
1685
Ian Rogers1428dce2014-10-21 15:02:15 -07001686 bool HaveLongGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001687 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
1688 }
1689
Ian Rogers1428dce2014-10-21 15:02:15 -07001690 bool LongGprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001691 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1692 kAlignLongOnStack && // and when it needs alignment
1693 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1694 }
1695
Ian Rogers1428dce2014-10-21 15:02:15 -07001696 bool LongStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001697 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1698 kAlignLongOnStack && // and when it needs 8B alignment
1699 (stack_entries_ & 1) == 1; // counter is odd
1700 }
1701
1702 void AdvanceLong(uint64_t val) {
1703 if (HaveLongGpr()) {
1704 if (LongGprNeedsPadding()) {
1705 PushGpr(0);
1706 gpr_index_--;
1707 }
1708 if (kRegistersNeededForLong == 1) {
1709 PushGpr(static_cast<uintptr_t>(val));
1710 } else {
1711 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1712 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1713 }
1714 gpr_index_ -= kRegistersNeededForLong;
1715 } else {
1716 if (LongStackNeedsPadding()) {
1717 PushStack(0);
1718 stack_entries_++;
1719 }
1720 if (kRegistersNeededForLong == 1) {
1721 PushStack(static_cast<uintptr_t>(val));
1722 stack_entries_++;
1723 } else {
1724 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1725 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1726 stack_entries_ += 2;
1727 }
1728 gpr_index_ = 0;
1729 }
1730 }
1731
Ian Rogers1428dce2014-10-21 15:02:15 -07001732 bool HaveFloatFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001733 return fpr_index_ > 0;
1734 }
1735
Andreas Gampec147b002014-03-06 18:11:06 -08001736 void AdvanceFloat(float val) {
1737 if (kNativeSoftFloatAbi) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001738 AdvanceInt(bit_cast<uint32_t, float>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001739 } else {
1740 if (HaveFloatFpr()) {
1741 fpr_index_--;
1742 if (kRegistersNeededForDouble == 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001743 if (kMultiFPRegistersWidened) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001744 PushFpr8(bit_cast<uint64_t, double>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001745 } else {
1746 // No widening, just use the bits.
Roland Levillainda4d79b2015-03-24 14:36:11 +00001747 PushFpr8(static_cast<uint64_t>(bit_cast<uint32_t, float>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001748 }
1749 } else {
1750 PushFpr4(val);
1751 }
1752 } else {
1753 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001754 if (kRegistersNeededForDouble == 1 && kMultiFPRegistersWidened) {
Andreas Gampec147b002014-03-06 18:11:06 -08001755 // Need to widen before storing: Note the "double" in the template instantiation.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001756 // Note: We need to jump through those hoops to make the compiler happy.
1757 DCHECK_EQ(sizeof(uintptr_t), sizeof(uint64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001758 PushStack(static_cast<uintptr_t>(bit_cast<uint64_t, double>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001759 } else {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001760 PushStack(static_cast<uintptr_t>(bit_cast<uint32_t, float>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001761 }
1762 fpr_index_ = 0;
1763 }
1764 }
1765 }
1766
Ian Rogers1428dce2014-10-21 15:02:15 -07001767 bool HaveDoubleFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001768 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1769 }
1770
Ian Rogers1428dce2014-10-21 15:02:15 -07001771 bool DoubleFprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001772 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1773 kAlignDoubleOnStack && // and when it needs alignment
1774 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1775 }
1776
Ian Rogers1428dce2014-10-21 15:02:15 -07001777 bool DoubleStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001778 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1779 kAlignDoubleOnStack && // and when it needs 8B alignment
1780 (stack_entries_ & 1) == 1; // counter is odd
1781 }
1782
1783 void AdvanceDouble(uint64_t val) {
1784 if (kNativeSoftFloatAbi) {
1785 AdvanceLong(val);
1786 } else {
1787 if (HaveDoubleFpr()) {
1788 if (DoubleFprNeedsPadding()) {
1789 PushFpr4(0);
1790 fpr_index_--;
1791 }
1792 PushFpr8(val);
1793 fpr_index_ -= kRegistersNeededForDouble;
1794 } else {
1795 if (DoubleStackNeedsPadding()) {
1796 PushStack(0);
1797 stack_entries_++;
1798 }
1799 if (kRegistersNeededForDouble == 1) {
1800 PushStack(static_cast<uintptr_t>(val));
1801 stack_entries_++;
1802 } else {
1803 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1804 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1805 stack_entries_ += 2;
1806 }
1807 fpr_index_ = 0;
1808 }
1809 }
1810 }
1811
Ian Rogers1428dce2014-10-21 15:02:15 -07001812 uint32_t GetStackEntries() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001813 return stack_entries_;
1814 }
1815
Ian Rogers1428dce2014-10-21 15:02:15 -07001816 uint32_t GetNumberOfUsedGprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001817 return kNumNativeGprArgs - gpr_index_;
1818 }
1819
Ian Rogers1428dce2014-10-21 15:02:15 -07001820 uint32_t GetNumberOfUsedFprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001821 return kNumNativeFprArgs - fpr_index_;
1822 }
1823
1824 private:
1825 void PushGpr(uintptr_t val) {
1826 delegate_->PushGpr(val);
1827 }
1828 void PushFpr4(float val) {
1829 delegate_->PushFpr4(val);
1830 }
1831 void PushFpr8(uint64_t val) {
1832 delegate_->PushFpr8(val);
1833 }
1834 void PushStack(uintptr_t val) {
1835 delegate_->PushStack(val);
1836 }
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001837 uintptr_t PushHandle(mirror::Object* ref) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001838 return delegate_->PushHandle(ref);
Andreas Gampec147b002014-03-06 18:11:06 -08001839 }
1840
1841 uint32_t gpr_index_; // Number of free GPRs
1842 uint32_t fpr_index_; // Number of free FPRs
1843 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1844 // extended
Ian Rogers1428dce2014-10-21 15:02:15 -07001845 T* const delegate_; // What Push implementation gets called
Andreas Gampec147b002014-03-06 18:11:06 -08001846};
1847
Andreas Gampec200a4a2014-06-16 18:39:09 -07001848// Computes the sizes of register stacks and call stack area. Handling of references can be extended
1849// in subclasses.
1850//
1851// To handle native pointers, use "L" in the shorty for an object reference, which simulates
1852// them with handles.
1853class ComputeNativeCallFrameSize {
Andreas Gampec147b002014-03-06 18:11:06 -08001854 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001855 ComputeNativeCallFrameSize() : num_stack_entries_(0) {}
1856
1857 virtual ~ComputeNativeCallFrameSize() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001858
Ian Rogers1428dce2014-10-21 15:02:15 -07001859 uint32_t GetStackSize() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001860 return num_stack_entries_ * sizeof(uintptr_t);
1861 }
1862
Ian Rogers1428dce2014-10-21 15:02:15 -07001863 uint8_t* LayoutCallStack(uint8_t* sp8) const {
Andreas Gampec147b002014-03-06 18:11:06 -08001864 sp8 -= GetStackSize();
Andreas Gampe779f8c92014-06-09 18:29:38 -07001865 // Align by kStackAlignment.
1866 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001867 return sp8;
Andreas Gampec147b002014-03-06 18:11:06 -08001868 }
1869
Ian Rogers1428dce2014-10-21 15:02:15 -07001870 uint8_t* LayoutCallRegisterStacks(uint8_t* sp8, uintptr_t** start_gpr, uint32_t** start_fpr)
1871 const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001872 // Assumption is OK right now, as we have soft-float arm
1873 size_t fregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeFprArgs;
1874 sp8 -= fregs * sizeof(uintptr_t);
1875 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1876 size_t iregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeGprArgs;
1877 sp8 -= iregs * sizeof(uintptr_t);
1878 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1879 return sp8;
1880 }
Andreas Gampec147b002014-03-06 18:11:06 -08001881
Andreas Gampec200a4a2014-06-16 18:39:09 -07001882 uint8_t* LayoutNativeCall(uint8_t* sp8, uintptr_t** start_stack, uintptr_t** start_gpr,
Ian Rogers1428dce2014-10-21 15:02:15 -07001883 uint32_t** start_fpr) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001884 // Native call stack.
1885 sp8 = LayoutCallStack(sp8);
1886 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
Andreas Gampec147b002014-03-06 18:11:06 -08001887
Andreas Gampec200a4a2014-06-16 18:39:09 -07001888 // Put fprs and gprs below.
1889 sp8 = LayoutCallRegisterStacks(sp8, start_gpr, start_fpr);
Andreas Gampec147b002014-03-06 18:11:06 -08001890
Andreas Gampec200a4a2014-06-16 18:39:09 -07001891 // Return the new bottom.
1892 return sp8;
1893 }
1894
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001895 virtual void WalkHeader(
1896 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm ATTRIBUTE_UNUSED)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001897 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001898 }
Andreas Gampec200a4a2014-06-16 18:39:09 -07001899
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001900 void Walk(const char* shorty, uint32_t shorty_len) REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001901 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize> sm(this);
1902
1903 WalkHeader(&sm);
Andreas Gampec147b002014-03-06 18:11:06 -08001904
1905 for (uint32_t i = 1; i < shorty_len; ++i) {
1906 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1907 switch (cur_type_) {
1908 case Primitive::kPrimNot:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001909 // TODO: fix abuse of mirror types.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001910 sm.AdvanceHandleScope(
1911 reinterpret_cast<mirror::Object*>(0x12345678));
Andreas Gampec147b002014-03-06 18:11:06 -08001912 break;
1913
1914 case Primitive::kPrimBoolean:
1915 case Primitive::kPrimByte:
1916 case Primitive::kPrimChar:
1917 case Primitive::kPrimShort:
1918 case Primitive::kPrimInt:
1919 sm.AdvanceInt(0);
1920 break;
1921 case Primitive::kPrimFloat:
1922 sm.AdvanceFloat(0);
1923 break;
1924 case Primitive::kPrimDouble:
1925 sm.AdvanceDouble(0);
1926 break;
1927 case Primitive::kPrimLong:
1928 sm.AdvanceLong(0);
1929 break;
1930 default:
1931 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001932 UNREACHABLE();
Andreas Gampec147b002014-03-06 18:11:06 -08001933 }
1934 }
1935
Ian Rogers1428dce2014-10-21 15:02:15 -07001936 num_stack_entries_ = sm.GetStackEntries();
Andreas Gampec147b002014-03-06 18:11:06 -08001937 }
1938
1939 void PushGpr(uintptr_t /* val */) {
1940 // not optimizing registers, yet
1941 }
1942
1943 void PushFpr4(float /* val */) {
1944 // not optimizing registers, yet
1945 }
1946
1947 void PushFpr8(uint64_t /* val */) {
1948 // not optimizing registers, yet
1949 }
1950
1951 void PushStack(uintptr_t /* val */) {
1952 // counting is already done in the superclass
1953 }
1954
Andreas Gampec200a4a2014-06-16 18:39:09 -07001955 virtual uintptr_t PushHandle(mirror::Object* /* ptr */) {
Andreas Gampec147b002014-03-06 18:11:06 -08001956 return reinterpret_cast<uintptr_t>(nullptr);
1957 }
1958
Andreas Gampec200a4a2014-06-16 18:39:09 -07001959 protected:
Andreas Gampec147b002014-03-06 18:11:06 -08001960 uint32_t num_stack_entries_;
1961};
1962
Andreas Gampec200a4a2014-06-16 18:39:09 -07001963class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001964 public:
Igor Murashkin06a04e02016-09-13 15:57:37 -07001965 explicit ComputeGenericJniFrameSize(bool critical_native)
1966 : num_handle_scope_references_(0), critical_native_(critical_native) {}
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001967
Andreas Gampec200a4a2014-06-16 18:39:09 -07001968 // Lays out the callee-save frame. Assumes that the incorrect frame corresponding to RefsAndArgs
1969 // is at *m = sp. Will update to point to the bottom of the save frame.
1970 //
1971 // Note: assumes ComputeAll() has been run before.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001972 void LayoutCalleeSaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001973 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001974 ArtMethod* method = **m;
1975
Andreas Gampe542451c2016-07-26 09:02:02 -07001976 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001977
Andreas Gampec200a4a2014-06-16 18:39:09 -07001978 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
1979
1980 // First, fix up the layout of the callee-save frame.
1981 // We have to squeeze in the HandleScope, and relocate the method pointer.
1982
1983 // "Free" the slot for the method.
Ian Rogers13735952014-10-08 12:43:28 -07001984 sp8 += sizeof(void*); // In the callee-save frame we use a full pointer.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001985
1986 // Under the callee saves put handle scope and new method stack reference.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001987 size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001988 size_t scope_and_method = handle_scope_size + sizeof(ArtMethod*);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001989
1990 sp8 -= scope_and_method;
1991 // Align by kStackAlignment.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001992 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001993
Mathieu Chartiere401d142015-04-22 13:56:20 -07001994 uint8_t* sp8_table = sp8 + sizeof(ArtMethod*);
Ian Rogers59c07062014-10-10 13:03:39 -07001995 *handle_scope = HandleScope::Create(sp8_table, self->GetTopHandleScope(),
1996 num_handle_scope_references_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001997
1998 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
1999 uint8_t* method_pointer = sp8;
Mathieu Chartiere401d142015-04-22 13:56:20 -07002000 auto** new_method_ref = reinterpret_cast<ArtMethod**>(method_pointer);
2001 *new_method_ref = method;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002002 *m = new_method_ref;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002003 }
2004
Andreas Gampec200a4a2014-06-16 18:39:09 -07002005 // Adds space for the cookie. Note: may leave stack unaligned.
Ian Rogers1428dce2014-10-21 15:02:15 -07002006 void LayoutCookie(uint8_t** sp) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002007 // Reference cookie and padding
2008 *sp -= 8;
Mathieu Chartier0cd81352014-05-22 16:48:55 -07002009 }
2010
Andreas Gampec200a4a2014-06-16 18:39:09 -07002011 // Re-layout the callee-save frame (insert a handle-scope). Then add space for the cookie.
2012 // Returns the new bottom. Note: this may be unaligned.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002013 uint8_t* LayoutJNISaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002014 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002015 // First, fix up the layout of the callee-save frame.
2016 // We have to squeeze in the HandleScope, and relocate the method pointer.
Ian Rogers59c07062014-10-10 13:03:39 -07002017 LayoutCalleeSaveFrame(self, m, sp, handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002018
2019 // The bottom of the callee-save frame is now where the method is, *m.
2020 uint8_t* sp8 = reinterpret_cast<uint8_t*>(*m);
2021
2022 // Add space for cookie.
2023 LayoutCookie(&sp8);
2024
2025 return sp8;
2026 }
2027
2028 // WARNING: After this, *sp won't be pointing to the method anymore!
Mathieu Chartiere401d142015-04-22 13:56:20 -07002029 uint8_t* ComputeLayout(Thread* self, ArtMethod*** m, const char* shorty, uint32_t shorty_len,
2030 HandleScope** handle_scope, uintptr_t** start_stack, uintptr_t** start_gpr,
2031 uint32_t** start_fpr)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002032 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002033 Walk(shorty, shorty_len);
2034
2035 // JNI part.
Ian Rogers59c07062014-10-10 13:03:39 -07002036 uint8_t* sp8 = LayoutJNISaveFrame(self, m, reinterpret_cast<void*>(*m), handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002037
2038 sp8 = LayoutNativeCall(sp8, start_stack, start_gpr, start_fpr);
2039
2040 // Return the new bottom.
2041 return sp8;
2042 }
2043
2044 uintptr_t PushHandle(mirror::Object* /* ptr */) OVERRIDE;
2045
2046 // Add JNIEnv* and jobj/jclass before the shorty-derived elements.
2047 void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) OVERRIDE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002048 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002049
2050 private:
2051 uint32_t num_handle_scope_references_;
Igor Murashkin06a04e02016-09-13 15:57:37 -07002052 const bool critical_native_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002053};
2054
2055uintptr_t ComputeGenericJniFrameSize::PushHandle(mirror::Object* /* ptr */) {
2056 num_handle_scope_references_++;
2057 return reinterpret_cast<uintptr_t>(nullptr);
2058}
2059
2060void ComputeGenericJniFrameSize::WalkHeader(
2061 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) {
Igor Murashkin06a04e02016-09-13 15:57:37 -07002062 // First 2 parameters are always excluded for @CriticalNative.
2063 if (UNLIKELY(critical_native_)) {
2064 return;
2065 }
2066
Andreas Gampec200a4a2014-06-16 18:39:09 -07002067 // JNIEnv
2068 sm->AdvancePointer(nullptr);
2069
2070 // Class object or this as first argument
2071 sm->AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678));
2072}
2073
2074// Class to push values to three separate regions. Used to fill the native call part. Adheres to
2075// the template requirements of BuildGenericJniFrameStateMachine.
2076class FillNativeCall {
2077 public:
2078 FillNativeCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) :
2079 cur_gpr_reg_(gpr_regs), cur_fpr_reg_(fpr_regs), cur_stack_arg_(stack_args) {}
2080
2081 virtual ~FillNativeCall() {}
2082
2083 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) {
2084 cur_gpr_reg_ = gpr_regs;
2085 cur_fpr_reg_ = fpr_regs;
2086 cur_stack_arg_ = stack_args;
Andreas Gampec147b002014-03-06 18:11:06 -08002087 }
2088
2089 void PushGpr(uintptr_t val) {
2090 *cur_gpr_reg_ = val;
2091 cur_gpr_reg_++;
2092 }
2093
2094 void PushFpr4(float val) {
2095 *cur_fpr_reg_ = val;
2096 cur_fpr_reg_++;
2097 }
2098
2099 void PushFpr8(uint64_t val) {
2100 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
2101 *tmp = val;
2102 cur_fpr_reg_ += 2;
2103 }
2104
2105 void PushStack(uintptr_t val) {
2106 *cur_stack_arg_ = val;
2107 cur_stack_arg_++;
2108 }
2109
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002110 virtual uintptr_t PushHandle(mirror::Object*) REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002111 LOG(FATAL) << "(Non-JNI) Native call does not use handles.";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002112 UNREACHABLE();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002113 }
2114
2115 private:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002116 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002117 uint32_t* cur_fpr_reg_;
2118 uintptr_t* cur_stack_arg_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002119};
Andreas Gampec147b002014-03-06 18:11:06 -08002120
Andreas Gampec200a4a2014-06-16 18:39:09 -07002121// Visits arguments on the stack placing them into a region lower down the stack for the benefit
2122// of transitioning into native code.
2123class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
2124 public:
Igor Murashkin06a04e02016-09-13 15:57:37 -07002125 BuildGenericJniFrameVisitor(Thread* self,
2126 bool is_static,
2127 bool critical_native,
2128 const char* shorty,
2129 uint32_t shorty_len,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002130 ArtMethod*** sp)
Andreas Gampec200a4a2014-06-16 18:39:09 -07002131 : QuickArgumentVisitor(*sp, is_static, shorty, shorty_len),
Igor Murashkin06a04e02016-09-13 15:57:37 -07002132 jni_call_(nullptr, nullptr, nullptr, nullptr, critical_native),
2133 sm_(&jni_call_) {
2134 ComputeGenericJniFrameSize fsc(critical_native);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002135 uintptr_t* start_gpr_reg;
2136 uint32_t* start_fpr_reg;
2137 uintptr_t* start_stack_arg;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002138 bottom_of_used_area_ = fsc.ComputeLayout(self, sp, shorty, shorty_len,
Ian Rogers59c07062014-10-10 13:03:39 -07002139 &handle_scope_,
2140 &start_stack_arg,
Andreas Gampec200a4a2014-06-16 18:39:09 -07002141 &start_gpr_reg, &start_fpr_reg);
2142
Andreas Gampec200a4a2014-06-16 18:39:09 -07002143 jni_call_.Reset(start_gpr_reg, start_fpr_reg, start_stack_arg, handle_scope_);
2144
Igor Murashkin06a04e02016-09-13 15:57:37 -07002145 // First 2 parameters are always excluded for CriticalNative methods.
2146 if (LIKELY(!critical_native)) {
2147 // jni environment is always first argument
2148 sm_.AdvancePointer(self->GetJniEnv());
Andreas Gampec200a4a2014-06-16 18:39:09 -07002149
Igor Murashkin06a04e02016-09-13 15:57:37 -07002150 if (is_static) {
2151 sm_.AdvanceHandleScope((**sp)->GetDeclaringClass());
2152 } // else "this" reference is already handled by QuickArgumentVisitor.
Andreas Gampec200a4a2014-06-16 18:39:09 -07002153 }
2154 }
2155
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002156 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002157
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002158 void FinalizeHandleScope(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002159
Vladimir Markof39745e2016-01-26 12:16:55 +00002160 StackReference<mirror::Object>* GetFirstHandleScopeEntry() {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002161 return handle_scope_->GetHandle(0).GetReference();
2162 }
2163
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002164 jobject GetFirstHandleScopeJObject() const REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002165 return handle_scope_->GetHandle(0).ToJObject();
2166 }
2167
Ian Rogers1428dce2014-10-21 15:02:15 -07002168 void* GetBottomOfUsedArea() const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002169 return bottom_of_used_area_;
2170 }
2171
2172 private:
2173 // A class to fill a JNI call. Adds reference/handle-scope management to FillNativeCall.
2174 class FillJniCall FINAL : public FillNativeCall {
2175 public:
2176 FillJniCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args,
Igor Murashkin06a04e02016-09-13 15:57:37 -07002177 HandleScope* handle_scope, bool critical_native)
2178 : FillNativeCall(gpr_regs, fpr_regs, stack_args),
2179 handle_scope_(handle_scope),
2180 cur_entry_(0),
2181 critical_native_(critical_native) {}
Andreas Gampec200a4a2014-06-16 18:39:09 -07002182
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002183 uintptr_t PushHandle(mirror::Object* ref) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002184
2185 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args, HandleScope* scope) {
2186 FillNativeCall::Reset(gpr_regs, fpr_regs, stack_args);
2187 handle_scope_ = scope;
2188 cur_entry_ = 0U;
2189 }
2190
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002191 void ResetRemainingScopeSlots() REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002192 // Initialize padding entries.
2193 size_t expected_slots = handle_scope_->NumberOfReferences();
2194 while (cur_entry_ < expected_slots) {
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07002195 handle_scope_->GetMutableHandle(cur_entry_++).Assign(nullptr);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002196 }
Igor Murashkin06a04e02016-09-13 15:57:37 -07002197
2198 if (!critical_native_) {
2199 // Non-critical natives have at least the self class (jclass) or this (jobject).
2200 DCHECK_NE(cur_entry_, 0U);
2201 }
Andreas Gampec200a4a2014-06-16 18:39:09 -07002202 }
2203
Mathieu Chartier1432a5b2016-10-04 15:41:42 -07002204 bool CriticalNative() const {
2205 return critical_native_;
2206 }
2207
Andreas Gampec200a4a2014-06-16 18:39:09 -07002208 private:
2209 HandleScope* handle_scope_;
2210 size_t cur_entry_;
Igor Murashkin06a04e02016-09-13 15:57:37 -07002211 const bool critical_native_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002212 };
2213
2214 HandleScope* handle_scope_;
2215 FillJniCall jni_call_;
2216 void* bottom_of_used_area_;
2217
2218 BuildNativeCallFrameStateMachine<FillJniCall> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002219
2220 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
2221};
2222
Andreas Gampec200a4a2014-06-16 18:39:09 -07002223uintptr_t BuildGenericJniFrameVisitor::FillJniCall::PushHandle(mirror::Object* ref) {
2224 uintptr_t tmp;
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07002225 MutableHandle<mirror::Object> h = handle_scope_->GetMutableHandle(cur_entry_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002226 h.Assign(ref);
2227 tmp = reinterpret_cast<uintptr_t>(h.ToJObject());
2228 cur_entry_++;
2229 return tmp;
2230}
2231
Ian Rogers9758f792014-03-13 09:02:55 -07002232void BuildGenericJniFrameVisitor::Visit() {
2233 Primitive::Type type = GetParamPrimitiveType();
2234 switch (type) {
2235 case Primitive::kPrimLong: {
2236 jlong long_arg;
2237 if (IsSplitLongOrDouble()) {
2238 long_arg = ReadSplitLongParam();
2239 } else {
2240 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
2241 }
2242 sm_.AdvanceLong(long_arg);
2243 break;
2244 }
2245 case Primitive::kPrimDouble: {
2246 uint64_t double_arg;
2247 if (IsSplitLongOrDouble()) {
2248 // Read into union so that we don't case to a double.
2249 double_arg = ReadSplitLongParam();
2250 } else {
2251 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
2252 }
2253 sm_.AdvanceDouble(double_arg);
2254 break;
2255 }
2256 case Primitive::kPrimNot: {
2257 StackReference<mirror::Object>* stack_ref =
2258 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002259 sm_.AdvanceHandleScope(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -07002260 break;
2261 }
2262 case Primitive::kPrimFloat:
2263 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
2264 break;
2265 case Primitive::kPrimBoolean: // Fall-through.
2266 case Primitive::kPrimByte: // Fall-through.
2267 case Primitive::kPrimChar: // Fall-through.
2268 case Primitive::kPrimShort: // Fall-through.
2269 case Primitive::kPrimInt: // Fall-through.
2270 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
2271 break;
2272 case Primitive::kPrimVoid:
2273 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07002274 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -07002275 }
2276}
2277
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002278void BuildGenericJniFrameVisitor::FinalizeHandleScope(Thread* self) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002279 // Clear out rest of the scope.
2280 jni_call_.ResetRemainingScopeSlots();
Mathieu Chartier1432a5b2016-10-04 15:41:42 -07002281 if (!jni_call_.CriticalNative()) {
2282 // Install HandleScope.
2283 self->PushHandleScope(handle_scope_);
2284 }
Ian Rogers9758f792014-03-13 09:02:55 -07002285}
2286
Ian Rogers04c31d22014-07-07 21:44:06 -07002287#if defined(__arm__) || defined(__aarch64__)
Alex Lightd78ddec2017-04-18 15:20:38 -07002288extern "C" const void* artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07002289#else
Alex Lightd78ddec2017-04-18 15:20:38 -07002290extern "C" const void* artFindNativeMethod(Thread* self);
Ian Rogers04c31d22014-07-07 21:44:06 -07002291#endif
Andreas Gampe90546832014-03-12 18:07:19 -07002292
Igor Murashkin06a04e02016-09-13 15:57:37 -07002293static uint64_t artQuickGenericJniEndJNIRef(Thread* self,
2294 uint32_t cookie,
2295 bool fast_native ATTRIBUTE_UNUSED,
2296 jobject l,
2297 jobject lock) {
2298 // TODO: add entrypoints for @FastNative returning objects.
Andreas Gampead615172014-04-04 16:20:13 -07002299 if (lock != nullptr) {
2300 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self));
2301 } else {
2302 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(l, cookie, self));
2303 }
2304}
2305
Igor Murashkin06a04e02016-09-13 15:57:37 -07002306static void artQuickGenericJniEndJNINonRef(Thread* self,
2307 uint32_t cookie,
2308 bool fast_native,
2309 jobject lock) {
Andreas Gampead615172014-04-04 16:20:13 -07002310 if (lock != nullptr) {
2311 JniMethodEndSynchronized(cookie, lock, self);
Igor Murashkin06a04e02016-09-13 15:57:37 -07002312 // Ignore "fast_native" here because synchronized functions aren't very fast.
Andreas Gampead615172014-04-04 16:20:13 -07002313 } else {
Igor Murashkin06a04e02016-09-13 15:57:37 -07002314 if (UNLIKELY(fast_native)) {
2315 JniMethodFastEnd(cookie, self);
2316 } else {
2317 JniMethodEnd(cookie, self);
2318 }
Andreas Gampead615172014-04-04 16:20:13 -07002319 }
2320}
2321
Andreas Gampec147b002014-03-06 18:11:06 -08002322/*
2323 * Initializes an alloca region assumed to be directly below sp for a native call:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002324 * Create a HandleScope and call stack and fill a mini stack with values to be pushed to registers.
Andreas Gampec147b002014-03-06 18:11:06 -08002325 * The final element on the stack is a pointer to the native code.
2326 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07002327 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002328 * We need to fix this, as the handle scope needs to go into the callee-save frame.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07002329 *
Andreas Gampec147b002014-03-06 18:11:06 -08002330 * The return of this function denotes:
2331 * 1) How many bytes of the alloca can be released, if the value is non-negative.
2332 * 2) An error, if the value is negative.
2333 */
Mathieu Chartiere401d142015-04-22 13:56:20 -07002334extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002335 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markob0a6aee2017-10-27 10:34:04 +01002336 // Note: We cannot walk the stack properly until fixed up below.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002337 ArtMethod* called = *sp;
David Sehr709b0702016-10-13 09:12:37 -07002338 DCHECK(called->IsNative()) << called->PrettyMethod(true);
Vladimir Marko2196c652017-11-30 16:16:07 +00002339 Runtime* runtime = Runtime::Current();
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07002340 uint32_t shorty_len = 0;
2341 const char* shorty = called->GetShorty(&shorty_len);
Vladimir Markob0a6aee2017-10-27 10:34:04 +01002342 bool critical_native = called->IsCriticalNative();
2343 bool fast_native = called->IsFastNative();
Igor Murashkin06a04e02016-09-13 15:57:37 -07002344 bool normal_native = !critical_native && !fast_native;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002345
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002346 // Run the visitor and update sp.
Igor Murashkin06a04e02016-09-13 15:57:37 -07002347 BuildGenericJniFrameVisitor visitor(self,
2348 called->IsStatic(),
2349 critical_native,
2350 shorty,
2351 shorty_len,
2352 &sp);
Mathieu Chartierbe08cf52016-09-13 13:41:24 -07002353 {
2354 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
2355 visitor.VisitArguments();
2356 // FinalizeHandleScope pushes the handle scope on the thread.
2357 visitor.FinalizeHandleScope(self);
2358 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002359
Vladimir Markob0a6aee2017-10-27 10:34:04 +01002360 // Fix up managed-stack things in Thread. After this we can walk the stack.
Vladimir Marko2196c652017-11-30 16:16:07 +00002361 self->SetTopOfStackTagged(sp);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002362
Ian Rogerse0dcd462014-03-08 15:21:04 -08002363 self->VerifyStack();
2364
Vladimir Markof8655b32018-03-21 17:53:56 +00002365 // We can now walk the stack if needed by JIT GC from MethodEntered() for JIT-on-first-use.
2366 jit::Jit* jit = runtime->GetJit();
2367 if (jit != nullptr) {
2368 jit->MethodEntered(self, called);
2369 }
2370
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002371 uint32_t cookie;
Igor Murashkin06a04e02016-09-13 15:57:37 -07002372 uint32_t* sp32;
2373 // Skip calling JniMethodStart for @CriticalNative.
2374 if (LIKELY(!critical_native)) {
2375 // Start JNI, save the cookie.
2376 if (called->IsSynchronized()) {
2377 DCHECK(normal_native) << " @FastNative and synchronize is not supported";
2378 cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self);
2379 if (self->IsExceptionPending()) {
2380 self->PopHandleScope();
2381 // A negative value denotes an error.
2382 return GetTwoWordFailureValue();
2383 }
2384 } else {
2385 if (fast_native) {
2386 cookie = JniMethodFastStart(self);
2387 } else {
2388 DCHECK(normal_native);
2389 cookie = JniMethodStart(self);
2390 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002391 }
Igor Murashkin06a04e02016-09-13 15:57:37 -07002392 sp32 = reinterpret_cast<uint32_t*>(sp);
2393 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002394 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002395
Andreas Gampe90546832014-03-12 18:07:19 -07002396 // Retrieve the stored native code.
Alex Lightd78ddec2017-04-18 15:20:38 -07002397 void const* nativeCode = called->GetEntryPointFromJni();
Andreas Gampe90546832014-03-12 18:07:19 -07002398
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07002399 // There are two cases for the content of nativeCode:
2400 // 1) Pointer to the native function.
2401 // 2) Pointer to the trampoline for native code binding.
2402 // In the second case, we need to execute the binding and continue with the actual native function
2403 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07002404 DCHECK(nativeCode != nullptr);
2405 if (nativeCode == GetJniDlsymLookupStub()) {
Ian Rogers04c31d22014-07-07 21:44:06 -07002406#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07002407 nativeCode = artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07002408#else
2409 nativeCode = artFindNativeMethod(self);
2410#endif
Andreas Gampe90546832014-03-12 18:07:19 -07002411
2412 if (nativeCode == nullptr) {
2413 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
Andreas Gampead615172014-04-04 16:20:13 -07002414
Igor Murashkin06a04e02016-09-13 15:57:37 -07002415 // @CriticalNative calls do not need to call back into JniMethodEnd.
2416 if (LIKELY(!critical_native)) {
2417 // End JNI, as the assembly will move to deliver the exception.
2418 jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr;
2419 if (shorty[0] == 'L') {
2420 artQuickGenericJniEndJNIRef(self, cookie, fast_native, nullptr, lock);
2421 } else {
2422 artQuickGenericJniEndJNINonRef(self, cookie, fast_native, lock);
2423 }
Andreas Gampead615172014-04-04 16:20:13 -07002424 }
2425
Andreas Gampec200a4a2014-06-16 18:39:09 -07002426 return GetTwoWordFailureValue();
Andreas Gampe90546832014-03-12 18:07:19 -07002427 }
2428 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002429 }
2430
Alexey Frunze1b8464d2016-11-12 17:22:05 -08002431#if defined(__mips__) && !defined(__LP64__)
2432 // On MIPS32 if the first two arguments are floating-point, we need to know their types
2433 // so that art_quick_generic_jni_trampoline can correctly extract them from the stack
2434 // and load into floating-point registers.
2435 // Possible arrangements of first two floating-point arguments on the stack (32-bit FPU
2436 // view):
2437 // (1)
2438 // | DOUBLE | DOUBLE | other args, if any
2439 // | F12 | F13 | F14 | F15 |
2440 // | SP+0 | SP+4 | SP+8 | SP+12 | SP+16
2441 // (2)
2442 // | DOUBLE | FLOAT | (PAD) | other args, if any
2443 // | F12 | F13 | F14 | |
2444 // | SP+0 | SP+4 | SP+8 | SP+12 | SP+16
2445 // (3)
2446 // | FLOAT | (PAD) | DOUBLE | other args, if any
2447 // | F12 | | F14 | F15 |
2448 // | SP+0 | SP+4 | SP+8 | SP+12 | SP+16
2449 // (4)
2450 // | FLOAT | FLOAT | other args, if any
2451 // | F12 | F14 |
2452 // | SP+0 | SP+4 | SP+8
2453 // As you can see, only the last case (4) is special. In all others we can just
2454 // load F12/F13 and F14/F15 in the same manner.
2455 // Set bit 0 of the native code address to 1 in this case (valid code addresses
2456 // are always a multiple of 4 on MIPS32, so we have 2 spare bits available).
2457 if (nativeCode != nullptr &&
2458 shorty != nullptr &&
2459 shorty_len >= 3 &&
2460 shorty[1] == 'F' &&
2461 shorty[2] == 'F') {
2462 nativeCode = reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(nativeCode) | 1);
2463 }
2464#endif
2465
Andreas Gampec200a4a2014-06-16 18:39:09 -07002466 // Return native code addr(lo) and bottom of alloca address(hi).
2467 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(visitor.GetBottomOfUsedArea()),
2468 reinterpret_cast<uintptr_t>(nativeCode));
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002469}
2470
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -07002471// Defined in quick_jni_entrypoints.cc.
2472extern uint64_t GenericJniMethodEnd(Thread* self, uint32_t saved_local_ref_cookie,
2473 jvalue result, uint64_t result_f, ArtMethod* called,
2474 HandleScope* handle_scope);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002475/*
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002476 * Is called after the native JNI code. Responsible for cleanup (handle scope, saved state) and
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002477 * unlocking.
2478 */
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -07002479extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self,
2480 jvalue result,
2481 uint64_t result_f) {
2482 // We're here just back from a native call. We don't have the shared mutator lock at this point
2483 // yet until we call GoToRunnable() later in GenericJniMethodEnd(). Accessing objects or doing
2484 // anything that requires a mutator lock before that would cause problems as GC may have the
2485 // exclusive mutator lock and may be moving objects, etc.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002486 ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
Vladimir Marko2196c652017-11-30 16:16:07 +00002487 DCHECK(self->GetManagedStack()->GetTopQuickFrameTag());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002488 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002489 ArtMethod* called = *sp;
Ian Rogerse0dcd462014-03-08 15:21:04 -08002490 uint32_t cookie = *(sp32 - 1);
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -07002491 HandleScope* table = reinterpret_cast<HandleScope*>(reinterpret_cast<uint8_t*>(sp) + sizeof(*sp));
2492 return GenericJniMethodEnd(self, cookie, result, result_f, called, table);
Andreas Gampe2da88232014-02-27 12:26:20 -08002493}
2494
Andreas Gamped58342c2014-06-05 14:18:08 -07002495// We use TwoWordReturn to optimize scalar returns. We use the hi value for code, and the lo value
2496// for the method pointer.
Andreas Gampe51f76352014-05-21 08:28:48 -07002497//
Andreas Gamped58342c2014-06-05 14:18:08 -07002498// It is valid to use this, as at the usage points here (returns from C functions) we are assuming
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002499// to hold the mutator lock (see REQUIRES_SHARED(Locks::mutator_lock_) annotations).
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002500
Vladimir Markof79aa7f2017-07-04 16:58:55 +01002501template <InvokeType type, bool access_check>
Mathieu Chartieref41db72016-10-25 15:08:01 -07002502static TwoWordReturn artInvokeCommon(uint32_t method_idx,
2503 ObjPtr<mirror::Object> this_object,
2504 Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002505 ArtMethod** sp) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002506 ScopedQuickEntrypointChecks sqec(self);
Andreas Gampe8228cdf2017-05-30 15:03:54 -07002507 DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
Mathieu Chartiere401d142015-04-22 13:56:20 -07002508 ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
Vladimir Markof79aa7f2017-07-04 16:58:55 +01002509 ArtMethod* method = FindMethodFast<type, access_check>(method_idx, this_object, caller_method);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002510 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002511 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
2512 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002513 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002514 {
2515 // Remember the args in case a GC happens in FindMethodFromCode.
2516 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
2517 RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa);
2518 visitor.VisitArguments();
Mathieu Chartieref41db72016-10-25 15:08:01 -07002519 method = FindMethodFromCode<type, access_check>(method_idx,
2520 &this_object,
2521 caller_method,
Mathieu Chartier0cd81352014-05-22 16:48:55 -07002522 self);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002523 visitor.FixupReferences();
2524 }
2525
Ian Rogerse0a02da2014-12-02 14:10:53 -08002526 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002527 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07002528 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002529 }
2530 }
2531 DCHECK(!self->IsExceptionPending());
2532 const void* code = method->GetEntryPointFromQuickCompiledCode();
2533
2534 // When we return, the caller will branch to this address, so it had better not be 0!
David Sehr709b0702016-10-13 09:12:37 -07002535 DCHECK(code != nullptr) << "Code was null in method: " << method->PrettyMethod()
Andreas Gampec200a4a2014-06-16 18:39:09 -07002536 << " location: "
2537 << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002538
Andreas Gamped58342c2014-06-05 14:18:08 -07002539 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2540 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002541}
2542
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01002543// Explicit artInvokeCommon template function declarations to please analysis tool.
2544#define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002545 template REQUIRES_SHARED(Locks::mutator_lock_) \
Mathieu Chartiere401d142015-04-22 13:56:20 -07002546 TwoWordReturn artInvokeCommon<type, access_check>( \
Mathieu Chartieref41db72016-10-25 15:08:01 -07002547 uint32_t method_idx, ObjPtr<mirror::Object> his_object, Thread* self, ArtMethod** sp)
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01002548
2549EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false);
2550EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true);
2551EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false);
2552EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true);
2553EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false);
2554EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true);
2555EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false);
2556EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true);
2557EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false);
2558EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true);
2559#undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL
2560
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002561// See comments in runtime_support_asm.S
Andreas Gampec200a4a2014-06-16 18:39:09 -07002562extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002563 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002564 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002565 return artInvokeCommon<kInterface, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002566}
2567
Andreas Gampec200a4a2014-06-16 18:39:09 -07002568extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002569 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002570 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002571 return artInvokeCommon<kDirect, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002572}
2573
Andreas Gampec200a4a2014-06-16 18:39:09 -07002574extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck(
Mathieu Chartieref41db72016-10-25 15:08:01 -07002575 uint32_t method_idx,
2576 mirror::Object* this_object ATTRIBUTE_UNUSED,
2577 Thread* self,
2578 ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
2579 // For static, this_object is not required and may be random garbage. Don't pass it down so that
2580 // it doesn't cause ObjPtr alignment failure check.
2581 return artInvokeCommon<kStatic, true>(method_idx, nullptr, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002582}
2583
Andreas Gampec200a4a2014-06-16 18:39:09 -07002584extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002585 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002586 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002587 return artInvokeCommon<kSuper, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002588}
2589
Andreas Gampec200a4a2014-06-16 18:39:09 -07002590extern "C" TwoWordReturn artInvokeVirtualTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002591 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002592 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002593 return artInvokeCommon<kVirtual, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002594}
2595
Vladimir Marko07bfbac2017-07-06 14:55:02 +01002596// Helper function for art_quick_imt_conflict_trampoline to look up the interface method.
2597extern "C" ArtMethod* artLookupResolvedMethod(uint32_t method_index, ArtMethod* referrer)
2598 REQUIRES_SHARED(Locks::mutator_lock_) {
2599 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
2600 DCHECK(!referrer->IsProxyMethod());
2601 ArtMethod* result = Runtime::Current()->GetClassLinker()->LookupResolvedMethod(
2602 method_index, referrer->GetDexCache(), referrer->GetClassLoader());
2603 DCHECK(result == nullptr ||
2604 result->GetDeclaringClass()->IsInterface() ||
2605 result->GetDeclaringClass() ==
2606 WellKnownClasses::ToClass(WellKnownClasses::java_lang_Object))
2607 << result->PrettyMethod();
2608 return result;
2609}
2610
Jeff Hao5667f562017-02-27 19:32:01 -08002611// Determine target of interface dispatch. The interface method and this object are known non-null.
2612// The interface method is the method returned by the dex cache in the conflict trampoline.
2613extern "C" TwoWordReturn artInvokeInterfaceTrampoline(ArtMethod* interface_method,
Mathieu Chartieref41db72016-10-25 15:08:01 -07002614 mirror::Object* raw_this_object,
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002615 Thread* self,
2616 ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002617 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002618 ScopedQuickEntrypointChecks sqec(self);
Vladimir Marko302f69c2017-07-25 15:27:15 +01002619 StackHandleScope<2> hs(self);
2620 Handle<mirror::Object> this_object = hs.NewHandle(raw_this_object);
2621 Handle<mirror::Class> cls = hs.NewHandle(this_object->GetClass());
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002622
Nicolas Geoffray5bf7bac2016-07-06 14:18:23 +00002623 ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002624 ArtMethod* method = nullptr;
Andreas Gampe542451c2016-07-26 09:02:02 -07002625 ImTable* imt = cls->GetImt(kRuntimePointerSize);
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002626
Vladimir Marko302f69c2017-07-25 15:27:15 +01002627 if (UNLIKELY(interface_method == nullptr)) {
Vladimir Marko07bfbac2017-07-06 14:55:02 +01002628 // The interface method is unresolved, so resolve it in the dex file of the caller.
Jeff Hao5667f562017-02-27 19:32:01 -08002629 // Fetch the dex_method_idx of the target interface method from the caller.
2630 uint32_t dex_method_idx;
2631 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
Mathieu Chartier808c7a52017-12-15 11:19:33 -08002632 const Instruction& instr = caller_method->DexInstructions().InstructionAt(dex_pc);
Vladimir Markod7559b72017-09-28 13:50:37 +01002633 Instruction::Code instr_code = instr.Opcode();
Jeff Hao5667f562017-02-27 19:32:01 -08002634 DCHECK(instr_code == Instruction::INVOKE_INTERFACE ||
2635 instr_code == Instruction::INVOKE_INTERFACE_RANGE)
Vladimir Markod7559b72017-09-28 13:50:37 +01002636 << "Unexpected call into interface trampoline: " << instr.DumpString(nullptr);
Jeff Hao5667f562017-02-27 19:32:01 -08002637 if (instr_code == Instruction::INVOKE_INTERFACE) {
Vladimir Markod7559b72017-09-28 13:50:37 +01002638 dex_method_idx = instr.VRegB_35c();
Jeff Hao5667f562017-02-27 19:32:01 -08002639 } else {
2640 DCHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE);
Vladimir Markod7559b72017-09-28 13:50:37 +01002641 dex_method_idx = instr.VRegB_3rc();
Jeff Hao5667f562017-02-27 19:32:01 -08002642 }
2643
Vladimir Marko302f69c2017-07-25 15:27:15 +01002644 const DexFile& dex_file = caller_method->GetDeclaringClass()->GetDexFile();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002645 uint32_t shorty_len;
Vladimir Marko302f69c2017-07-25 15:27:15 +01002646 const char* shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(dex_method_idx),
2647 &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002648 {
Vladimir Marko302f69c2017-07-25 15:27:15 +01002649 // Remember the args in case a GC happens in ClassLinker::ResolveMethod().
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002650 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
2651 RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa);
2652 visitor.VisitArguments();
Vladimir Marko302f69c2017-07-25 15:27:15 +01002653 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2654 interface_method = class_linker->ResolveMethod<ClassLinker::ResolveMode::kNoChecks>(
2655 self, dex_method_idx, caller_method, kInterface);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002656 visitor.FixupReferences();
2657 }
2658
Vladimir Marko302f69c2017-07-25 15:27:15 +01002659 if (UNLIKELY(interface_method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002660 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07002661 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002662 }
Vladimir Marko302f69c2017-07-25 15:27:15 +01002663 }
2664
2665 DCHECK(!interface_method->IsRuntimeMethod());
2666 // Look whether we have a match in the ImtConflictTable.
2667 uint32_t imt_index = ImTable::GetImtIndex(interface_method);
2668 ArtMethod* conflict_method = imt->Get(imt_index, kRuntimePointerSize);
2669 if (LIKELY(conflict_method->IsRuntimeMethod())) {
2670 ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
2671 DCHECK(current_table != nullptr);
2672 method = current_table->Lookup(interface_method, kRuntimePointerSize);
2673 } else {
2674 // It seems we aren't really a conflict method!
2675 if (kIsDebugBuild) {
2676 ArtMethod* m = cls->FindVirtualMethodForInterface(interface_method, kRuntimePointerSize);
2677 CHECK_EQ(conflict_method, m)
2678 << interface_method->PrettyMethod() << " / " << conflict_method->PrettyMethod() << " / "
2679 << " / " << ArtMethod::PrettyMethod(m) << " / " << cls->PrettyClass();
2680 }
2681 method = conflict_method;
2682 }
2683 if (method != nullptr) {
2684 return GetTwoWordSuccessValue(
2685 reinterpret_cast<uintptr_t>(method->GetEntryPointFromQuickCompiledCode()),
2686 reinterpret_cast<uintptr_t>(method));
2687 }
2688
2689 // No match, use the IfTable.
2690 method = cls->FindVirtualMethodForInterface(interface_method, kRuntimePointerSize);
2691 if (UNLIKELY(method == nullptr)) {
2692 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(
2693 interface_method, this_object.Get(), caller_method);
2694 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002695 }
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002696
2697 // We arrive here if we have found an implementation, and it is not in the ImtConflictTable.
2698 // We create a new table with the new pair { interface_method, method }.
Vladimir Marko302f69c2017-07-25 15:27:15 +01002699 DCHECK(conflict_method->IsRuntimeMethod());
2700 ArtMethod* new_conflict_method = Runtime::Current()->GetClassLinker()->AddMethodToConflictTable(
2701 cls.Get(),
2702 conflict_method,
2703 interface_method,
2704 method,
2705 /*force_new_conflict_method*/false);
2706 if (new_conflict_method != conflict_method) {
2707 // Update the IMT if we create a new conflict method. No fence needed here, as the
2708 // data is consistent.
2709 imt->Set(imt_index,
2710 new_conflict_method,
2711 kRuntimePointerSize);
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002712 }
2713
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002714 const void* code = method->GetEntryPointFromQuickCompiledCode();
2715
2716 // When we return, the caller will branch to this address, so it had better not be 0!
David Sehr709b0702016-10-13 09:12:37 -07002717 DCHECK(code != nullptr) << "Code was null in method: " << method->PrettyMethod()
Andreas Gampec200a4a2014-06-16 18:39:09 -07002718 << " location: " << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002719
Andreas Gamped58342c2014-06-05 14:18:08 -07002720 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2721 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002722}
2723
Orion Hodsonac141392017-01-13 11:53:47 +00002724// Returns shorty type so the caller can determine how to put |result|
2725// into expected registers. The shorty type is static so the compiler
2726// could call different flavors of this code path depending on the
2727// shorty type though this would require different entry points for
2728// each type.
2729extern "C" uintptr_t artInvokePolymorphic(
2730 JValue* result,
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002731 mirror::Object* raw_receiver,
Orion Hodsonac141392017-01-13 11:53:47 +00002732 Thread* self,
2733 ArtMethod** sp)
2734 REQUIRES_SHARED(Locks::mutator_lock_) {
2735 ScopedQuickEntrypointChecks sqec(self);
Andreas Gampe8228cdf2017-05-30 15:03:54 -07002736 DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
Orion Hodsonac141392017-01-13 11:53:47 +00002737
2738 // Start new JNI local reference state
2739 JNIEnvExt* env = self->GetJniEnv();
2740 ScopedObjectAccessUnchecked soa(env);
2741 ScopedJniEnvLocalRefState env_state(env);
2742 const char* old_cause = self->StartAssertNoThreadSuspension("Making stack arguments safe.");
2743
2744 // From the instruction, get the |callsite_shorty| and expose arguments on the stack to the GC.
2745 ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
2746 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
Mathieu Chartier73f21d42018-01-02 14:26:50 -08002747 const Instruction& inst = caller_method->DexInstructions().InstructionAt(dex_pc);
Vladimir Markod7559b72017-09-28 13:50:37 +01002748 DCHECK(inst.Opcode() == Instruction::INVOKE_POLYMORPHIC ||
2749 inst.Opcode() == Instruction::INVOKE_POLYMORPHIC_RANGE);
Orion Hodson06d10a72018-05-14 08:53:38 +01002750 const dex::ProtoIndex proto_idx(inst.VRegH());
Vladimir Marko666ee3d2017-12-11 18:37:36 +00002751 const char* shorty = caller_method->GetDexFile()->GetShorty(proto_idx);
Orion Hodsonac141392017-01-13 11:53:47 +00002752 const size_t shorty_length = strlen(shorty);
2753 static const bool kMethodIsStatic = false; // invoke() and invokeExact() are not static.
2754 RememberForGcArgumentVisitor gc_visitor(sp, kMethodIsStatic, shorty, shorty_length, &soa);
Orion Hodsonfea84dd2017-01-16 13:52:20 +00002755 gc_visitor.VisitArguments();
Orion Hodsonac141392017-01-13 11:53:47 +00002756
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002757 // Wrap raw_receiver in a Handle for safety.
2758 StackHandleScope<3> hs(self);
2759 Handle<mirror::Object> receiver_handle(hs.NewHandle(raw_receiver));
2760 raw_receiver = nullptr;
Orion Hodsonac141392017-01-13 11:53:47 +00002761 self->EndAssertNoThreadSuspension(old_cause);
2762
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002763 // Resolve method.
Orion Hodsonac141392017-01-13 11:53:47 +00002764 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Vladimir Markoba118822017-06-12 15:41:56 +01002765 ArtMethod* resolved_method = linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
Vladimir Markod7559b72017-09-28 13:50:37 +01002766 self, inst.VRegB(), caller_method, kVirtual);
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002767
2768 if (UNLIKELY(receiver_handle.IsNull())) {
Orion Hodsonac141392017-01-13 11:53:47 +00002769 ThrowNullPointerExceptionForMethodAccess(resolved_method, InvokeType::kVirtual);
2770 return static_cast<uintptr_t>('V');
2771 }
2772
Orion Hodsone7732be2017-10-11 14:35:20 +01002773 Handle<mirror::MethodType> method_type(
2774 hs.NewHandle(linker->ResolveMethodType(self, proto_idx, caller_method)));
2775
Orion Hodsonac141392017-01-13 11:53:47 +00002776 // This implies we couldn't resolve one or more types in this method handle.
2777 if (UNLIKELY(method_type.IsNull())) {
2778 CHECK(self->IsExceptionPending());
2779 return static_cast<uintptr_t>('V');
2780 }
2781
Vladimir Markod7559b72017-09-28 13:50:37 +01002782 DCHECK_EQ(ArtMethod::NumArgRegisters(shorty) + 1u, (uint32_t)inst.VRegA());
Orion Hodsonac141392017-01-13 11:53:47 +00002783 DCHECK_EQ(resolved_method->IsStatic(), kMethodIsStatic);
2784
2785 // Fix references before constructing the shadow frame.
2786 gc_visitor.FixupReferences();
2787
2788 // Construct shadow frame placing arguments consecutively from |first_arg|.
Vladimir Markod7559b72017-09-28 13:50:37 +01002789 const bool is_range = (inst.Opcode() == Instruction::INVOKE_POLYMORPHIC_RANGE);
2790 const size_t num_vregs = is_range ? inst.VRegA_4rcc() : inst.VRegA_45cc();
Orion Hodsonac141392017-01-13 11:53:47 +00002791 const size_t first_arg = 0;
2792 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
2793 CREATE_SHADOW_FRAME(num_vregs, /* link */ nullptr, resolved_method, dex_pc);
2794 ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
2795 ScopedStackedShadowFramePusher
2796 frame_pusher(self, shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction);
2797 BuildQuickShadowFrameVisitor shadow_frame_builder(sp,
2798 kMethodIsStatic,
2799 shorty,
2800 strlen(shorty),
2801 shadow_frame,
2802 first_arg);
2803 shadow_frame_builder.VisitArguments();
2804
2805 // Push a transition back into managed code onto the linked list in thread.
2806 ManagedStack fragment;
2807 self->PushManagedStackFragment(&fragment);
2808
2809 // Call DoInvokePolymorphic with |is_range| = true, as shadow frame has argument registers in
2810 // consecutive order.
Orion Hodson960d4f72017-11-10 15:32:38 +00002811 RangeInstructionOperands operands(first_arg + 1, num_vregs - 1);
Orion Hodson537a4fe2018-05-15 13:57:58 +01002812 Intrinsics intrinsic = static_cast<Intrinsics>(resolved_method->GetIntrinsic());
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002813 bool success = false;
Vladimir Markoc7aa87e2018-05-24 15:19:52 +01002814 if (resolved_method->GetDeclaringClass() == GetClassRoot<mirror::MethodHandle>(linker)) {
Orion Hodson537a4fe2018-05-15 13:57:58 +01002815 Handle<mirror::MethodHandle> method_handle(hs.NewHandle(
2816 ObjPtr<mirror::MethodHandle>::DownCast(MakeObjPtr(receiver_handle.Get()))));
2817 if (intrinsic == Intrinsics::kMethodHandleInvokeExact) {
2818 success = MethodHandleInvokeExact(self,
2819 *shadow_frame,
2820 method_handle,
2821 method_type,
2822 &operands,
2823 result);
2824 } else {
2825 DCHECK_EQ(static_cast<uint32_t>(intrinsic),
2826 static_cast<uint32_t>(Intrinsics::kMethodHandleInvoke));
2827 success = MethodHandleInvoke(self,
2828 *shadow_frame,
2829 method_handle,
2830 method_type,
2831 &operands,
2832 result);
2833 }
2834 } else {
Vladimir Markoc7aa87e2018-05-24 15:19:52 +01002835 DCHECK_EQ(GetClassRoot<mirror::VarHandle>(linker), resolved_method->GetDeclaringClass());
Orion Hodson537a4fe2018-05-15 13:57:58 +01002836 Handle<mirror::VarHandle> var_handle(hs.NewHandle(
2837 ObjPtr<mirror::VarHandle>::DownCast(MakeObjPtr(receiver_handle.Get()))));
2838 mirror::VarHandle::AccessMode access_mode =
2839 mirror::VarHandle::GetAccessModeByIntrinsic(intrinsic);
2840 success = VarHandleInvokeAccessor(self,
Orion Hodson960d4f72017-11-10 15:32:38 +00002841 *shadow_frame,
Orion Hodson537a4fe2018-05-15 13:57:58 +01002842 var_handle,
Orion Hodson960d4f72017-11-10 15:32:38 +00002843 method_type,
Orion Hodson537a4fe2018-05-15 13:57:58 +01002844 access_mode,
Orion Hodson960d4f72017-11-10 15:32:38 +00002845 &operands,
2846 result);
Orion Hodsonac141392017-01-13 11:53:47 +00002847 }
Orion Hodson537a4fe2018-05-15 13:57:58 +01002848
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002849 DCHECK(success || self->IsExceptionPending());
Orion Hodsonac141392017-01-13 11:53:47 +00002850
2851 // Pop transition record.
2852 self->PopManagedStackFragment(fragment);
2853
2854 return static_cast<uintptr_t>(shorty[0]);
2855}
2856
Ian Rogers848871b2013-08-05 10:56:33 -07002857} // namespace art