blob: 379292db7174e3ab0bcc1e643e4ab70a690234a0 [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartiere401d142015-04-22 13:56:20 -070017#include "art_method-inl.h"
Andreas Gampe8228cdf2017-05-30 15:03:54 -070018#include "base/callee_save_type.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070019#include "base/enums.h"
Ian Rogers848871b2013-08-05 10:56:33 -070020#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070021#include "common_throws.h"
Vladimir Markoc7aa87e2018-05-24 15:19:52 +010022#include "class_root.h"
Vladimir Marko606adb32018-04-05 14:49:24 +010023#include "debug_print.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070024#include "debugger.h"
David Sehr9e734c72018-01-04 17:56:19 -080025#include "dex/dex_file-inl.h"
26#include "dex/dex_file_types.h"
27#include "dex/dex_instruction-inl.h"
David Sehr312f3b22018-03-19 08:39:26 -070028#include "dex/method_reference.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070029#include "entrypoints/entrypoint_utils-inl.h"
Vladimir Markod3083dd2018-05-17 08:43:47 +010030#include "entrypoints/quick/callee_save_frame.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070031#include "entrypoints/runtime_asm_entrypoints.h"
Ian Rogers83883d72013-10-21 21:07:24 -070032#include "gc/accounting/card_table-inl.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070033#include "imt_conflict_table.h"
34#include "imtable-inl.h"
Vladimir Markof3c52b42017-11-17 17:32:12 +000035#include "index_bss_mapping.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070036#include "instrumentation.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070037#include "interpreter/interpreter.h"
Orion Hodson4c8e12e2018-05-18 08:33:20 +010038#include "interpreter/interpreter_common.h"
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +010039#include "interpreter/shadow_frame-inl.h"
Vladimir Marko2196c652017-11-30 16:16:07 +000040#include "jit/jit.h"
Alex Light2d441b12018-06-08 15:33:21 -070041#include "jit/jit_code_cache.h"
Nicolas Geoffray796d6302016-03-13 22:22:31 +000042#include "linear_alloc.h"
Orion Hodsonac141392017-01-13 11:53:47 +000043#include "method_handles.h"
Ian Rogers848871b2013-08-05 10:56:33 -070044#include "mirror/class-inl.h"
Mathieu Chartier5f3ded42014-04-03 15:25:30 -070045#include "mirror/dex_cache-inl.h"
Mathieu Chartierfc58af42015-04-16 18:00:39 -070046#include "mirror/method.h"
Orion Hodsonac141392017-01-13 11:53:47 +000047#include "mirror/method_handle_impl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070048#include "mirror/object-inl.h"
49#include "mirror/object_array-inl.h"
Orion Hodson537a4fe2018-05-15 13:57:58 +010050#include "mirror/var_handle.h"
Vladimir Marko0eb882b2017-05-15 13:39:18 +010051#include "oat_file.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010052#include "oat_quick_method_header.h"
Andreas Gampe639bdd12015-06-03 11:22:45 -070053#include "quick_exception_handler.h"
Ian Rogers848871b2013-08-05 10:56:33 -070054#include "runtime.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070055#include "scoped_thread_state_change-inl.h"
Andreas Gampeb3025922015-09-01 14:45:00 -070056#include "stack.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070057#include "thread-inl.h"
Orion Hodson537a4fe2018-05-15 13:57:58 +010058#include "var_handles.h"
Orion Hodsonac141392017-01-13 11:53:47 +000059#include "well_known_classes.h"
Ian Rogers848871b2013-08-05 10:56:33 -070060
61namespace art {
62
Andreas Gampe8228cdf2017-05-30 15:03:54 -070063// Visits the arguments as saved to the stack by a CalleeSaveType::kRefAndArgs callee save frame.
Ian Rogers848871b2013-08-05 10:56:33 -070064class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080065 // Number of bytes for each out register in the caller method's frame.
66 static constexpr size_t kBytesStackArgLocation = 4;
Alexei Zavjalov41c507a2014-05-15 16:02:46 +070067 // Frame size in bytes of a callee-save frame for RefsAndArgs.
68 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize =
Vladimir Markod3083dd2018-05-17 08:43:47 +010069 RuntimeCalleeSaveFrame::GetFrameSize(CalleeSaveType::kSaveRefsAndArgs);
70 // Offset of first GPR arg.
71 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
72 RuntimeCalleeSaveFrame::GetGpr1Offset(CalleeSaveType::kSaveRefsAndArgs);
73 // Offset of first FPR arg.
74 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
75 RuntimeCalleeSaveFrame::GetFpr1Offset(CalleeSaveType::kSaveRefsAndArgs);
76 // Offset of return address.
77 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_ReturnPcOffset =
78 RuntimeCalleeSaveFrame::GetReturnPcOffset(CalleeSaveType::kSaveRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -070079#if defined(__arm__)
80 // The callee save frame is pointed to by SP.
81 // | argN | |
82 // | ... | |
83 // | arg4 | |
84 // | arg3 spill | | Caller's frame
85 // | arg2 spill | |
86 // | arg1 spill | |
87 // | Method* | ---
88 // | LR |
Zheng Xu5667fdb2014-10-23 18:29:55 +080089 // | ... | 4x6 bytes callee saves
90 // | R3 |
91 // | R2 |
92 // | R1 |
93 // | S15 |
94 // | : |
95 // | S0 |
96 // | | 4x2 bytes padding
Ian Rogers848871b2013-08-05 10:56:33 -070097 // | Method* | <- sp
Andreas Gampe217d6d32017-09-18 12:48:20 -070098 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
99 static constexpr bool kAlignPairRegister = true;
100 static constexpr bool kQuickSoftFloatAbi = false;
101 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = true;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000102 static constexpr bool kQuickSkipOddFpRegisters = false;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800103 static constexpr size_t kNumQuickGprArgs = 3;
Andreas Gampe217d6d32017-09-18 12:48:20 -0700104 static constexpr size_t kNumQuickFprArgs = 16;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800105 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800106 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000107 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800108 }
Stuart Monteithb95a5342014-03-12 13:32:32 +0000109#elif defined(__aarch64__)
110 // The callee save frame is pointed to by SP.
111 // | argN | |
112 // | ... | |
113 // | arg4 | |
114 // | arg3 spill | | Caller's frame
115 // | arg2 spill | |
116 // | arg1 spill | |
117 // | Method* | ---
118 // | LR |
Zheng Xub551fdc2014-07-25 11:49:42 +0800119 // | X29 |
Stuart Monteithb95a5342014-03-12 13:32:32 +0000120 // | : |
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100121 // | X20 |
Stuart Monteithb95a5342014-03-12 13:32:32 +0000122 // | X7 |
123 // | : |
124 // | X1 |
Zheng Xub551fdc2014-07-25 11:49:42 +0800125 // | D7 |
Stuart Monteithb95a5342014-03-12 13:32:32 +0000126 // | : |
127 // | D0 |
128 // | | padding
129 // | Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500130 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000131 static constexpr bool kAlignPairRegister = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000132 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800133 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000134 static constexpr bool kQuickSkipOddFpRegisters = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000135 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
136 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800137 static constexpr bool kGprFprLockstep = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000138 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000139 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Stuart Monteithb95a5342014-03-12 13:32:32 +0000140 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800141#elif defined(__mips__) && !defined(__LP64__)
Ian Rogers848871b2013-08-05 10:56:33 -0700142 // The callee save frame is pointed to by SP.
143 // | argN | |
144 // | ... | |
145 // | arg4 | |
146 // | arg3 spill | | Caller's frame
147 // | arg2 spill | |
148 // | arg1 spill | |
149 // | Method* | ---
150 // | RA |
151 // | ... | callee saves
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800152 // | T1 | arg5
153 // | T0 | arg4
Ian Rogers848871b2013-08-05 10:56:33 -0700154 // | A3 | arg3
155 // | A2 | arg2
156 // | A1 | arg1
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800157 // | F19 |
158 // | F18 | f_arg5
159 // | F17 |
160 // | F16 | f_arg4
Goran Jakovljevicff734982015-08-24 12:58:55 +0000161 // | F15 |
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800162 // | F14 | f_arg3
Goran Jakovljevicff734982015-08-24 12:58:55 +0000163 // | F13 |
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800164 // | F12 | f_arg2
165 // | F11 |
166 // | F10 | f_arg1
167 // | F9 |
168 // | F8 | f_arg0
Goran Jakovljevicff734982015-08-24 12:58:55 +0000169 // | | padding
Ian Rogers848871b2013-08-05 10:56:33 -0700170 // | A0/Method* | <- sp
Goran Jakovljevicff734982015-08-24 12:58:55 +0000171 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
172 static constexpr bool kAlignPairRegister = true;
173 static constexpr bool kQuickSoftFloatAbi = false;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800174 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000175 static constexpr bool kQuickSkipOddFpRegisters = true;
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800176 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
177 static constexpr size_t kNumQuickFprArgs = 12; // 6 arguments passed in FPRs. Floats can be
178 // passed only in even numbered registers and each
179 // double occupies two registers.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800180 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800181 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000182 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800183 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800184#elif defined(__mips__) && defined(__LP64__)
185 // The callee save frame is pointed to by SP.
186 // | argN | |
187 // | ... | |
188 // | arg4 | |
189 // | arg3 spill | | Caller's frame
190 // | arg2 spill | |
191 // | arg1 spill | |
192 // | Method* | ---
193 // | RA |
194 // | ... | callee saves
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800195 // | A7 | arg7
196 // | A6 | arg6
197 // | A5 | arg5
198 // | A4 | arg4
199 // | A3 | arg3
200 // | A2 | arg2
201 // | A1 | arg1
Goran Jakovljevicff734982015-08-24 12:58:55 +0000202 // | F19 | f_arg7
203 // | F18 | f_arg6
204 // | F17 | f_arg5
205 // | F16 | f_arg4
206 // | F15 | f_arg3
207 // | F14 | f_arg2
208 // | F13 | f_arg1
209 // | F12 | f_arg0
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800210 // | | padding
211 // | A0/Method* | <- sp
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800212 // NOTE: for Mip64, when A0 is skipped, F12 is also skipped.
Douglas Leungd18e0832015-02-09 15:22:26 -0800213 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800214 static constexpr bool kAlignPairRegister = false;
215 static constexpr bool kQuickSoftFloatAbi = false;
216 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000217 static constexpr bool kQuickSkipOddFpRegisters = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800218 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
219 static constexpr size_t kNumQuickFprArgs = 7; // 7 arguments passed in FPRs.
220 static constexpr bool kGprFprLockstep = true;
221
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800222 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
223 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
224 }
Ian Rogers848871b2013-08-05 10:56:33 -0700225#elif defined(__i386__)
226 // The callee save frame is pointed to by SP.
227 // | argN | |
228 // | ... | |
229 // | arg4 | |
230 // | arg3 spill | | Caller's frame
231 // | arg2 spill | |
232 // | arg1 spill | |
233 // | Method* | ---
234 // | Return |
235 // | EBP,ESI,EDI | callee saves
236 // | EBX | arg3
237 // | EDX | arg2
238 // | ECX | arg1
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000239 // | XMM3 | float arg 4
240 // | XMM2 | float arg 3
241 // | XMM1 | float arg 2
242 // | XMM0 | float arg 1
Ian Rogers848871b2013-08-05 10:56:33 -0700243 // | EAX/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500244 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000245 static constexpr bool kAlignPairRegister = false;
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000246 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800247 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000248 static constexpr bool kQuickSkipOddFpRegisters = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800249 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000250 static constexpr size_t kNumQuickFprArgs = 4; // 4 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800251 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800252 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000253 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800254 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800255#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800256 // The callee save frame is pointed to by SP.
257 // | argN | |
258 // | ... | |
259 // | reg. arg spills | | Caller's frame
260 // | Method* | ---
261 // | Return |
262 // | R15 | callee save
263 // | R14 | callee save
264 // | R13 | callee save
265 // | R12 | callee save
266 // | R9 | arg5
267 // | R8 | arg4
268 // | RSI/R6 | arg1
269 // | RBP/R5 | callee save
270 // | RBX/R3 | callee save
271 // | RDX/R2 | arg2
272 // | RCX/R1 | arg3
273 // | XMM7 | float arg 8
274 // | XMM6 | float arg 7
275 // | XMM5 | float arg 6
276 // | XMM4 | float arg 5
277 // | XMM3 | float arg 4
278 // | XMM2 | float arg 3
279 // | XMM1 | float arg 2
280 // | XMM0 | float arg 1
281 // | Padding |
282 // | RDI/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500283 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000284 static constexpr bool kAlignPairRegister = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800285 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800286 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000287 static constexpr bool kQuickSkipOddFpRegisters = false;
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700288 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700289 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800290 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800291 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
292 switch (gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000293 case 0: return (4 * GetBytesPerGprSpillLocation(kRuntimeISA));
294 case 1: return (1 * GetBytesPerGprSpillLocation(kRuntimeISA));
295 case 2: return (0 * GetBytesPerGprSpillLocation(kRuntimeISA));
296 case 3: return (5 * GetBytesPerGprSpillLocation(kRuntimeISA));
297 case 4: return (6 * GetBytesPerGprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800298 default:
Andreas Gampec200a4a2014-06-16 18:39:09 -0700299 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
300 return 0;
Ian Rogers936b37f2014-02-14 00:52:24 -0800301 }
302 }
Ian Rogers848871b2013-08-05 10:56:33 -0700303#else
304#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700305#endif
306
Ian Rogers936b37f2014-02-14 00:52:24 -0800307 public:
Sebastien Hertza836bc92014-11-25 16:30:53 +0100308 // Special handling for proxy methods. Proxy methods are instance methods so the
309 // 'this' object is the 1st argument. They also have the same frame layout as the
310 // kRefAndArgs runtime method. Since 'this' is a reference, it is located in the
311 // 1st GPR.
Roland Levillainfa854e42018-02-07 13:09:55 +0000312 static StackReference<mirror::Object>* GetProxyThisObjectReference(ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700313 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray3a090922015-11-24 09:17:30 +0000314 CHECK((*sp)->IsProxyMethod());
Sebastien Hertza836bc92014-11-25 16:30:53 +0100315 CHECK_GT(kNumQuickGprArgs, 0u);
316 constexpr uint32_t kThisGprIndex = 0u; // 'this' is in the 1st GPR.
317 size_t this_arg_offset = kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset +
318 GprIndexToGprOffset(kThisGprIndex);
319 uint8_t* this_arg_address = reinterpret_cast<uint8_t*>(sp) + this_arg_offset;
Roland Levillainfa854e42018-02-07 13:09:55 +0000320 return reinterpret_cast<StackReference<mirror::Object>*>(this_arg_address);
Sebastien Hertza836bc92014-11-25 16:30:53 +0100321 }
322
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700323 static ArtMethod* GetCallingMethod(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700324 DCHECK((*sp)->IsCalleeSaveMethod());
Andreas Gampe8228cdf2017-05-30 15:03:54 -0700325 return GetCalleeSaveMethodCaller(sp, CalleeSaveType::kSaveRefsAndArgs);
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100326 }
327
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700328 static ArtMethod* GetOuterMethod(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700329 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100330 uint8_t* previous_sp =
331 reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700332 return *reinterpret_cast<ArtMethod**>(previous_sp);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100333 }
334
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700335 static uint32_t GetCallingDexPc(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700336 DCHECK((*sp)->IsCalleeSaveMethod());
Vladimir Markod3083dd2018-05-17 08:43:47 +0100337 constexpr size_t callee_frame_size =
338 RuntimeCalleeSaveFrame::GetFrameSize(CalleeSaveType::kSaveRefsAndArgs);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700339 ArtMethod** caller_sp = reinterpret_cast<ArtMethod**>(
340 reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100341 uintptr_t outer_pc = QuickArgumentVisitor::GetCallingPc(sp);
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100342 const OatQuickMethodHeader* current_code = (*caller_sp)->GetOatQuickMethodHeader(outer_pc);
343 uintptr_t outer_pc_offset = current_code->NativeQuickPcOffset(outer_pc);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100344
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100345 if (current_code->IsOptimized()) {
David Srbecky052f8ca2018-04-26 15:42:54 +0100346 CodeInfo code_info(current_code);
347 StackMap stack_map = code_info.GetStackMapForNativePcOffset(outer_pc_offset);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100348 DCHECK(stack_map.IsValid());
David Srbecky93bd3612018-07-02 19:30:18 +0100349 BitTableRange<InlineInfo> inline_infos = code_info.GetInlineInfosOf(stack_map);
350 if (!inline_infos.empty()) {
351 return inline_infos.back().GetDexPc();
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100352 } else {
David Srbecky052f8ca2018-04-26 15:42:54 +0100353 return stack_map.GetDexPc();
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100354 }
355 } else {
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100356 return current_code->ToDexPc(*caller_sp, outer_pc);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100357 }
Ian Rogers848871b2013-08-05 10:56:33 -0700358 }
359
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800360 static bool GetInvokeType(ArtMethod** sp, InvokeType* invoke_type, uint32_t* dex_method_index)
361 REQUIRES_SHARED(Locks::mutator_lock_) {
362 DCHECK((*sp)->IsCalleeSaveMethod());
Vladimir Markod3083dd2018-05-17 08:43:47 +0100363 constexpr size_t callee_frame_size =
364 RuntimeCalleeSaveFrame::GetFrameSize(CalleeSaveType::kSaveRefsAndArgs);
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800365 ArtMethod** caller_sp = reinterpret_cast<ArtMethod**>(
366 reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
367 uintptr_t outer_pc = QuickArgumentVisitor::GetCallingPc(sp);
368 const OatQuickMethodHeader* current_code = (*caller_sp)->GetOatQuickMethodHeader(outer_pc);
369 if (!current_code->IsOptimized()) {
370 return false;
371 }
372 uintptr_t outer_pc_offset = current_code->NativeQuickPcOffset(outer_pc);
David Srbecky052f8ca2018-04-26 15:42:54 +0100373 CodeInfo code_info(current_code);
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700374 MethodInfo method_info = current_code->GetOptimizedMethodInfo();
David Srbecky052f8ca2018-04-26 15:42:54 +0100375 InvokeInfo invoke(code_info.GetInvokeInfoForNativePcOffset(outer_pc_offset));
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800376 if (invoke.IsValid()) {
David Srbecky052f8ca2018-04-26 15:42:54 +0100377 *invoke_type = static_cast<InvokeType>(invoke.GetInvokeType());
378 *dex_method_index = invoke.GetMethodIndex(method_info);
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800379 return true;
380 }
381 return false;
382 }
383
Ian Rogers936b37f2014-02-14 00:52:24 -0800384 // For the given quick ref and args quick frame, return the caller's PC.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700385 static uintptr_t GetCallingPc(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700386 DCHECK((*sp)->IsCalleeSaveMethod());
Vladimir Markod3083dd2018-05-17 08:43:47 +0100387 uint8_t* return_adress_spill =
388 reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_ReturnPcOffset;
389 return *reinterpret_cast<uintptr_t*>(return_adress_spill);
Ian Rogers848871b2013-08-05 10:56:33 -0700390 }
391
Mathieu Chartiere401d142015-04-22 13:56:20 -0700392 QuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty,
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700393 uint32_t shorty_len) REQUIRES_SHARED(Locks::mutator_lock_) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700394 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
Ian Rogers13735952014-10-08 12:43:28 -0700395 gpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
396 fpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
397 stack_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
Mathieu Chartiere401d142015-04-22 13:56:20 -0700398 + sizeof(ArtMethod*)), // Skip ArtMethod*.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800399 gpr_index_(0), fpr_index_(0), fpr_double_index_(0), stack_index_(0),
400 cur_type_(Primitive::kPrimVoid), is_split_long_or_double_(false) {
Andreas Gampe575e78c2014-11-03 23:41:03 -0800401 static_assert(kQuickSoftFloatAbi == (kNumQuickFprArgs == 0),
402 "Number of Quick FPR arguments unexpected");
403 static_assert(!(kQuickSoftFloatAbi && kQuickDoubleRegAlignedFloatBackFilled),
404 "Double alignment unexpected");
Zheng Xu5667fdb2014-10-23 18:29:55 +0800405 // For register alignment, we want to assume that counters(fpr_double_index_) are even if the
406 // next register is even.
Andreas Gampe575e78c2014-11-03 23:41:03 -0800407 static_assert(!kQuickDoubleRegAlignedFloatBackFilled || kNumQuickFprArgs % 2 == 0,
408 "Number of Quick FPR arguments not even");
Andreas Gampe542451c2016-07-26 09:02:02 -0700409 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize);
Zheng Xu5667fdb2014-10-23 18:29:55 +0800410 }
Ian Rogers848871b2013-08-05 10:56:33 -0700411
412 virtual ~QuickArgumentVisitor() {}
413
414 virtual void Visit() = 0;
415
Ian Rogers936b37f2014-02-14 00:52:24 -0800416 Primitive::Type GetParamPrimitiveType() const {
417 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700418 }
419
Ian Rogers13735952014-10-08 12:43:28 -0700420 uint8_t* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800421 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800422 Primitive::Type type = GetParamPrimitiveType();
423 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800424 if (type == Primitive::kPrimDouble && kQuickDoubleRegAlignedFloatBackFilled) {
425 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
426 return fpr_args_ + (fpr_double_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
427 }
428 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000429 return fpr_args_ + (fpr_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800430 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700431 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers936b37f2014-02-14 00:52:24 -0800432 }
433 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800434 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800435 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
436 }
437 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700438 }
439
440 bool IsSplitLongOrDouble() const {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700441 if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) ||
442 (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800443 return is_split_long_or_double_;
444 } else {
445 return false; // An optimization for when GPR and FPRs are 64bit.
446 }
Ian Rogers848871b2013-08-05 10:56:33 -0700447 }
448
Ian Rogers936b37f2014-02-14 00:52:24 -0800449 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700450 return GetParamPrimitiveType() == Primitive::kPrimNot;
451 }
452
Ian Rogers936b37f2014-02-14 00:52:24 -0800453 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700454 Primitive::Type type = GetParamPrimitiveType();
455 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
456 }
457
458 uint64_t ReadSplitLongParam() const {
Nicolas Geoffray425f2392015-01-08 14:52:29 +0000459 // The splitted long is always available through the stack.
460 return *reinterpret_cast<uint64_t*>(stack_args_
461 + stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700462 }
463
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800464 void IncGprIndex() {
465 gpr_index_++;
466 if (kGprFprLockstep) {
467 fpr_index_++;
468 }
469 }
470
471 void IncFprIndex() {
472 fpr_index_++;
473 if (kGprFprLockstep) {
474 gpr_index_++;
475 }
476 }
477
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700478 void VisitArguments() REQUIRES_SHARED(Locks::mutator_lock_) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800479 // (a) 'stack_args_' should point to the first method's argument
480 // (b) whatever the argument type it is, the 'stack_index_' should
481 // be moved forward along with every visiting.
Ian Rogers936b37f2014-02-14 00:52:24 -0800482 gpr_index_ = 0;
483 fpr_index_ = 0;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800484 if (kQuickDoubleRegAlignedFloatBackFilled) {
485 fpr_double_index_ = 0;
486 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800487 stack_index_ = 0;
488 if (!is_static_) { // Handle this.
489 cur_type_ = Primitive::kPrimNot;
490 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700491 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800492 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800493 if (kNumQuickGprArgs > 0) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800494 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800495 }
Ian Rogers848871b2013-08-05 10:56:33 -0700496 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800497 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
498 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
499 switch (cur_type_) {
500 case Primitive::kPrimNot:
501 case Primitive::kPrimBoolean:
502 case Primitive::kPrimByte:
503 case Primitive::kPrimChar:
504 case Primitive::kPrimShort:
505 case Primitive::kPrimInt:
506 is_split_long_or_double_ = false;
507 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800508 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800509 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800510 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800511 }
512 break;
513 case Primitive::kPrimFloat:
514 is_split_long_or_double_ = false;
515 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800516 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800517 if (kQuickSoftFloatAbi) {
518 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800519 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800520 }
521 } else {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800522 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800523 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800524 if (kQuickDoubleRegAlignedFloatBackFilled) {
525 // Double should not overlap with float.
526 // For example, if fpr_index_ = 3, fpr_double_index_ should be at least 4.
527 fpr_double_index_ = std::max(fpr_double_index_, RoundUp(fpr_index_, 2));
528 // Float should not overlap with double.
529 if (fpr_index_ % 2 == 0) {
530 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
531 }
Goran Jakovljevicff734982015-08-24 12:58:55 +0000532 } else if (kQuickSkipOddFpRegisters) {
533 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800534 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800535 }
536 }
537 break;
538 case Primitive::kPrimDouble:
539 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800540 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800541 if (cur_type_ == Primitive::kPrimLong &&
542#if defined(__mips__) && !defined(__LP64__)
543 (gpr_index_ == 0 || gpr_index_ == 2) &&
544#else
545 gpr_index_ == 0 &&
546#endif
547 kAlignPairRegister) {
548 // Currently, this is only for ARM and MIPS, where we align long parameters with
549 // even-numbered registers by skipping R1 (on ARM) or A1(A3) (on MIPS) and using
550 // R2 (on ARM) or A2(T0) (on MIPS) instead.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800551 IncGprIndex();
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000552 }
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000553 is_split_long_or_double_ = (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800554 ((gpr_index_ + 1) == kNumQuickGprArgs);
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500555 if (!kSplitPairAcrossRegisterAndStack && is_split_long_or_double_) {
556 // We don't want to split this. Pass over this register.
557 gpr_index_++;
558 is_split_long_or_double_ = false;
559 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800560 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800561 if (kBytesStackArgLocation == 4) {
562 stack_index_+= 2;
563 } else {
564 CHECK_EQ(kBytesStackArgLocation, 8U);
565 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800566 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700567 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800568 IncGprIndex();
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000569 if (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700570 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800571 IncGprIndex();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700572 }
573 }
574 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800575 } else {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000576 is_split_long_or_double_ = (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) &&
Zheng Xu5667fdb2014-10-23 18:29:55 +0800577 ((fpr_index_ + 1) == kNumQuickFprArgs) && !kQuickDoubleRegAlignedFloatBackFilled;
Ian Rogers936b37f2014-02-14 00:52:24 -0800578 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700579 if (kBytesStackArgLocation == 4) {
580 stack_index_+= 2;
Ian Rogers936b37f2014-02-14 00:52:24 -0800581 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700582 CHECK_EQ(kBytesStackArgLocation, 8U);
583 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800584 }
Zheng Xu5667fdb2014-10-23 18:29:55 +0800585 if (kQuickDoubleRegAlignedFloatBackFilled) {
586 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
587 fpr_double_index_ += 2;
588 // Float should not overlap with double.
589 if (fpr_index_ % 2 == 0) {
590 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
591 }
592 }
593 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800594 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800595 if (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) {
596 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800597 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800598 }
599 }
600 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800601 }
602 break;
603 default:
604 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
605 }
Ian Rogers848871b2013-08-05 10:56:33 -0700606 }
607 }
608
Andreas Gampec200a4a2014-06-16 18:39:09 -0700609 protected:
Ian Rogers848871b2013-08-05 10:56:33 -0700610 const bool is_static_;
611 const char* const shorty_;
612 const uint32_t shorty_len_;
Andreas Gampec200a4a2014-06-16 18:39:09 -0700613
614 private:
Ian Rogers13735952014-10-08 12:43:28 -0700615 uint8_t* const gpr_args_; // Address of GPR arguments in callee save frame.
616 uint8_t* const fpr_args_; // Address of FPR arguments in callee save frame.
617 uint8_t* const stack_args_; // Address of stack arguments in caller's frame.
Ian Rogers936b37f2014-02-14 00:52:24 -0800618 uint32_t gpr_index_; // Index into spilled GPRs.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800619 // Index into spilled FPRs.
620 // In case kQuickDoubleRegAlignedFloatBackFilled, it may index a hole while fpr_double_index_
621 // holds a higher register number.
622 uint32_t fpr_index_;
623 // Index into spilled FPRs for aligned double.
624 // Only used when kQuickDoubleRegAlignedFloatBackFilled. Next available double register indexed in
625 // terms of singles, may be behind fpr_index.
626 uint32_t fpr_double_index_;
Ian Rogers936b37f2014-02-14 00:52:24 -0800627 uint32_t stack_index_; // Index into arguments on the stack.
628 // The current type of argument during VisitArguments.
629 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700630 // Does a 64bit parameter straddle the register and stack arguments?
631 bool is_split_long_or_double_;
632};
633
Sebastien Hertza836bc92014-11-25 16:30:53 +0100634// Returns the 'this' object of a proxy method. This function is only used by StackVisitor. It
635// allows to use the QuickArgumentVisitor constants without moving all the code in its own module.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700636extern "C" mirror::Object* artQuickGetProxyThisObject(ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700637 REQUIRES_SHARED(Locks::mutator_lock_) {
Roland Levillainfa854e42018-02-07 13:09:55 +0000638 return QuickArgumentVisitor::GetProxyThisObjectReference(sp)->AsMirrorPtr();
639}
Sebastien Hertza836bc92014-11-25 16:30:53 +0100640
Ian Rogers848871b2013-08-05 10:56:33 -0700641// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800642class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700643 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700644 BuildQuickShadowFrameVisitor(ArtMethod** sp, bool is_static, const char* shorty,
645 uint32_t shorty_len, ShadowFrame* sf, size_t first_arg_reg) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700646 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700647
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700648 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700649
650 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800651 ShadowFrame* const sf_;
652 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700653
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700654 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700655};
656
Andreas Gampec200a4a2014-06-16 18:39:09 -0700657void BuildQuickShadowFrameVisitor::Visit() {
Ian Rogers9758f792014-03-13 09:02:55 -0700658 Primitive::Type type = GetParamPrimitiveType();
659 switch (type) {
660 case Primitive::kPrimLong: // Fall-through.
661 case Primitive::kPrimDouble:
662 if (IsSplitLongOrDouble()) {
663 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
664 } else {
665 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
666 }
667 ++cur_reg_;
668 break;
669 case Primitive::kPrimNot: {
670 StackReference<mirror::Object>* stack_ref =
671 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
672 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
673 }
674 break;
675 case Primitive::kPrimBoolean: // Fall-through.
676 case Primitive::kPrimByte: // Fall-through.
677 case Primitive::kPrimChar: // Fall-through.
678 case Primitive::kPrimShort: // Fall-through.
679 case Primitive::kPrimInt: // Fall-through.
680 case Primitive::kPrimFloat:
681 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
682 break;
683 case Primitive::kPrimVoid:
684 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700685 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700686 }
687 ++cur_reg_;
688}
689
Mingyao Yang417528d2017-09-13 12:10:40 -0700690// Don't inline. See b/65159206.
691NO_INLINE
692static void HandleDeoptimization(JValue* result,
693 ArtMethod* method,
694 ShadowFrame* deopt_frame,
695 ManagedStack* fragment)
696 REQUIRES_SHARED(Locks::mutator_lock_) {
697 // Coming from partial-fragment deopt.
698 Thread* self = Thread::Current();
699 if (kIsDebugBuild) {
700 // Sanity-check: are the methods as expected? We check that the last shadow frame (the bottom
701 // of the call-stack) corresponds to the called method.
702 ShadowFrame* linked = deopt_frame;
703 while (linked->GetLink() != nullptr) {
704 linked = linked->GetLink();
705 }
706 CHECK_EQ(method, linked->GetMethod()) << method->PrettyMethod() << " "
707 << ArtMethod::PrettyMethod(linked->GetMethod());
708 }
709
710 if (VLOG_IS_ON(deopt)) {
711 // Print out the stack to verify that it was a partial-fragment deopt.
712 LOG(INFO) << "Continue-ing from deopt. Stack is:";
713 QuickExceptionHandler::DumpFramesWithType(self, true);
714 }
715
716 ObjPtr<mirror::Throwable> pending_exception;
717 bool from_code = false;
718 DeoptimizationMethodType method_type;
719 self->PopDeoptimizationContext(/* out */ result,
720 /* out */ &pending_exception,
721 /* out */ &from_code,
722 /* out */ &method_type);
723
724 // Push a transition back into managed code onto the linked list in thread.
725 self->PushManagedStackFragment(fragment);
726
727 // Ensure that the stack is still in order.
728 if (kIsDebugBuild) {
729 class DummyStackVisitor : public StackVisitor {
730 public:
731 explicit DummyStackVisitor(Thread* self_in) REQUIRES_SHARED(Locks::mutator_lock_)
732 : StackVisitor(self_in, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames) {}
733
734 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
735 // Nothing to do here. In a debug build, SanityCheckFrame will do the work in the walking
736 // logic. Just always say we want to continue.
737 return true;
738 }
739 };
740 DummyStackVisitor dsv(self);
741 dsv.WalkStack();
742 }
743
744 // Restore the exception that was pending before deoptimization then interpret the
745 // deoptimized frames.
746 if (pending_exception != nullptr) {
747 self->SetException(pending_exception);
748 }
749 interpreter::EnterInterpreterFromDeoptimize(self,
750 deopt_frame,
751 result,
752 from_code,
753 DeoptimizationMethodType::kDefault);
754}
755
Mathieu Chartiere401d142015-04-22 13:56:20 -0700756extern "C" uint64_t artQuickToInterpreterBridge(ArtMethod* method, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700757 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers848871b2013-08-05 10:56:33 -0700758 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
759 // frame.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700760 ScopedQuickEntrypointChecks sqec(self);
Ian Rogers848871b2013-08-05 10:56:33 -0700761
Alex Light9139e002015-10-09 15:59:48 -0700762 if (UNLIKELY(!method->IsInvokable())) {
763 method->ThrowInvocationTimeError();
Ian Rogers848871b2013-08-05 10:56:33 -0700764 return 0;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700765 }
766
767 JValue tmp_value;
768 ShadowFrame* deopt_frame = self->PopStackedShadowFrame(
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700769 StackedShadowFrameType::kDeoptimizationShadowFrame, false);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700770 ManagedStack fragment;
771
David Sehr709b0702016-10-13 09:12:37 -0700772 DCHECK(!method->IsNative()) << method->PrettyMethod();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700773 uint32_t shorty_len = 0;
Andreas Gampe542451c2016-07-26 09:02:02 -0700774 ArtMethod* non_proxy_method = method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800775 DCHECK(non_proxy_method->GetCodeItem() != nullptr) << method->PrettyMethod();
David Sehr0225f8e2018-01-31 08:52:24 +0000776 CodeItemDataAccessor accessor(non_proxy_method->DexInstructionData());
Andreas Gampe639bdd12015-06-03 11:22:45 -0700777 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
778
779 JValue result;
780
Mingyao Yang417528d2017-09-13 12:10:40 -0700781 if (UNLIKELY(deopt_frame != nullptr)) {
782 HandleDeoptimization(&result, method, deopt_frame, &fragment);
Ian Rogers848871b2013-08-05 10:56:33 -0700783 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -0700784 const char* old_cause = self->StartAssertNoThreadSuspension(
785 "Building interpreter shadow frame");
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800786 uint16_t num_regs = accessor.RegistersSize();
Andreas Gampec200a4a2014-06-16 18:39:09 -0700787 // No last shadow coming from quick.
Andreas Gampeb3025922015-09-01 14:45:00 -0700788 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
Andreas Gampe03ec9302015-08-27 17:41:47 -0700789 CREATE_SHADOW_FRAME(num_regs, /* link */ nullptr, method, /* dex pc */ 0);
Andreas Gampeb3025922015-09-01 14:45:00 -0700790 ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800791 size_t first_arg_reg = accessor.RegistersSize() - accessor.InsSize();
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700792 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len,
Ian Rogers936b37f2014-02-14 00:52:24 -0800793 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700794 shadow_frame_builder.VisitArguments();
Ian Rogerse94652f2014-12-02 11:13:19 -0800795 const bool needs_initialization =
796 method->IsStatic() && !method->GetDeclaringClass()->IsInitialized();
Ian Rogers848871b2013-08-05 10:56:33 -0700797 // Push a transition back into managed code onto the linked list in thread.
Ian Rogers848871b2013-08-05 10:56:33 -0700798 self->PushManagedStackFragment(&fragment);
799 self->PushShadowFrame(shadow_frame);
800 self->EndAssertNoThreadSuspension(old_cause);
801
Ian Rogerse94652f2014-12-02 11:13:19 -0800802 if (needs_initialization) {
Ian Rogers848871b2013-08-05 10:56:33 -0700803 // Ensure static method's class is initialized.
Ian Rogerse94652f2014-12-02 11:13:19 -0800804 StackHandleScope<1> hs(self);
805 Handle<mirror::Class> h_class(hs.NewHandle(shadow_frame->GetMethod()->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -0700806 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
David Sehr709b0702016-10-13 09:12:37 -0700807 DCHECK(Thread::Current()->IsExceptionPending())
808 << shadow_frame->GetMethod()->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700809 self->PopManagedStackFragment(fragment);
810 return 0;
811 }
812 }
Daniel Mihalyieb076692014-08-22 17:33:31 +0200813
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800814 result = interpreter::EnterInterpreterFromEntryPoint(self, accessor, shadow_frame);
Ian Rogers848871b2013-08-05 10:56:33 -0700815 }
Andreas Gampe639bdd12015-06-03 11:22:45 -0700816
817 // Pop transition.
818 self->PopManagedStackFragment(fragment);
819
820 // Request a stack deoptimization if needed
821 ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700822 uintptr_t caller_pc = QuickArgumentVisitor::GetCallingPc(sp);
Mingyao Yanga3549d22016-06-02 17:01:02 -0700823 // If caller_pc is the instrumentation exit stub, the stub will check to see if deoptimization
824 // should be done and it knows the real return pc.
825 if (UNLIKELY(caller_pc != reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) &&
Nicolas Geoffray433b79a2017-01-30 20:54:45 +0000826 Dbg::IsForcedInterpreterNeededForUpcall(self, caller))) {
827 if (!Runtime::Current()->IsAsyncDeoptimizeable(caller_pc)) {
828 LOG(WARNING) << "Got a deoptimization request on un-deoptimizable method "
829 << caller->PrettyMethod();
830 } else {
831 // Push the context of the deoptimization stack so we can restore the return value and the
832 // exception before executing the deoptimized frames.
833 self->PushDeoptimizationContext(
Mingyao Yang2ee17902017-08-30 11:37:08 -0700834 result,
835 shorty[0] == 'L' || shorty[0] == '[', /* class or array */
836 self->GetException(),
837 false /* from_code */,
838 DeoptimizationMethodType::kDefault);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700839
Nicolas Geoffray433b79a2017-01-30 20:54:45 +0000840 // Set special exception to cause deoptimization.
841 self->SetException(Thread::GetDeoptimizationException());
842 }
Andreas Gampe639bdd12015-06-03 11:22:45 -0700843 }
844
845 // No need to restore the args since the method has already been run by the interpreter.
846 return result.GetJ();
Ian Rogers848871b2013-08-05 10:56:33 -0700847}
848
849// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
850// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800851class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700852 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700853 BuildQuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty, uint32_t shorty_len,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700854 ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700855 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700856
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700857 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700858
859 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700860 ScopedObjectAccessUnchecked* const soa_;
861 std::vector<jvalue>* const args_;
Ian Rogers9758f792014-03-13 09:02:55 -0700862
Ian Rogers848871b2013-08-05 10:56:33 -0700863 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
864};
865
Ian Rogers9758f792014-03-13 09:02:55 -0700866void BuildQuickArgumentVisitor::Visit() {
867 jvalue val;
868 Primitive::Type type = GetParamPrimitiveType();
869 switch (type) {
870 case Primitive::kPrimNot: {
871 StackReference<mirror::Object>* stack_ref =
872 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
873 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -0700874 break;
875 }
876 case Primitive::kPrimLong: // Fall-through.
877 case Primitive::kPrimDouble:
878 if (IsSplitLongOrDouble()) {
879 val.j = ReadSplitLongParam();
880 } else {
881 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
882 }
883 break;
884 case Primitive::kPrimBoolean: // Fall-through.
885 case Primitive::kPrimByte: // Fall-through.
886 case Primitive::kPrimChar: // Fall-through.
887 case Primitive::kPrimShort: // Fall-through.
888 case Primitive::kPrimInt: // Fall-through.
889 case Primitive::kPrimFloat:
890 val.i = *reinterpret_cast<jint*>(GetParamAddress());
891 break;
892 case Primitive::kPrimVoid:
893 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700894 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700895 }
896 args_->push_back(val);
897}
898
Ian Rogers848871b2013-08-05 10:56:33 -0700899// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
900// which is responsible for recording callee save registers. We explicitly place into jobjects the
901// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
902// field within the proxy object, which will box the primitive arguments and deal with error cases.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700903extern "C" uint64_t artQuickProxyInvokeHandler(
904 ArtMethod* proxy_method, mirror::Object* receiver, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700905 REQUIRES_SHARED(Locks::mutator_lock_) {
David Sehr709b0702016-10-13 09:12:37 -0700906 DCHECK(proxy_method->IsProxyMethod()) << proxy_method->PrettyMethod();
907 DCHECK(receiver->GetClass()->IsProxyClass()) << proxy_method->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700908 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
909 const char* old_cause =
910 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
911 // Register the top of the managed stack, making stack crawlable.
David Sehr709b0702016-10-13 09:12:37 -0700912 DCHECK_EQ((*sp), proxy_method) << proxy_method->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700913 self->VerifyStack();
914 // Start new JNI local reference state.
915 JNIEnvExt* env = self->GetJniEnv();
916 ScopedObjectAccessUnchecked soa(env);
917 ScopedJniEnvLocalRefState env_state(env);
918 // Create local ref. copies of proxy method and the receiver.
919 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
920
921 // Placing arguments into args vector and remove the receiver.
Andreas Gampe542451c2016-07-26 09:02:02 -0700922 ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
David Sehr709b0702016-10-13 09:12:37 -0700923 CHECK(!non_proxy_method->IsStatic()) << proxy_method->PrettyMethod() << " "
924 << non_proxy_method->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700925 std::vector<jvalue> args;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700926 uint32_t shorty_len = 0;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700927 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
Roland Levillainad0777d2018-02-12 20:00:18 +0000928 BuildQuickArgumentVisitor local_ref_visitor(
929 sp, /* is_static */ false, shorty, shorty_len, &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700930
Ian Rogers848871b2013-08-05 10:56:33 -0700931 local_ref_visitor.VisitArguments();
David Sehr709b0702016-10-13 09:12:37 -0700932 DCHECK_GT(args.size(), 0U) << proxy_method->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700933 args.erase(args.begin());
934
935 // Convert proxy method into expected interface method.
Andreas Gampe542451c2016-07-26 09:02:02 -0700936 ArtMethod* interface_method = proxy_method->FindOverriddenMethod(kRuntimePointerSize);
David Sehr709b0702016-10-13 09:12:37 -0700937 DCHECK(interface_method != nullptr) << proxy_method->PrettyMethod();
938 DCHECK(!interface_method->IsProxyMethod()) << interface_method->PrettyMethod();
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700939 self->EndAssertNoThreadSuspension(old_cause);
Andreas Gampe542451c2016-07-26 09:02:02 -0700940 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize);
Andreas Gampee01e3642016-07-25 13:06:04 -0700941 DCHECK(!Runtime::Current()->IsActiveTransaction());
Andreas Gampeee29a072017-11-02 15:28:09 -0700942 ObjPtr<mirror::Method> interface_reflect_method =
943 mirror::Method::CreateFromArtMethod<kRuntimePointerSize, false>(soa.Self(), interface_method);
944 if (interface_reflect_method == nullptr) {
945 soa.Self()->AssertPendingOOMException();
946 return 0;
947 }
948 jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_reflect_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700949
950 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
Alex Lightc9167362018-06-11 16:46:43 -0700951 // that performs allocations or instrumentation events.
952 instrumentation::Instrumentation* instr = Runtime::Current()->GetInstrumentation();
953 if (instr->HasMethodEntryListeners()) {
954 instr->MethodEnterEvent(soa.Self(),
955 soa.Decode<mirror::Object>(rcvr_jobj).Ptr(),
956 proxy_method,
957 0);
958 if (soa.Self()->IsExceptionPending()) {
959 instr->MethodUnwindEvent(self,
960 soa.Decode<mirror::Object>(rcvr_jobj).Ptr(),
961 proxy_method,
962 0);
963 return 0;
964 }
965 }
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700966 JValue result = InvokeProxyInvocationHandler(soa, shorty, rcvr_jobj, interface_method_jobj, args);
Alex Lightc9167362018-06-11 16:46:43 -0700967 if (soa.Self()->IsExceptionPending()) {
968 if (instr->HasMethodUnwindListeners()) {
969 instr->MethodUnwindEvent(self,
970 soa.Decode<mirror::Object>(rcvr_jobj).Ptr(),
971 proxy_method,
972 0);
973 }
974 } else if (instr->HasMethodExitListeners()) {
975 instr->MethodExitEvent(self,
976 soa.Decode<mirror::Object>(rcvr_jobj).Ptr(),
977 proxy_method,
978 0,
979 result);
980 }
Ian Rogers848871b2013-08-05 10:56:33 -0700981 return result.GetJ();
982}
983
Roland Levillainad0777d2018-02-12 20:00:18 +0000984// Visitor returning a reference argument at a given position in a Quick stack frame.
985// NOTE: Only used for testing purposes.
986class GetQuickReferenceArgumentAtVisitor FINAL : public QuickArgumentVisitor {
987 public:
988 GetQuickReferenceArgumentAtVisitor(ArtMethod** sp,
989 const char* shorty,
990 uint32_t shorty_len,
991 size_t arg_pos)
992 : QuickArgumentVisitor(sp, /* is_static */ false, shorty, shorty_len),
993 cur_pos_(0u),
994 arg_pos_(arg_pos),
995 ref_arg_(nullptr) {
996 CHECK_LT(arg_pos, shorty_len) << "Argument position greater than the number arguments";
997 }
998
999 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE {
1000 if (cur_pos_ == arg_pos_) {
1001 Primitive::Type type = GetParamPrimitiveType();
1002 CHECK_EQ(type, Primitive::kPrimNot) << "Argument at searched position is not a reference";
1003 ref_arg_ = reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
1004 }
1005 ++cur_pos_;
1006 }
1007
1008 StackReference<mirror::Object>* GetReferenceArgument() {
1009 return ref_arg_;
1010 }
1011
1012 private:
1013 // The position of the currently visited argument.
1014 size_t cur_pos_;
1015 // The position of the searched argument.
1016 const size_t arg_pos_;
1017 // The reference argument, if found.
1018 StackReference<mirror::Object>* ref_arg_;
1019
1020 DISALLOW_COPY_AND_ASSIGN(GetQuickReferenceArgumentAtVisitor);
1021};
1022
1023// Returning reference argument at position `arg_pos` in Quick stack frame at address `sp`.
1024// NOTE: Only used for testing purposes.
1025extern "C" StackReference<mirror::Object>* artQuickGetProxyReferenceArgumentAt(size_t arg_pos,
1026 ArtMethod** sp)
1027 REQUIRES_SHARED(Locks::mutator_lock_) {
1028 ArtMethod* proxy_method = *sp;
1029 ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
1030 CHECK(!non_proxy_method->IsStatic())
1031 << proxy_method->PrettyMethod() << " " << non_proxy_method->PrettyMethod();
1032 uint32_t shorty_len = 0;
1033 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
1034 GetQuickReferenceArgumentAtVisitor ref_arg_visitor(sp, shorty, shorty_len, arg_pos);
1035 ref_arg_visitor.VisitArguments();
1036 StackReference<mirror::Object>* ref_arg = ref_arg_visitor.GetReferenceArgument();
1037 return ref_arg;
1038}
1039
1040// Visitor returning all the reference arguments in a Quick stack frame.
1041class GetQuickReferenceArgumentsVisitor FINAL : public QuickArgumentVisitor {
1042 public:
1043 GetQuickReferenceArgumentsVisitor(ArtMethod** sp,
1044 bool is_static,
1045 const char* shorty,
1046 uint32_t shorty_len)
1047 : QuickArgumentVisitor(sp, is_static, shorty, shorty_len) {}
1048
1049 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE {
1050 Primitive::Type type = GetParamPrimitiveType();
1051 if (type == Primitive::kPrimNot) {
1052 StackReference<mirror::Object>* ref_arg =
1053 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
1054 ref_args_.push_back(ref_arg);
1055 }
1056 }
1057
1058 std::vector<StackReference<mirror::Object>*> GetReferenceArguments() {
1059 return ref_args_;
1060 }
1061
1062 private:
1063 // The reference arguments.
1064 std::vector<StackReference<mirror::Object>*> ref_args_;
1065
1066 DISALLOW_COPY_AND_ASSIGN(GetQuickReferenceArgumentsVisitor);
1067};
1068
1069// Returning all reference arguments in Quick stack frame at address `sp`.
1070std::vector<StackReference<mirror::Object>*> GetProxyReferenceArguments(ArtMethod** sp)
1071 REQUIRES_SHARED(Locks::mutator_lock_) {
1072 ArtMethod* proxy_method = *sp;
1073 ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
1074 CHECK(!non_proxy_method->IsStatic())
1075 << proxy_method->PrettyMethod() << " " << non_proxy_method->PrettyMethod();
1076 uint32_t shorty_len = 0;
1077 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
1078 GetQuickReferenceArgumentsVisitor ref_args_visitor(sp, /* is_static */ false, shorty, shorty_len);
1079 ref_args_visitor.VisitArguments();
1080 std::vector<StackReference<mirror::Object>*> ref_args = ref_args_visitor.GetReferenceArguments();
1081 return ref_args;
1082}
1083
Ian Rogers848871b2013-08-05 10:56:33 -07001084// Read object references held in arguments from quick frames and place in a JNI local references,
1085// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001086class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -07001087 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -07001088 RememberForGcArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty,
1089 uint32_t shorty_len, ScopedObjectAccessUnchecked* soa) :
Andreas Gampec200a4a2014-06-16 18:39:09 -07001090 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
Ian Rogers848871b2013-08-05 10:56:33 -07001091
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001092 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -07001093
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001094 void FixupReferences() REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -07001095
1096 private:
Ian Rogers9758f792014-03-13 09:02:55 -07001097 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -08001098 // References which we must update when exiting in case the GC moved the objects.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001099 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
1100
Mathieu Chartier590fee92013-09-13 13:46:47 -07001101 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -07001102};
1103
Ian Rogers9758f792014-03-13 09:02:55 -07001104void RememberForGcArgumentVisitor::Visit() {
1105 if (IsParamAReference()) {
1106 StackReference<mirror::Object>* stack_ref =
1107 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
1108 jobject reference =
1109 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
1110 references_.push_back(std::make_pair(reference, stack_ref));
1111 }
1112}
1113
1114void RememberForGcArgumentVisitor::FixupReferences() {
1115 // Fixup any references which may have changed.
1116 for (const auto& pair : references_) {
Mathieu Chartier1a5337f2016-10-13 13:48:23 -07001117 pair.second->Assign(soa_->Decode<mirror::Object>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001118 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -07001119 }
1120}
1121
Alex Lightb7edcda2017-04-27 13:20:31 -07001122extern "C" const void* artInstrumentationMethodEntryFromCode(ArtMethod* method,
1123 mirror::Object* this_object,
1124 Thread* self,
1125 ArtMethod** sp)
1126 REQUIRES_SHARED(Locks::mutator_lock_) {
1127 const void* result;
1128 // Instrumentation changes the stack. Thus, when exiting, the stack cannot be verified, so skip
1129 // that part.
1130 ScopedQuickEntrypointChecks sqec(self, kIsDebugBuild, false);
1131 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
Alex Light6cae5ea2018-06-07 17:07:02 -07001132 DCHECK(!method->IsProxyMethod())
1133 << "Proxy method " << method->PrettyMethod()
1134 << " (declaring class: " << method->GetDeclaringClass()->PrettyClass() << ")"
1135 << " should not hit instrumentation entrypoint.";
Alex Lightb7edcda2017-04-27 13:20:31 -07001136 if (instrumentation->IsDeoptimized(method)) {
1137 result = GetQuickToInterpreterBridge();
1138 } else {
Alex Light2d441b12018-06-08 15:33:21 -07001139 // This will get the entry point either from the oat file, the JIT or the appropriate bridge
1140 // method if none of those can be found.
1141 result = instrumentation->GetCodeForInvoke(method);
1142 jit::Jit* jit = Runtime::Current()->GetJit();
1143 DCHECK_NE(result, GetQuickInstrumentationEntryPoint()) << method->PrettyMethod();
1144 DCHECK(jit == nullptr ||
1145 // Native methods come through here in Interpreter entrypoints. We might not have
1146 // disabled jit-gc but that is fine since we won't return jit-code for native methods.
1147 method->IsNative() ||
1148 !jit->GetCodeCache()->GetGarbageCollectCode());
1149 DCHECK(!method->IsNative() ||
1150 jit == nullptr ||
1151 !jit->GetCodeCache()->ContainsPc(result))
1152 << method->PrettyMethod() << " code will jump to possibly cleaned up jit code!";
Alex Lightb7edcda2017-04-27 13:20:31 -07001153 }
1154
1155 bool interpreter_entry = (result == GetQuickToInterpreterBridge());
1156 bool is_static = method->IsStatic();
1157 uint32_t shorty_len;
1158 const char* shorty =
1159 method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty(&shorty_len);
1160
1161 ScopedObjectAccessUnchecked soa(self);
1162 RememberForGcArgumentVisitor visitor(sp, is_static, shorty, shorty_len, &soa);
1163 visitor.VisitArguments();
1164
1165 instrumentation->PushInstrumentationStackFrame(self,
1166 is_static ? nullptr : this_object,
1167 method,
1168 QuickArgumentVisitor::GetCallingPc(sp),
1169 interpreter_entry);
1170
1171 visitor.FixupReferences();
1172 if (UNLIKELY(self->IsExceptionPending())) {
1173 return nullptr;
1174 }
1175 CHECK(result != nullptr) << method->PrettyMethod();
1176 return result;
1177}
1178
1179extern "C" TwoWordReturn artInstrumentationMethodExitFromCode(Thread* self,
1180 ArtMethod** sp,
1181 uint64_t* gpr_result,
1182 uint64_t* fpr_result)
1183 REQUIRES_SHARED(Locks::mutator_lock_) {
1184 DCHECK_EQ(reinterpret_cast<uintptr_t>(self), reinterpret_cast<uintptr_t>(Thread::Current()));
1185 CHECK(gpr_result != nullptr);
1186 CHECK(fpr_result != nullptr);
1187 // Instrumentation exit stub must not be entered with a pending exception.
1188 CHECK(!self->IsExceptionPending()) << "Enter instrumentation exit stub with pending exception "
1189 << self->GetException()->Dump();
1190 // Compute address of return PC and sanity check that it currently holds 0.
Vladimir Markod3083dd2018-05-17 08:43:47 +01001191 constexpr size_t return_pc_offset =
1192 RuntimeCalleeSaveFrame::GetReturnPcOffset(CalleeSaveType::kSaveEverything);
Alex Lightb7edcda2017-04-27 13:20:31 -07001193 uintptr_t* return_pc = reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(sp) +
1194 return_pc_offset);
1195 CHECK_EQ(*return_pc, 0U);
1196
1197 // Pop the frame filling in the return pc. The low half of the return value is 0 when
1198 // deoptimization shouldn't be performed with the high-half having the return address. When
1199 // deoptimization should be performed the low half is zero and the high-half the address of the
1200 // deoptimization entry point.
1201 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
1202 TwoWordReturn return_or_deoptimize_pc = instrumentation->PopInstrumentationStackFrame(
1203 self, return_pc, gpr_result, fpr_result);
Vladimir Markofac21782018-03-13 17:01:09 +00001204 if (self->IsExceptionPending() || self->ObserveAsyncException()) {
Alex Lightb7edcda2017-04-27 13:20:31 -07001205 return GetTwoWordFailureValue();
1206 }
1207 return return_or_deoptimize_pc;
1208}
1209
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001210static std::string DumpInstruction(ArtMethod* method, uint32_t dex_pc)
1211 REQUIRES_SHARED(Locks::mutator_lock_) {
1212 if (dex_pc == static_cast<uint32_t>(-1)) {
1213 CHECK(method == jni::DecodeArtMethod(WellKnownClasses::java_lang_String_charAt));
1214 return "<native>";
1215 } else {
1216 CodeItemInstructionAccessor accessor = method->DexInstructions();
1217 CHECK_LT(dex_pc, accessor.InsnsSizeInCodeUnits());
1218 return accessor.InstructionAt(dex_pc).DumpString(method->GetDexFile());
1219 }
1220}
1221
Vladimir Marko606adb32018-04-05 14:49:24 +01001222static void DumpB74410240ClassData(ObjPtr<mirror::Class> klass)
1223 REQUIRES_SHARED(Locks::mutator_lock_) {
1224 std::string storage;
1225 const char* descriptor = klass->GetDescriptor(&storage);
1226 LOG(FATAL_WITHOUT_ABORT) << " " << DescribeLoaders(klass->GetClassLoader(), descriptor);
1227 const OatDexFile* oat_dex_file = klass->GetDexFile().GetOatDexFile();
1228 if (oat_dex_file != nullptr) {
1229 const OatFile* oat_file = oat_dex_file->GetOatFile();
1230 const char* dex2oat_cmdline =
1231 oat_file->GetOatHeader().GetStoreValueByKey(OatHeader::kDex2OatCmdLineKey);
1232 LOG(FATAL_WITHOUT_ABORT) << " OatFile: " << oat_file->GetLocation()
1233 << "; " << (dex2oat_cmdline != nullptr ? dex2oat_cmdline : "<not recorded>");
1234 }
1235}
1236
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001237static void DumpB74410240DebugData(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
1238 // Mimick the search for the caller and dump some data while doing so.
Vladimir Marko606adb32018-04-05 14:49:24 +01001239 LOG(FATAL_WITHOUT_ABORT) << "Dumping debugging data, please attach a bugreport to b/74410240.";
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001240
1241 constexpr CalleeSaveType type = CalleeSaveType::kSaveRefsAndArgs;
1242 CHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(type));
1243
Vladimir Markod3083dd2018-05-17 08:43:47 +01001244 constexpr size_t callee_frame_size = RuntimeCalleeSaveFrame::GetFrameSize(type);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001245 auto** caller_sp = reinterpret_cast<ArtMethod**>(
1246 reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
Vladimir Markod3083dd2018-05-17 08:43:47 +01001247 constexpr size_t callee_return_pc_offset = RuntimeCalleeSaveFrame::GetReturnPcOffset(type);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001248 uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(
1249 (reinterpret_cast<uint8_t*>(sp) + callee_return_pc_offset));
1250 ArtMethod* outer_method = *caller_sp;
1251
1252 if (UNLIKELY(caller_pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()))) {
1253 LOG(FATAL_WITHOUT_ABORT) << "Method: " << outer_method->PrettyMethod()
1254 << " native pc: " << caller_pc << " Instrumented!";
1255 return;
1256 }
1257
1258 const OatQuickMethodHeader* current_code = outer_method->GetOatQuickMethodHeader(caller_pc);
1259 CHECK(current_code != nullptr);
1260 CHECK(current_code->IsOptimized());
1261 uintptr_t native_pc_offset = current_code->NativeQuickPcOffset(caller_pc);
David Srbecky052f8ca2018-04-26 15:42:54 +01001262 CodeInfo code_info(current_code);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001263 MethodInfo method_info = current_code->GetOptimizedMethodInfo();
David Srbecky052f8ca2018-04-26 15:42:54 +01001264 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001265 CHECK(stack_map.IsValid());
David Srbecky052f8ca2018-04-26 15:42:54 +01001266 uint32_t dex_pc = stack_map.GetDexPc();
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001267
1268 // Log the outer method and its associated dex file and class table pointer which can be used
1269 // to find out if the inlined methods were defined by other dex file(s) or class loader(s).
1270 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1271 LOG(FATAL_WITHOUT_ABORT) << "Outer: " << outer_method->PrettyMethod()
1272 << " native pc: " << caller_pc
1273 << " dex pc: " << dex_pc
1274 << " dex file: " << outer_method->GetDexFile()->GetLocation()
1275 << " class table: " << class_linker->ClassTableForClassLoader(outer_method->GetClassLoader());
Vladimir Marko606adb32018-04-05 14:49:24 +01001276 DumpB74410240ClassData(outer_method->GetDeclaringClass());
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001277 LOG(FATAL_WITHOUT_ABORT) << " instruction: " << DumpInstruction(outer_method, dex_pc);
1278
1279 ArtMethod* caller = outer_method;
David Srbecky93bd3612018-07-02 19:30:18 +01001280 BitTableRange<InlineInfo> inline_infos = code_info.GetInlineInfosOf(stack_map);
1281 for (InlineInfo inline_info : inline_infos) {
1282 const char* tag = "";
1283 dex_pc = inline_info.GetDexPc();
1284 if (inline_info.EncodesArtMethod()) {
1285 tag = "encoded ";
1286 caller = inline_info.GetArtMethod();
1287 } else {
1288 uint32_t method_index = inline_info.GetMethodIndex(method_info);
1289 if (dex_pc == static_cast<uint32_t>(-1)) {
1290 tag = "special ";
1291 CHECK(inline_info.Equals(inline_infos.back()));
1292 caller = jni::DecodeArtMethod(WellKnownClasses::java_lang_String_charAt);
1293 CHECK_EQ(caller->GetDexMethodIndex(), method_index);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001294 } else {
David Srbecky93bd3612018-07-02 19:30:18 +01001295 ObjPtr<mirror::DexCache> dex_cache = caller->GetDexCache();
1296 ObjPtr<mirror::ClassLoader> class_loader = caller->GetClassLoader();
1297 caller = class_linker->LookupResolvedMethod(method_index, dex_cache, class_loader);
1298 CHECK(caller != nullptr);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001299 }
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001300 }
David Srbecky93bd3612018-07-02 19:30:18 +01001301 LOG(FATAL_WITHOUT_ABORT) << "InlineInfo #" << inline_info.Row()
1302 << ": " << tag << caller->PrettyMethod()
1303 << " dex pc: " << dex_pc
1304 << " dex file: " << caller->GetDexFile()->GetLocation()
1305 << " class table: "
1306 << class_linker->ClassTableForClassLoader(caller->GetClassLoader());
1307 DumpB74410240ClassData(caller->GetDeclaringClass());
1308 LOG(FATAL_WITHOUT_ABORT) << " instruction: " << DumpInstruction(caller, dex_pc);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001309 }
1310}
1311
Ian Rogers848871b2013-08-05 10:56:33 -07001312// Lazily resolve a method for quick. Called by stub code.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001313extern "C" const void* artQuickResolutionTrampoline(
1314 ArtMethod* called, mirror::Object* receiver, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001315 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe3b45ef22015-05-26 21:34:09 -07001316 // The resolution trampoline stashes the resolved method into the callee-save frame to transport
1317 // it. Thus, when exiting, the stack cannot be verified (as the resolved method most likely
1318 // does not have the same stack layout as the callee-save method).
1319 ScopedQuickEntrypointChecks sqec(self, kIsDebugBuild, false);
Ian Rogers848871b2013-08-05 10:56:33 -07001320 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001321 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -07001322 ScopedObjectAccessUnchecked soa(env);
1323 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001324 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -07001325
1326 // Compute details about the called method (avoid GCs)
1327 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Ian Rogers848871b2013-08-05 10:56:33 -07001328 InvokeType invoke_type;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001329 MethodReference called_method(nullptr, 0);
1330 const bool called_method_known_on_entry = !called->IsRuntimeMethod();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001331 ArtMethod* caller = nullptr;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001332 if (!called_method_known_on_entry) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01001333 caller = QuickArgumentVisitor::GetCallingMethod(sp);
Ian Rogerse0a02da2014-12-02 14:10:53 -08001334 called_method.dex_file = caller->GetDexFile();
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001335
1336 InvokeType stack_map_invoke_type;
1337 uint32_t stack_map_dex_method_idx;
1338 const bool found_stack_map = QuickArgumentVisitor::GetInvokeType(sp,
1339 &stack_map_invoke_type,
1340 &stack_map_dex_method_idx);
1341 // For debug builds, we make sure both of the paths are consistent by also looking at the dex
1342 // code.
1343 if (!found_stack_map || kIsDebugBuild) {
1344 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
David Sehr0225f8e2018-01-31 08:52:24 +00001345 CodeItemInstructionAccessor accessor(caller->DexInstructions());
Mathieu Chartier808c7a52017-12-15 11:19:33 -08001346 CHECK_LT(dex_pc, accessor.InsnsSizeInCodeUnits());
1347 const Instruction& instr = accessor.InstructionAt(dex_pc);
Vladimir Markod7559b72017-09-28 13:50:37 +01001348 Instruction::Code instr_code = instr.Opcode();
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001349 bool is_range;
1350 switch (instr_code) {
1351 case Instruction::INVOKE_DIRECT:
1352 invoke_type = kDirect;
1353 is_range = false;
1354 break;
1355 case Instruction::INVOKE_DIRECT_RANGE:
1356 invoke_type = kDirect;
1357 is_range = true;
1358 break;
1359 case Instruction::INVOKE_STATIC:
1360 invoke_type = kStatic;
1361 is_range = false;
1362 break;
1363 case Instruction::INVOKE_STATIC_RANGE:
1364 invoke_type = kStatic;
1365 is_range = true;
1366 break;
1367 case Instruction::INVOKE_SUPER:
1368 invoke_type = kSuper;
1369 is_range = false;
1370 break;
1371 case Instruction::INVOKE_SUPER_RANGE:
1372 invoke_type = kSuper;
1373 is_range = true;
1374 break;
1375 case Instruction::INVOKE_VIRTUAL:
1376 invoke_type = kVirtual;
1377 is_range = false;
1378 break;
1379 case Instruction::INVOKE_VIRTUAL_RANGE:
1380 invoke_type = kVirtual;
1381 is_range = true;
1382 break;
1383 case Instruction::INVOKE_INTERFACE:
1384 invoke_type = kInterface;
1385 is_range = false;
1386 break;
1387 case Instruction::INVOKE_INTERFACE_RANGE:
1388 invoke_type = kInterface;
1389 is_range = true;
1390 break;
1391 default:
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001392 DumpB74410240DebugData(sp);
Vladimir Markod7559b72017-09-28 13:50:37 +01001393 LOG(FATAL) << "Unexpected call into trampoline: " << instr.DumpString(nullptr);
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001394 UNREACHABLE();
1395 }
Vladimir Markod7559b72017-09-28 13:50:37 +01001396 called_method.index = (is_range) ? instr.VRegB_3rc() : instr.VRegB_35c();
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001397 // Check that the invoke matches what we expected, note that this path only happens for debug
1398 // builds.
1399 if (found_stack_map) {
1400 DCHECK_EQ(stack_map_invoke_type, invoke_type);
1401 if (invoke_type != kSuper) {
1402 // Super may be sharpened.
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001403 DCHECK_EQ(stack_map_dex_method_idx, called_method.index)
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001404 << called_method.dex_file->PrettyMethod(stack_map_dex_method_idx) << " "
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001405 << called_method.PrettyMethod();
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001406 }
1407 } else {
Andreas Gampe9e6dee22017-04-11 13:50:23 -07001408 VLOG(dex) << "Accessed dex file for invoke " << invoke_type << " "
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001409 << called_method.index;
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001410 }
1411 } else {
1412 invoke_type = stack_map_invoke_type;
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001413 called_method.index = stack_map_dex_method_idx;
Ian Rogers848871b2013-08-05 10:56:33 -07001414 }
Ian Rogers848871b2013-08-05 10:56:33 -07001415 } else {
1416 invoke_type = kStatic;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001417 called_method.dex_file = called->GetDexFile();
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001418 called_method.index = called->GetDexMethodIndex();
Ian Rogers848871b2013-08-05 10:56:33 -07001419 }
1420 uint32_t shorty_len;
1421 const char* shorty =
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001422 called_method.dex_file->GetMethodShorty(called_method.GetMethodId(), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001423 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -07001424 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001425 self->EndAssertNoThreadSuspension(old_cause);
Ian Rogerse0a02da2014-12-02 14:10:53 -08001426 const bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -07001427 // Resolve method filling in dex cache.
Ian Rogerse0a02da2014-12-02 14:10:53 -08001428 if (!called_method_known_on_entry) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001429 StackHandleScope<1> hs(self);
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001430 mirror::Object* dummy = nullptr;
1431 HandleWrapper<mirror::Object> h_receiver(
1432 hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy));
Ian Rogerse0a02da2014-12-02 14:10:53 -08001433 DCHECK_EQ(caller->GetDexFile(), called_method.dex_file);
Vladimir Markoba118822017-06-12 15:41:56 +01001434 called = linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001435 self, called_method.index, caller, invoke_type);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001436
1437 // Update .bss entry in oat file if any.
1438 if (called != nullptr && called_method.dex_file->GetOatDexFile() != nullptr) {
Vladimir Markof3c52b42017-11-17 17:32:12 +00001439 size_t bss_offset = IndexBssMappingLookup::GetBssOffset(
1440 called_method.dex_file->GetOatDexFile()->GetMethodBssMapping(),
1441 called_method.index,
1442 called_method.dex_file->NumMethodIds(),
1443 static_cast<size_t>(kRuntimePointerSize));
1444 if (bss_offset != IndexBssMappingLookup::npos) {
1445 DCHECK_ALIGNED(bss_offset, static_cast<size_t>(kRuntimePointerSize));
1446 const OatFile* oat_file = called_method.dex_file->GetOatDexFile()->GetOatFile();
1447 ArtMethod** method_entry = reinterpret_cast<ArtMethod**>(const_cast<uint8_t*>(
1448 oat_file->BssBegin() + bss_offset));
1449 DCHECK_GE(method_entry, oat_file->GetBssMethods().data());
1450 DCHECK_LT(method_entry,
1451 oat_file->GetBssMethods().data() + oat_file->GetBssMethods().size());
1452 *method_entry = called;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001453 }
1454 }
Ian Rogers848871b2013-08-05 10:56:33 -07001455 }
Ian Rogerse0a02da2014-12-02 14:10:53 -08001456 const void* code = nullptr;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001457 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -07001458 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -08001459 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
David Sehr709b0702016-10-13 09:12:37 -07001460 << called->PrettyMethod() << " " << invoke_type;
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001461 if (virtual_or_interface || invoke_type == kSuper) {
1462 // Refine called method based on receiver for kVirtual/kInterface, and
1463 // caller for kSuper.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001464 ArtMethod* orig_called = called;
Mathieu Chartier55871bf2014-02-27 10:24:50 -08001465 if (invoke_type == kVirtual) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001466 CHECK(receiver != nullptr) << invoke_type;
Andreas Gampe542451c2016-07-26 09:02:02 -07001467 called = receiver->GetClass()->FindVirtualMethodForVirtual(called, kRuntimePointerSize);
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001468 } else if (invoke_type == kInterface) {
1469 CHECK(receiver != nullptr) << invoke_type;
Andreas Gampe542451c2016-07-26 09:02:02 -07001470 called = receiver->GetClass()->FindVirtualMethodForInterface(called, kRuntimePointerSize);
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001471 } else {
1472 DCHECK_EQ(invoke_type, kSuper);
1473 CHECK(caller != nullptr) << invoke_type;
Vladimir Markoba118822017-06-12 15:41:56 +01001474 ObjPtr<mirror::Class> ref_class = linker->LookupResolvedType(
Vladimir Marko666ee3d2017-12-11 18:37:36 +00001475 caller->GetDexFile()->GetMethodId(called_method.index).class_idx_, caller);
Alex Lightfedd91d2016-01-07 14:49:16 -08001476 if (ref_class->IsInterface()) {
Andreas Gampe542451c2016-07-26 09:02:02 -07001477 called = ref_class->FindVirtualMethodForInterfaceSuper(called, kRuntimePointerSize);
Alex Lightfedd91d2016-01-07 14:49:16 -08001478 } else {
1479 called = caller->GetDeclaringClass()->GetSuperClass()->GetVTableEntry(
Andreas Gampe542451c2016-07-26 09:02:02 -07001480 called->GetMethodIndex(), kRuntimePointerSize);
Alex Lightfedd91d2016-01-07 14:49:16 -08001481 }
Mathieu Chartier55871bf2014-02-27 10:24:50 -08001482 }
Mingyao Yangf4867782014-05-05 11:55:02 -07001483
David Sehr709b0702016-10-13 09:12:37 -07001484 CHECK(called != nullptr) << orig_called->PrettyMethod() << " "
1485 << mirror::Object::PrettyTypeOf(receiver) << " "
Mingyao Yangf4867782014-05-05 11:55:02 -07001486 << invoke_type << " " << orig_called->GetVtableIndex();
Ian Rogers83883d72013-10-21 21:07:24 -07001487 }
Daniel Mihalyieb076692014-08-22 17:33:31 +02001488
Ian Rogers848871b2013-08-05 10:56:33 -07001489 // Ensure that the called method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001490 StackHandleScope<1> hs(soa.Self());
1491 Handle<mirror::Class> called_class(hs.NewHandle(called->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -07001492 linker->EnsureInitialized(soa.Self(), called_class, true, true);
Ian Rogers848871b2013-08-05 10:56:33 -07001493 if (LIKELY(called_class->IsInitialized())) {
Daniel Mihalyieb076692014-08-22 17:33:31 +02001494 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) {
1495 // If we are single-stepping or the called method is deoptimized (by a
1496 // breakpoint, for example), then we have to execute the called method
1497 // with the interpreter.
1498 code = GetQuickToInterpreterBridge();
1499 } else if (UNLIKELY(Dbg::IsForcedInstrumentationNeededForResolution(self, caller))) {
1500 // If the caller is deoptimized (by a breakpoint, for example), we have to
1501 // continue its execution with interpreter when returning from the called
1502 // method. Because we do not want to execute the called method with the
1503 // interpreter, we wrap its execution into the instrumentation stubs.
1504 // When the called method returns, it will execute the instrumentation
1505 // exit hook that will determine the need of the interpreter with a call
1506 // to Dbg::IsForcedInterpreterNeededForUpcall and deoptimize the stack if
1507 // it is needed.
1508 code = GetQuickInstrumentationEntryPoint();
1509 } else {
1510 code = called->GetEntryPointFromQuickCompiledCode();
1511 }
Ian Rogers848871b2013-08-05 10:56:33 -07001512 } else if (called_class->IsInitializing()) {
Daniel Mihalyieb076692014-08-22 17:33:31 +02001513 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) {
1514 // If we are single-stepping or the called method is deoptimized (by a
1515 // breakpoint, for example), then we have to execute the called method
1516 // with the interpreter.
1517 code = GetQuickToInterpreterBridge();
1518 } else if (invoke_type == kStatic) {
Alex Lightfc49fec2018-01-16 22:28:36 +00001519 // Class is still initializing, go to oat and grab code (trampoline must be left in place
1520 // until class is initialized to stop races between threads).
1521 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -07001522 } else {
1523 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -08001524 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -07001525 }
1526 } else {
1527 DCHECK(called_class->IsErroneous());
1528 }
1529 }
Ian Rogerse0a02da2014-12-02 14:10:53 -08001530 CHECK_EQ(code == nullptr, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -07001531 // Fixup any locally saved objects may have moved during a GC.
1532 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -07001533 // Place called method in callee-save frame to be placed as first argument to quick method.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001534 *sp = called;
1535
Ian Rogers848871b2013-08-05 10:56:33 -07001536 return code;
1537}
1538
Andreas Gampec147b002014-03-06 18:11:06 -08001539/*
1540 * This class uses a couple of observations to unite the different calling conventions through
1541 * a few constants.
1542 *
1543 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
1544 * possible alignment.
1545 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
1546 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
1547 * when we have to split things
1548 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
1549 * and we can use Int handling directly.
1550 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
1551 * necessary when widening. Also, widening of Ints will take place implicitly, and the
1552 * extension should be compatible with Aarch64, which mandates copying the available bits
1553 * into LSB and leaving the rest unspecified.
1554 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
1555 * the stack.
1556 * 6) There is only little endian.
1557 *
1558 *
1559 * Actual work is supposed to be done in a delegate of the template type. The interface is as
1560 * follows:
1561 *
1562 * void PushGpr(uintptr_t): Add a value for the next GPR
1563 *
1564 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
1565 * padding, that is, think the architecture is 32b and aligns 64b.
1566 *
1567 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
1568 * split this if necessary. The current state will have aligned, if
1569 * necessary.
1570 *
1571 * void PushStack(uintptr_t): Push a value to the stack.
1572 *
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001573 * uintptr_t PushHandleScope(mirror::Object* ref): Add a reference to the HandleScope. This _will_ have nullptr,
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001574 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -08001575 * Must return the jobject, that is, the reference to the
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001576 * entry in the HandleScope (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -08001577 *
1578 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001579template<class T> class BuildNativeCallFrameStateMachine {
Andreas Gampec147b002014-03-06 18:11:06 -08001580 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001581#if defined(__arm__)
1582 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -08001583 static constexpr bool kNativeSoftFloatAbi = true;
1584 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001585 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
1586
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001587 static constexpr size_t kRegistersNeededForLong = 2;
1588 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001589 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001590 static constexpr bool kMultiFPRegistersWidened = false;
1591 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001592 static constexpr bool kAlignLongOnStack = true;
1593 static constexpr bool kAlignDoubleOnStack = true;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001594#elif defined(__aarch64__)
1595 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1596 static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs.
1597 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1598
1599 static constexpr size_t kRegistersNeededForLong = 1;
1600 static constexpr size_t kRegistersNeededForDouble = 1;
1601 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001602 static constexpr bool kMultiFPRegistersWidened = false;
1603 static constexpr bool kMultiGPRegistersWidened = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001604 static constexpr bool kAlignLongOnStack = false;
1605 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001606#elif defined(__mips__) && !defined(__LP64__)
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001607 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
Douglas Leung735b8552014-10-31 12:21:40 -07001608 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs.
1609 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001610
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001611 static constexpr size_t kRegistersNeededForLong = 2;
1612 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001613 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001614 static constexpr bool kMultiFPRegistersWidened = true;
1615 static constexpr bool kMultiGPRegistersWidened = false;
Douglas Leung735b8552014-10-31 12:21:40 -07001616 static constexpr bool kAlignLongOnStack = true;
1617 static constexpr bool kAlignDoubleOnStack = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001618#elif defined(__mips__) && defined(__LP64__)
1619 // Let the code prepare GPRs only and we will load the FPRs with same data.
1620 static constexpr bool kNativeSoftFloatAbi = true;
1621 static constexpr size_t kNumNativeGprArgs = 8;
1622 static constexpr size_t kNumNativeFprArgs = 0;
1623
1624 static constexpr size_t kRegistersNeededForLong = 1;
1625 static constexpr size_t kRegistersNeededForDouble = 1;
1626 static constexpr bool kMultiRegistersAligned = false;
1627 static constexpr bool kMultiFPRegistersWidened = false;
1628 static constexpr bool kMultiGPRegistersWidened = true;
1629 static constexpr bool kAlignLongOnStack = false;
1630 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001631#elif defined(__i386__)
1632 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -08001633 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001634 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
1635 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
1636
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001637 static constexpr size_t kRegistersNeededForLong = 2;
1638 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001639 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001640 static constexpr bool kMultiFPRegistersWidened = false;
1641 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001642 static constexpr bool kAlignLongOnStack = false;
1643 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001644#elif defined(__x86_64__)
1645 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1646 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
1647 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1648
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001649 static constexpr size_t kRegistersNeededForLong = 1;
1650 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -08001651 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001652 static constexpr bool kMultiFPRegistersWidened = false;
1653 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001654 static constexpr bool kAlignLongOnStack = false;
1655 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001656#else
1657#error "Unsupported architecture"
1658#endif
1659
Andreas Gampec147b002014-03-06 18:11:06 -08001660 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001661 explicit BuildNativeCallFrameStateMachine(T* delegate)
1662 : gpr_index_(kNumNativeGprArgs),
1663 fpr_index_(kNumNativeFprArgs),
1664 stack_entries_(0),
1665 delegate_(delegate) {
Andreas Gampec147b002014-03-06 18:11:06 -08001666 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
1667 // the next register is even; counting down is just to make the compiler happy...
Andreas Gampe575e78c2014-11-03 23:41:03 -08001668 static_assert(kNumNativeGprArgs % 2 == 0U, "Number of native GPR arguments not even");
1669 static_assert(kNumNativeFprArgs % 2 == 0U, "Number of native FPR arguments not even");
Andreas Gampec147b002014-03-06 18:11:06 -08001670 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001671
Andreas Gampec200a4a2014-06-16 18:39:09 -07001672 virtual ~BuildNativeCallFrameStateMachine() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001673
Ian Rogers1428dce2014-10-21 15:02:15 -07001674 bool HavePointerGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001675 return gpr_index_ > 0;
1676 }
1677
Andreas Gampec200a4a2014-06-16 18:39:09 -07001678 void AdvancePointer(const void* val) {
Andreas Gampec147b002014-03-06 18:11:06 -08001679 if (HavePointerGpr()) {
1680 gpr_index_--;
1681 PushGpr(reinterpret_cast<uintptr_t>(val));
1682 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001683 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
Andreas Gampec147b002014-03-06 18:11:06 -08001684 PushStack(reinterpret_cast<uintptr_t>(val));
1685 gpr_index_ = 0;
1686 }
1687 }
1688
Ian Rogers1428dce2014-10-21 15:02:15 -07001689 bool HaveHandleScopeGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001690 return gpr_index_ > 0;
1691 }
1692
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001693 void AdvanceHandleScope(mirror::Object* ptr) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001694 uintptr_t handle = PushHandle(ptr);
1695 if (HaveHandleScopeGpr()) {
Andreas Gampec147b002014-03-06 18:11:06 -08001696 gpr_index_--;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001697 PushGpr(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001698 } else {
1699 stack_entries_++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001700 PushStack(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001701 gpr_index_ = 0;
1702 }
1703 }
1704
Ian Rogers1428dce2014-10-21 15:02:15 -07001705 bool HaveIntGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001706 return gpr_index_ > 0;
1707 }
1708
1709 void AdvanceInt(uint32_t val) {
1710 if (HaveIntGpr()) {
1711 gpr_index_--;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001712 if (kMultiGPRegistersWidened) {
1713 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001714 PushGpr(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val)));
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001715 } else {
1716 PushGpr(val);
1717 }
Andreas Gampec147b002014-03-06 18:11:06 -08001718 } else {
1719 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001720 if (kMultiGPRegistersWidened) {
1721 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001722 PushStack(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val)));
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001723 } else {
1724 PushStack(val);
1725 }
Andreas Gampec147b002014-03-06 18:11:06 -08001726 gpr_index_ = 0;
1727 }
1728 }
1729
Ian Rogers1428dce2014-10-21 15:02:15 -07001730 bool HaveLongGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001731 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
1732 }
1733
Ian Rogers1428dce2014-10-21 15:02:15 -07001734 bool LongGprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001735 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1736 kAlignLongOnStack && // and when it needs alignment
1737 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1738 }
1739
Ian Rogers1428dce2014-10-21 15:02:15 -07001740 bool LongStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001741 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1742 kAlignLongOnStack && // and when it needs 8B alignment
1743 (stack_entries_ & 1) == 1; // counter is odd
1744 }
1745
1746 void AdvanceLong(uint64_t val) {
1747 if (HaveLongGpr()) {
1748 if (LongGprNeedsPadding()) {
1749 PushGpr(0);
1750 gpr_index_--;
1751 }
1752 if (kRegistersNeededForLong == 1) {
1753 PushGpr(static_cast<uintptr_t>(val));
1754 } else {
1755 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1756 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1757 }
1758 gpr_index_ -= kRegistersNeededForLong;
1759 } else {
1760 if (LongStackNeedsPadding()) {
1761 PushStack(0);
1762 stack_entries_++;
1763 }
1764 if (kRegistersNeededForLong == 1) {
1765 PushStack(static_cast<uintptr_t>(val));
1766 stack_entries_++;
1767 } else {
1768 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1769 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1770 stack_entries_ += 2;
1771 }
1772 gpr_index_ = 0;
1773 }
1774 }
1775
Ian Rogers1428dce2014-10-21 15:02:15 -07001776 bool HaveFloatFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001777 return fpr_index_ > 0;
1778 }
1779
Andreas Gampec147b002014-03-06 18:11:06 -08001780 void AdvanceFloat(float val) {
1781 if (kNativeSoftFloatAbi) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001782 AdvanceInt(bit_cast<uint32_t, float>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001783 } else {
1784 if (HaveFloatFpr()) {
1785 fpr_index_--;
1786 if (kRegistersNeededForDouble == 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001787 if (kMultiFPRegistersWidened) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001788 PushFpr8(bit_cast<uint64_t, double>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001789 } else {
1790 // No widening, just use the bits.
Roland Levillainda4d79b2015-03-24 14:36:11 +00001791 PushFpr8(static_cast<uint64_t>(bit_cast<uint32_t, float>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001792 }
1793 } else {
1794 PushFpr4(val);
1795 }
1796 } else {
1797 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001798 if (kRegistersNeededForDouble == 1 && kMultiFPRegistersWidened) {
Andreas Gampec147b002014-03-06 18:11:06 -08001799 // Need to widen before storing: Note the "double" in the template instantiation.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001800 // Note: We need to jump through those hoops to make the compiler happy.
1801 DCHECK_EQ(sizeof(uintptr_t), sizeof(uint64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001802 PushStack(static_cast<uintptr_t>(bit_cast<uint64_t, double>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001803 } else {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001804 PushStack(static_cast<uintptr_t>(bit_cast<uint32_t, float>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001805 }
1806 fpr_index_ = 0;
1807 }
1808 }
1809 }
1810
Ian Rogers1428dce2014-10-21 15:02:15 -07001811 bool HaveDoubleFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001812 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1813 }
1814
Ian Rogers1428dce2014-10-21 15:02:15 -07001815 bool DoubleFprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001816 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1817 kAlignDoubleOnStack && // and when it needs alignment
1818 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1819 }
1820
Ian Rogers1428dce2014-10-21 15:02:15 -07001821 bool DoubleStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001822 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1823 kAlignDoubleOnStack && // and when it needs 8B alignment
1824 (stack_entries_ & 1) == 1; // counter is odd
1825 }
1826
1827 void AdvanceDouble(uint64_t val) {
1828 if (kNativeSoftFloatAbi) {
1829 AdvanceLong(val);
1830 } else {
1831 if (HaveDoubleFpr()) {
1832 if (DoubleFprNeedsPadding()) {
1833 PushFpr4(0);
1834 fpr_index_--;
1835 }
1836 PushFpr8(val);
1837 fpr_index_ -= kRegistersNeededForDouble;
1838 } else {
1839 if (DoubleStackNeedsPadding()) {
1840 PushStack(0);
1841 stack_entries_++;
1842 }
1843 if (kRegistersNeededForDouble == 1) {
1844 PushStack(static_cast<uintptr_t>(val));
1845 stack_entries_++;
1846 } else {
1847 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1848 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1849 stack_entries_ += 2;
1850 }
1851 fpr_index_ = 0;
1852 }
1853 }
1854 }
1855
Ian Rogers1428dce2014-10-21 15:02:15 -07001856 uint32_t GetStackEntries() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001857 return stack_entries_;
1858 }
1859
Ian Rogers1428dce2014-10-21 15:02:15 -07001860 uint32_t GetNumberOfUsedGprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001861 return kNumNativeGprArgs - gpr_index_;
1862 }
1863
Ian Rogers1428dce2014-10-21 15:02:15 -07001864 uint32_t GetNumberOfUsedFprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001865 return kNumNativeFprArgs - fpr_index_;
1866 }
1867
1868 private:
1869 void PushGpr(uintptr_t val) {
1870 delegate_->PushGpr(val);
1871 }
1872 void PushFpr4(float val) {
1873 delegate_->PushFpr4(val);
1874 }
1875 void PushFpr8(uint64_t val) {
1876 delegate_->PushFpr8(val);
1877 }
1878 void PushStack(uintptr_t val) {
1879 delegate_->PushStack(val);
1880 }
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001881 uintptr_t PushHandle(mirror::Object* ref) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001882 return delegate_->PushHandle(ref);
Andreas Gampec147b002014-03-06 18:11:06 -08001883 }
1884
1885 uint32_t gpr_index_; // Number of free GPRs
1886 uint32_t fpr_index_; // Number of free FPRs
1887 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1888 // extended
Ian Rogers1428dce2014-10-21 15:02:15 -07001889 T* const delegate_; // What Push implementation gets called
Andreas Gampec147b002014-03-06 18:11:06 -08001890};
1891
Andreas Gampec200a4a2014-06-16 18:39:09 -07001892// Computes the sizes of register stacks and call stack area. Handling of references can be extended
1893// in subclasses.
1894//
1895// To handle native pointers, use "L" in the shorty for an object reference, which simulates
1896// them with handles.
1897class ComputeNativeCallFrameSize {
Andreas Gampec147b002014-03-06 18:11:06 -08001898 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001899 ComputeNativeCallFrameSize() : num_stack_entries_(0) {}
1900
1901 virtual ~ComputeNativeCallFrameSize() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001902
Ian Rogers1428dce2014-10-21 15:02:15 -07001903 uint32_t GetStackSize() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001904 return num_stack_entries_ * sizeof(uintptr_t);
1905 }
1906
Ian Rogers1428dce2014-10-21 15:02:15 -07001907 uint8_t* LayoutCallStack(uint8_t* sp8) const {
Andreas Gampec147b002014-03-06 18:11:06 -08001908 sp8 -= GetStackSize();
Andreas Gampe779f8c92014-06-09 18:29:38 -07001909 // Align by kStackAlignment.
1910 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001911 return sp8;
Andreas Gampec147b002014-03-06 18:11:06 -08001912 }
1913
Ian Rogers1428dce2014-10-21 15:02:15 -07001914 uint8_t* LayoutCallRegisterStacks(uint8_t* sp8, uintptr_t** start_gpr, uint32_t** start_fpr)
1915 const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001916 // Assumption is OK right now, as we have soft-float arm
1917 size_t fregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeFprArgs;
1918 sp8 -= fregs * sizeof(uintptr_t);
1919 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1920 size_t iregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeGprArgs;
1921 sp8 -= iregs * sizeof(uintptr_t);
1922 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1923 return sp8;
1924 }
Andreas Gampec147b002014-03-06 18:11:06 -08001925
Andreas Gampec200a4a2014-06-16 18:39:09 -07001926 uint8_t* LayoutNativeCall(uint8_t* sp8, uintptr_t** start_stack, uintptr_t** start_gpr,
Ian Rogers1428dce2014-10-21 15:02:15 -07001927 uint32_t** start_fpr) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001928 // Native call stack.
1929 sp8 = LayoutCallStack(sp8);
1930 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
Andreas Gampec147b002014-03-06 18:11:06 -08001931
Andreas Gampec200a4a2014-06-16 18:39:09 -07001932 // Put fprs and gprs below.
1933 sp8 = LayoutCallRegisterStacks(sp8, start_gpr, start_fpr);
Andreas Gampec147b002014-03-06 18:11:06 -08001934
Andreas Gampec200a4a2014-06-16 18:39:09 -07001935 // Return the new bottom.
1936 return sp8;
1937 }
1938
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001939 virtual void WalkHeader(
1940 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm ATTRIBUTE_UNUSED)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001941 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001942 }
Andreas Gampec200a4a2014-06-16 18:39:09 -07001943
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001944 void Walk(const char* shorty, uint32_t shorty_len) REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001945 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize> sm(this);
1946
1947 WalkHeader(&sm);
Andreas Gampec147b002014-03-06 18:11:06 -08001948
1949 for (uint32_t i = 1; i < shorty_len; ++i) {
1950 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1951 switch (cur_type_) {
1952 case Primitive::kPrimNot:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001953 // TODO: fix abuse of mirror types.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001954 sm.AdvanceHandleScope(
1955 reinterpret_cast<mirror::Object*>(0x12345678));
Andreas Gampec147b002014-03-06 18:11:06 -08001956 break;
1957
1958 case Primitive::kPrimBoolean:
1959 case Primitive::kPrimByte:
1960 case Primitive::kPrimChar:
1961 case Primitive::kPrimShort:
1962 case Primitive::kPrimInt:
1963 sm.AdvanceInt(0);
1964 break;
1965 case Primitive::kPrimFloat:
1966 sm.AdvanceFloat(0);
1967 break;
1968 case Primitive::kPrimDouble:
1969 sm.AdvanceDouble(0);
1970 break;
1971 case Primitive::kPrimLong:
1972 sm.AdvanceLong(0);
1973 break;
1974 default:
1975 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001976 UNREACHABLE();
Andreas Gampec147b002014-03-06 18:11:06 -08001977 }
1978 }
1979
Ian Rogers1428dce2014-10-21 15:02:15 -07001980 num_stack_entries_ = sm.GetStackEntries();
Andreas Gampec147b002014-03-06 18:11:06 -08001981 }
1982
1983 void PushGpr(uintptr_t /* val */) {
1984 // not optimizing registers, yet
1985 }
1986
1987 void PushFpr4(float /* val */) {
1988 // not optimizing registers, yet
1989 }
1990
1991 void PushFpr8(uint64_t /* val */) {
1992 // not optimizing registers, yet
1993 }
1994
1995 void PushStack(uintptr_t /* val */) {
1996 // counting is already done in the superclass
1997 }
1998
Andreas Gampec200a4a2014-06-16 18:39:09 -07001999 virtual uintptr_t PushHandle(mirror::Object* /* ptr */) {
Andreas Gampec147b002014-03-06 18:11:06 -08002000 return reinterpret_cast<uintptr_t>(nullptr);
2001 }
2002
Andreas Gampec200a4a2014-06-16 18:39:09 -07002003 protected:
Andreas Gampec147b002014-03-06 18:11:06 -08002004 uint32_t num_stack_entries_;
2005};
2006
Andreas Gampec200a4a2014-06-16 18:39:09 -07002007class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002008 public:
Igor Murashkin06a04e02016-09-13 15:57:37 -07002009 explicit ComputeGenericJniFrameSize(bool critical_native)
2010 : num_handle_scope_references_(0), critical_native_(critical_native) {}
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002011
Andreas Gampec200a4a2014-06-16 18:39:09 -07002012 // Lays out the callee-save frame. Assumes that the incorrect frame corresponding to RefsAndArgs
2013 // is at *m = sp. Will update to point to the bottom of the save frame.
2014 //
2015 // Note: assumes ComputeAll() has been run before.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002016 void LayoutCalleeSaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002017 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002018 ArtMethod* method = **m;
2019
Andreas Gampe542451c2016-07-26 09:02:02 -07002020 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002021
Andreas Gampec200a4a2014-06-16 18:39:09 -07002022 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
2023
2024 // First, fix up the layout of the callee-save frame.
2025 // We have to squeeze in the HandleScope, and relocate the method pointer.
2026
2027 // "Free" the slot for the method.
Ian Rogers13735952014-10-08 12:43:28 -07002028 sp8 += sizeof(void*); // In the callee-save frame we use a full pointer.
Andreas Gampec200a4a2014-06-16 18:39:09 -07002029
2030 // Under the callee saves put handle scope and new method stack reference.
Andreas Gampec200a4a2014-06-16 18:39:09 -07002031 size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002032 size_t scope_and_method = handle_scope_size + sizeof(ArtMethod*);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002033
2034 sp8 -= scope_and_method;
2035 // Align by kStackAlignment.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002036 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07002037
Mathieu Chartiere401d142015-04-22 13:56:20 -07002038 uint8_t* sp8_table = sp8 + sizeof(ArtMethod*);
Ian Rogers59c07062014-10-10 13:03:39 -07002039 *handle_scope = HandleScope::Create(sp8_table, self->GetTopHandleScope(),
2040 num_handle_scope_references_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002041
2042 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
2043 uint8_t* method_pointer = sp8;
Mathieu Chartiere401d142015-04-22 13:56:20 -07002044 auto** new_method_ref = reinterpret_cast<ArtMethod**>(method_pointer);
2045 *new_method_ref = method;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002046 *m = new_method_ref;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002047 }
2048
Andreas Gampec200a4a2014-06-16 18:39:09 -07002049 // Adds space for the cookie. Note: may leave stack unaligned.
Ian Rogers1428dce2014-10-21 15:02:15 -07002050 void LayoutCookie(uint8_t** sp) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002051 // Reference cookie and padding
2052 *sp -= 8;
Mathieu Chartier0cd81352014-05-22 16:48:55 -07002053 }
2054
Andreas Gampec200a4a2014-06-16 18:39:09 -07002055 // Re-layout the callee-save frame (insert a handle-scope). Then add space for the cookie.
2056 // Returns the new bottom. Note: this may be unaligned.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002057 uint8_t* LayoutJNISaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002058 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002059 // First, fix up the layout of the callee-save frame.
2060 // We have to squeeze in the HandleScope, and relocate the method pointer.
Ian Rogers59c07062014-10-10 13:03:39 -07002061 LayoutCalleeSaveFrame(self, m, sp, handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002062
2063 // The bottom of the callee-save frame is now where the method is, *m.
2064 uint8_t* sp8 = reinterpret_cast<uint8_t*>(*m);
2065
2066 // Add space for cookie.
2067 LayoutCookie(&sp8);
2068
2069 return sp8;
2070 }
2071
2072 // WARNING: After this, *sp won't be pointing to the method anymore!
Mathieu Chartiere401d142015-04-22 13:56:20 -07002073 uint8_t* ComputeLayout(Thread* self, ArtMethod*** m, const char* shorty, uint32_t shorty_len,
2074 HandleScope** handle_scope, uintptr_t** start_stack, uintptr_t** start_gpr,
2075 uint32_t** start_fpr)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002076 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002077 Walk(shorty, shorty_len);
2078
2079 // JNI part.
Ian Rogers59c07062014-10-10 13:03:39 -07002080 uint8_t* sp8 = LayoutJNISaveFrame(self, m, reinterpret_cast<void*>(*m), handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002081
2082 sp8 = LayoutNativeCall(sp8, start_stack, start_gpr, start_fpr);
2083
2084 // Return the new bottom.
2085 return sp8;
2086 }
2087
2088 uintptr_t PushHandle(mirror::Object* /* ptr */) OVERRIDE;
2089
2090 // Add JNIEnv* and jobj/jclass before the shorty-derived elements.
2091 void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) OVERRIDE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002092 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002093
2094 private:
2095 uint32_t num_handle_scope_references_;
Igor Murashkin06a04e02016-09-13 15:57:37 -07002096 const bool critical_native_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002097};
2098
2099uintptr_t ComputeGenericJniFrameSize::PushHandle(mirror::Object* /* ptr */) {
2100 num_handle_scope_references_++;
2101 return reinterpret_cast<uintptr_t>(nullptr);
2102}
2103
2104void ComputeGenericJniFrameSize::WalkHeader(
2105 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) {
Igor Murashkin06a04e02016-09-13 15:57:37 -07002106 // First 2 parameters are always excluded for @CriticalNative.
2107 if (UNLIKELY(critical_native_)) {
2108 return;
2109 }
2110
Andreas Gampec200a4a2014-06-16 18:39:09 -07002111 // JNIEnv
2112 sm->AdvancePointer(nullptr);
2113
2114 // Class object or this as first argument
2115 sm->AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678));
2116}
2117
2118// Class to push values to three separate regions. Used to fill the native call part. Adheres to
2119// the template requirements of BuildGenericJniFrameStateMachine.
2120class FillNativeCall {
2121 public:
2122 FillNativeCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) :
2123 cur_gpr_reg_(gpr_regs), cur_fpr_reg_(fpr_regs), cur_stack_arg_(stack_args) {}
2124
2125 virtual ~FillNativeCall() {}
2126
2127 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) {
2128 cur_gpr_reg_ = gpr_regs;
2129 cur_fpr_reg_ = fpr_regs;
2130 cur_stack_arg_ = stack_args;
Andreas Gampec147b002014-03-06 18:11:06 -08002131 }
2132
2133 void PushGpr(uintptr_t val) {
2134 *cur_gpr_reg_ = val;
2135 cur_gpr_reg_++;
2136 }
2137
2138 void PushFpr4(float val) {
2139 *cur_fpr_reg_ = val;
2140 cur_fpr_reg_++;
2141 }
2142
2143 void PushFpr8(uint64_t val) {
2144 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
2145 *tmp = val;
2146 cur_fpr_reg_ += 2;
2147 }
2148
2149 void PushStack(uintptr_t val) {
2150 *cur_stack_arg_ = val;
2151 cur_stack_arg_++;
2152 }
2153
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002154 virtual uintptr_t PushHandle(mirror::Object*) REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002155 LOG(FATAL) << "(Non-JNI) Native call does not use handles.";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002156 UNREACHABLE();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002157 }
2158
2159 private:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002160 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002161 uint32_t* cur_fpr_reg_;
2162 uintptr_t* cur_stack_arg_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002163};
Andreas Gampec147b002014-03-06 18:11:06 -08002164
Andreas Gampec200a4a2014-06-16 18:39:09 -07002165// Visits arguments on the stack placing them into a region lower down the stack for the benefit
2166// of transitioning into native code.
2167class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
2168 public:
Igor Murashkin06a04e02016-09-13 15:57:37 -07002169 BuildGenericJniFrameVisitor(Thread* self,
2170 bool is_static,
2171 bool critical_native,
2172 const char* shorty,
2173 uint32_t shorty_len,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002174 ArtMethod*** sp)
Andreas Gampec200a4a2014-06-16 18:39:09 -07002175 : QuickArgumentVisitor(*sp, is_static, shorty, shorty_len),
Igor Murashkin06a04e02016-09-13 15:57:37 -07002176 jni_call_(nullptr, nullptr, nullptr, nullptr, critical_native),
2177 sm_(&jni_call_) {
2178 ComputeGenericJniFrameSize fsc(critical_native);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002179 uintptr_t* start_gpr_reg;
2180 uint32_t* start_fpr_reg;
2181 uintptr_t* start_stack_arg;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002182 bottom_of_used_area_ = fsc.ComputeLayout(self, sp, shorty, shorty_len,
Ian Rogers59c07062014-10-10 13:03:39 -07002183 &handle_scope_,
2184 &start_stack_arg,
Andreas Gampec200a4a2014-06-16 18:39:09 -07002185 &start_gpr_reg, &start_fpr_reg);
2186
Andreas Gampec200a4a2014-06-16 18:39:09 -07002187 jni_call_.Reset(start_gpr_reg, start_fpr_reg, start_stack_arg, handle_scope_);
2188
Igor Murashkin06a04e02016-09-13 15:57:37 -07002189 // First 2 parameters are always excluded for CriticalNative methods.
2190 if (LIKELY(!critical_native)) {
2191 // jni environment is always first argument
2192 sm_.AdvancePointer(self->GetJniEnv());
Andreas Gampec200a4a2014-06-16 18:39:09 -07002193
Igor Murashkin06a04e02016-09-13 15:57:37 -07002194 if (is_static) {
2195 sm_.AdvanceHandleScope((**sp)->GetDeclaringClass());
2196 } // else "this" reference is already handled by QuickArgumentVisitor.
Andreas Gampec200a4a2014-06-16 18:39:09 -07002197 }
2198 }
2199
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002200 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002201
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002202 void FinalizeHandleScope(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002203
Vladimir Markof39745e2016-01-26 12:16:55 +00002204 StackReference<mirror::Object>* GetFirstHandleScopeEntry() {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002205 return handle_scope_->GetHandle(0).GetReference();
2206 }
2207
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002208 jobject GetFirstHandleScopeJObject() const REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002209 return handle_scope_->GetHandle(0).ToJObject();
2210 }
2211
Ian Rogers1428dce2014-10-21 15:02:15 -07002212 void* GetBottomOfUsedArea() const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002213 return bottom_of_used_area_;
2214 }
2215
2216 private:
2217 // A class to fill a JNI call. Adds reference/handle-scope management to FillNativeCall.
2218 class FillJniCall FINAL : public FillNativeCall {
2219 public:
2220 FillJniCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args,
Igor Murashkin06a04e02016-09-13 15:57:37 -07002221 HandleScope* handle_scope, bool critical_native)
2222 : FillNativeCall(gpr_regs, fpr_regs, stack_args),
2223 handle_scope_(handle_scope),
2224 cur_entry_(0),
2225 critical_native_(critical_native) {}
Andreas Gampec200a4a2014-06-16 18:39:09 -07002226
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002227 uintptr_t PushHandle(mirror::Object* ref) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002228
2229 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args, HandleScope* scope) {
2230 FillNativeCall::Reset(gpr_regs, fpr_regs, stack_args);
2231 handle_scope_ = scope;
2232 cur_entry_ = 0U;
2233 }
2234
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002235 void ResetRemainingScopeSlots() REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002236 // Initialize padding entries.
2237 size_t expected_slots = handle_scope_->NumberOfReferences();
2238 while (cur_entry_ < expected_slots) {
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07002239 handle_scope_->GetMutableHandle(cur_entry_++).Assign(nullptr);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002240 }
Igor Murashkin06a04e02016-09-13 15:57:37 -07002241
2242 if (!critical_native_) {
2243 // Non-critical natives have at least the self class (jclass) or this (jobject).
2244 DCHECK_NE(cur_entry_, 0U);
2245 }
Andreas Gampec200a4a2014-06-16 18:39:09 -07002246 }
2247
Mathieu Chartier1432a5b2016-10-04 15:41:42 -07002248 bool CriticalNative() const {
2249 return critical_native_;
2250 }
2251
Andreas Gampec200a4a2014-06-16 18:39:09 -07002252 private:
2253 HandleScope* handle_scope_;
2254 size_t cur_entry_;
Igor Murashkin06a04e02016-09-13 15:57:37 -07002255 const bool critical_native_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002256 };
2257
2258 HandleScope* handle_scope_;
2259 FillJniCall jni_call_;
2260 void* bottom_of_used_area_;
2261
2262 BuildNativeCallFrameStateMachine<FillJniCall> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002263
2264 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
2265};
2266
Andreas Gampec200a4a2014-06-16 18:39:09 -07002267uintptr_t BuildGenericJniFrameVisitor::FillJniCall::PushHandle(mirror::Object* ref) {
2268 uintptr_t tmp;
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07002269 MutableHandle<mirror::Object> h = handle_scope_->GetMutableHandle(cur_entry_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002270 h.Assign(ref);
2271 tmp = reinterpret_cast<uintptr_t>(h.ToJObject());
2272 cur_entry_++;
2273 return tmp;
2274}
2275
Ian Rogers9758f792014-03-13 09:02:55 -07002276void BuildGenericJniFrameVisitor::Visit() {
2277 Primitive::Type type = GetParamPrimitiveType();
2278 switch (type) {
2279 case Primitive::kPrimLong: {
2280 jlong long_arg;
2281 if (IsSplitLongOrDouble()) {
2282 long_arg = ReadSplitLongParam();
2283 } else {
2284 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
2285 }
2286 sm_.AdvanceLong(long_arg);
2287 break;
2288 }
2289 case Primitive::kPrimDouble: {
2290 uint64_t double_arg;
2291 if (IsSplitLongOrDouble()) {
2292 // Read into union so that we don't case to a double.
2293 double_arg = ReadSplitLongParam();
2294 } else {
2295 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
2296 }
2297 sm_.AdvanceDouble(double_arg);
2298 break;
2299 }
2300 case Primitive::kPrimNot: {
2301 StackReference<mirror::Object>* stack_ref =
2302 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002303 sm_.AdvanceHandleScope(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -07002304 break;
2305 }
2306 case Primitive::kPrimFloat:
2307 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
2308 break;
2309 case Primitive::kPrimBoolean: // Fall-through.
2310 case Primitive::kPrimByte: // Fall-through.
2311 case Primitive::kPrimChar: // Fall-through.
2312 case Primitive::kPrimShort: // Fall-through.
2313 case Primitive::kPrimInt: // Fall-through.
2314 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
2315 break;
2316 case Primitive::kPrimVoid:
2317 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07002318 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -07002319 }
2320}
2321
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002322void BuildGenericJniFrameVisitor::FinalizeHandleScope(Thread* self) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002323 // Clear out rest of the scope.
2324 jni_call_.ResetRemainingScopeSlots();
Mathieu Chartier1432a5b2016-10-04 15:41:42 -07002325 if (!jni_call_.CriticalNative()) {
2326 // Install HandleScope.
2327 self->PushHandleScope(handle_scope_);
2328 }
Ian Rogers9758f792014-03-13 09:02:55 -07002329}
2330
Ian Rogers04c31d22014-07-07 21:44:06 -07002331#if defined(__arm__) || defined(__aarch64__)
Alex Lightd78ddec2017-04-18 15:20:38 -07002332extern "C" const void* artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07002333#else
Alex Lightd78ddec2017-04-18 15:20:38 -07002334extern "C" const void* artFindNativeMethod(Thread* self);
Ian Rogers04c31d22014-07-07 21:44:06 -07002335#endif
Andreas Gampe90546832014-03-12 18:07:19 -07002336
Igor Murashkin06a04e02016-09-13 15:57:37 -07002337static uint64_t artQuickGenericJniEndJNIRef(Thread* self,
2338 uint32_t cookie,
2339 bool fast_native ATTRIBUTE_UNUSED,
2340 jobject l,
2341 jobject lock) {
2342 // TODO: add entrypoints for @FastNative returning objects.
Andreas Gampead615172014-04-04 16:20:13 -07002343 if (lock != nullptr) {
2344 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self));
2345 } else {
2346 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(l, cookie, self));
2347 }
2348}
2349
Igor Murashkin06a04e02016-09-13 15:57:37 -07002350static void artQuickGenericJniEndJNINonRef(Thread* self,
2351 uint32_t cookie,
2352 bool fast_native,
2353 jobject lock) {
Andreas Gampead615172014-04-04 16:20:13 -07002354 if (lock != nullptr) {
2355 JniMethodEndSynchronized(cookie, lock, self);
Igor Murashkin06a04e02016-09-13 15:57:37 -07002356 // Ignore "fast_native" here because synchronized functions aren't very fast.
Andreas Gampead615172014-04-04 16:20:13 -07002357 } else {
Igor Murashkin06a04e02016-09-13 15:57:37 -07002358 if (UNLIKELY(fast_native)) {
2359 JniMethodFastEnd(cookie, self);
2360 } else {
2361 JniMethodEnd(cookie, self);
2362 }
Andreas Gampead615172014-04-04 16:20:13 -07002363 }
2364}
2365
Andreas Gampec147b002014-03-06 18:11:06 -08002366/*
2367 * Initializes an alloca region assumed to be directly below sp for a native call:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002368 * Create a HandleScope and call stack and fill a mini stack with values to be pushed to registers.
Andreas Gampec147b002014-03-06 18:11:06 -08002369 * The final element on the stack is a pointer to the native code.
2370 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07002371 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002372 * We need to fix this, as the handle scope needs to go into the callee-save frame.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07002373 *
Andreas Gampec147b002014-03-06 18:11:06 -08002374 * The return of this function denotes:
2375 * 1) How many bytes of the alloca can be released, if the value is non-negative.
2376 * 2) An error, if the value is negative.
2377 */
Mathieu Chartiere401d142015-04-22 13:56:20 -07002378extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002379 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markob0a6aee2017-10-27 10:34:04 +01002380 // Note: We cannot walk the stack properly until fixed up below.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002381 ArtMethod* called = *sp;
David Sehr709b0702016-10-13 09:12:37 -07002382 DCHECK(called->IsNative()) << called->PrettyMethod(true);
Vladimir Marko2196c652017-11-30 16:16:07 +00002383 Runtime* runtime = Runtime::Current();
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07002384 uint32_t shorty_len = 0;
2385 const char* shorty = called->GetShorty(&shorty_len);
Vladimir Markob0a6aee2017-10-27 10:34:04 +01002386 bool critical_native = called->IsCriticalNative();
2387 bool fast_native = called->IsFastNative();
Igor Murashkin06a04e02016-09-13 15:57:37 -07002388 bool normal_native = !critical_native && !fast_native;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002389
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002390 // Run the visitor and update sp.
Igor Murashkin06a04e02016-09-13 15:57:37 -07002391 BuildGenericJniFrameVisitor visitor(self,
2392 called->IsStatic(),
2393 critical_native,
2394 shorty,
2395 shorty_len,
2396 &sp);
Mathieu Chartierbe08cf52016-09-13 13:41:24 -07002397 {
2398 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
2399 visitor.VisitArguments();
2400 // FinalizeHandleScope pushes the handle scope on the thread.
2401 visitor.FinalizeHandleScope(self);
2402 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002403
Vladimir Markob0a6aee2017-10-27 10:34:04 +01002404 // Fix up managed-stack things in Thread. After this we can walk the stack.
Vladimir Marko2196c652017-11-30 16:16:07 +00002405 self->SetTopOfStackTagged(sp);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002406
Ian Rogerse0dcd462014-03-08 15:21:04 -08002407 self->VerifyStack();
2408
Vladimir Markof8655b32018-03-21 17:53:56 +00002409 // We can now walk the stack if needed by JIT GC from MethodEntered() for JIT-on-first-use.
2410 jit::Jit* jit = runtime->GetJit();
2411 if (jit != nullptr) {
2412 jit->MethodEntered(self, called);
2413 }
2414
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002415 uint32_t cookie;
Igor Murashkin06a04e02016-09-13 15:57:37 -07002416 uint32_t* sp32;
2417 // Skip calling JniMethodStart for @CriticalNative.
2418 if (LIKELY(!critical_native)) {
2419 // Start JNI, save the cookie.
2420 if (called->IsSynchronized()) {
2421 DCHECK(normal_native) << " @FastNative and synchronize is not supported";
2422 cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self);
2423 if (self->IsExceptionPending()) {
2424 self->PopHandleScope();
2425 // A negative value denotes an error.
2426 return GetTwoWordFailureValue();
2427 }
2428 } else {
2429 if (fast_native) {
2430 cookie = JniMethodFastStart(self);
2431 } else {
2432 DCHECK(normal_native);
2433 cookie = JniMethodStart(self);
2434 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002435 }
Igor Murashkin06a04e02016-09-13 15:57:37 -07002436 sp32 = reinterpret_cast<uint32_t*>(sp);
2437 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002438 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002439
Andreas Gampe90546832014-03-12 18:07:19 -07002440 // Retrieve the stored native code.
Alex Lightd78ddec2017-04-18 15:20:38 -07002441 void const* nativeCode = called->GetEntryPointFromJni();
Andreas Gampe90546832014-03-12 18:07:19 -07002442
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07002443 // There are two cases for the content of nativeCode:
2444 // 1) Pointer to the native function.
2445 // 2) Pointer to the trampoline for native code binding.
2446 // In the second case, we need to execute the binding and continue with the actual native function
2447 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07002448 DCHECK(nativeCode != nullptr);
2449 if (nativeCode == GetJniDlsymLookupStub()) {
Ian Rogers04c31d22014-07-07 21:44:06 -07002450#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07002451 nativeCode = artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07002452#else
2453 nativeCode = artFindNativeMethod(self);
2454#endif
Andreas Gampe90546832014-03-12 18:07:19 -07002455
2456 if (nativeCode == nullptr) {
2457 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
Andreas Gampead615172014-04-04 16:20:13 -07002458
Igor Murashkin06a04e02016-09-13 15:57:37 -07002459 // @CriticalNative calls do not need to call back into JniMethodEnd.
2460 if (LIKELY(!critical_native)) {
2461 // End JNI, as the assembly will move to deliver the exception.
2462 jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr;
2463 if (shorty[0] == 'L') {
2464 artQuickGenericJniEndJNIRef(self, cookie, fast_native, nullptr, lock);
2465 } else {
2466 artQuickGenericJniEndJNINonRef(self, cookie, fast_native, lock);
2467 }
Andreas Gampead615172014-04-04 16:20:13 -07002468 }
2469
Andreas Gampec200a4a2014-06-16 18:39:09 -07002470 return GetTwoWordFailureValue();
Andreas Gampe90546832014-03-12 18:07:19 -07002471 }
2472 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002473 }
2474
Alexey Frunze1b8464d2016-11-12 17:22:05 -08002475#if defined(__mips__) && !defined(__LP64__)
2476 // On MIPS32 if the first two arguments are floating-point, we need to know their types
2477 // so that art_quick_generic_jni_trampoline can correctly extract them from the stack
2478 // and load into floating-point registers.
2479 // Possible arrangements of first two floating-point arguments on the stack (32-bit FPU
2480 // view):
2481 // (1)
2482 // | DOUBLE | DOUBLE | other args, if any
2483 // | F12 | F13 | F14 | F15 |
2484 // | SP+0 | SP+4 | SP+8 | SP+12 | SP+16
2485 // (2)
2486 // | DOUBLE | FLOAT | (PAD) | other args, if any
2487 // | F12 | F13 | F14 | |
2488 // | SP+0 | SP+4 | SP+8 | SP+12 | SP+16
2489 // (3)
2490 // | FLOAT | (PAD) | DOUBLE | other args, if any
2491 // | F12 | | F14 | F15 |
2492 // | SP+0 | SP+4 | SP+8 | SP+12 | SP+16
2493 // (4)
2494 // | FLOAT | FLOAT | other args, if any
2495 // | F12 | F14 |
2496 // | SP+0 | SP+4 | SP+8
2497 // As you can see, only the last case (4) is special. In all others we can just
2498 // load F12/F13 and F14/F15 in the same manner.
2499 // Set bit 0 of the native code address to 1 in this case (valid code addresses
2500 // are always a multiple of 4 on MIPS32, so we have 2 spare bits available).
2501 if (nativeCode != nullptr &&
2502 shorty != nullptr &&
2503 shorty_len >= 3 &&
2504 shorty[1] == 'F' &&
2505 shorty[2] == 'F') {
2506 nativeCode = reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(nativeCode) | 1);
2507 }
2508#endif
2509
Andreas Gampec200a4a2014-06-16 18:39:09 -07002510 // Return native code addr(lo) and bottom of alloca address(hi).
2511 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(visitor.GetBottomOfUsedArea()),
2512 reinterpret_cast<uintptr_t>(nativeCode));
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002513}
2514
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -07002515// Defined in quick_jni_entrypoints.cc.
2516extern uint64_t GenericJniMethodEnd(Thread* self, uint32_t saved_local_ref_cookie,
2517 jvalue result, uint64_t result_f, ArtMethod* called,
2518 HandleScope* handle_scope);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002519/*
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002520 * Is called after the native JNI code. Responsible for cleanup (handle scope, saved state) and
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002521 * unlocking.
2522 */
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -07002523extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self,
2524 jvalue result,
2525 uint64_t result_f) {
2526 // We're here just back from a native call. We don't have the shared mutator lock at this point
2527 // yet until we call GoToRunnable() later in GenericJniMethodEnd(). Accessing objects or doing
2528 // anything that requires a mutator lock before that would cause problems as GC may have the
2529 // exclusive mutator lock and may be moving objects, etc.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002530 ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
Vladimir Marko2196c652017-11-30 16:16:07 +00002531 DCHECK(self->GetManagedStack()->GetTopQuickFrameTag());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002532 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002533 ArtMethod* called = *sp;
Ian Rogerse0dcd462014-03-08 15:21:04 -08002534 uint32_t cookie = *(sp32 - 1);
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -07002535 HandleScope* table = reinterpret_cast<HandleScope*>(reinterpret_cast<uint8_t*>(sp) + sizeof(*sp));
2536 return GenericJniMethodEnd(self, cookie, result, result_f, called, table);
Andreas Gampe2da88232014-02-27 12:26:20 -08002537}
2538
Andreas Gamped58342c2014-06-05 14:18:08 -07002539// We use TwoWordReturn to optimize scalar returns. We use the hi value for code, and the lo value
2540// for the method pointer.
Andreas Gampe51f76352014-05-21 08:28:48 -07002541//
Andreas Gamped58342c2014-06-05 14:18:08 -07002542// It is valid to use this, as at the usage points here (returns from C functions) we are assuming
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002543// to hold the mutator lock (see REQUIRES_SHARED(Locks::mutator_lock_) annotations).
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002544
Vladimir Markof79aa7f2017-07-04 16:58:55 +01002545template <InvokeType type, bool access_check>
Mathieu Chartieref41db72016-10-25 15:08:01 -07002546static TwoWordReturn artInvokeCommon(uint32_t method_idx,
2547 ObjPtr<mirror::Object> this_object,
2548 Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002549 ArtMethod** sp) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002550 ScopedQuickEntrypointChecks sqec(self);
Andreas Gampe8228cdf2017-05-30 15:03:54 -07002551 DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
Mathieu Chartiere401d142015-04-22 13:56:20 -07002552 ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
Vladimir Markof79aa7f2017-07-04 16:58:55 +01002553 ArtMethod* method = FindMethodFast<type, access_check>(method_idx, this_object, caller_method);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002554 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002555 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
2556 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002557 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002558 {
2559 // Remember the args in case a GC happens in FindMethodFromCode.
2560 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
2561 RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa);
2562 visitor.VisitArguments();
Mathieu Chartieref41db72016-10-25 15:08:01 -07002563 method = FindMethodFromCode<type, access_check>(method_idx,
2564 &this_object,
2565 caller_method,
Mathieu Chartier0cd81352014-05-22 16:48:55 -07002566 self);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002567 visitor.FixupReferences();
2568 }
2569
Ian Rogerse0a02da2014-12-02 14:10:53 -08002570 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002571 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07002572 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002573 }
2574 }
2575 DCHECK(!self->IsExceptionPending());
2576 const void* code = method->GetEntryPointFromQuickCompiledCode();
2577
2578 // When we return, the caller will branch to this address, so it had better not be 0!
David Sehr709b0702016-10-13 09:12:37 -07002579 DCHECK(code != nullptr) << "Code was null in method: " << method->PrettyMethod()
Andreas Gampec200a4a2014-06-16 18:39:09 -07002580 << " location: "
2581 << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002582
Andreas Gamped58342c2014-06-05 14:18:08 -07002583 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2584 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002585}
2586
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01002587// Explicit artInvokeCommon template function declarations to please analysis tool.
2588#define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002589 template REQUIRES_SHARED(Locks::mutator_lock_) \
Mathieu Chartiere401d142015-04-22 13:56:20 -07002590 TwoWordReturn artInvokeCommon<type, access_check>( \
Mathieu Chartieref41db72016-10-25 15:08:01 -07002591 uint32_t method_idx, ObjPtr<mirror::Object> his_object, Thread* self, ArtMethod** sp)
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01002592
2593EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false);
2594EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true);
2595EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false);
2596EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true);
2597EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false);
2598EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true);
2599EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false);
2600EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true);
2601EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false);
2602EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true);
2603#undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL
2604
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002605// See comments in runtime_support_asm.S
Andreas Gampec200a4a2014-06-16 18:39:09 -07002606extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002607 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002608 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002609 return artInvokeCommon<kInterface, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002610}
2611
Andreas Gampec200a4a2014-06-16 18:39:09 -07002612extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002613 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002614 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002615 return artInvokeCommon<kDirect, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002616}
2617
Andreas Gampec200a4a2014-06-16 18:39:09 -07002618extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck(
Mathieu Chartieref41db72016-10-25 15:08:01 -07002619 uint32_t method_idx,
2620 mirror::Object* this_object ATTRIBUTE_UNUSED,
2621 Thread* self,
2622 ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
2623 // For static, this_object is not required and may be random garbage. Don't pass it down so that
2624 // it doesn't cause ObjPtr alignment failure check.
2625 return artInvokeCommon<kStatic, true>(method_idx, nullptr, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002626}
2627
Andreas Gampec200a4a2014-06-16 18:39:09 -07002628extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002629 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002630 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002631 return artInvokeCommon<kSuper, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002632}
2633
Andreas Gampec200a4a2014-06-16 18:39:09 -07002634extern "C" TwoWordReturn artInvokeVirtualTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002635 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002636 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002637 return artInvokeCommon<kVirtual, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002638}
2639
Vladimir Marko07bfbac2017-07-06 14:55:02 +01002640// Helper function for art_quick_imt_conflict_trampoline to look up the interface method.
2641extern "C" ArtMethod* artLookupResolvedMethod(uint32_t method_index, ArtMethod* referrer)
2642 REQUIRES_SHARED(Locks::mutator_lock_) {
2643 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
2644 DCHECK(!referrer->IsProxyMethod());
2645 ArtMethod* result = Runtime::Current()->GetClassLinker()->LookupResolvedMethod(
2646 method_index, referrer->GetDexCache(), referrer->GetClassLoader());
2647 DCHECK(result == nullptr ||
2648 result->GetDeclaringClass()->IsInterface() ||
2649 result->GetDeclaringClass() ==
2650 WellKnownClasses::ToClass(WellKnownClasses::java_lang_Object))
2651 << result->PrettyMethod();
2652 return result;
2653}
2654
Jeff Hao5667f562017-02-27 19:32:01 -08002655// Determine target of interface dispatch. The interface method and this object are known non-null.
2656// The interface method is the method returned by the dex cache in the conflict trampoline.
2657extern "C" TwoWordReturn artInvokeInterfaceTrampoline(ArtMethod* interface_method,
Mathieu Chartieref41db72016-10-25 15:08:01 -07002658 mirror::Object* raw_this_object,
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002659 Thread* self,
2660 ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002661 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002662 ScopedQuickEntrypointChecks sqec(self);
Vladimir Marko302f69c2017-07-25 15:27:15 +01002663 StackHandleScope<2> hs(self);
2664 Handle<mirror::Object> this_object = hs.NewHandle(raw_this_object);
2665 Handle<mirror::Class> cls = hs.NewHandle(this_object->GetClass());
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002666
Nicolas Geoffray5bf7bac2016-07-06 14:18:23 +00002667 ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002668 ArtMethod* method = nullptr;
Andreas Gampe542451c2016-07-26 09:02:02 -07002669 ImTable* imt = cls->GetImt(kRuntimePointerSize);
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002670
Vladimir Marko302f69c2017-07-25 15:27:15 +01002671 if (UNLIKELY(interface_method == nullptr)) {
Vladimir Marko07bfbac2017-07-06 14:55:02 +01002672 // The interface method is unresolved, so resolve it in the dex file of the caller.
Jeff Hao5667f562017-02-27 19:32:01 -08002673 // Fetch the dex_method_idx of the target interface method from the caller.
2674 uint32_t dex_method_idx;
2675 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
Mathieu Chartier808c7a52017-12-15 11:19:33 -08002676 const Instruction& instr = caller_method->DexInstructions().InstructionAt(dex_pc);
Vladimir Markod7559b72017-09-28 13:50:37 +01002677 Instruction::Code instr_code = instr.Opcode();
Jeff Hao5667f562017-02-27 19:32:01 -08002678 DCHECK(instr_code == Instruction::INVOKE_INTERFACE ||
2679 instr_code == Instruction::INVOKE_INTERFACE_RANGE)
Vladimir Markod7559b72017-09-28 13:50:37 +01002680 << "Unexpected call into interface trampoline: " << instr.DumpString(nullptr);
Jeff Hao5667f562017-02-27 19:32:01 -08002681 if (instr_code == Instruction::INVOKE_INTERFACE) {
Vladimir Markod7559b72017-09-28 13:50:37 +01002682 dex_method_idx = instr.VRegB_35c();
Jeff Hao5667f562017-02-27 19:32:01 -08002683 } else {
2684 DCHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE);
Vladimir Markod7559b72017-09-28 13:50:37 +01002685 dex_method_idx = instr.VRegB_3rc();
Jeff Hao5667f562017-02-27 19:32:01 -08002686 }
2687
Vladimir Marko302f69c2017-07-25 15:27:15 +01002688 const DexFile& dex_file = caller_method->GetDeclaringClass()->GetDexFile();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002689 uint32_t shorty_len;
Vladimir Marko302f69c2017-07-25 15:27:15 +01002690 const char* shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(dex_method_idx),
2691 &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002692 {
Vladimir Marko302f69c2017-07-25 15:27:15 +01002693 // Remember the args in case a GC happens in ClassLinker::ResolveMethod().
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002694 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
2695 RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa);
2696 visitor.VisitArguments();
Vladimir Marko302f69c2017-07-25 15:27:15 +01002697 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2698 interface_method = class_linker->ResolveMethod<ClassLinker::ResolveMode::kNoChecks>(
2699 self, dex_method_idx, caller_method, kInterface);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002700 visitor.FixupReferences();
2701 }
2702
Vladimir Marko302f69c2017-07-25 15:27:15 +01002703 if (UNLIKELY(interface_method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002704 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07002705 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002706 }
Vladimir Marko302f69c2017-07-25 15:27:15 +01002707 }
2708
2709 DCHECK(!interface_method->IsRuntimeMethod());
2710 // Look whether we have a match in the ImtConflictTable.
2711 uint32_t imt_index = ImTable::GetImtIndex(interface_method);
2712 ArtMethod* conflict_method = imt->Get(imt_index, kRuntimePointerSize);
2713 if (LIKELY(conflict_method->IsRuntimeMethod())) {
2714 ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
2715 DCHECK(current_table != nullptr);
2716 method = current_table->Lookup(interface_method, kRuntimePointerSize);
2717 } else {
2718 // It seems we aren't really a conflict method!
2719 if (kIsDebugBuild) {
2720 ArtMethod* m = cls->FindVirtualMethodForInterface(interface_method, kRuntimePointerSize);
2721 CHECK_EQ(conflict_method, m)
2722 << interface_method->PrettyMethod() << " / " << conflict_method->PrettyMethod() << " / "
2723 << " / " << ArtMethod::PrettyMethod(m) << " / " << cls->PrettyClass();
2724 }
2725 method = conflict_method;
2726 }
2727 if (method != nullptr) {
2728 return GetTwoWordSuccessValue(
2729 reinterpret_cast<uintptr_t>(method->GetEntryPointFromQuickCompiledCode()),
2730 reinterpret_cast<uintptr_t>(method));
2731 }
2732
2733 // No match, use the IfTable.
2734 method = cls->FindVirtualMethodForInterface(interface_method, kRuntimePointerSize);
2735 if (UNLIKELY(method == nullptr)) {
2736 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(
2737 interface_method, this_object.Get(), caller_method);
2738 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002739 }
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002740
2741 // We arrive here if we have found an implementation, and it is not in the ImtConflictTable.
2742 // We create a new table with the new pair { interface_method, method }.
Vladimir Marko302f69c2017-07-25 15:27:15 +01002743 DCHECK(conflict_method->IsRuntimeMethod());
2744 ArtMethod* new_conflict_method = Runtime::Current()->GetClassLinker()->AddMethodToConflictTable(
2745 cls.Get(),
2746 conflict_method,
2747 interface_method,
2748 method,
2749 /*force_new_conflict_method*/false);
2750 if (new_conflict_method != conflict_method) {
2751 // Update the IMT if we create a new conflict method. No fence needed here, as the
2752 // data is consistent.
2753 imt->Set(imt_index,
2754 new_conflict_method,
2755 kRuntimePointerSize);
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002756 }
2757
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002758 const void* code = method->GetEntryPointFromQuickCompiledCode();
2759
2760 // When we return, the caller will branch to this address, so it had better not be 0!
David Sehr709b0702016-10-13 09:12:37 -07002761 DCHECK(code != nullptr) << "Code was null in method: " << method->PrettyMethod()
Andreas Gampec200a4a2014-06-16 18:39:09 -07002762 << " location: " << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002763
Andreas Gamped58342c2014-06-05 14:18:08 -07002764 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2765 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002766}
2767
Orion Hodsoncd260eb2018-06-06 09:04:17 +01002768// Returns uint64_t representing raw bits from JValue.
2769extern "C" uint64_t artInvokePolymorphic(mirror::Object* raw_receiver, Thread* self, ArtMethod** sp)
Orion Hodsonac141392017-01-13 11:53:47 +00002770 REQUIRES_SHARED(Locks::mutator_lock_) {
2771 ScopedQuickEntrypointChecks sqec(self);
Orion Hodsoncd260eb2018-06-06 09:04:17 +01002772 DCHECK(raw_receiver != nullptr);
Andreas Gampe8228cdf2017-05-30 15:03:54 -07002773 DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
Orion Hodsonac141392017-01-13 11:53:47 +00002774
2775 // Start new JNI local reference state
2776 JNIEnvExt* env = self->GetJniEnv();
2777 ScopedObjectAccessUnchecked soa(env);
2778 ScopedJniEnvLocalRefState env_state(env);
2779 const char* old_cause = self->StartAssertNoThreadSuspension("Making stack arguments safe.");
2780
2781 // From the instruction, get the |callsite_shorty| and expose arguments on the stack to the GC.
2782 ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
2783 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
Mathieu Chartier73f21d42018-01-02 14:26:50 -08002784 const Instruction& inst = caller_method->DexInstructions().InstructionAt(dex_pc);
Vladimir Markod7559b72017-09-28 13:50:37 +01002785 DCHECK(inst.Opcode() == Instruction::INVOKE_POLYMORPHIC ||
2786 inst.Opcode() == Instruction::INVOKE_POLYMORPHIC_RANGE);
Orion Hodson06d10a72018-05-14 08:53:38 +01002787 const dex::ProtoIndex proto_idx(inst.VRegH());
Vladimir Marko666ee3d2017-12-11 18:37:36 +00002788 const char* shorty = caller_method->GetDexFile()->GetShorty(proto_idx);
Orion Hodsonac141392017-01-13 11:53:47 +00002789 const size_t shorty_length = strlen(shorty);
2790 static const bool kMethodIsStatic = false; // invoke() and invokeExact() are not static.
2791 RememberForGcArgumentVisitor gc_visitor(sp, kMethodIsStatic, shorty, shorty_length, &soa);
Orion Hodsonfea84dd2017-01-16 13:52:20 +00002792 gc_visitor.VisitArguments();
Orion Hodsonac141392017-01-13 11:53:47 +00002793
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002794 // Wrap raw_receiver in a Handle for safety.
2795 StackHandleScope<3> hs(self);
2796 Handle<mirror::Object> receiver_handle(hs.NewHandle(raw_receiver));
2797 raw_receiver = nullptr;
Orion Hodsonac141392017-01-13 11:53:47 +00002798 self->EndAssertNoThreadSuspension(old_cause);
2799
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002800 // Resolve method.
Orion Hodsonac141392017-01-13 11:53:47 +00002801 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Vladimir Markoba118822017-06-12 15:41:56 +01002802 ArtMethod* resolved_method = linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
Vladimir Markod7559b72017-09-28 13:50:37 +01002803 self, inst.VRegB(), caller_method, kVirtual);
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002804
Orion Hodsone7732be2017-10-11 14:35:20 +01002805 Handle<mirror::MethodType> method_type(
2806 hs.NewHandle(linker->ResolveMethodType(self, proto_idx, caller_method)));
Orion Hodsonac141392017-01-13 11:53:47 +00002807 if (UNLIKELY(method_type.IsNull())) {
Orion Hodsoncd260eb2018-06-06 09:04:17 +01002808 // This implies we couldn't resolve one or more types in this method handle.
Orion Hodsonac141392017-01-13 11:53:47 +00002809 CHECK(self->IsExceptionPending());
Orion Hodsoncd260eb2018-06-06 09:04:17 +01002810 return 0UL;
Orion Hodsonac141392017-01-13 11:53:47 +00002811 }
2812
Vladimir Markod7559b72017-09-28 13:50:37 +01002813 DCHECK_EQ(ArtMethod::NumArgRegisters(shorty) + 1u, (uint32_t)inst.VRegA());
Orion Hodsonac141392017-01-13 11:53:47 +00002814 DCHECK_EQ(resolved_method->IsStatic(), kMethodIsStatic);
2815
2816 // Fix references before constructing the shadow frame.
2817 gc_visitor.FixupReferences();
2818
2819 // Construct shadow frame placing arguments consecutively from |first_arg|.
Vladimir Markod7559b72017-09-28 13:50:37 +01002820 const bool is_range = (inst.Opcode() == Instruction::INVOKE_POLYMORPHIC_RANGE);
2821 const size_t num_vregs = is_range ? inst.VRegA_4rcc() : inst.VRegA_45cc();
Orion Hodsonac141392017-01-13 11:53:47 +00002822 const size_t first_arg = 0;
2823 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
2824 CREATE_SHADOW_FRAME(num_vregs, /* link */ nullptr, resolved_method, dex_pc);
2825 ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
2826 ScopedStackedShadowFramePusher
2827 frame_pusher(self, shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction);
2828 BuildQuickShadowFrameVisitor shadow_frame_builder(sp,
2829 kMethodIsStatic,
2830 shorty,
2831 strlen(shorty),
2832 shadow_frame,
2833 first_arg);
2834 shadow_frame_builder.VisitArguments();
2835
2836 // Push a transition back into managed code onto the linked list in thread.
2837 ManagedStack fragment;
2838 self->PushManagedStackFragment(&fragment);
2839
2840 // Call DoInvokePolymorphic with |is_range| = true, as shadow frame has argument registers in
2841 // consecutive order.
Orion Hodson960d4f72017-11-10 15:32:38 +00002842 RangeInstructionOperands operands(first_arg + 1, num_vregs - 1);
Orion Hodson537a4fe2018-05-15 13:57:58 +01002843 Intrinsics intrinsic = static_cast<Intrinsics>(resolved_method->GetIntrinsic());
Orion Hodsoncd260eb2018-06-06 09:04:17 +01002844 JValue result;
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002845 bool success = false;
Vladimir Markoc7aa87e2018-05-24 15:19:52 +01002846 if (resolved_method->GetDeclaringClass() == GetClassRoot<mirror::MethodHandle>(linker)) {
Orion Hodson537a4fe2018-05-15 13:57:58 +01002847 Handle<mirror::MethodHandle> method_handle(hs.NewHandle(
2848 ObjPtr<mirror::MethodHandle>::DownCast(MakeObjPtr(receiver_handle.Get()))));
2849 if (intrinsic == Intrinsics::kMethodHandleInvokeExact) {
2850 success = MethodHandleInvokeExact(self,
2851 *shadow_frame,
2852 method_handle,
2853 method_type,
2854 &operands,
Orion Hodsoncd260eb2018-06-06 09:04:17 +01002855 &result);
Orion Hodson537a4fe2018-05-15 13:57:58 +01002856 } else {
2857 DCHECK_EQ(static_cast<uint32_t>(intrinsic),
2858 static_cast<uint32_t>(Intrinsics::kMethodHandleInvoke));
2859 success = MethodHandleInvoke(self,
2860 *shadow_frame,
2861 method_handle,
2862 method_type,
2863 &operands,
Orion Hodsoncd260eb2018-06-06 09:04:17 +01002864 &result);
Orion Hodson537a4fe2018-05-15 13:57:58 +01002865 }
2866 } else {
Vladimir Markoc7aa87e2018-05-24 15:19:52 +01002867 DCHECK_EQ(GetClassRoot<mirror::VarHandle>(linker), resolved_method->GetDeclaringClass());
Orion Hodson537a4fe2018-05-15 13:57:58 +01002868 Handle<mirror::VarHandle> var_handle(hs.NewHandle(
2869 ObjPtr<mirror::VarHandle>::DownCast(MakeObjPtr(receiver_handle.Get()))));
2870 mirror::VarHandle::AccessMode access_mode =
2871 mirror::VarHandle::GetAccessModeByIntrinsic(intrinsic);
2872 success = VarHandleInvokeAccessor(self,
Orion Hodson960d4f72017-11-10 15:32:38 +00002873 *shadow_frame,
Orion Hodson537a4fe2018-05-15 13:57:58 +01002874 var_handle,
Orion Hodson960d4f72017-11-10 15:32:38 +00002875 method_type,
Orion Hodson537a4fe2018-05-15 13:57:58 +01002876 access_mode,
Orion Hodson960d4f72017-11-10 15:32:38 +00002877 &operands,
Orion Hodsoncd260eb2018-06-06 09:04:17 +01002878 &result);
Orion Hodsonac141392017-01-13 11:53:47 +00002879 }
Orion Hodson537a4fe2018-05-15 13:57:58 +01002880
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002881 DCHECK(success || self->IsExceptionPending());
Orion Hodsonac141392017-01-13 11:53:47 +00002882
2883 // Pop transition record.
2884 self->PopManagedStackFragment(fragment);
2885
Orion Hodsoncd260eb2018-06-06 09:04:17 +01002886 return result.GetJ();
Orion Hodsonac141392017-01-13 11:53:47 +00002887}
2888
Orion Hodson4c8e12e2018-05-18 08:33:20 +01002889// Returns uint64_t representing raw bits from JValue.
2890extern "C" uint64_t artInvokeCustom(uint32_t call_site_idx, Thread* self, ArtMethod** sp)
2891 REQUIRES_SHARED(Locks::mutator_lock_) {
2892 ScopedQuickEntrypointChecks sqec(self);
2893 DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
2894
2895 // invoke-custom is effectively a static call (no receiver).
2896 static constexpr bool kMethodIsStatic = true;
2897
2898 // Start new JNI local reference state
2899 JNIEnvExt* env = self->GetJniEnv();
2900 ScopedObjectAccessUnchecked soa(env);
2901 ScopedJniEnvLocalRefState env_state(env);
2902
2903 const char* old_cause = self->StartAssertNoThreadSuspension("Making stack arguments safe.");
2904
2905 // From the instruction, get the |callsite_shorty| and expose arguments on the stack to the GC.
2906 ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
2907 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
2908 const DexFile* dex_file = caller_method->GetDexFile();
2909 const dex::ProtoIndex proto_idx(dex_file->GetProtoIndexForCallSite(call_site_idx));
2910 const char* shorty = caller_method->GetDexFile()->GetShorty(proto_idx);
2911 const uint32_t shorty_len = strlen(shorty);
2912
2913 // Construct the shadow frame placing arguments consecutively from |first_arg|.
2914 const size_t first_arg = 0;
2915 const size_t num_vregs = ArtMethod::NumArgRegisters(shorty);
2916 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
2917 CREATE_SHADOW_FRAME(num_vregs, /* link */ nullptr, caller_method, dex_pc);
2918 ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
2919 ScopedStackedShadowFramePusher
2920 frame_pusher(self, shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction);
2921 BuildQuickShadowFrameVisitor shadow_frame_builder(sp,
2922 kMethodIsStatic,
2923 shorty,
2924 shorty_len,
2925 shadow_frame,
2926 first_arg);
2927 shadow_frame_builder.VisitArguments();
2928
2929 // Push a transition back into managed code onto the linked list in thread.
2930 ManagedStack fragment;
2931 self->PushManagedStackFragment(&fragment);
2932 self->EndAssertNoThreadSuspension(old_cause);
2933
2934 // Perform the invoke-custom operation.
2935 RangeInstructionOperands operands(first_arg, num_vregs);
2936 JValue result;
2937 bool success =
2938 interpreter::DoInvokeCustom(self, *shadow_frame, call_site_idx, &operands, &result);
2939 DCHECK(success || self->IsExceptionPending());
2940
2941 // Pop transition record.
2942 self->PopManagedStackFragment(fragment);
2943
2944 return result.GetJ();
2945}
2946
Ian Rogers848871b2013-08-05 10:56:33 -07002947} // namespace art