blob: 98f1684df515f128d8b04984d7f8585e16b50773 [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070018#include "common_throws.h"
Ian Rogers848871b2013-08-05 10:56:33 -070019#include "dex_file-inl.h"
20#include "dex_instruction-inl.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070021#include "entrypoints/entrypoint_utils-inl.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070022#include "entrypoints/runtime_asm_entrypoints.h"
Ian Rogers83883d72013-10-21 21:07:24 -070023#include "gc/accounting/card_table-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070024#include "interpreter/interpreter.h"
Ian Rogerse0a02da2014-12-02 14:10:53 -080025#include "method_reference.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070026#include "mirror/art_method-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070027#include "mirror/class-inl.h"
Mathieu Chartier5f3ded42014-04-03 15:25:30 -070028#include "mirror/dex_cache-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070029#include "mirror/object-inl.h"
30#include "mirror/object_array-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070031#include "runtime.h"
Ian Rogers53b8b092014-03-13 23:45:53 -070032#include "scoped_thread_state_change.h"
Ian Rogers848871b2013-08-05 10:56:33 -070033
34namespace art {
35
36// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
37class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080038 // Number of bytes for each out register in the caller method's frame.
39 static constexpr size_t kBytesStackArgLocation = 4;
Alexei Zavjalov41c507a2014-05-15 16:02:46 +070040 // Frame size in bytes of a callee-save frame for RefsAndArgs.
41 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize =
42 GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -070043#if defined(__arm__)
44 // The callee save frame is pointed to by SP.
45 // | argN | |
46 // | ... | |
47 // | arg4 | |
48 // | arg3 spill | | Caller's frame
49 // | arg2 spill | |
50 // | arg1 spill | |
51 // | Method* | ---
52 // | LR |
Zheng Xu5667fdb2014-10-23 18:29:55 +080053 // | ... | 4x6 bytes callee saves
54 // | R3 |
55 // | R2 |
56 // | R1 |
57 // | S15 |
58 // | : |
59 // | S0 |
60 // | | 4x2 bytes padding
Ian Rogers848871b2013-08-05 10:56:33 -070061 // | Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -050062 static constexpr bool kSplitPairAcrossRegisterAndStack = kArm32QuickCodeUseSoftFloat;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +000063 static constexpr bool kAlignPairRegister = !kArm32QuickCodeUseSoftFloat;
Zheng Xu5667fdb2014-10-23 18:29:55 +080064 static constexpr bool kQuickSoftFloatAbi = kArm32QuickCodeUseSoftFloat;
65 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = !kArm32QuickCodeUseSoftFloat;
66 static constexpr size_t kNumQuickGprArgs = 3;
67 static constexpr size_t kNumQuickFprArgs = kArm32QuickCodeUseSoftFloat ? 0 : 16;
Andreas Gampe1a5c4062015-01-15 12:10:47 -080068 static constexpr bool kGprFprLockstep = false;
Zheng Xub551fdc2014-07-25 11:49:42 +080069 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
70 arm::ArmCalleeSaveFpr1Offset(Runtime::kRefsAndArgs); // Offset of first FPR arg.
71 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
72 arm::ArmCalleeSaveGpr1Offset(Runtime::kRefsAndArgs); // Offset of first GPR arg.
73 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset =
74 arm::ArmCalleeSaveLrOffset(Runtime::kRefsAndArgs); // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -080075 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000076 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -080077 }
Stuart Monteithb95a5342014-03-12 13:32:32 +000078#elif defined(__aarch64__)
79 // The callee save frame is pointed to by SP.
80 // | argN | |
81 // | ... | |
82 // | arg4 | |
83 // | arg3 spill | | Caller's frame
84 // | arg2 spill | |
85 // | arg1 spill | |
86 // | Method* | ---
87 // | LR |
Zheng Xub551fdc2014-07-25 11:49:42 +080088 // | X29 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000089 // | : |
Zheng Xub551fdc2014-07-25 11:49:42 +080090 // | X20 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000091 // | X7 |
92 // | : |
93 // | X1 |
Zheng Xub551fdc2014-07-25 11:49:42 +080094 // | D7 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000095 // | : |
96 // | D0 |
97 // | | padding
98 // | Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -050099 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000100 static constexpr bool kAlignPairRegister = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000101 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800102 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000103 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
104 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800105 static constexpr bool kGprFprLockstep = false;
Zheng Xub551fdc2014-07-25 11:49:42 +0800106 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
107 arm64::Arm64CalleeSaveFpr1Offset(Runtime::kRefsAndArgs); // Offset of first FPR arg.
108 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
109 arm64::Arm64CalleeSaveGpr1Offset(Runtime::kRefsAndArgs); // Offset of first GPR arg.
110 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset =
111 arm64::Arm64CalleeSaveLrOffset(Runtime::kRefsAndArgs); // Offset of return address.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000112 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000113 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Stuart Monteithb95a5342014-03-12 13:32:32 +0000114 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800115#elif defined(__mips__) && !defined(__LP64__)
Ian Rogers848871b2013-08-05 10:56:33 -0700116 // The callee save frame is pointed to by SP.
117 // | argN | |
118 // | ... | |
119 // | arg4 | |
120 // | arg3 spill | | Caller's frame
121 // | arg2 spill | |
122 // | arg1 spill | |
123 // | Method* | ---
124 // | RA |
125 // | ... | callee saves
126 // | A3 | arg3
127 // | A2 | arg2
128 // | A1 | arg1
129 // | A0/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500130 static constexpr bool kSplitPairAcrossRegisterAndStack = true;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000131 static constexpr bool kAlignPairRegister = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800132 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800133 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800134 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
135 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800136 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800137 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
Douglas Leungc6d86722014-12-10 16:15:17 -0800138 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 16; // Offset of first GPR arg.
Ian Rogers936b37f2014-02-14 00:52:24 -0800139 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 60; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800140 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000141 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800142 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800143#elif defined(__mips__) && defined(__LP64__)
144 // The callee save frame is pointed to by SP.
145 // | argN | |
146 // | ... | |
147 // | arg4 | |
148 // | arg3 spill | | Caller's frame
149 // | arg2 spill | |
150 // | arg1 spill | |
151 // | Method* | ---
152 // | RA |
153 // | ... | callee saves
154 // | F7 | f_arg7
155 // | F6 | f_arg6
156 // | F5 | f_arg5
157 // | F6 | f_arg6
158 // | F5 | f_arg5
159 // | F4 | f_arg4
160 // | F3 | f_arg3
161 // | F2 | f_arg2
162 // | F1 | f_arg1
163 // | F0 | f_arg0
164 // | A7 | arg7
165 // | A6 | arg6
166 // | A5 | arg5
167 // | A4 | arg4
168 // | A3 | arg3
169 // | A2 | arg2
170 // | A1 | arg1
171 // | | padding
172 // | A0/Method* | <- sp
173 // NOTE: for Mip64, when A0 is skipped, F0 is also skipped.
174 static constexpr bool kAlignPairRegister = false;
175 static constexpr bool kQuickSoftFloatAbi = false;
176 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
177 // These values are set to zeros because GPR and FPR register
178 // assignments for Mips64 are interleaved, which the current VisitArguments()
179 // function does not support.
180 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
181 static constexpr size_t kNumQuickFprArgs = 7; // 7 arguments passed in FPRs.
182 static constexpr bool kGprFprLockstep = true;
183
184 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 24; // Offset of first FPR arg (F1).
185 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80; // Offset of first GPR arg (A1).
186 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 200; // Offset of return address.
187 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
188 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
189 }
Ian Rogers848871b2013-08-05 10:56:33 -0700190#elif defined(__i386__)
191 // The callee save frame is pointed to by SP.
192 // | argN | |
193 // | ... | |
194 // | arg4 | |
195 // | arg3 spill | | Caller's frame
196 // | arg2 spill | |
197 // | arg1 spill | |
198 // | Method* | ---
199 // | Return |
200 // | EBP,ESI,EDI | callee saves
201 // | EBX | arg3
202 // | EDX | arg2
203 // | ECX | arg1
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000204 // | XMM3 | float arg 4
205 // | XMM2 | float arg 3
206 // | XMM1 | float arg 2
207 // | XMM0 | float arg 1
Ian Rogers848871b2013-08-05 10:56:33 -0700208 // | EAX/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500209 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000210 static constexpr bool kAlignPairRegister = false;
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000211 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800212 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800213 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000214 static constexpr size_t kNumQuickFprArgs = 4; // 4 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800215 static constexpr bool kGprFprLockstep = false;
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000216 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 4; // Offset of first FPR arg.
217 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4 + 4*8; // Offset of first GPR arg.
218 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 28 + 4*8; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800219 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000220 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800221 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800222#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800223 // The callee save frame is pointed to by SP.
224 // | argN | |
225 // | ... | |
226 // | reg. arg spills | | Caller's frame
227 // | Method* | ---
228 // | Return |
229 // | R15 | callee save
230 // | R14 | callee save
231 // | R13 | callee save
232 // | R12 | callee save
233 // | R9 | arg5
234 // | R8 | arg4
235 // | RSI/R6 | arg1
236 // | RBP/R5 | callee save
237 // | RBX/R3 | callee save
238 // | RDX/R2 | arg2
239 // | RCX/R1 | arg3
240 // | XMM7 | float arg 8
241 // | XMM6 | float arg 7
242 // | XMM5 | float arg 6
243 // | XMM4 | float arg 5
244 // | XMM3 | float arg 4
245 // | XMM2 | float arg 3
246 // | XMM1 | float arg 2
247 // | XMM0 | float arg 1
248 // | Padding |
249 // | RDI/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500250 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000251 static constexpr bool kAlignPairRegister = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800252 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800253 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700254 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700255 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800256 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800257 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
Serguei Katkovc3801912014-07-08 17:21:53 +0700258 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80 + 4*8; // Offset of first GPR arg.
259 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 168 + 4*8; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800260 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
261 switch (gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000262 case 0: return (4 * GetBytesPerGprSpillLocation(kRuntimeISA));
263 case 1: return (1 * GetBytesPerGprSpillLocation(kRuntimeISA));
264 case 2: return (0 * GetBytesPerGprSpillLocation(kRuntimeISA));
265 case 3: return (5 * GetBytesPerGprSpillLocation(kRuntimeISA));
266 case 4: return (6 * GetBytesPerGprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800267 default:
Andreas Gampec200a4a2014-06-16 18:39:09 -0700268 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
269 return 0;
Ian Rogers936b37f2014-02-14 00:52:24 -0800270 }
271 }
Ian Rogers848871b2013-08-05 10:56:33 -0700272#else
273#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700274#endif
275
Ian Rogers936b37f2014-02-14 00:52:24 -0800276 public:
Sebastien Hertza836bc92014-11-25 16:30:53 +0100277 // Special handling for proxy methods. Proxy methods are instance methods so the
278 // 'this' object is the 1st argument. They also have the same frame layout as the
279 // kRefAndArgs runtime method. Since 'this' is a reference, it is located in the
280 // 1st GPR.
281 static mirror::Object* GetProxyThisObject(StackReference<mirror::ArtMethod>* sp)
282 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
283 CHECK(sp->AsMirrorPtr()->IsProxyMethod());
284 CHECK_EQ(kQuickCalleeSaveFrame_RefAndArgs_FrameSize, sp->AsMirrorPtr()->GetFrameSizeInBytes());
285 CHECK_GT(kNumQuickGprArgs, 0u);
286 constexpr uint32_t kThisGprIndex = 0u; // 'this' is in the 1st GPR.
287 size_t this_arg_offset = kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset +
288 GprIndexToGprOffset(kThisGprIndex);
289 uint8_t* this_arg_address = reinterpret_cast<uint8_t*>(sp) + this_arg_offset;
290 return reinterpret_cast<StackReference<mirror::Object>*>(this_arg_address)->AsMirrorPtr();
291 }
292
Andreas Gampecf4035a2014-05-28 22:43:01 -0700293 static mirror::ArtMethod* GetCallingMethod(StackReference<mirror::ArtMethod>* sp)
Ian Rogers936b37f2014-02-14 00:52:24 -0800294 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -0700295 DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod());
Ian Rogers13735952014-10-08 12:43:28 -0700296 uint8_t* previous_sp = reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Andreas Gampecf4035a2014-05-28 22:43:01 -0700297 return reinterpret_cast<StackReference<mirror::ArtMethod>*>(previous_sp)->AsMirrorPtr();
Ian Rogers848871b2013-08-05 10:56:33 -0700298 }
299
Ian Rogers936b37f2014-02-14 00:52:24 -0800300 // For the given quick ref and args quick frame, return the caller's PC.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700301 static uintptr_t GetCallingPc(StackReference<mirror::ArtMethod>* sp)
Ian Rogers936b37f2014-02-14 00:52:24 -0800302 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -0700303 DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod());
Ian Rogers13735952014-10-08 12:43:28 -0700304 uint8_t* lr = reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset;
Ian Rogers848871b2013-08-05 10:56:33 -0700305 return *reinterpret_cast<uintptr_t*>(lr);
306 }
307
Andreas Gampec200a4a2014-06-16 18:39:09 -0700308 QuickArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static, const char* shorty,
309 uint32_t shorty_len) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) :
310 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
Ian Rogers13735952014-10-08 12:43:28 -0700311 gpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
312 fpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
313 stack_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700314 + sizeof(StackReference<mirror::ArtMethod>)), // Skip StackReference<ArtMethod>.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800315 gpr_index_(0), fpr_index_(0), fpr_double_index_(0), stack_index_(0),
316 cur_type_(Primitive::kPrimVoid), is_split_long_or_double_(false) {
Andreas Gampe575e78c2014-11-03 23:41:03 -0800317 static_assert(kQuickSoftFloatAbi == (kNumQuickFprArgs == 0),
318 "Number of Quick FPR arguments unexpected");
319 static_assert(!(kQuickSoftFloatAbi && kQuickDoubleRegAlignedFloatBackFilled),
320 "Double alignment unexpected");
Zheng Xu5667fdb2014-10-23 18:29:55 +0800321 // For register alignment, we want to assume that counters(fpr_double_index_) are even if the
322 // next register is even.
Andreas Gampe575e78c2014-11-03 23:41:03 -0800323 static_assert(!kQuickDoubleRegAlignedFloatBackFilled || kNumQuickFprArgs % 2 == 0,
324 "Number of Quick FPR arguments not even");
Zheng Xu5667fdb2014-10-23 18:29:55 +0800325 }
Ian Rogers848871b2013-08-05 10:56:33 -0700326
327 virtual ~QuickArgumentVisitor() {}
328
329 virtual void Visit() = 0;
330
Ian Rogers936b37f2014-02-14 00:52:24 -0800331 Primitive::Type GetParamPrimitiveType() const {
332 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700333 }
334
Ian Rogers13735952014-10-08 12:43:28 -0700335 uint8_t* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800336 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800337 Primitive::Type type = GetParamPrimitiveType();
338 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800339 if (type == Primitive::kPrimDouble && kQuickDoubleRegAlignedFloatBackFilled) {
340 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
341 return fpr_args_ + (fpr_double_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
342 }
343 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000344 return fpr_args_ + (fpr_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800345 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700346 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers936b37f2014-02-14 00:52:24 -0800347 }
348 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800349 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800350 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
351 }
352 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700353 }
354
355 bool IsSplitLongOrDouble() const {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000356 if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) || (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800357 return is_split_long_or_double_;
358 } else {
359 return false; // An optimization for when GPR and FPRs are 64bit.
360 }
Ian Rogers848871b2013-08-05 10:56:33 -0700361 }
362
Ian Rogers936b37f2014-02-14 00:52:24 -0800363 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700364 return GetParamPrimitiveType() == Primitive::kPrimNot;
365 }
366
Ian Rogers936b37f2014-02-14 00:52:24 -0800367 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700368 Primitive::Type type = GetParamPrimitiveType();
369 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
370 }
371
372 uint64_t ReadSplitLongParam() const {
Nicolas Geoffray425f2392015-01-08 14:52:29 +0000373 // The splitted long is always available through the stack.
374 return *reinterpret_cast<uint64_t*>(stack_args_
375 + stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700376 }
377
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800378 void IncGprIndex() {
379 gpr_index_++;
380 if (kGprFprLockstep) {
381 fpr_index_++;
382 }
383 }
384
385 void IncFprIndex() {
386 fpr_index_++;
387 if (kGprFprLockstep) {
388 gpr_index_++;
389 }
390 }
391
Ian Rogers848871b2013-08-05 10:56:33 -0700392 void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800393 // (a) 'stack_args_' should point to the first method's argument
394 // (b) whatever the argument type it is, the 'stack_index_' should
395 // be moved forward along with every visiting.
Ian Rogers936b37f2014-02-14 00:52:24 -0800396 gpr_index_ = 0;
397 fpr_index_ = 0;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800398 if (kQuickDoubleRegAlignedFloatBackFilled) {
399 fpr_double_index_ = 0;
400 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800401 stack_index_ = 0;
402 if (!is_static_) { // Handle this.
403 cur_type_ = Primitive::kPrimNot;
404 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700405 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800406 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800407 if (kNumQuickGprArgs > 0) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800408 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800409 }
Ian Rogers848871b2013-08-05 10:56:33 -0700410 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800411 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
412 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
413 switch (cur_type_) {
414 case Primitive::kPrimNot:
415 case Primitive::kPrimBoolean:
416 case Primitive::kPrimByte:
417 case Primitive::kPrimChar:
418 case Primitive::kPrimShort:
419 case Primitive::kPrimInt:
420 is_split_long_or_double_ = false;
421 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800422 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800423 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800424 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800425 }
426 break;
427 case Primitive::kPrimFloat:
428 is_split_long_or_double_ = false;
429 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800430 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800431 if (kQuickSoftFloatAbi) {
432 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800433 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800434 }
435 } else {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800436 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800437 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800438 if (kQuickDoubleRegAlignedFloatBackFilled) {
439 // Double should not overlap with float.
440 // For example, if fpr_index_ = 3, fpr_double_index_ should be at least 4.
441 fpr_double_index_ = std::max(fpr_double_index_, RoundUp(fpr_index_, 2));
442 // Float should not overlap with double.
443 if (fpr_index_ % 2 == 0) {
444 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
445 }
446 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800447 }
448 }
449 break;
450 case Primitive::kPrimDouble:
451 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800452 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000453 if (cur_type_ == Primitive::kPrimLong && kAlignPairRegister && gpr_index_ == 0) {
454 // Currently, this is only for ARM, where the first available parameter register
455 // is R1. So we skip it, and use R2 instead.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800456 IncGprIndex();
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000457 }
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000458 is_split_long_or_double_ = (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800459 ((gpr_index_ + 1) == kNumQuickGprArgs);
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500460 if (!kSplitPairAcrossRegisterAndStack && is_split_long_or_double_) {
461 // We don't want to split this. Pass over this register.
462 gpr_index_++;
463 is_split_long_or_double_ = false;
464 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800465 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800466 if (kBytesStackArgLocation == 4) {
467 stack_index_+= 2;
468 } else {
469 CHECK_EQ(kBytesStackArgLocation, 8U);
470 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800471 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700472 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800473 IncGprIndex();
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000474 if (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700475 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800476 IncGprIndex();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700477 }
478 }
479 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800480 } else {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000481 is_split_long_or_double_ = (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) &&
Zheng Xu5667fdb2014-10-23 18:29:55 +0800482 ((fpr_index_ + 1) == kNumQuickFprArgs) && !kQuickDoubleRegAlignedFloatBackFilled;
Ian Rogers936b37f2014-02-14 00:52:24 -0800483 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700484 if (kBytesStackArgLocation == 4) {
485 stack_index_+= 2;
Ian Rogers936b37f2014-02-14 00:52:24 -0800486 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700487 CHECK_EQ(kBytesStackArgLocation, 8U);
488 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800489 }
Zheng Xu5667fdb2014-10-23 18:29:55 +0800490 if (kQuickDoubleRegAlignedFloatBackFilled) {
491 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
492 fpr_double_index_ += 2;
493 // Float should not overlap with double.
494 if (fpr_index_ % 2 == 0) {
495 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
496 }
497 }
498 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800499 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800500 if (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) {
501 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800502 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800503 }
504 }
505 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800506 }
507 break;
508 default:
509 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
510 }
Ian Rogers848871b2013-08-05 10:56:33 -0700511 }
512 }
513
Andreas Gampec200a4a2014-06-16 18:39:09 -0700514 protected:
Ian Rogers848871b2013-08-05 10:56:33 -0700515 const bool is_static_;
516 const char* const shorty_;
517 const uint32_t shorty_len_;
Andreas Gampec200a4a2014-06-16 18:39:09 -0700518
519 private:
Ian Rogers13735952014-10-08 12:43:28 -0700520 uint8_t* const gpr_args_; // Address of GPR arguments in callee save frame.
521 uint8_t* const fpr_args_; // Address of FPR arguments in callee save frame.
522 uint8_t* const stack_args_; // Address of stack arguments in caller's frame.
Ian Rogers936b37f2014-02-14 00:52:24 -0800523 uint32_t gpr_index_; // Index into spilled GPRs.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800524 // Index into spilled FPRs.
525 // In case kQuickDoubleRegAlignedFloatBackFilled, it may index a hole while fpr_double_index_
526 // holds a higher register number.
527 uint32_t fpr_index_;
528 // Index into spilled FPRs for aligned double.
529 // Only used when kQuickDoubleRegAlignedFloatBackFilled. Next available double register indexed in
530 // terms of singles, may be behind fpr_index.
531 uint32_t fpr_double_index_;
Ian Rogers936b37f2014-02-14 00:52:24 -0800532 uint32_t stack_index_; // Index into arguments on the stack.
533 // The current type of argument during VisitArguments.
534 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700535 // Does a 64bit parameter straddle the register and stack arguments?
536 bool is_split_long_or_double_;
537};
538
Sebastien Hertza836bc92014-11-25 16:30:53 +0100539// Returns the 'this' object of a proxy method. This function is only used by StackVisitor. It
540// allows to use the QuickArgumentVisitor constants without moving all the code in its own module.
541extern "C" mirror::Object* artQuickGetProxyThisObject(StackReference<mirror::ArtMethod>* sp)
542 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
543 return QuickArgumentVisitor::GetProxyThisObject(sp);
544}
545
Ian Rogers848871b2013-08-05 10:56:33 -0700546// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800547class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700548 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700549 BuildQuickShadowFrameVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
550 const char* shorty, uint32_t shorty_len, ShadowFrame* sf,
551 size_t first_arg_reg) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700552 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700553
Ian Rogers9758f792014-03-13 09:02:55 -0700554 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700555
556 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800557 ShadowFrame* const sf_;
558 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700559
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700560 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700561};
562
Andreas Gampec200a4a2014-06-16 18:39:09 -0700563void BuildQuickShadowFrameVisitor::Visit() {
Ian Rogers9758f792014-03-13 09:02:55 -0700564 Primitive::Type type = GetParamPrimitiveType();
565 switch (type) {
566 case Primitive::kPrimLong: // Fall-through.
567 case Primitive::kPrimDouble:
568 if (IsSplitLongOrDouble()) {
569 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
570 } else {
571 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
572 }
573 ++cur_reg_;
574 break;
575 case Primitive::kPrimNot: {
576 StackReference<mirror::Object>* stack_ref =
577 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
578 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
579 }
580 break;
581 case Primitive::kPrimBoolean: // Fall-through.
582 case Primitive::kPrimByte: // Fall-through.
583 case Primitive::kPrimChar: // Fall-through.
584 case Primitive::kPrimShort: // Fall-through.
585 case Primitive::kPrimInt: // Fall-through.
586 case Primitive::kPrimFloat:
587 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
588 break;
589 case Primitive::kPrimVoid:
590 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700591 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700592 }
593 ++cur_reg_;
594}
595
Brian Carlstromea46f952013-07-30 01:26:50 -0700596extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700597 StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700598 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
599 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
600 // frame.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700601 ScopedQuickEntrypointChecks sqec(self);
Ian Rogers848871b2013-08-05 10:56:33 -0700602
603 if (method->IsAbstract()) {
604 ThrowAbstractMethodError(method);
605 return 0;
606 } else {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800607 DCHECK(!method->IsNative()) << PrettyMethod(method);
Andreas Gampec200a4a2014-06-16 18:39:09 -0700608 const char* old_cause = self->StartAssertNoThreadSuspension(
609 "Building interpreter shadow frame");
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700610 const DexFile::CodeItem* code_item = method->GetCodeItem();
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800611 DCHECK(code_item != nullptr) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700612 uint16_t num_regs = code_item->registers_size_;
613 void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
Andreas Gampec200a4a2014-06-16 18:39:09 -0700614 // No last shadow coming from quick.
615 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, nullptr, method, 0, memory));
Ian Rogers848871b2013-08-05 10:56:33 -0700616 size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700617 uint32_t shorty_len = 0;
618 const char* shorty = method->GetShorty(&shorty_len);
619 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len,
Ian Rogers936b37f2014-02-14 00:52:24 -0800620 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700621 shadow_frame_builder.VisitArguments();
Ian Rogerse94652f2014-12-02 11:13:19 -0800622 const bool needs_initialization =
623 method->IsStatic() && !method->GetDeclaringClass()->IsInitialized();
Ian Rogers848871b2013-08-05 10:56:33 -0700624 // Push a transition back into managed code onto the linked list in thread.
625 ManagedStack fragment;
626 self->PushManagedStackFragment(&fragment);
627 self->PushShadowFrame(shadow_frame);
628 self->EndAssertNoThreadSuspension(old_cause);
629
Ian Rogerse94652f2014-12-02 11:13:19 -0800630 if (needs_initialization) {
Ian Rogers848871b2013-08-05 10:56:33 -0700631 // Ensure static method's class is initialized.
Ian Rogerse94652f2014-12-02 11:13:19 -0800632 StackHandleScope<1> hs(self);
633 Handle<mirror::Class> h_class(hs.NewHandle(shadow_frame->GetMethod()->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -0700634 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
Ian Rogerse94652f2014-12-02 11:13:19 -0800635 DCHECK(Thread::Current()->IsExceptionPending()) << PrettyMethod(shadow_frame->GetMethod());
Ian Rogers848871b2013-08-05 10:56:33 -0700636 self->PopManagedStackFragment(fragment);
637 return 0;
638 }
639 }
Ian Rogerse94652f2014-12-02 11:13:19 -0800640 JValue result = interpreter::EnterInterpreterFromEntryPoint(self, code_item, shadow_frame);
Ian Rogers848871b2013-08-05 10:56:33 -0700641 // Pop transition.
642 self->PopManagedStackFragment(fragment);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800643 // No need to restore the args since the method has already been run by the interpreter.
Ian Rogers848871b2013-08-05 10:56:33 -0700644 return result.GetJ();
645 }
646}
647
648// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
649// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800650class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700651 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700652 BuildQuickArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
653 const char* shorty, uint32_t shorty_len,
654 ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700655 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700656
Ian Rogers9758f792014-03-13 09:02:55 -0700657 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700658
Ian Rogers9758f792014-03-13 09:02:55 -0700659 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800660
Ian Rogers848871b2013-08-05 10:56:33 -0700661 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700662 ScopedObjectAccessUnchecked* const soa_;
663 std::vector<jvalue>* const args_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800664 // References which we must update when exiting in case the GC moved the objects.
Ian Rogers700a4022014-05-19 16:49:03 -0700665 std::vector<std::pair<jobject, StackReference<mirror::Object>*>> references_;
Ian Rogers9758f792014-03-13 09:02:55 -0700666
Ian Rogers848871b2013-08-05 10:56:33 -0700667 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
668};
669
Ian Rogers9758f792014-03-13 09:02:55 -0700670void BuildQuickArgumentVisitor::Visit() {
671 jvalue val;
672 Primitive::Type type = GetParamPrimitiveType();
673 switch (type) {
674 case Primitive::kPrimNot: {
675 StackReference<mirror::Object>* stack_ref =
676 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
677 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
678 references_.push_back(std::make_pair(val.l, stack_ref));
679 break;
680 }
681 case Primitive::kPrimLong: // Fall-through.
682 case Primitive::kPrimDouble:
683 if (IsSplitLongOrDouble()) {
684 val.j = ReadSplitLongParam();
685 } else {
686 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
687 }
688 break;
689 case Primitive::kPrimBoolean: // Fall-through.
690 case Primitive::kPrimByte: // Fall-through.
691 case Primitive::kPrimChar: // Fall-through.
692 case Primitive::kPrimShort: // Fall-through.
693 case Primitive::kPrimInt: // Fall-through.
694 case Primitive::kPrimFloat:
695 val.i = *reinterpret_cast<jint*>(GetParamAddress());
696 break;
697 case Primitive::kPrimVoid:
698 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700699 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700700 }
701 args_->push_back(val);
702}
703
704void BuildQuickArgumentVisitor::FixupReferences() {
705 // Fixup any references which may have changed.
706 for (const auto& pair : references_) {
707 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700708 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700709 }
710}
711
Ian Rogers848871b2013-08-05 10:56:33 -0700712// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
713// which is responsible for recording callee save registers. We explicitly place into jobjects the
714// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
715// field within the proxy object, which will box the primitive arguments and deal with error cases.
Brian Carlstromea46f952013-07-30 01:26:50 -0700716extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method,
Ian Rogers848871b2013-08-05 10:56:33 -0700717 mirror::Object* receiver,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700718 Thread* self, StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700719 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromd3633d52013-08-20 21:06:26 -0700720 DCHECK(proxy_method->IsProxyMethod()) << PrettyMethod(proxy_method);
721 DCHECK(receiver->GetClass()->IsProxyClass()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700722 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
723 const char* old_cause =
724 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
725 // Register the top of the managed stack, making stack crawlable.
Jeff Haof0a3f092014-07-24 16:26:09 -0700726 DCHECK_EQ(sp->AsMirrorPtr(), proxy_method) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700727 DCHECK_EQ(proxy_method->GetFrameSizeInBytes(),
Brian Carlstromd3633d52013-08-20 21:06:26 -0700728 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes())
729 << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700730 self->VerifyStack();
731 // Start new JNI local reference state.
732 JNIEnvExt* env = self->GetJniEnv();
733 ScopedObjectAccessUnchecked soa(env);
734 ScopedJniEnvLocalRefState env_state(env);
735 // Create local ref. copies of proxy method and the receiver.
736 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
737
738 // Placing arguments into args vector and remove the receiver.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700739 mirror::ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy();
740 CHECK(!non_proxy_method->IsStatic()) << PrettyMethod(proxy_method) << " "
Andreas Gampec200a4a2014-06-16 18:39:09 -0700741 << PrettyMethod(non_proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700742 std::vector<jvalue> args;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700743 uint32_t shorty_len = 0;
744 const char* shorty = proxy_method->GetShorty(&shorty_len);
745 BuildQuickArgumentVisitor local_ref_visitor(sp, false, shorty, shorty_len, &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700746
Ian Rogers848871b2013-08-05 10:56:33 -0700747 local_ref_visitor.VisitArguments();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700748 DCHECK_GT(args.size(), 0U) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700749 args.erase(args.begin());
750
751 // Convert proxy method into expected interface method.
Brian Carlstromea46f952013-07-30 01:26:50 -0700752 mirror::ArtMethod* interface_method = proxy_method->FindOverriddenMethod();
Ian Rogerse0a02da2014-12-02 14:10:53 -0800753 DCHECK(interface_method != nullptr) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700754 DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
755 jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_method);
756
757 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
758 // that performs allocations.
759 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700760 JValue result = InvokeProxyInvocationHandler(soa, shorty, rcvr_jobj, interface_method_jobj, args);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800761 // Restore references which might have moved.
762 local_ref_visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700763 return result.GetJ();
764}
765
766// Read object references held in arguments from quick frames and place in a JNI local references,
767// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800768class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700769 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700770 RememberForGcArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
771 const char* shorty, uint32_t shorty_len,
772 ScopedObjectAccessUnchecked* soa) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700773 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700774
Ian Rogers9758f792014-03-13 09:02:55 -0700775 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700776
Ian Rogers9758f792014-03-13 09:02:55 -0700777 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -0700778
779 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700780 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800781 // References which we must update when exiting in case the GC moved the objects.
Andreas Gampec200a4a2014-06-16 18:39:09 -0700782 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
783
Mathieu Chartier590fee92013-09-13 13:46:47 -0700784 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700785};
786
Ian Rogers9758f792014-03-13 09:02:55 -0700787void RememberForGcArgumentVisitor::Visit() {
788 if (IsParamAReference()) {
789 StackReference<mirror::Object>* stack_ref =
790 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
791 jobject reference =
792 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
793 references_.push_back(std::make_pair(reference, stack_ref));
794 }
795}
796
797void RememberForGcArgumentVisitor::FixupReferences() {
798 // Fixup any references which may have changed.
799 for (const auto& pair : references_) {
800 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700801 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700802 }
803}
804
Ian Rogers848871b2013-08-05 10:56:33 -0700805// Lazily resolve a method for quick. Called by stub code.
Brian Carlstromea46f952013-07-30 01:26:50 -0700806extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called,
Ian Rogers848871b2013-08-05 10:56:33 -0700807 mirror::Object* receiver,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700808 Thread* self,
809 StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700810 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700811 ScopedQuickEntrypointChecks sqec(self);
Ian Rogers848871b2013-08-05 10:56:33 -0700812 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800813 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -0700814 ScopedObjectAccessUnchecked soa(env);
815 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800816 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -0700817
818 // Compute details about the called method (avoid GCs)
819 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Brian Carlstromea46f952013-07-30 01:26:50 -0700820 mirror::ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700821 InvokeType invoke_type;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800822 MethodReference called_method(nullptr, 0);
823 const bool called_method_known_on_entry = !called->IsRuntimeMethod();
824 if (!called_method_known_on_entry) {
Ian Rogers848871b2013-08-05 10:56:33 -0700825 uint32_t dex_pc = caller->ToDexPc(QuickArgumentVisitor::GetCallingPc(sp));
826 const DexFile::CodeItem* code;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800827 called_method.dex_file = caller->GetDexFile();
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700828 code = caller->GetCodeItem();
Ian Rogers848871b2013-08-05 10:56:33 -0700829 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
830 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
831 Instruction::Code instr_code = instr->Opcode();
832 bool is_range;
833 switch (instr_code) {
834 case Instruction::INVOKE_DIRECT:
835 invoke_type = kDirect;
836 is_range = false;
837 break;
838 case Instruction::INVOKE_DIRECT_RANGE:
839 invoke_type = kDirect;
840 is_range = true;
841 break;
842 case Instruction::INVOKE_STATIC:
843 invoke_type = kStatic;
844 is_range = false;
845 break;
846 case Instruction::INVOKE_STATIC_RANGE:
847 invoke_type = kStatic;
848 is_range = true;
849 break;
850 case Instruction::INVOKE_SUPER:
851 invoke_type = kSuper;
852 is_range = false;
853 break;
854 case Instruction::INVOKE_SUPER_RANGE:
855 invoke_type = kSuper;
856 is_range = true;
857 break;
858 case Instruction::INVOKE_VIRTUAL:
859 invoke_type = kVirtual;
860 is_range = false;
861 break;
862 case Instruction::INVOKE_VIRTUAL_RANGE:
863 invoke_type = kVirtual;
864 is_range = true;
865 break;
866 case Instruction::INVOKE_INTERFACE:
867 invoke_type = kInterface;
868 is_range = false;
869 break;
870 case Instruction::INVOKE_INTERFACE_RANGE:
871 invoke_type = kInterface;
872 is_range = true;
873 break;
874 default:
Ian Rogerse0a02da2014-12-02 14:10:53 -0800875 LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(nullptr);
876 UNREACHABLE();
Ian Rogers848871b2013-08-05 10:56:33 -0700877 }
Ian Rogerse0a02da2014-12-02 14:10:53 -0800878 called_method.dex_method_index = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
Ian Rogers848871b2013-08-05 10:56:33 -0700879 } else {
880 invoke_type = kStatic;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800881 called_method.dex_file = called->GetDexFile();
882 called_method.dex_method_index = called->GetDexMethodIndex();
Ian Rogers848871b2013-08-05 10:56:33 -0700883 }
884 uint32_t shorty_len;
885 const char* shorty =
Ian Rogerse0a02da2014-12-02 14:10:53 -0800886 called_method.dex_file->GetMethodShorty(
887 called_method.dex_file->GetMethodId(called_method.dex_method_index), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700888 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -0700889 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800890 self->EndAssertNoThreadSuspension(old_cause);
Ian Rogerse0a02da2014-12-02 14:10:53 -0800891 const bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -0700892 // Resolve method filling in dex cache.
Ian Rogerse0a02da2014-12-02 14:10:53 -0800893 if (!called_method_known_on_entry) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700894 StackHandleScope<1> hs(self);
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700895 mirror::Object* dummy = nullptr;
896 HandleWrapper<mirror::Object> h_receiver(
897 hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy));
Ian Rogerse0a02da2014-12-02 14:10:53 -0800898 DCHECK_EQ(caller->GetDexFile(), called_method.dex_file);
899 called = linker->ResolveMethod(self, called_method.dex_method_index, &caller, invoke_type);
Ian Rogers848871b2013-08-05 10:56:33 -0700900 }
Ian Rogerse0a02da2014-12-02 14:10:53 -0800901 const void* code = nullptr;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800902 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -0700903 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800904 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
905 << PrettyMethod(called) << " " << invoke_type;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800906 if (virtual_or_interface) {
907 // Refine called method based on receiver.
908 CHECK(receiver != nullptr) << invoke_type;
Mingyao Yangf4867782014-05-05 11:55:02 -0700909
910 mirror::ArtMethod* orig_called = called;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800911 if (invoke_type == kVirtual) {
912 called = receiver->GetClass()->FindVirtualMethodForVirtual(called);
913 } else {
914 called = receiver->GetClass()->FindVirtualMethodForInterface(called);
915 }
Mingyao Yangf4867782014-05-05 11:55:02 -0700916
917 CHECK(called != nullptr) << PrettyMethod(orig_called) << " "
918 << PrettyTypeOf(receiver) << " "
919 << invoke_type << " " << orig_called->GetVtableIndex();
920
Ian Rogers83883d72013-10-21 21:07:24 -0700921 // We came here because of sharpening. Ensure the dex cache is up-to-date on the method index
Ian Rogerse0a02da2014-12-02 14:10:53 -0800922 // of the sharpened method avoiding dirtying the dex cache if possible.
Ian Rogers00f15272014-12-02 16:55:46 -0800923 // Note, called_method.dex_method_index references the dex method before the
924 // FindVirtualMethodFor... This is ok for FindDexMethodIndexInOtherDexFile that only cares
925 // about the name and signature.
926 uint32_t update_dex_cache_method_index = called->GetDexMethodIndex();
Ian Rogerse0a02da2014-12-02 14:10:53 -0800927 if (!called->HasSameDexCacheResolvedMethods(caller)) {
Ian Rogers83883d72013-10-21 21:07:24 -0700928 // Calling from one dex file to another, need to compute the method index appropriate to
Vladimir Markobbcc0c02014-02-03 14:08:42 +0000929 // the caller's dex file. Since we get here only if the original called was a runtime
930 // method, we've got the correct dex_file and a dex_method_idx from above.
Ian Rogerse0a02da2014-12-02 14:10:53 -0800931 DCHECK(!called_method_known_on_entry);
932 DCHECK_EQ(caller->GetDexFile(), called_method.dex_file);
933 const DexFile* caller_dex_file = called_method.dex_file;
934 uint32_t caller_method_name_and_sig_index = called_method.dex_method_index;
935 update_dex_cache_method_index =
936 called->FindDexMethodIndexInOtherDexFile(*caller_dex_file,
937 caller_method_name_and_sig_index);
938 }
939 if ((update_dex_cache_method_index != DexFile::kDexNoIndex) &&
940 (caller->GetDexCacheResolvedMethod(update_dex_cache_method_index) != called)) {
941 caller->SetDexCacheResolvedMethod(update_dex_cache_method_index, called);
Ian Rogers83883d72013-10-21 21:07:24 -0700942 }
Mathieu Chartiere4a91bb2015-01-28 13:11:44 -0800943 } else if (invoke_type == kStatic) {
944 const auto called_dex_method_idx = called->GetDexMethodIndex();
945 // For static invokes, we may dispatch to the static method in the superclass but resolve
946 // using the subclass. To prevent getting slow paths on each invoke, we force set the
947 // resolved method for the super class dex method index if we are in the same dex file.
948 // b/19175856
949 if (called->GetDexFile() == called_method.dex_file &&
950 called_method.dex_method_index != called_dex_method_idx) {
951 called->GetDexCache()->SetResolvedMethod(called_dex_method_idx, called);
952 }
Ian Rogers83883d72013-10-21 21:07:24 -0700953 }
Ian Rogers848871b2013-08-05 10:56:33 -0700954 // Ensure that the called method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700955 StackHandleScope<1> hs(soa.Self());
956 Handle<mirror::Class> called_class(hs.NewHandle(called->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -0700957 linker->EnsureInitialized(soa.Self(), called_class, true, true);
Ian Rogers848871b2013-08-05 10:56:33 -0700958 if (LIKELY(called_class->IsInitialized())) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800959 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700960 } else if (called_class->IsInitializing()) {
961 if (invoke_type == kStatic) {
962 // Class is still initializing, go to oat and grab code (trampoline must be left in place
963 // until class is initialized to stop races between threads).
Ian Rogersef7d42f2014-01-06 12:55:46 -0800964 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -0700965 } else {
966 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800967 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700968 }
969 } else {
970 DCHECK(called_class->IsErroneous());
971 }
972 }
Ian Rogerse0a02da2014-12-02 14:10:53 -0800973 CHECK_EQ(code == nullptr, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700974 // Fixup any locally saved objects may have moved during a GC.
975 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700976 // Place called method in callee-save frame to be placed as first argument to quick method.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700977 sp->Assign(called);
Ian Rogers848871b2013-08-05 10:56:33 -0700978 return code;
979}
980
Andreas Gampec147b002014-03-06 18:11:06 -0800981/*
982 * This class uses a couple of observations to unite the different calling conventions through
983 * a few constants.
984 *
985 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
986 * possible alignment.
987 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
988 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
989 * when we have to split things
990 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
991 * and we can use Int handling directly.
992 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
993 * necessary when widening. Also, widening of Ints will take place implicitly, and the
994 * extension should be compatible with Aarch64, which mandates copying the available bits
995 * into LSB and leaving the rest unspecified.
996 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
997 * the stack.
998 * 6) There is only little endian.
999 *
1000 *
1001 * Actual work is supposed to be done in a delegate of the template type. The interface is as
1002 * follows:
1003 *
1004 * void PushGpr(uintptr_t): Add a value for the next GPR
1005 *
1006 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
1007 * padding, that is, think the architecture is 32b and aligns 64b.
1008 *
1009 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
1010 * split this if necessary. The current state will have aligned, if
1011 * necessary.
1012 *
1013 * void PushStack(uintptr_t): Push a value to the stack.
1014 *
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001015 * uintptr_t PushHandleScope(mirror::Object* ref): Add a reference to the HandleScope. This _will_ have nullptr,
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001016 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -08001017 * Must return the jobject, that is, the reference to the
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001018 * entry in the HandleScope (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -08001019 *
1020 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001021template<class T> class BuildNativeCallFrameStateMachine {
Andreas Gampec147b002014-03-06 18:11:06 -08001022 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001023#if defined(__arm__)
1024 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -08001025 static constexpr bool kNativeSoftFloatAbi = true;
1026 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001027 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
1028
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001029 static constexpr size_t kRegistersNeededForLong = 2;
1030 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001031 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001032 static constexpr bool kMultiFPRegistersWidened = false;
1033 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001034 static constexpr bool kAlignLongOnStack = true;
1035 static constexpr bool kAlignDoubleOnStack = true;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001036#elif defined(__aarch64__)
1037 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1038 static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs.
1039 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1040
1041 static constexpr size_t kRegistersNeededForLong = 1;
1042 static constexpr size_t kRegistersNeededForDouble = 1;
1043 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001044 static constexpr bool kMultiFPRegistersWidened = false;
1045 static constexpr bool kMultiGPRegistersWidened = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001046 static constexpr bool kAlignLongOnStack = false;
1047 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001048#elif defined(__mips__) && !defined(__LP64__)
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001049 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
Douglas Leung735b8552014-10-31 12:21:40 -07001050 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs.
1051 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001052
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001053 static constexpr size_t kRegistersNeededForLong = 2;
1054 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001055 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001056 static constexpr bool kMultiFPRegistersWidened = true;
1057 static constexpr bool kMultiGPRegistersWidened = false;
Douglas Leung735b8552014-10-31 12:21:40 -07001058 static constexpr bool kAlignLongOnStack = true;
1059 static constexpr bool kAlignDoubleOnStack = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001060#elif defined(__mips__) && defined(__LP64__)
1061 // Let the code prepare GPRs only and we will load the FPRs with same data.
1062 static constexpr bool kNativeSoftFloatAbi = true;
1063 static constexpr size_t kNumNativeGprArgs = 8;
1064 static constexpr size_t kNumNativeFprArgs = 0;
1065
1066 static constexpr size_t kRegistersNeededForLong = 1;
1067 static constexpr size_t kRegistersNeededForDouble = 1;
1068 static constexpr bool kMultiRegistersAligned = false;
1069 static constexpr bool kMultiFPRegistersWidened = false;
1070 static constexpr bool kMultiGPRegistersWidened = true;
1071 static constexpr bool kAlignLongOnStack = false;
1072 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001073#elif defined(__i386__)
1074 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -08001075 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001076 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
1077 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
1078
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001079 static constexpr size_t kRegistersNeededForLong = 2;
1080 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001081 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001082 static constexpr bool kMultiFPRegistersWidened = false;
1083 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001084 static constexpr bool kAlignLongOnStack = false;
1085 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001086#elif defined(__x86_64__)
1087 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1088 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
1089 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1090
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001091 static constexpr size_t kRegistersNeededForLong = 1;
1092 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -08001093 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001094 static constexpr bool kMultiFPRegistersWidened = false;
1095 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001096 static constexpr bool kAlignLongOnStack = false;
1097 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001098#else
1099#error "Unsupported architecture"
1100#endif
1101
Andreas Gampec147b002014-03-06 18:11:06 -08001102 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001103 explicit BuildNativeCallFrameStateMachine(T* delegate)
1104 : gpr_index_(kNumNativeGprArgs),
1105 fpr_index_(kNumNativeFprArgs),
1106 stack_entries_(0),
1107 delegate_(delegate) {
Andreas Gampec147b002014-03-06 18:11:06 -08001108 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
1109 // the next register is even; counting down is just to make the compiler happy...
Andreas Gampe575e78c2014-11-03 23:41:03 -08001110 static_assert(kNumNativeGprArgs % 2 == 0U, "Number of native GPR arguments not even");
1111 static_assert(kNumNativeFprArgs % 2 == 0U, "Number of native FPR arguments not even");
Andreas Gampec147b002014-03-06 18:11:06 -08001112 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001113
Andreas Gampec200a4a2014-06-16 18:39:09 -07001114 virtual ~BuildNativeCallFrameStateMachine() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001115
Ian Rogers1428dce2014-10-21 15:02:15 -07001116 bool HavePointerGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001117 return gpr_index_ > 0;
1118 }
1119
Andreas Gampec200a4a2014-06-16 18:39:09 -07001120 void AdvancePointer(const void* val) {
Andreas Gampec147b002014-03-06 18:11:06 -08001121 if (HavePointerGpr()) {
1122 gpr_index_--;
1123 PushGpr(reinterpret_cast<uintptr_t>(val));
1124 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001125 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
Andreas Gampec147b002014-03-06 18:11:06 -08001126 PushStack(reinterpret_cast<uintptr_t>(val));
1127 gpr_index_ = 0;
1128 }
1129 }
1130
Ian Rogers1428dce2014-10-21 15:02:15 -07001131 bool HaveHandleScopeGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001132 return gpr_index_ > 0;
1133 }
1134
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001135 void AdvanceHandleScope(mirror::Object* ptr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1136 uintptr_t handle = PushHandle(ptr);
1137 if (HaveHandleScopeGpr()) {
Andreas Gampec147b002014-03-06 18:11:06 -08001138 gpr_index_--;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001139 PushGpr(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001140 } else {
1141 stack_entries_++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001142 PushStack(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001143 gpr_index_ = 0;
1144 }
1145 }
1146
Ian Rogers1428dce2014-10-21 15:02:15 -07001147 bool HaveIntGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001148 return gpr_index_ > 0;
1149 }
1150
1151 void AdvanceInt(uint32_t val) {
1152 if (HaveIntGpr()) {
1153 gpr_index_--;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001154 if (kMultiGPRegistersWidened) {
1155 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
1156 PushGpr(static_cast<int64_t>(bit_cast<uint32_t, int32_t>(val)));
1157 } else {
1158 PushGpr(val);
1159 }
Andreas Gampec147b002014-03-06 18:11:06 -08001160 } else {
1161 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001162 if (kMultiGPRegistersWidened) {
1163 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
1164 PushStack(static_cast<int64_t>(bit_cast<uint32_t, int32_t>(val)));
1165 } else {
1166 PushStack(val);
1167 }
Andreas Gampec147b002014-03-06 18:11:06 -08001168 gpr_index_ = 0;
1169 }
1170 }
1171
Ian Rogers1428dce2014-10-21 15:02:15 -07001172 bool HaveLongGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001173 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
1174 }
1175
Ian Rogers1428dce2014-10-21 15:02:15 -07001176 bool LongGprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001177 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1178 kAlignLongOnStack && // and when it needs alignment
1179 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1180 }
1181
Ian Rogers1428dce2014-10-21 15:02:15 -07001182 bool LongStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001183 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1184 kAlignLongOnStack && // and when it needs 8B alignment
1185 (stack_entries_ & 1) == 1; // counter is odd
1186 }
1187
1188 void AdvanceLong(uint64_t val) {
1189 if (HaveLongGpr()) {
1190 if (LongGprNeedsPadding()) {
1191 PushGpr(0);
1192 gpr_index_--;
1193 }
1194 if (kRegistersNeededForLong == 1) {
1195 PushGpr(static_cast<uintptr_t>(val));
1196 } else {
1197 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1198 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1199 }
1200 gpr_index_ -= kRegistersNeededForLong;
1201 } else {
1202 if (LongStackNeedsPadding()) {
1203 PushStack(0);
1204 stack_entries_++;
1205 }
1206 if (kRegistersNeededForLong == 1) {
1207 PushStack(static_cast<uintptr_t>(val));
1208 stack_entries_++;
1209 } else {
1210 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1211 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1212 stack_entries_ += 2;
1213 }
1214 gpr_index_ = 0;
1215 }
1216 }
1217
Ian Rogers1428dce2014-10-21 15:02:15 -07001218 bool HaveFloatFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001219 return fpr_index_ > 0;
1220 }
1221
Andreas Gampec147b002014-03-06 18:11:06 -08001222 void AdvanceFloat(float val) {
1223 if (kNativeSoftFloatAbi) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001224 AdvanceInt(bit_cast<float, uint32_t>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001225 } else {
1226 if (HaveFloatFpr()) {
1227 fpr_index_--;
1228 if (kRegistersNeededForDouble == 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001229 if (kMultiFPRegistersWidened) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001230 PushFpr8(bit_cast<double, uint64_t>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001231 } else {
1232 // No widening, just use the bits.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001233 PushFpr8(bit_cast<float, uint64_t>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001234 }
1235 } else {
1236 PushFpr4(val);
1237 }
1238 } else {
1239 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001240 if (kRegistersNeededForDouble == 1 && kMultiFPRegistersWidened) {
Andreas Gampec147b002014-03-06 18:11:06 -08001241 // Need to widen before storing: Note the "double" in the template instantiation.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001242 // Note: We need to jump through those hoops to make the compiler happy.
1243 DCHECK_EQ(sizeof(uintptr_t), sizeof(uint64_t));
1244 PushStack(static_cast<uintptr_t>(bit_cast<double, uint64_t>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001245 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001246 PushStack(bit_cast<float, uintptr_t>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001247 }
1248 fpr_index_ = 0;
1249 }
1250 }
1251 }
1252
Ian Rogers1428dce2014-10-21 15:02:15 -07001253 bool HaveDoubleFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001254 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1255 }
1256
Ian Rogers1428dce2014-10-21 15:02:15 -07001257 bool DoubleFprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001258 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1259 kAlignDoubleOnStack && // and when it needs alignment
1260 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1261 }
1262
Ian Rogers1428dce2014-10-21 15:02:15 -07001263 bool DoubleStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001264 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1265 kAlignDoubleOnStack && // and when it needs 8B alignment
1266 (stack_entries_ & 1) == 1; // counter is odd
1267 }
1268
1269 void AdvanceDouble(uint64_t val) {
1270 if (kNativeSoftFloatAbi) {
1271 AdvanceLong(val);
1272 } else {
1273 if (HaveDoubleFpr()) {
1274 if (DoubleFprNeedsPadding()) {
1275 PushFpr4(0);
1276 fpr_index_--;
1277 }
1278 PushFpr8(val);
1279 fpr_index_ -= kRegistersNeededForDouble;
1280 } else {
1281 if (DoubleStackNeedsPadding()) {
1282 PushStack(0);
1283 stack_entries_++;
1284 }
1285 if (kRegistersNeededForDouble == 1) {
1286 PushStack(static_cast<uintptr_t>(val));
1287 stack_entries_++;
1288 } else {
1289 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1290 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1291 stack_entries_ += 2;
1292 }
1293 fpr_index_ = 0;
1294 }
1295 }
1296 }
1297
Ian Rogers1428dce2014-10-21 15:02:15 -07001298 uint32_t GetStackEntries() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001299 return stack_entries_;
1300 }
1301
Ian Rogers1428dce2014-10-21 15:02:15 -07001302 uint32_t GetNumberOfUsedGprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001303 return kNumNativeGprArgs - gpr_index_;
1304 }
1305
Ian Rogers1428dce2014-10-21 15:02:15 -07001306 uint32_t GetNumberOfUsedFprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001307 return kNumNativeFprArgs - fpr_index_;
1308 }
1309
1310 private:
1311 void PushGpr(uintptr_t val) {
1312 delegate_->PushGpr(val);
1313 }
1314 void PushFpr4(float val) {
1315 delegate_->PushFpr4(val);
1316 }
1317 void PushFpr8(uint64_t val) {
1318 delegate_->PushFpr8(val);
1319 }
1320 void PushStack(uintptr_t val) {
1321 delegate_->PushStack(val);
1322 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001323 uintptr_t PushHandle(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1324 return delegate_->PushHandle(ref);
Andreas Gampec147b002014-03-06 18:11:06 -08001325 }
1326
1327 uint32_t gpr_index_; // Number of free GPRs
1328 uint32_t fpr_index_; // Number of free FPRs
1329 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1330 // extended
Ian Rogers1428dce2014-10-21 15:02:15 -07001331 T* const delegate_; // What Push implementation gets called
Andreas Gampec147b002014-03-06 18:11:06 -08001332};
1333
Andreas Gampec200a4a2014-06-16 18:39:09 -07001334// Computes the sizes of register stacks and call stack area. Handling of references can be extended
1335// in subclasses.
1336//
1337// To handle native pointers, use "L" in the shorty for an object reference, which simulates
1338// them with handles.
1339class ComputeNativeCallFrameSize {
Andreas Gampec147b002014-03-06 18:11:06 -08001340 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001341 ComputeNativeCallFrameSize() : num_stack_entries_(0) {}
1342
1343 virtual ~ComputeNativeCallFrameSize() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001344
Ian Rogers1428dce2014-10-21 15:02:15 -07001345 uint32_t GetStackSize() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001346 return num_stack_entries_ * sizeof(uintptr_t);
1347 }
1348
Ian Rogers1428dce2014-10-21 15:02:15 -07001349 uint8_t* LayoutCallStack(uint8_t* sp8) const {
Andreas Gampec147b002014-03-06 18:11:06 -08001350 sp8 -= GetStackSize();
Andreas Gampe779f8c92014-06-09 18:29:38 -07001351 // Align by kStackAlignment.
1352 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001353 return sp8;
Andreas Gampec147b002014-03-06 18:11:06 -08001354 }
1355
Ian Rogers1428dce2014-10-21 15:02:15 -07001356 uint8_t* LayoutCallRegisterStacks(uint8_t* sp8, uintptr_t** start_gpr, uint32_t** start_fpr)
1357 const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001358 // Assumption is OK right now, as we have soft-float arm
1359 size_t fregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeFprArgs;
1360 sp8 -= fregs * sizeof(uintptr_t);
1361 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1362 size_t iregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeGprArgs;
1363 sp8 -= iregs * sizeof(uintptr_t);
1364 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1365 return sp8;
1366 }
Andreas Gampec147b002014-03-06 18:11:06 -08001367
Andreas Gampec200a4a2014-06-16 18:39:09 -07001368 uint8_t* LayoutNativeCall(uint8_t* sp8, uintptr_t** start_stack, uintptr_t** start_gpr,
Ian Rogers1428dce2014-10-21 15:02:15 -07001369 uint32_t** start_fpr) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001370 // Native call stack.
1371 sp8 = LayoutCallStack(sp8);
1372 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
Andreas Gampec147b002014-03-06 18:11:06 -08001373
Andreas Gampec200a4a2014-06-16 18:39:09 -07001374 // Put fprs and gprs below.
1375 sp8 = LayoutCallRegisterStacks(sp8, start_gpr, start_fpr);
Andreas Gampec147b002014-03-06 18:11:06 -08001376
Andreas Gampec200a4a2014-06-16 18:39:09 -07001377 // Return the new bottom.
1378 return sp8;
1379 }
1380
1381 virtual void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm)
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001382 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1383 UNUSED(sm);
1384 }
Andreas Gampec200a4a2014-06-16 18:39:09 -07001385
1386 void Walk(const char* shorty, uint32_t shorty_len) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1387 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize> sm(this);
1388
1389 WalkHeader(&sm);
Andreas Gampec147b002014-03-06 18:11:06 -08001390
1391 for (uint32_t i = 1; i < shorty_len; ++i) {
1392 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1393 switch (cur_type_) {
1394 case Primitive::kPrimNot:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001395 // TODO: fix abuse of mirror types.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001396 sm.AdvanceHandleScope(
1397 reinterpret_cast<mirror::Object*>(0x12345678));
Andreas Gampec147b002014-03-06 18:11:06 -08001398 break;
1399
1400 case Primitive::kPrimBoolean:
1401 case Primitive::kPrimByte:
1402 case Primitive::kPrimChar:
1403 case Primitive::kPrimShort:
1404 case Primitive::kPrimInt:
1405 sm.AdvanceInt(0);
1406 break;
1407 case Primitive::kPrimFloat:
1408 sm.AdvanceFloat(0);
1409 break;
1410 case Primitive::kPrimDouble:
1411 sm.AdvanceDouble(0);
1412 break;
1413 case Primitive::kPrimLong:
1414 sm.AdvanceLong(0);
1415 break;
1416 default:
1417 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001418 UNREACHABLE();
Andreas Gampec147b002014-03-06 18:11:06 -08001419 }
1420 }
1421
Ian Rogers1428dce2014-10-21 15:02:15 -07001422 num_stack_entries_ = sm.GetStackEntries();
Andreas Gampec147b002014-03-06 18:11:06 -08001423 }
1424
1425 void PushGpr(uintptr_t /* val */) {
1426 // not optimizing registers, yet
1427 }
1428
1429 void PushFpr4(float /* val */) {
1430 // not optimizing registers, yet
1431 }
1432
1433 void PushFpr8(uint64_t /* val */) {
1434 // not optimizing registers, yet
1435 }
1436
1437 void PushStack(uintptr_t /* val */) {
1438 // counting is already done in the superclass
1439 }
1440
Andreas Gampec200a4a2014-06-16 18:39:09 -07001441 virtual uintptr_t PushHandle(mirror::Object* /* ptr */) {
Andreas Gampec147b002014-03-06 18:11:06 -08001442 return reinterpret_cast<uintptr_t>(nullptr);
1443 }
1444
Andreas Gampec200a4a2014-06-16 18:39:09 -07001445 protected:
Andreas Gampec147b002014-03-06 18:11:06 -08001446 uint32_t num_stack_entries_;
1447};
1448
Andreas Gampec200a4a2014-06-16 18:39:09 -07001449class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001450 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001451 ComputeGenericJniFrameSize() : num_handle_scope_references_(0) {}
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001452
Andreas Gampec200a4a2014-06-16 18:39:09 -07001453 // Lays out the callee-save frame. Assumes that the incorrect frame corresponding to RefsAndArgs
1454 // is at *m = sp. Will update to point to the bottom of the save frame.
1455 //
1456 // Note: assumes ComputeAll() has been run before.
Ian Rogers59c07062014-10-10 13:03:39 -07001457 void LayoutCalleeSaveFrame(Thread* self, StackReference<mirror::ArtMethod>** m, void* sp,
1458 HandleScope** handle_scope)
Andreas Gampec200a4a2014-06-16 18:39:09 -07001459 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1460 mirror::ArtMethod* method = (*m)->AsMirrorPtr();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001461
Andreas Gampec200a4a2014-06-16 18:39:09 -07001462 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
1463
1464 // First, fix up the layout of the callee-save frame.
1465 // We have to squeeze in the HandleScope, and relocate the method pointer.
1466
1467 // "Free" the slot for the method.
Ian Rogers13735952014-10-08 12:43:28 -07001468 sp8 += sizeof(void*); // In the callee-save frame we use a full pointer.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001469
1470 // Under the callee saves put handle scope and new method stack reference.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001471 size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_);
1472 size_t scope_and_method = handle_scope_size + sizeof(StackReference<mirror::ArtMethod>);
1473
1474 sp8 -= scope_and_method;
1475 // Align by kStackAlignment.
1476 sp8 = reinterpret_cast<uint8_t*>(RoundDown(
1477 reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
1478
1479 uint8_t* sp8_table = sp8 + sizeof(StackReference<mirror::ArtMethod>);
Ian Rogers59c07062014-10-10 13:03:39 -07001480 *handle_scope = HandleScope::Create(sp8_table, self->GetTopHandleScope(),
1481 num_handle_scope_references_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001482
1483 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
1484 uint8_t* method_pointer = sp8;
1485 StackReference<mirror::ArtMethod>* new_method_ref =
1486 reinterpret_cast<StackReference<mirror::ArtMethod>*>(method_pointer);
1487 new_method_ref->Assign(method);
1488 *m = new_method_ref;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001489 }
1490
Andreas Gampec200a4a2014-06-16 18:39:09 -07001491 // Adds space for the cookie. Note: may leave stack unaligned.
Ian Rogers1428dce2014-10-21 15:02:15 -07001492 void LayoutCookie(uint8_t** sp) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001493 // Reference cookie and padding
1494 *sp -= 8;
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001495 }
1496
Andreas Gampec200a4a2014-06-16 18:39:09 -07001497 // Re-layout the callee-save frame (insert a handle-scope). Then add space for the cookie.
1498 // Returns the new bottom. Note: this may be unaligned.
Ian Rogers59c07062014-10-10 13:03:39 -07001499 uint8_t* LayoutJNISaveFrame(Thread* self, StackReference<mirror::ArtMethod>** m, void* sp,
1500 HandleScope** handle_scope)
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001501 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001502 // First, fix up the layout of the callee-save frame.
1503 // We have to squeeze in the HandleScope, and relocate the method pointer.
Ian Rogers59c07062014-10-10 13:03:39 -07001504 LayoutCalleeSaveFrame(self, m, sp, handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001505
1506 // The bottom of the callee-save frame is now where the method is, *m.
1507 uint8_t* sp8 = reinterpret_cast<uint8_t*>(*m);
1508
1509 // Add space for cookie.
1510 LayoutCookie(&sp8);
1511
1512 return sp8;
1513 }
1514
1515 // WARNING: After this, *sp won't be pointing to the method anymore!
Ian Rogers59c07062014-10-10 13:03:39 -07001516 uint8_t* ComputeLayout(Thread* self, StackReference<mirror::ArtMethod>** m,
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001517 const char* shorty, uint32_t shorty_len, HandleScope** handle_scope,
Andreas Gampec200a4a2014-06-16 18:39:09 -07001518 uintptr_t** start_stack, uintptr_t** start_gpr, uint32_t** start_fpr)
1519 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1520 Walk(shorty, shorty_len);
1521
1522 // JNI part.
Ian Rogers59c07062014-10-10 13:03:39 -07001523 uint8_t* sp8 = LayoutJNISaveFrame(self, m, reinterpret_cast<void*>(*m), handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001524
1525 sp8 = LayoutNativeCall(sp8, start_stack, start_gpr, start_fpr);
1526
1527 // Return the new bottom.
1528 return sp8;
1529 }
1530
1531 uintptr_t PushHandle(mirror::Object* /* ptr */) OVERRIDE;
1532
1533 // Add JNIEnv* and jobj/jclass before the shorty-derived elements.
1534 void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) OVERRIDE
1535 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
1536
1537 private:
1538 uint32_t num_handle_scope_references_;
1539};
1540
1541uintptr_t ComputeGenericJniFrameSize::PushHandle(mirror::Object* /* ptr */) {
1542 num_handle_scope_references_++;
1543 return reinterpret_cast<uintptr_t>(nullptr);
1544}
1545
1546void ComputeGenericJniFrameSize::WalkHeader(
1547 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) {
1548 // JNIEnv
1549 sm->AdvancePointer(nullptr);
1550
1551 // Class object or this as first argument
1552 sm->AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678));
1553}
1554
1555// Class to push values to three separate regions. Used to fill the native call part. Adheres to
1556// the template requirements of BuildGenericJniFrameStateMachine.
1557class FillNativeCall {
1558 public:
1559 FillNativeCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) :
1560 cur_gpr_reg_(gpr_regs), cur_fpr_reg_(fpr_regs), cur_stack_arg_(stack_args) {}
1561
1562 virtual ~FillNativeCall() {}
1563
1564 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) {
1565 cur_gpr_reg_ = gpr_regs;
1566 cur_fpr_reg_ = fpr_regs;
1567 cur_stack_arg_ = stack_args;
Andreas Gampec147b002014-03-06 18:11:06 -08001568 }
1569
1570 void PushGpr(uintptr_t val) {
1571 *cur_gpr_reg_ = val;
1572 cur_gpr_reg_++;
1573 }
1574
1575 void PushFpr4(float val) {
1576 *cur_fpr_reg_ = val;
1577 cur_fpr_reg_++;
1578 }
1579
1580 void PushFpr8(uint64_t val) {
1581 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
1582 *tmp = val;
1583 cur_fpr_reg_ += 2;
1584 }
1585
1586 void PushStack(uintptr_t val) {
1587 *cur_stack_arg_ = val;
1588 cur_stack_arg_++;
1589 }
1590
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001591 virtual uintptr_t PushHandle(mirror::Object*) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001592 LOG(FATAL) << "(Non-JNI) Native call does not use handles.";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001593 UNREACHABLE();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001594 }
1595
1596 private:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001597 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001598 uint32_t* cur_fpr_reg_;
1599 uintptr_t* cur_stack_arg_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001600};
Andreas Gampec147b002014-03-06 18:11:06 -08001601
Andreas Gampec200a4a2014-06-16 18:39:09 -07001602// Visits arguments on the stack placing them into a region lower down the stack for the benefit
1603// of transitioning into native code.
1604class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
1605 public:
Ian Rogers59c07062014-10-10 13:03:39 -07001606 BuildGenericJniFrameVisitor(Thread* self, bool is_static, const char* shorty, uint32_t shorty_len,
1607 StackReference<mirror::ArtMethod>** sp)
Andreas Gampec200a4a2014-06-16 18:39:09 -07001608 : QuickArgumentVisitor(*sp, is_static, shorty, shorty_len),
1609 jni_call_(nullptr, nullptr, nullptr, nullptr), sm_(&jni_call_) {
1610 ComputeGenericJniFrameSize fsc;
1611 uintptr_t* start_gpr_reg;
1612 uint32_t* start_fpr_reg;
1613 uintptr_t* start_stack_arg;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001614 bottom_of_used_area_ = fsc.ComputeLayout(self, sp, shorty, shorty_len,
Ian Rogers59c07062014-10-10 13:03:39 -07001615 &handle_scope_,
1616 &start_stack_arg,
Andreas Gampec200a4a2014-06-16 18:39:09 -07001617 &start_gpr_reg, &start_fpr_reg);
1618
Andreas Gampec200a4a2014-06-16 18:39:09 -07001619 jni_call_.Reset(start_gpr_reg, start_fpr_reg, start_stack_arg, handle_scope_);
1620
1621 // jni environment is always first argument
1622 sm_.AdvancePointer(self->GetJniEnv());
1623
1624 if (is_static) {
1625 sm_.AdvanceHandleScope((*sp)->AsMirrorPtr()->GetDeclaringClass());
1626 }
1627 }
1628
1629 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
1630
1631 void FinalizeHandleScope(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
1632
1633 StackReference<mirror::Object>* GetFirstHandleScopeEntry()
1634 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1635 return handle_scope_->GetHandle(0).GetReference();
1636 }
1637
Ian Rogers1428dce2014-10-21 15:02:15 -07001638 jobject GetFirstHandleScopeJObject() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001639 return handle_scope_->GetHandle(0).ToJObject();
1640 }
1641
Ian Rogers1428dce2014-10-21 15:02:15 -07001642 void* GetBottomOfUsedArea() const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001643 return bottom_of_used_area_;
1644 }
1645
1646 private:
1647 // A class to fill a JNI call. Adds reference/handle-scope management to FillNativeCall.
1648 class FillJniCall FINAL : public FillNativeCall {
1649 public:
1650 FillJniCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args,
1651 HandleScope* handle_scope) : FillNativeCall(gpr_regs, fpr_regs, stack_args),
1652 handle_scope_(handle_scope), cur_entry_(0) {}
1653
1654 uintptr_t PushHandle(mirror::Object* ref) OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
1655
1656 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args, HandleScope* scope) {
1657 FillNativeCall::Reset(gpr_regs, fpr_regs, stack_args);
1658 handle_scope_ = scope;
1659 cur_entry_ = 0U;
1660 }
1661
1662 void ResetRemainingScopeSlots() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1663 // Initialize padding entries.
1664 size_t expected_slots = handle_scope_->NumberOfReferences();
1665 while (cur_entry_ < expected_slots) {
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07001666 handle_scope_->GetMutableHandle(cur_entry_++).Assign(nullptr);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001667 }
1668 DCHECK_NE(cur_entry_, 0U);
1669 }
1670
1671 private:
1672 HandleScope* handle_scope_;
1673 size_t cur_entry_;
1674 };
1675
1676 HandleScope* handle_scope_;
1677 FillJniCall jni_call_;
1678 void* bottom_of_used_area_;
1679
1680 BuildNativeCallFrameStateMachine<FillJniCall> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001681
1682 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
1683};
1684
Andreas Gampec200a4a2014-06-16 18:39:09 -07001685uintptr_t BuildGenericJniFrameVisitor::FillJniCall::PushHandle(mirror::Object* ref) {
1686 uintptr_t tmp;
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07001687 MutableHandle<mirror::Object> h = handle_scope_->GetMutableHandle(cur_entry_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001688 h.Assign(ref);
1689 tmp = reinterpret_cast<uintptr_t>(h.ToJObject());
1690 cur_entry_++;
1691 return tmp;
1692}
1693
Ian Rogers9758f792014-03-13 09:02:55 -07001694void BuildGenericJniFrameVisitor::Visit() {
1695 Primitive::Type type = GetParamPrimitiveType();
1696 switch (type) {
1697 case Primitive::kPrimLong: {
1698 jlong long_arg;
1699 if (IsSplitLongOrDouble()) {
1700 long_arg = ReadSplitLongParam();
1701 } else {
1702 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
1703 }
1704 sm_.AdvanceLong(long_arg);
1705 break;
1706 }
1707 case Primitive::kPrimDouble: {
1708 uint64_t double_arg;
1709 if (IsSplitLongOrDouble()) {
1710 // Read into union so that we don't case to a double.
1711 double_arg = ReadSplitLongParam();
1712 } else {
1713 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
1714 }
1715 sm_.AdvanceDouble(double_arg);
1716 break;
1717 }
1718 case Primitive::kPrimNot: {
1719 StackReference<mirror::Object>* stack_ref =
1720 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001721 sm_.AdvanceHandleScope(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -07001722 break;
1723 }
1724 case Primitive::kPrimFloat:
1725 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
1726 break;
1727 case Primitive::kPrimBoolean: // Fall-through.
1728 case Primitive::kPrimByte: // Fall-through.
1729 case Primitive::kPrimChar: // Fall-through.
1730 case Primitive::kPrimShort: // Fall-through.
1731 case Primitive::kPrimInt: // Fall-through.
1732 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
1733 break;
1734 case Primitive::kPrimVoid:
1735 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07001736 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -07001737 }
1738}
1739
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001740void BuildGenericJniFrameVisitor::FinalizeHandleScope(Thread* self) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001741 // Clear out rest of the scope.
1742 jni_call_.ResetRemainingScopeSlots();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001743 // Install HandleScope.
1744 self->PushHandleScope(handle_scope_);
Ian Rogers9758f792014-03-13 09:02:55 -07001745}
1746
Ian Rogers04c31d22014-07-07 21:44:06 -07001747#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07001748extern "C" void* artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07001749#else
1750extern "C" void* artFindNativeMethod(Thread* self);
1751#endif
Andreas Gampe90546832014-03-12 18:07:19 -07001752
Andreas Gampead615172014-04-04 16:20:13 -07001753uint64_t artQuickGenericJniEndJNIRef(Thread* self, uint32_t cookie, jobject l, jobject lock) {
1754 if (lock != nullptr) {
1755 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self));
1756 } else {
1757 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(l, cookie, self));
1758 }
1759}
1760
1761void artQuickGenericJniEndJNINonRef(Thread* self, uint32_t cookie, jobject lock) {
1762 if (lock != nullptr) {
1763 JniMethodEndSynchronized(cookie, lock, self);
1764 } else {
1765 JniMethodEnd(cookie, self);
1766 }
1767}
1768
Andreas Gampec147b002014-03-06 18:11:06 -08001769/*
1770 * Initializes an alloca region assumed to be directly below sp for a native call:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001771 * Create a HandleScope and call stack and fill a mini stack with values to be pushed to registers.
Andreas Gampec147b002014-03-06 18:11:06 -08001772 * The final element on the stack is a pointer to the native code.
1773 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001774 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001775 * We need to fix this, as the handle scope needs to go into the callee-save frame.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001776 *
Andreas Gampec147b002014-03-06 18:11:06 -08001777 * The return of this function denotes:
1778 * 1) How many bytes of the alloca can be released, if the value is non-negative.
1779 * 2) An error, if the value is negative.
1780 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001781extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self,
1782 StackReference<mirror::ArtMethod>* sp)
Andreas Gampe2da88232014-02-27 12:26:20 -08001783 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -07001784 mirror::ArtMethod* called = sp->AsMirrorPtr();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001785 DCHECK(called->IsNative()) << PrettyMethod(called, true);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001786 uint32_t shorty_len = 0;
1787 const char* shorty = called->GetShorty(&shorty_len);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001788
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001789 // Run the visitor and update sp.
Ian Rogers59c07062014-10-10 13:03:39 -07001790 BuildGenericJniFrameVisitor visitor(self, called->IsStatic(), shorty, shorty_len, &sp);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001791 visitor.VisitArguments();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001792 visitor.FinalizeHandleScope(self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001793
Andreas Gampec200a4a2014-06-16 18:39:09 -07001794 // Fix up managed-stack things in Thread.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001795 self->SetTopOfStack(sp);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001796
Ian Rogerse0dcd462014-03-08 15:21:04 -08001797 self->VerifyStack();
1798
Andreas Gampe90546832014-03-12 18:07:19 -07001799 // Start JNI, save the cookie.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001800 uint32_t cookie;
1801 if (called->IsSynchronized()) {
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001802 cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001803 if (self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001804 self->PopHandleScope();
Andreas Gampec147b002014-03-06 18:11:06 -08001805 // A negative value denotes an error.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001806 return GetTwoWordFailureValue();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001807 }
1808 } else {
1809 cookie = JniMethodStart(self);
1810 }
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001811 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Ian Rogerse0dcd462014-03-08 15:21:04 -08001812 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001813
Andreas Gampe90546832014-03-12 18:07:19 -07001814 // Retrieve the stored native code.
Mathieu Chartier2d721012014-11-10 11:08:06 -08001815 void* nativeCode = called->GetEntryPointFromJni();
Andreas Gampe90546832014-03-12 18:07:19 -07001816
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07001817 // There are two cases for the content of nativeCode:
1818 // 1) Pointer to the native function.
1819 // 2) Pointer to the trampoline for native code binding.
1820 // In the second case, we need to execute the binding and continue with the actual native function
1821 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07001822 DCHECK(nativeCode != nullptr);
1823 if (nativeCode == GetJniDlsymLookupStub()) {
Ian Rogers04c31d22014-07-07 21:44:06 -07001824#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07001825 nativeCode = artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07001826#else
1827 nativeCode = artFindNativeMethod(self);
1828#endif
Andreas Gampe90546832014-03-12 18:07:19 -07001829
1830 if (nativeCode == nullptr) {
1831 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
Andreas Gampead615172014-04-04 16:20:13 -07001832
1833 // End JNI, as the assembly will move to deliver the exception.
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001834 jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001835 if (shorty[0] == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001836 artQuickGenericJniEndJNIRef(self, cookie, nullptr, lock);
1837 } else {
1838 artQuickGenericJniEndJNINonRef(self, cookie, lock);
1839 }
1840
Andreas Gampec200a4a2014-06-16 18:39:09 -07001841 return GetTwoWordFailureValue();
Andreas Gampe90546832014-03-12 18:07:19 -07001842 }
1843 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001844 }
1845
Andreas Gampec200a4a2014-06-16 18:39:09 -07001846 // Return native code addr(lo) and bottom of alloca address(hi).
1847 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(visitor.GetBottomOfUsedArea()),
1848 reinterpret_cast<uintptr_t>(nativeCode));
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001849}
1850
1851/*
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001852 * Is called after the native JNI code. Responsible for cleanup (handle scope, saved state) and
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001853 * unlocking.
1854 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001855extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self, jvalue result, uint64_t result_f)
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001856 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001857 StackReference<mirror::ArtMethod>* sp = self->GetManagedStack()->GetTopQuickFrame();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001858 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Andreas Gampecf4035a2014-05-28 22:43:01 -07001859 mirror::ArtMethod* called = sp->AsMirrorPtr();
Ian Rogerse0dcd462014-03-08 15:21:04 -08001860 uint32_t cookie = *(sp32 - 1);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001861
Andreas Gampead615172014-04-04 16:20:13 -07001862 jobject lock = nullptr;
1863 if (called->IsSynchronized()) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001864 HandleScope* table = reinterpret_cast<HandleScope*>(reinterpret_cast<uint8_t*>(sp)
1865 + sizeof(StackReference<mirror::ArtMethod>));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001866 lock = table->GetHandle(0).ToJObject();
Andreas Gampead615172014-04-04 16:20:13 -07001867 }
1868
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001869 char return_shorty_char = called->GetShorty()[0];
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001870
1871 if (return_shorty_char == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001872 return artQuickGenericJniEndJNIRef(self, cookie, result.l, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001873 } else {
Andreas Gampead615172014-04-04 16:20:13 -07001874 artQuickGenericJniEndJNINonRef(self, cookie, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001875
1876 switch (return_shorty_char) {
Nicolas Geoffray54accbc2014-08-13 03:40:45 +01001877 case 'F': {
1878 if (kRuntimeISA == kX86) {
1879 // Convert back the result to float.
1880 double d = bit_cast<uint64_t, double>(result_f);
1881 return bit_cast<float, uint32_t>(static_cast<float>(d));
1882 } else {
1883 return result_f;
1884 }
1885 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001886 case 'D':
1887 return result_f;
1888 case 'Z':
1889 return result.z;
1890 case 'B':
1891 return result.b;
1892 case 'C':
1893 return result.c;
1894 case 'S':
1895 return result.s;
1896 case 'I':
1897 return result.i;
1898 case 'J':
1899 return result.j;
1900 case 'V':
1901 return 0;
1902 default:
1903 LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char;
1904 return 0;
1905 }
1906 }
Andreas Gampe2da88232014-02-27 12:26:20 -08001907}
1908
Andreas Gamped58342c2014-06-05 14:18:08 -07001909// We use TwoWordReturn to optimize scalar returns. We use the hi value for code, and the lo value
1910// for the method pointer.
Andreas Gampe51f76352014-05-21 08:28:48 -07001911//
Andreas Gamped58342c2014-06-05 14:18:08 -07001912// It is valid to use this, as at the usage points here (returns from C functions) we are assuming
1913// to hold the mutator lock (see SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) annotations).
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001914
1915template<InvokeType type, bool access_check>
Andreas Gamped58342c2014-06-05 14:18:08 -07001916static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object,
Andreas Gampe51f76352014-05-21 08:28:48 -07001917 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001918 Thread* self, StackReference<mirror::ArtMethod>* sp);
Andreas Gampe51f76352014-05-21 08:28:48 -07001919
1920template<InvokeType type, bool access_check>
Andreas Gamped58342c2014-06-05 14:18:08 -07001921static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object,
Andreas Gampe51f76352014-05-21 08:28:48 -07001922 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001923 Thread* self, StackReference<mirror::ArtMethod>* sp) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001924 ScopedQuickEntrypointChecks sqec(self);
1925 DCHECK_EQ(sp->AsMirrorPtr(), Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001926 mirror::ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check,
1927 type);
1928 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001929 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
1930 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001931 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001932 {
1933 // Remember the args in case a GC happens in FindMethodFromCode.
1934 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
1935 RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa);
1936 visitor.VisitArguments();
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001937 method = FindMethodFromCode<type, access_check>(method_idx, &this_object, &caller_method,
1938 self);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001939 visitor.FixupReferences();
1940 }
1941
Ian Rogerse0a02da2014-12-02 14:10:53 -08001942 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001943 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07001944 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001945 }
1946 }
1947 DCHECK(!self->IsExceptionPending());
1948 const void* code = method->GetEntryPointFromQuickCompiledCode();
1949
1950 // When we return, the caller will branch to this address, so it had better not be 0!
Ian Rogerse0a02da2014-12-02 14:10:53 -08001951 DCHECK(code != nullptr) << "Code was null in method: " << PrettyMethod(method)
Andreas Gampec200a4a2014-06-16 18:39:09 -07001952 << " location: "
1953 << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07001954
Andreas Gamped58342c2014-06-05 14:18:08 -07001955 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
1956 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001957}
1958
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001959// Explicit artInvokeCommon template function declarations to please analysis tool.
1960#define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \
1961 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) \
Andreas Gamped58342c2014-06-05 14:18:08 -07001962 TwoWordReturn artInvokeCommon<type, access_check>(uint32_t method_idx, \
Andreas Gampe51f76352014-05-21 08:28:48 -07001963 mirror::Object* this_object, \
1964 mirror::ArtMethod* caller_method, \
Andreas Gampecf4035a2014-05-28 22:43:01 -07001965 Thread* self, \
1966 StackReference<mirror::ArtMethod>* sp) \
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001967
1968EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false);
1969EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true);
1970EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false);
1971EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true);
1972EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false);
1973EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true);
1974EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false);
1975EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true);
1976EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false);
1977EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true);
1978#undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL
1979
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001980// See comments in runtime_support_asm.S
Andreas Gampec200a4a2014-06-16 18:39:09 -07001981extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck(
1982 uint32_t method_idx, mirror::Object* this_object,
1983 mirror::ArtMethod* caller_method, Thread* self,
1984 StackReference<mirror::ArtMethod>* sp)
1985 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1986 return artInvokeCommon<kInterface, true>(method_idx, this_object,
1987 caller_method, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001988}
1989
Andreas Gampec200a4a2014-06-16 18:39:09 -07001990extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck(
1991 uint32_t method_idx, mirror::Object* this_object,
1992 mirror::ArtMethod* caller_method, Thread* self,
1993 StackReference<mirror::ArtMethod>* sp)
1994 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1995 return artInvokeCommon<kDirect, true>(method_idx, this_object, caller_method,
1996 self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001997}
1998
Andreas Gampec200a4a2014-06-16 18:39:09 -07001999extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck(
2000 uint32_t method_idx, mirror::Object* this_object,
2001 mirror::ArtMethod* caller_method, Thread* self,
2002 StackReference<mirror::ArtMethod>* sp)
2003 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
2004 return artInvokeCommon<kStatic, true>(method_idx, this_object, caller_method,
2005 self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002006}
2007
Andreas Gampec200a4a2014-06-16 18:39:09 -07002008extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck(
2009 uint32_t method_idx, mirror::Object* this_object,
2010 mirror::ArtMethod* caller_method, Thread* self,
2011 StackReference<mirror::ArtMethod>* sp)
2012 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
2013 return artInvokeCommon<kSuper, true>(method_idx, this_object, caller_method,
2014 self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002015}
2016
Andreas Gampec200a4a2014-06-16 18:39:09 -07002017extern "C" TwoWordReturn artInvokeVirtualTrampolineWithAccessCheck(
2018 uint32_t method_idx, mirror::Object* this_object,
2019 mirror::ArtMethod* caller_method, Thread* self,
2020 StackReference<mirror::ArtMethod>* sp)
2021 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
2022 return artInvokeCommon<kVirtual, true>(method_idx, this_object, caller_method,
2023 self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002024}
2025
2026// Determine target of interface dispatch. This object is known non-null.
Andreas Gamped58342c2014-06-05 14:18:08 -07002027extern "C" TwoWordReturn artInvokeInterfaceTrampoline(mirror::ArtMethod* interface_method,
Andreas Gampe51f76352014-05-21 08:28:48 -07002028 mirror::Object* this_object,
2029 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07002030 Thread* self,
2031 StackReference<mirror::ArtMethod>* sp)
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002032 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002033 ScopedQuickEntrypointChecks sqec(self);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002034 mirror::ArtMethod* method;
2035 if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) {
2036 method = this_object->GetClass()->FindVirtualMethodForInterface(interface_method);
Ian Rogerse0a02da2014-12-02 14:10:53 -08002037 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002038 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(interface_method, this_object,
2039 caller_method);
Andreas Gamped58342c2014-06-05 14:18:08 -07002040 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002041 }
2042 } else {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002043 DCHECK(interface_method == Runtime::Current()->GetResolutionMethod());
Alexei Zavjalov41c507a2014-05-15 16:02:46 +07002044
2045 // Find the caller PC.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002046 constexpr size_t pc_offset = GetCalleeSaveReturnPcOffset(kRuntimeISA, Runtime::kRefsAndArgs);
Ian Rogers13735952014-10-08 12:43:28 -07002047 uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(sp) + pc_offset);
Alexei Zavjalov41c507a2014-05-15 16:02:46 +07002048
2049 // Map the caller PC to a dex PC.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002050 uint32_t dex_pc = caller_method->ToDexPc(caller_pc);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07002051 const DexFile::CodeItem* code = caller_method->GetCodeItem();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002052 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
2053 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
2054 Instruction::Code instr_code = instr->Opcode();
2055 CHECK(instr_code == Instruction::INVOKE_INTERFACE ||
2056 instr_code == Instruction::INVOKE_INTERFACE_RANGE)
Ian Rogerse0a02da2014-12-02 14:10:53 -08002057 << "Unexpected call into interface trampoline: " << instr->DumpString(nullptr);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002058 uint32_t dex_method_idx;
2059 if (instr_code == Instruction::INVOKE_INTERFACE) {
2060 dex_method_idx = instr->VRegB_35c();
2061 } else {
2062 DCHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE);
2063 dex_method_idx = instr->VRegB_3rc();
2064 }
2065
Andreas Gampec200a4a2014-06-16 18:39:09 -07002066 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()
2067 ->GetDexFile();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002068 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002069 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx),
2070 &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002071 {
2072 // Remember the args in case a GC happens in FindMethodFromCode.
2073 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
2074 RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa);
2075 visitor.VisitArguments();
Mathieu Chartier0cd81352014-05-22 16:48:55 -07002076 method = FindMethodFromCode<kInterface, false>(dex_method_idx, &this_object, &caller_method,
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002077 self);
2078 visitor.FixupReferences();
2079 }
2080
2081 if (UNLIKELY(method == nullptr)) {
2082 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07002083 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002084 }
2085 }
2086 const void* code = method->GetEntryPointFromQuickCompiledCode();
2087
2088 // When we return, the caller will branch to this address, so it had better not be 0!
Ian Rogerse0a02da2014-12-02 14:10:53 -08002089 DCHECK(code != nullptr) << "Code was null in method: " << PrettyMethod(method)
Andreas Gampec200a4a2014-06-16 18:39:09 -07002090 << " location: " << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002091
Andreas Gamped58342c2014-06-05 14:18:08 -07002092 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2093 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002094}
2095
Ian Rogers848871b2013-08-05 10:56:33 -07002096} // namespace art