blob: 28bf856b37dc8b6960c7de9e62b717100159b9b5 [file] [log] [blame]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm64.S"
18
19#include "arch/quick_alloc_entrypoints.S"
20
21
22 /*
23 * Macro that sets up the callee save frame to conform with
24 * Runtime::CreateCalleeSaveMethod(kSaveAll)
25 */
26.macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
27 adrp x9, :got:_ZN3art7Runtime9instance_E
28 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
29
30 // Our registers aren't intermixed - just spill in order.
31 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
32
33 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
34 ldr x9, [x9, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET ]
35
36 sub sp, sp, #368
37 .cfi_adjust_cfa_offset 368
38
Andreas Gampe5c1e4352014-04-21 19:28:24 -070039 // Ugly compile-time check, but we only have the preprocessor.
40#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 368)
41#error "SAVE_ALL_CALLEE_SAVE_FRAME(ARM64) size not as expected."
42#endif
43
Stuart Monteithb95a5342014-03-12 13:32:32 +000044 // FP args
Andreas Gampe6cf80102014-05-19 11:32:41 -070045 stp d0, d1, [sp, #8]
Stuart Monteithb95a5342014-03-12 13:32:32 +000046 stp d2, d3, [sp, #24]
47 stp d4, d5, [sp, #40]
48 stp d6, d7, [sp, #56]
49
50 // FP callee-saves
51 stp d8, d9, [sp, #72]
52 stp d10, d11, [sp, #88]
53 stp d12, d13, [sp, #104]
54 stp d14, d15, [sp, #120]
55
56 stp d16, d17, [sp, #136]
57 stp d18, d19, [sp, #152]
58 stp d20, d21, [sp, #168]
59 stp d22, d23, [sp, #184]
60 stp d24, d25, [sp, #200]
61 stp d26, d27, [sp, #216]
62 stp d28, d29, [sp, #232]
63 stp d30, d31, [sp, #248]
64
65
66 // Callee saved.
67 stp xSELF, x19, [sp, #264]
Andreas Gampe03906cf2014-04-07 12:08:28 -070068 .cfi_rel_offset x18, 264
69 .cfi_rel_offset x19, 272
Stuart Monteithb95a5342014-03-12 13:32:32 +000070
Andreas Gampe03906cf2014-04-07 12:08:28 -070071 stp x20, x21, [sp, #280]
72 .cfi_rel_offset x20, 280
73 .cfi_rel_offset x21, 288
74
75 stp x22, x23, [sp, #296]
76 .cfi_rel_offset x22, 296
77 .cfi_rel_offset x23, 304
78
79 stp x24, x25, [sp, #312]
80 .cfi_rel_offset x24, 312
81 .cfi_rel_offset x25, 320
82
83 stp x26, x27, [sp, #328]
84 .cfi_rel_offset x26, 328
85 .cfi_rel_offset x27, 336
86
87 stp x28, xFP, [sp, #344] // Save FP.
88 .cfi_rel_offset x28, 344
89 .cfi_rel_offset x29, 352
90
91 str xLR, [sp, #360]
92 .cfi_rel_offset x30, 360
Stuart Monteithb95a5342014-03-12 13:32:32 +000093
94 // Loads appropriate callee-save-method
95 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
96
97.endm
98
99 /*
100 * Macro that sets up the callee save frame to conform with
101 * Runtime::CreateCalleeSaveMethod(kRefsOnly).
102 */
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700103// WIP.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000104.macro SETUP_REF_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700105 adrp x9, :got:_ZN3art7Runtime9instance_E
106 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
107
108 // Our registers aren't intermixed - just spill in order.
109 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
110
111 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
112 ldr x9, [x9, RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET ]
113
114 sub sp, sp, #176
115 .cfi_adjust_cfa_offset 176
116
117 // Ugly compile-time check, but we only have the preprocessor.
118#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 176)
119#error "REFS_ONLY_CALLEE_SAVE_FRAME(ARM64) size not as expected."
120#endif
121
122 // FP callee-saves
123 stp d8, d9, [sp, #8]
124 stp d10, d11, [sp, #24]
125 stp d12, d13, [sp, #40]
126 stp d14, d15, [sp, #56]
127
128 // Callee saved.
129 stp xSELF, x19, [sp, #72]
130 .cfi_rel_offset x18, 72
131 .cfi_rel_offset x19, 80
132
133 stp x20, x21, [sp, #88]
134 .cfi_rel_offset x20, 88
135 .cfi_rel_offset x21, 96
136
137 stp x22, x23, [sp, #104]
138 .cfi_rel_offset x22, 104
139 .cfi_rel_offset x23, 112
140
141 stp x24, x25, [sp, #120]
142 .cfi_rel_offset x24, 120
143 .cfi_rel_offset x25, 128
144
145 stp x26, x27, [sp, #136]
146 .cfi_rel_offset x26, 136
147 .cfi_rel_offset x27, 144
148
149 stp x28, xFP, [sp, #152] // Save FP.
150 .cfi_rel_offset x28, 152
151 .cfi_rel_offset x29, 160
152
153 str xLR, [sp, #168]
154 .cfi_rel_offset x30, 168
155
156 // Loads appropriate callee-save-method
157 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000158.endm
159
160.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700161 // FP callee saves
162 ldp d8, d9, [sp, #8]
163 ldp d10, d11, [sp, #24]
164 ldp d12, d13, [sp, #40]
165 ldp d14, d15, [sp, #56]
166
167 // Callee saved.
168 ldp xSELF, x19, [sp, #72]
169 .cfi_restore x18
170 .cfi_restore x19
171
172 ldp x20, x21, [sp, #88]
173 .cfi_restore x20
174 .cfi_restore x21
175
176 ldp x22, x23, [sp, #104]
177 .cfi_restore x22
178 .cfi_restore x23
179
180 ldp x24, x25, [sp, #120]
181 .cfi_restore x24
182 .cfi_restore x25
183
184 ldp x26, x27, [sp, #136]
185 .cfi_restore x26
186 .cfi_restore x27
187
188 ldp x28, xFP, [sp, #152] // Save FP.
189 .cfi_restore x28
190 .cfi_restore x29
191
192 ldr xLR, [sp, #168]
193 .cfi_restore x30
194
195 add sp, sp, #176
196 .cfi_adjust_cfa_offset -176
Stuart Monteithb95a5342014-03-12 13:32:32 +0000197.endm
198
199.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
Zheng Xu48241e72014-05-23 11:52:42 +0800200 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
201 ret
Stuart Monteithb95a5342014-03-12 13:32:32 +0000202.endm
203
204
205.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
206 sub sp, sp, #304
207 .cfi_adjust_cfa_offset 304
208
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700209 // Ugly compile-time check, but we only have the preprocessor.
210#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 304)
211#error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(ARM64) size not as expected."
212#endif
213
Stuart Monteithb95a5342014-03-12 13:32:32 +0000214 stp d0, d1, [sp, #16]
215 stp d2, d3, [sp, #32]
216 stp d4, d5, [sp, #48]
217 stp d6, d7, [sp, #64]
218 stp d8, d9, [sp, #80]
219 stp d10, d11, [sp, #96]
220 stp d12, d13, [sp, #112]
221 stp d14, d15, [sp, #128]
222
223 stp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700224 .cfi_rel_offset x1, 144
225 .cfi_rel_offset x2, 152
Stuart Monteithb95a5342014-03-12 13:32:32 +0000226
Andreas Gampe03906cf2014-04-07 12:08:28 -0700227 stp x3, x4, [sp, #160]
228 .cfi_rel_offset x3, 160
229 .cfi_rel_offset x4, 168
230
231 stp x5, x6, [sp, #176]
232 .cfi_rel_offset x5, 176
233 .cfi_rel_offset x6, 184
234
235 stp x7, xSELF, [sp, #192]
236 .cfi_rel_offset x7, 192
237 .cfi_rel_offset x18, 200
238
239 stp x19, x20, [sp, #208]
240 .cfi_rel_offset x19, 208
241 .cfi_rel_offset x20, 216
242
243 stp x21, x22, [sp, #224]
244 .cfi_rel_offset x21, 224
245 .cfi_rel_offset x22, 232
246
247 stp x23, x24, [sp, #240]
248 .cfi_rel_offset x23, 240
249 .cfi_rel_offset x24, 248
250
251 stp x25, x26, [sp, #256]
252 .cfi_rel_offset x25, 256
253 .cfi_rel_offset x26, 264
254
255 stp x27, x28, [sp, #272]
256 .cfi_rel_offset x27, 272
257 .cfi_rel_offset x28, 280
258
259 stp xFP, xLR, [sp, #288]
260 .cfi_rel_offset x29, 288
261 .cfi_rel_offset x30, 296
Stuart Monteithb95a5342014-03-12 13:32:32 +0000262.endm
263
264 /*
265 * Macro that sets up the callee save frame to conform with
266 * Runtime::CreateCalleeSaveMethod(kRefsAndArgs).
267 *
268 * TODO This is probably too conservative - saving FP & LR.
269 */
270.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
271 adrp x9, :got:_ZN3art7Runtime9instance_E
272 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
273
274 // Our registers aren't intermixed - just spill in order.
275 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
276
277 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
278 ldr x9, [x9, RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ]
279
280 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
281
282 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
283.endm
284
285.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
286
287 ldp d0, d1, [sp, #16]
288 ldp d2, d3, [sp, #32]
289 ldp d4, d5, [sp, #48]
290 ldp d6, d7, [sp, #64]
291 ldp d8, d9, [sp, #80]
292 ldp d10, d11, [sp, #96]
293 ldp d12, d13, [sp, #112]
294 ldp d14, d15, [sp, #128]
295
296 // args.
297 ldp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700298 .cfi_restore x1
299 .cfi_restore x2
300
Stuart Monteithb95a5342014-03-12 13:32:32 +0000301 ldp x3, x4, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700302 .cfi_restore x3
303 .cfi_restore x4
304
Stuart Monteithb95a5342014-03-12 13:32:32 +0000305 ldp x5, x6, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700306 .cfi_restore x5
307 .cfi_restore x6
308
Stuart Monteithb95a5342014-03-12 13:32:32 +0000309 ldp x7, xSELF, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700310 .cfi_restore x7
311 .cfi_restore x18
312
Stuart Monteithb95a5342014-03-12 13:32:32 +0000313 ldp x19, x20, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700314 .cfi_restore x19
315 .cfi_restore x20
316
Stuart Monteithb95a5342014-03-12 13:32:32 +0000317 ldp x21, x22, [sp, #224]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700318 .cfi_restore x21
319 .cfi_restore x22
320
Stuart Monteithb95a5342014-03-12 13:32:32 +0000321 ldp x23, x24, [sp, #240]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700322 .cfi_restore x23
323 .cfi_restore x24
324
Stuart Monteithb95a5342014-03-12 13:32:32 +0000325 ldp x25, x26, [sp, #256]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700326 .cfi_restore x25
327 .cfi_restore x26
328
Stuart Monteithb95a5342014-03-12 13:32:32 +0000329 ldp x27, x28, [sp, #272]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700330 .cfi_restore x27
331 .cfi_restore x28
332
Stuart Monteithb95a5342014-03-12 13:32:32 +0000333 ldp xFP, xLR, [sp, #288]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700334 .cfi_restore x29
335 .cfi_restore x30
Stuart Monteithb95a5342014-03-12 13:32:32 +0000336
337 add sp, sp, #304
338 .cfi_adjust_cfa_offset -304
339.endm
340
Andreas Gampee62a07e2014-03-26 14:53:21 -0700341.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME_NO_D0
342
343 ldr d1, [sp, #24]
344 ldp d2, d3, [sp, #32]
345 ldp d4, d5, [sp, #48]
346 ldp d6, d7, [sp, #64]
347 ldp d8, d9, [sp, #80]
348 ldp d10, d11, [sp, #96]
349 ldp d12, d13, [sp, #112]
350 ldp d14, d15, [sp, #128]
351
352 // args.
353 ldp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700354 .cfi_restore x1
355 .cfi_restore x2
356
Andreas Gampee62a07e2014-03-26 14:53:21 -0700357 ldp x3, x4, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700358 .cfi_restore x3
359 .cfi_restore x4
360
Andreas Gampee62a07e2014-03-26 14:53:21 -0700361 ldp x5, x6, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700362 .cfi_restore x5
363 .cfi_restore x6
364
Andreas Gampee62a07e2014-03-26 14:53:21 -0700365 ldp x7, xSELF, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700366 .cfi_restore x7
367 .cfi_restore x18
368
Andreas Gampee62a07e2014-03-26 14:53:21 -0700369 ldp x19, x20, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700370 .cfi_restore x19
371 .cfi_restore x20
372
Andreas Gampee62a07e2014-03-26 14:53:21 -0700373 ldp x21, x22, [sp, #224]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700374 .cfi_restore x21
375 .cfi_restore x22
376
Andreas Gampee62a07e2014-03-26 14:53:21 -0700377 ldp x23, x24, [sp, #240]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700378 .cfi_restore x23
379 .cfi_restore x24
380
Andreas Gampee62a07e2014-03-26 14:53:21 -0700381 ldp x25, x26, [sp, #256]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700382 .cfi_restore x25
383 .cfi_restore x26
384
Andreas Gampee62a07e2014-03-26 14:53:21 -0700385 ldp x27, x28, [sp, #272]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700386 .cfi_restore x27
387 .cfi_restore x28
388
Andreas Gampee62a07e2014-03-26 14:53:21 -0700389 ldp xFP, xLR, [sp, #288]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700390 .cfi_restore x29
391 .cfi_restore x30
Andreas Gampee62a07e2014-03-26 14:53:21 -0700392
393 add sp, sp, #304
394 .cfi_adjust_cfa_offset -304
395.endm
396
Stuart Monteithb95a5342014-03-12 13:32:32 +0000397.macro RETURN_IF_RESULT_IS_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700398 cbnz x0, 1f // result non-zero branch over
399 ret // return
4001:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000401.endm
402
403.macro RETURN_IF_RESULT_IS_NON_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700404 cbz x0, 1f // result zero branch over
405 ret // return
4061:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000407.endm
408
409 /*
410 * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
411 * exception is Thread::Current()->exception_
412 */
413.macro DELIVER_PENDING_EXCEPTION
414 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
415 mov x0, xSELF
416 mov x1, sp
417
418 // Point of no return.
419 b artDeliverPendingExceptionFromCode // artDeliverPendingExceptionFromCode(Thread*, SP)
420 brk 0 // Unreached
421.endm
422
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700423.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_REG reg
424 ldr \reg, [xSELF, # THREAD_EXCEPTION_OFFSET] // Get exception field.
425 cbnz \reg, 1f
Stuart Monteithb95a5342014-03-12 13:32:32 +0000426 ret
4271:
428 DELIVER_PENDING_EXCEPTION
429.endm
430
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700431.macro RETURN_OR_DELIVER_PENDING_EXCEPTION
432 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG x9
433.endm
434
435// Same as above with x1. This is helpful in stubs that want to avoid clobbering another register.
436.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
437 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG x1
438.endm
439
440.macro RETURN_IF_W0_IS_ZERO_OR_DELIVER
441 cbnz w0, 1f // result non-zero branch over
442 ret // return
4431:
444 DELIVER_PENDING_EXCEPTION
445.endm
446
Stuart Monteithb95a5342014-03-12 13:32:32 +0000447.macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
448 .extern \cxx_name
449ENTRY \c_name
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700450 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Serban Constantinescu63206f32014-05-07 18:40:49 +0100451 mov x0, xSELF // pass Thread::Current
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700452 mov x1, sp // pass SP
453 b \cxx_name // \cxx_name(Thread*, SP)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000454END \c_name
455.endm
456
457.macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name
458 .extern \cxx_name
459ENTRY \c_name
Serban Constantinescu75b91132014-04-09 18:39:10 +0100460 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context.
Serban Constantinescu63206f32014-05-07 18:40:49 +0100461 mov x1, xSELF // pass Thread::Current.
Serban Constantinescu75b91132014-04-09 18:39:10 +0100462 mov x2, sp // pass SP.
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700463 b \cxx_name // \cxx_name(arg, Thread*, SP).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000464 brk 0
465END \c_name
466.endm
467
468.macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
469 .extern \cxx_name
470ENTRY \c_name
471 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Serban Constantinescu63206f32014-05-07 18:40:49 +0100472 mov x2, xSELF // pass Thread::Current
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700473 mov x3, sp // pass SP
474 b \cxx_name // \cxx_name(arg1, arg2, Thread*, SP)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000475 brk 0
476END \c_name
477.endm
478
479 /*
480 * Called by managed code, saves callee saves and then calls artThrowException
481 * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
482 */
483ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
484
485 /*
486 * Called by managed code to create and deliver a NullPointerException.
487 */
488NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
489
490 /*
491 * Called by managed code to create and deliver an ArithmeticException.
492 */
493NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
494
495 /*
496 * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
497 * index, arg2 holds limit.
498 */
499TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
500
501 /*
502 * Called by managed code to create and deliver a StackOverflowError.
503 */
504NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
505
506 /*
507 * Called by managed code to create and deliver a NoSuchMethodError.
508 */
509ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
510
511 /*
Stuart Monteithb95a5342014-03-12 13:32:32 +0000512 * All generated callsites for interface invokes and invocation slow paths will load arguments
Andreas Gampe51f76352014-05-21 08:28:48 -0700513 * as usual - except instead of loading arg0/x0 with the target Method*, arg0/x0 will contain
514 * the method_idx. This wrapper will save arg1-arg3, load the caller's Method*, align the
Stuart Monteithb95a5342014-03-12 13:32:32 +0000515 * stack and call the appropriate C helper.
Andreas Gampe51f76352014-05-21 08:28:48 -0700516 * NOTE: "this" is first visible argument of the target, and so can be found in arg1/x1.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000517 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700518 * The helper will attempt to locate the target and return a 128-bit result in x0/x1 consisting
Stuart Monteithb95a5342014-03-12 13:32:32 +0000519 * of the target Method* in x0 and method->code_ in x1.
520 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700521 * If unsuccessful, the helper will return NULL/????. There will be a pending exception in the
Stuart Monteithb95a5342014-03-12 13:32:32 +0000522 * thread and we branch to another stub to deliver it.
523 *
524 * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
525 * pointing back to the original caller.
Andreas Gampe51f76352014-05-21 08:28:48 -0700526 *
527 * Adapted from ARM32 code.
528 *
529 * Clobbers x12.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000530 */
531.macro INVOKE_TRAMPOLINE c_name, cxx_name
532 .extern \cxx_name
533ENTRY \c_name
Andreas Gampe51f76352014-05-21 08:28:48 -0700534 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // save callee saves in case allocation triggers GC
535 // Helper signature is always
536 // (method_idx, *this_object, *caller_method, *self, sp)
537
538 ldr x2, [sp, #FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE] // pass caller Method*
539 mov x3, xSELF // pass Thread::Current
540 mov x4, sp
541 bl \cxx_name // (method_idx, this, caller, Thread*, SP)
542 mov x12, x1 // save Method*->code_
543 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
544 cbz x0, 1f // did we find the target? if not go to exception delivery
545 br x12 // tail call to target
5461:
547 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +0000548END \c_name
549.endm
550
551INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline
552INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
553
554INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
555INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
556INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
557INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
558
Andreas Gampe03906cf2014-04-07 12:08:28 -0700559
560.macro INVOKE_STUB_CREATE_FRAME
561
Andreas Gampecf4035a2014-05-28 22:43:01 -0700562SAVE_SIZE=6*8 // x4, x5, x19(wSUSPEND), SP, LR & FP saved.
563SAVE_SIZE_AND_METHOD=SAVE_SIZE+STACK_REFERENCE_SIZE
564
Andreas Gampe03906cf2014-04-07 12:08:28 -0700565
Zheng Xu48241e72014-05-23 11:52:42 +0800566 mov x9, sp // Save stack pointer.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700567 .cfi_register sp,x9
568
Zheng Xu48241e72014-05-23 11:52:42 +0800569 add x10, x2, # SAVE_SIZE_AND_METHOD // calculate size of frame.
570 sub x10, sp, x10 // Calculate SP position - saves + ArtMethod* + args
571 and x10, x10, # ~0xf // Enforce 16 byte stack alignment.
572 mov sp, x10 // Set new SP.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700573
Zheng Xu48241e72014-05-23 11:52:42 +0800574 sub x10, x9, #SAVE_SIZE // Calculate new FP (later). Done here as we must move SP
575 .cfi_def_cfa_register x10 // before this.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700576 .cfi_adjust_cfa_offset SAVE_SIZE
577
Andreas Gampecf4035a2014-05-28 22:43:01 -0700578 stp x9, x19, [x10, #32] // Save old stack pointer and x19(wSUSPEND)
Andreas Gampe03906cf2014-04-07 12:08:28 -0700579 .cfi_rel_offset sp, 32
Andreas Gampecf4035a2014-05-28 22:43:01 -0700580 .cfi_rel_offset x19, 40
Andreas Gampe03906cf2014-04-07 12:08:28 -0700581
Zheng Xu48241e72014-05-23 11:52:42 +0800582 stp x4, x5, [x10, #16] // Save result and shorty addresses.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700583 .cfi_rel_offset x4, 16
584 .cfi_rel_offset x5, 24
585
Zheng Xu48241e72014-05-23 11:52:42 +0800586 stp xFP, xLR, [x10] // Store LR & FP.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700587 .cfi_rel_offset x29, 0
588 .cfi_rel_offset x30, 8
589
Zheng Xu48241e72014-05-23 11:52:42 +0800590 mov xFP, x10 // Use xFP now, as it's callee-saved.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700591 .cfi_def_cfa_register x29
Zheng Xu48241e72014-05-23 11:52:42 +0800592 mov xSELF, x3 // Move thread pointer into SELF register.
593 mov wSUSPEND, #SUSPEND_CHECK_INTERVAL // reset wSUSPEND to suspend check interval
Andreas Gampe03906cf2014-04-07 12:08:28 -0700594
595 // Copy arguments into stack frame.
596 // Use simple copy routine for now.
597 // 4 bytes per slot.
598 // X1 - source address
599 // W2 - args length
600 // X9 - destination address.
601 // W10 - temporary
Andreas Gampecf4035a2014-05-28 22:43:01 -0700602 add x9, sp, #4 // Destination address is bottom of stack + NULL.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700603
604 // Use \@ to differentiate between macro invocations.
605.LcopyParams\@:
606 cmp w2, #0
607 beq .LendCopyParams\@
608 sub w2, w2, #4 // Need 65536 bytes of range.
609 ldr w10, [x1, x2]
610 str w10, [x9, x2]
611
612 b .LcopyParams\@
613
614.LendCopyParams\@:
615
Andreas Gampecf4035a2014-05-28 22:43:01 -0700616 // Store NULL into StackReference<Method>* at bottom of frame.
617 str wzr, [sp]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700618
Andreas Gampecf4035a2014-05-28 22:43:01 -0700619#if (STACK_REFERENCE_SIZE != 4)
620#error "STACK_REFERENCE_SIZE(ARM64) size not as expected."
621#endif
Andreas Gampe03906cf2014-04-07 12:08:28 -0700622.endm
623
624.macro INVOKE_STUB_CALL_AND_RETURN
625
626 // load method-> METHOD_QUICK_CODE_OFFSET
627 ldr x9, [x0 , #METHOD_QUICK_CODE_OFFSET]
628 // Branch to method.
629 blr x9
630
631 // Restore return value address and shorty address.
632 ldp x4,x5, [xFP, #16]
633 .cfi_restore x4
634 .cfi_restore x5
635
636 // Store result (w0/x0/s0/d0) appropriately, depending on resultType.
637 ldrb w10, [x5]
638
639 // Don't set anything for a void type.
640 cmp w10, #'V'
641 beq .Lexit_art_quick_invoke_stub\@
642
643 cmp w10, #'D'
644 bne .Lreturn_is_float\@
645 str d0, [x4]
646 b .Lexit_art_quick_invoke_stub\@
647
648.Lreturn_is_float\@:
649 cmp w10, #'F'
650 bne .Lreturn_is_int\@
651 str s0, [x4]
652 b .Lexit_art_quick_invoke_stub\@
653
654 // Just store x0. Doesn't matter if it is 64 or 32 bits.
655.Lreturn_is_int\@:
656 str x0, [x4]
657
658.Lexit_art_quick_invoke_stub\@:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700659 ldp x2, x19, [x29, #32] // Restore stack pointer and x19.
660 .cfi_restore x19
Andreas Gampe03906cf2014-04-07 12:08:28 -0700661 mov sp, x2
662 .cfi_restore sp
663
664 ldp x29, x30, [x29] // Restore old frame pointer and link register.
665 .cfi_restore x29
666 .cfi_restore x30
667
668 ret
669
670.endm
671
672
Stuart Monteithb95a5342014-03-12 13:32:32 +0000673/*
674 * extern"C" void art_quick_invoke_stub(ArtMethod *method, x0
675 * uint32_t *args, x1
676 * uint32_t argsize, w2
677 * Thread *self, x3
678 * JValue *result, x4
679 * char *shorty); x5
680 * +----------------------+
681 * | |
682 * | C/C++ frame |
683 * | LR'' |
684 * | FP'' | <- SP'
685 * +----------------------+
686 * +----------------------+
687 * | SP' |
688 * | X5 |
689 * | X4 | Saved registers
690 * | LR' |
691 * | FP' | <- FP
692 * +----------------------+
693 * | uint32_t out[n-1] |
694 * | : : | Outs
695 * | uint32_t out[0] |
Andreas Gampecf4035a2014-05-28 22:43:01 -0700696 * | StackRef<ArtMethod> | <- SP value=null
Stuart Monteithb95a5342014-03-12 13:32:32 +0000697 * +----------------------+
698 *
699 * Outgoing registers:
700 * x0 - Method*
701 * x1-x7 - integer parameters.
702 * d0-d7 - Floating point parameters.
703 * xSELF = self
Zheng Xu48241e72014-05-23 11:52:42 +0800704 * wSUSPEND = suspend count
Stuart Monteithb95a5342014-03-12 13:32:32 +0000705 * SP = & of ArtMethod*
706 * x1 = "this" pointer.
707 *
708 */
709ENTRY art_quick_invoke_stub
710 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700711 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000712
713 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
714 // Parse the passed shorty to determine which register to load.
715 // Load addresses for routines that load WXSD registers.
716 adr x11, .LstoreW2
717 adr x12, .LstoreX2
718 adr x13, .LstoreS0
719 adr x14, .LstoreD0
720
721 // Initialize routine offsets to 0 for integers and floats.
722 // x8 for integers, x15 for floating point.
723 mov x8, #0
724 mov x15, #0
725
726 add x10, x5, #1 // Load shorty address, plus one to skip return value.
727 ldr w1, [x9],#4 // Load "this" parameter, and increment arg pointer.
728
729 // Loop to fill registers.
730.LfillRegisters:
731 ldrb w17, [x10], #1 // Load next character in signature, and increment.
732 cbz w17, .LcallFunction // Exit at end of signature. Shorty 0 terminated.
733
734 cmp w17, #'F' // is this a float?
735 bne .LisDouble
736
737 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700738 beq .Ladvance4
Stuart Monteithb95a5342014-03-12 13:32:32 +0000739
740 add x17, x13, x15 // Calculate subroutine to jump to.
741 br x17
742
743.LisDouble:
744 cmp w17, #'D' // is this a double?
745 bne .LisLong
746
747 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700748 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000749
750 add x17, x14, x15 // Calculate subroutine to jump to.
751 br x17
752
753.LisLong:
754 cmp w17, #'J' // is this a long?
755 bne .LisOther
756
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700757 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700758 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000759
760 add x17, x12, x8 // Calculate subroutine to jump to.
761 br x17
762
Stuart Monteithb95a5342014-03-12 13:32:32 +0000763.LisOther: // Everything else takes one vReg.
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700764 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700765 beq .Ladvance4
766
Stuart Monteithb95a5342014-03-12 13:32:32 +0000767 add x17, x11, x8 // Calculate subroutine to jump to.
768 br x17
769
Andreas Gampe03906cf2014-04-07 12:08:28 -0700770.Ladvance4:
771 add x9, x9, #4
772 b .LfillRegisters
773
774.Ladvance8:
775 add x9, x9, #8
776 b .LfillRegisters
777
Stuart Monteithb95a5342014-03-12 13:32:32 +0000778// Macro for loading a parameter into a register.
779// counter - the register with offset into these tables
780// size - the size of the register - 4 or 8 bytes.
781// register - the name of the register to be loaded.
782.macro LOADREG counter size register return
783 ldr \register , [x9], #\size
784 add \counter, \counter, 12
785 b \return
786.endm
787
788// Store ints.
789.LstoreW2:
790 LOADREG x8 4 w2 .LfillRegisters
791 LOADREG x8 4 w3 .LfillRegisters
792 LOADREG x8 4 w4 .LfillRegisters
793 LOADREG x8 4 w5 .LfillRegisters
794 LOADREG x8 4 w6 .LfillRegisters
795 LOADREG x8 4 w7 .LfillRegisters
796
797// Store longs.
798.LstoreX2:
799 LOADREG x8 8 x2 .LfillRegisters
800 LOADREG x8 8 x3 .LfillRegisters
801 LOADREG x8 8 x4 .LfillRegisters
802 LOADREG x8 8 x5 .LfillRegisters
803 LOADREG x8 8 x6 .LfillRegisters
804 LOADREG x8 8 x7 .LfillRegisters
805
806// Store singles.
807.LstoreS0:
808 LOADREG x15 4 s0 .LfillRegisters
809 LOADREG x15 4 s1 .LfillRegisters
810 LOADREG x15 4 s2 .LfillRegisters
811 LOADREG x15 4 s3 .LfillRegisters
812 LOADREG x15 4 s4 .LfillRegisters
813 LOADREG x15 4 s5 .LfillRegisters
814 LOADREG x15 4 s6 .LfillRegisters
815 LOADREG x15 4 s7 .LfillRegisters
816
817// Store doubles.
818.LstoreD0:
819 LOADREG x15 8 d0 .LfillRegisters
820 LOADREG x15 8 d1 .LfillRegisters
821 LOADREG x15 8 d2 .LfillRegisters
822 LOADREG x15 8 d3 .LfillRegisters
823 LOADREG x15 8 d4 .LfillRegisters
824 LOADREG x15 8 d5 .LfillRegisters
825 LOADREG x15 8 d6 .LfillRegisters
826 LOADREG x15 8 d7 .LfillRegisters
827
828
829.LcallFunction:
830
Andreas Gampe03906cf2014-04-07 12:08:28 -0700831 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000832
Stuart Monteithb95a5342014-03-12 13:32:32 +0000833END art_quick_invoke_stub
834
835/* extern"C"
836 * void art_quick_invoke_static_stub(ArtMethod *method, x0
837 * uint32_t *args, x1
838 * uint32_t argsize, w2
839 * Thread *self, x3
840 * JValue *result, x4
841 * char *shorty); x5
842 */
843ENTRY art_quick_invoke_static_stub
844 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700845 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000846
847 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
848 // Parse the passed shorty to determine which register to load.
849 // Load addresses for routines that load WXSD registers.
850 adr x11, .LstoreW1_2
851 adr x12, .LstoreX1_2
852 adr x13, .LstoreS0_2
853 adr x14, .LstoreD0_2
854
855 // Initialize routine offsets to 0 for integers and floats.
856 // x8 for integers, x15 for floating point.
857 mov x8, #0
858 mov x15, #0
859
860 add x10, x5, #1 // Load shorty address, plus one to skip return value.
861
862 // Loop to fill registers.
863.LfillRegisters2:
864 ldrb w17, [x10], #1 // Load next character in signature, and increment.
865 cbz w17, .LcallFunction2 // Exit at end of signature. Shorty 0 terminated.
866
867 cmp w17, #'F' // is this a float?
868 bne .LisDouble2
869
870 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700871 beq .Ladvance4_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000872
873 add x17, x13, x15 // Calculate subroutine to jump to.
874 br x17
875
876.LisDouble2:
877 cmp w17, #'D' // is this a double?
878 bne .LisLong2
879
880 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700881 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000882
883 add x17, x14, x15 // Calculate subroutine to jump to.
884 br x17
885
886.LisLong2:
887 cmp w17, #'J' // is this a long?
888 bne .LisOther2
889
890 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700891 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000892
893 add x17, x12, x8 // Calculate subroutine to jump to.
894 br x17
895
Stuart Monteithb95a5342014-03-12 13:32:32 +0000896.LisOther2: // Everything else takes one vReg.
897 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700898 beq .Ladvance4_2
899
Stuart Monteithb95a5342014-03-12 13:32:32 +0000900 add x17, x11, x8 // Calculate subroutine to jump to.
901 br x17
902
Andreas Gampe03906cf2014-04-07 12:08:28 -0700903.Ladvance4_2:
904 add x9, x9, #4
905 b .LfillRegisters2
906
907.Ladvance8_2:
908 add x9, x9, #8
909 b .LfillRegisters2
910
Stuart Monteithb95a5342014-03-12 13:32:32 +0000911// Store ints.
912.LstoreW1_2:
913 LOADREG x8 4 w1 .LfillRegisters2
914 LOADREG x8 4 w2 .LfillRegisters2
915 LOADREG x8 4 w3 .LfillRegisters2
916 LOADREG x8 4 w4 .LfillRegisters2
917 LOADREG x8 4 w5 .LfillRegisters2
918 LOADREG x8 4 w6 .LfillRegisters2
919 LOADREG x8 4 w7 .LfillRegisters2
920
921// Store longs.
922.LstoreX1_2:
923 LOADREG x8 8 x1 .LfillRegisters2
924 LOADREG x8 8 x2 .LfillRegisters2
925 LOADREG x8 8 x3 .LfillRegisters2
926 LOADREG x8 8 x4 .LfillRegisters2
927 LOADREG x8 8 x5 .LfillRegisters2
928 LOADREG x8 8 x6 .LfillRegisters2
929 LOADREG x8 8 x7 .LfillRegisters2
930
931// Store singles.
932.LstoreS0_2:
933 LOADREG x15 4 s0 .LfillRegisters2
934 LOADREG x15 4 s1 .LfillRegisters2
935 LOADREG x15 4 s2 .LfillRegisters2
936 LOADREG x15 4 s3 .LfillRegisters2
937 LOADREG x15 4 s4 .LfillRegisters2
938 LOADREG x15 4 s5 .LfillRegisters2
939 LOADREG x15 4 s6 .LfillRegisters2
940 LOADREG x15 4 s7 .LfillRegisters2
941
942// Store doubles.
943.LstoreD0_2:
944 LOADREG x15 8 d0 .LfillRegisters2
945 LOADREG x15 8 d1 .LfillRegisters2
946 LOADREG x15 8 d2 .LfillRegisters2
947 LOADREG x15 8 d3 .LfillRegisters2
948 LOADREG x15 8 d4 .LfillRegisters2
949 LOADREG x15 8 d5 .LfillRegisters2
950 LOADREG x15 8 d6 .LfillRegisters2
951 LOADREG x15 8 d7 .LfillRegisters2
952
953
954.LcallFunction2:
955
Andreas Gampe03906cf2014-04-07 12:08:28 -0700956 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000957
Stuart Monteithb95a5342014-03-12 13:32:32 +0000958END art_quick_invoke_static_stub
959
Andreas Gampe03906cf2014-04-07 12:08:28 -0700960
Stuart Monteithb95a5342014-03-12 13:32:32 +0000961
962 /*
963 * On entry x0 is uintptr_t* gprs_ and x1 is uint64_t* fprs_
964 */
965
966ENTRY art_quick_do_long_jump
967 // Load FPRs
968 ldp d0, d1, [x1], #16
969 ldp d2, d3, [x1], #16
970 ldp d4, d5, [x1], #16
971 ldp d6, d7, [x1], #16
972 ldp d8, d9, [x1], #16
973 ldp d10, d11, [x1], #16
974 ldp d12, d13, [x1], #16
975 ldp d14, d15, [x1], #16
976 ldp d16, d17, [x1], #16
977 ldp d18, d19, [x1], #16
978 ldp d20, d21, [x1], #16
979 ldp d22, d23, [x1], #16
980 ldp d24, d25, [x1], #16
981 ldp d26, d27, [x1], #16
982 ldp d28, d29, [x1], #16
983 ldp d30, d31, [x1]
984
985 // Load GPRs
986 // TODO: lots of those are smashed, could optimize.
987 add x0, x0, #30*8
988 ldp x30, x1, [x0], #-16
989 ldp x28, x29, [x0], #-16
990 ldp x26, x27, [x0], #-16
991 ldp x24, x25, [x0], #-16
992 ldp x22, x23, [x0], #-16
993 ldp x20, x21, [x0], #-16
994 ldp x18, x19, [x0], #-16
995 ldp x16, x17, [x0], #-16
996 ldp x14, x15, [x0], #-16
997 ldp x12, x13, [x0], #-16
998 ldp x10, x11, [x0], #-16
999 ldp x8, x9, [x0], #-16
1000 ldp x6, x7, [x0], #-16
1001 ldp x4, x5, [x0], #-16
1002 ldp x2, x3, [x0], #-16
1003 mov sp, x1
1004
1005 // TODO: Is it really OK to use LR for the target PC?
1006 mov x0, #0
1007 mov x1, #0
1008 br xLR
1009END art_quick_do_long_jump
1010
Andreas Gampef4e910b2014-04-29 16:55:52 -07001011 /*
1012 * Entry from managed code that calls artHandleFillArrayDataFromCode and delivers exception on
1013 * failure.
1014 */
1015 .extern artHandleFillArrayDataFromCode
Andreas Gampef4e910b2014-04-29 16:55:52 -07001016ENTRY art_quick_handle_fill_data
1017 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // Save callee saves in case exception allocation triggers GC.
1018 mov x2, xSELF // Pass Thread::Current.
1019 mov x3, sp // Pass SP.
1020 bl artHandleFillArrayDataFromCode // (Array*, const DexFile::Payload*, Thread*, SP)
1021 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1022 RETURN_IF_RESULT_IS_ZERO
1023 DELIVER_PENDING_EXCEPTION
1024END art_quick_handle_fill_data
Stuart Monteithb95a5342014-03-12 13:32:32 +00001025
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001026 /*
1027 * Entry from managed code that calls artLockObjectFromCode, may block for GC. x0 holds the
1028 * possibly null object to lock.
1029 *
1030 * Derived from arm32 code.
1031 */
1032 .extern artLockObjectFromCode
1033ENTRY art_quick_lock_object
1034 cbz w0, .Lslow_lock
1035 add x4, x0, #LOCK_WORD_OFFSET // exclusive load/store had no immediate anymore
1036.Lretry_lock:
1037 ldr w2, [xSELF, #THREAD_ID_OFFSET] // TODO: Can the thread ID really change during the loop?
1038 ldxr w1, [x4]
1039 cbnz w1, .Lnot_unlocked // already thin locked
1040 stxr w3, w2, [x4]
1041 cbnz w3, .Lstrex_fail // store failed, retry
Andreas Gampe675967d2014-05-14 16:28:34 -07001042 dmb ishld // full (LoadLoad|LoadStore) memory barrier
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001043 ret
1044.Lstrex_fail:
1045 b .Lretry_lock // unlikely forward branch, need to reload and recheck r1/r2
1046.Lnot_unlocked:
1047 lsr w3, w1, 30
1048 cbnz w3, .Lslow_lock // if either of the top two bits are set, go slow path
1049 eor w2, w1, w2 // lock_word.ThreadId() ^ self->ThreadId()
1050 uxth w2, w2 // zero top 16 bits
1051 cbnz w2, .Lslow_lock // lock word and self thread id's match -> recursive lock
1052 // else contention, go to slow path
1053 add w2, w1, #65536 // increment count in lock word placing in w2 for storing
1054 lsr w1, w2, 30 // if either of the top two bits are set, we overflowed.
1055 cbnz w1, .Lslow_lock // if we overflow the count go slow path
1056 str w2, [x0, #LOCK_WORD_OFFSET]// no need for stxr as we hold the lock
1057 ret
1058.Lslow_lock:
1059 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case we block
1060 mov x1, xSELF // pass Thread::Current
1061 mov x2, sp // pass SP
1062 bl artLockObjectFromCode // (Object* obj, Thread*, SP)
1063 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1064 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1065END art_quick_lock_object
1066
1067 /*
1068 * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure.
1069 * x0 holds the possibly null object to lock.
1070 *
1071 * Derived from arm32 code.
1072 */
1073 .extern artUnlockObjectFromCode
1074ENTRY art_quick_unlock_object
1075 cbz x0, .Lslow_unlock
1076 ldr w1, [x0, #LOCK_WORD_OFFSET]
1077 lsr w2, w1, 30
1078 cbnz w2, .Lslow_unlock // if either of the top two bits are set, go slow path
1079 ldr w2, [xSELF, #THREAD_ID_OFFSET]
1080 eor w3, w1, w2 // lock_word.ThreadId() ^ self->ThreadId()
1081 uxth w3, w3 // zero top 16 bits
1082 cbnz w3, .Lslow_unlock // do lock word and self thread id's match?
1083 cmp w1, #65536
1084 bpl .Lrecursive_thin_unlock
1085 // transition to unlocked, w3 holds 0
Andreas Gampe675967d2014-05-14 16:28:34 -07001086 dmb ish // full (LoadStore|StoreStore) memory barrier
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001087 str w3, [x0, #LOCK_WORD_OFFSET]
1088 ret
1089.Lrecursive_thin_unlock:
1090 sub w1, w1, #65536
1091 str w1, [x0, #LOCK_WORD_OFFSET]
1092 ret
1093.Lslow_unlock:
1094 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case exception allocation triggers GC
1095 mov x1, xSELF // pass Thread::Current
1096 mov x2, sp // pass SP
1097 bl artUnlockObjectFromCode // (Object* obj, Thread*, SP)
1098 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1099 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1100END art_quick_unlock_object
Andreas Gampe525cde22014-04-22 15:44:50 -07001101
1102 /*
1103 * Entry from managed code that calls artIsAssignableFromCode and on failure calls
1104 * artThrowClassCastException.
1105 */
1106 .extern artThrowClassCastException
1107ENTRY art_quick_check_cast
1108 // Store arguments and link register
1109 sub sp, sp, #32 // Stack needs to be 16b aligned on calls
1110 .cfi_adjust_cfa_offset 32
1111 stp x0, x1, [sp]
1112 .cfi_rel_offset x0, 0
1113 .cfi_rel_offset x1, 8
1114 stp xSELF, xLR, [sp, #16]
1115 .cfi_rel_offset x18, 16
1116 .cfi_rel_offset x30, 24
1117
1118 // Call runtime code
1119 bl artIsAssignableFromCode
1120
1121 // Check for exception
1122 cbz x0, .Lthrow_class_cast_exception
1123
1124 // Restore and return
1125 ldp x0, x1, [sp]
1126 .cfi_restore x0
1127 .cfi_restore x1
1128 ldp xSELF, xLR, [sp, #16]
1129 .cfi_restore x18
1130 .cfi_restore x30
1131 add sp, sp, #32
1132 .cfi_adjust_cfa_offset -32
1133 ret
1134
1135.Lthrow_class_cast_exception:
1136 // Restore
1137 ldp x0, x1, [sp]
1138 .cfi_restore x0
1139 .cfi_restore x1
1140 ldp xSELF, xLR, [sp, #16]
1141 .cfi_restore x18
1142 .cfi_restore x30
1143 add sp, sp, #32
1144 .cfi_adjust_cfa_offset -32
1145
1146 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
1147 mov x2, xSELF // pass Thread::Current
1148 mov x3, sp // pass SP
1149 b artThrowClassCastException // (Class*, Class*, Thread*, SP)
1150 brk 0 // We should not return here...
1151END art_quick_check_cast
1152
Andreas Gampef4e910b2014-04-29 16:55:52 -07001153 /*
1154 * Entry from managed code for array put operations of objects where the value being stored
1155 * needs to be checked for compatibility.
1156 * x0 = array, x1 = index, x2 = value
1157 *
1158 * Currently all values should fit into w0/w1/w2, and w1 always will as indices are 32b. We
1159 * assume, though, that the upper 32b are zeroed out. At least for x1/w1 we can do better by
1160 * using index-zero-extension in load/stores.
1161 *
1162 * Temporaries: x3, x4
1163 * TODO: x4 OK? ip seems wrong here.
1164 */
1165ENTRY art_quick_aput_obj_with_null_and_bound_check
1166 tst x0, x0
1167 bne art_quick_aput_obj_with_bound_check
1168 b art_quick_throw_null_pointer_exception
1169END art_quick_aput_obj_with_null_and_bound_check
1170
1171ENTRY art_quick_aput_obj_with_bound_check
1172 ldr w3, [x0, #ARRAY_LENGTH_OFFSET]
1173 cmp w3, w1
1174 bhi art_quick_aput_obj
1175 mov x0, x1
1176 mov x1, x3
1177 b art_quick_throw_array_bounds
1178END art_quick_aput_obj_with_bound_check
1179
1180ENTRY art_quick_aput_obj
1181 cbz x2, .Ldo_aput_null
1182 ldr w3, [x0, #CLASS_OFFSET] // Heap reference = 32b
1183 // This also zero-extends to x3
1184 ldr w4, [x2, #CLASS_OFFSET] // Heap reference = 32b
1185 // This also zero-extends to x4
1186 ldr w3, [x3, #CLASS_COMPONENT_TYPE_OFFSET] // Heap reference = 32b
1187 // This also zero-extends to x3
1188 cmp w3, w4 // value's type == array's component type - trivial assignability
1189 bne .Lcheck_assignability
1190.Ldo_aput:
1191 add x3, x0, #OBJECT_ARRAY_DATA_OFFSET
1192 // "Compress" = do nothing
1193 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1194 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1195 lsr x0, x0, #7
1196 strb w3, [x3, x0]
1197 ret
1198.Ldo_aput_null:
1199 add x3, x0, #OBJECT_ARRAY_DATA_OFFSET
1200 // "Compress" = do nothing
1201 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1202 ret
1203.Lcheck_assignability:
1204 // Store arguments and link register
1205 sub sp, sp, #48 // Stack needs to be 16b aligned on calls
1206 .cfi_adjust_cfa_offset 48
1207 stp x0, x1, [sp]
1208 .cfi_rel_offset x0, 0
1209 .cfi_rel_offset x1, 8
1210 stp x2, xSELF, [sp, #16]
1211 .cfi_rel_offset x2, 16
1212 .cfi_rel_offset x18, 24
1213 str xLR, [sp, #32]
1214 .cfi_rel_offset x30, 32
1215
1216 // Call runtime code
1217 mov x0, x3 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1218 mov x1, x4 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1219 bl artIsAssignableFromCode
1220
1221 // Check for exception
1222 cbz x0, .Lthrow_array_store_exception
1223
1224 // Restore
1225 ldp x0, x1, [sp]
1226 .cfi_restore x0
1227 .cfi_restore x1
1228 ldp x2, xSELF, [sp, #16]
1229 .cfi_restore x2
1230 .cfi_restore x18
1231 ldr xLR, [sp, #32]
1232 .cfi_restore x30
1233 add sp, sp, #48
1234 .cfi_adjust_cfa_offset -48
1235
1236 add x3, x0, #OBJECT_ARRAY_DATA_OFFSET
1237 // "Compress" = do nothing
1238 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1239 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1240 lsr x0, x0, #7
1241 strb w3, [x3, x0]
1242 ret
1243.Lthrow_array_store_exception:
1244 ldp x0, x1, [sp]
1245 .cfi_restore x0
1246 .cfi_restore x1
1247 ldp x2, xSELF, [sp, #16]
1248 .cfi_restore x2
1249 .cfi_restore x18
1250 ldr xLR, [sp, #32]
1251 .cfi_restore x30
1252 add sp, sp, #48
1253 .cfi_adjust_cfa_offset -48
1254
1255 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
1256 mov x1, x2 // Pass value.
1257 mov x2, xSELF // Pass Thread::Current.
1258 mov x3, sp // Pass SP.
1259 b artThrowArrayStoreException // (Object*, Object*, Thread*, SP).
1260 brk 0 // Unreached.
1261END art_quick_aput_obj
1262
Stuart Monteithb95a5342014-03-12 13:32:32 +00001263// Macro to facilitate adding new allocation entrypoints.
1264.macro TWO_ARG_DOWNCALL name, entrypoint, return
1265 .extern \entrypoint
1266ENTRY \name
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001267 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1268 mov x2, xSELF // pass Thread::Current
1269 mov x3, sp // pass SP
1270 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP)
1271 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1272 \return
1273 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +00001274END \name
1275.endm
1276
1277// Macro to facilitate adding new array allocation entrypoints.
1278.macro THREE_ARG_DOWNCALL name, entrypoint, return
1279 .extern \entrypoint
1280ENTRY \name
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001281 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1282 mov x3, xSELF // pass Thread::Current
1283 mov x4, sp // pass SP
1284 bl \entrypoint
1285 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1286 \return
1287 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +00001288END \name
1289.endm
1290
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001291// Macros taking opportunity of code similarities for downcalls with referrer.
1292
1293// TODO: xSELF -> x19. Temporarily rely on xSELF being saved in REF_ONLY
1294.macro ONE_ARG_REF_DOWNCALL name, entrypoint, return
1295 .extern \entrypoint
1296ENTRY \name
1297 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampecf4035a2014-05-28 22:43:01 -07001298 ldr w1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001299 mov x2, xSELF // pass Thread::Current
1300 mov x3, sp // pass SP
1301 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP)
1302 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1303 \return
1304END \name
1305.endm
1306
1307// TODO: xSELF -> x19. Temporarily rely on xSELF being saved in REF_ONLY
1308.macro TWO_ARG_REF_DOWNCALL name, entrypoint, return
1309 .extern \entrypoint
1310ENTRY \name
1311 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampecf4035a2014-05-28 22:43:01 -07001312 ldr w2, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001313 mov x3, xSELF // pass Thread::Current
1314 mov x4, sp // pass SP
1315 bl \entrypoint
1316 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1317 \return
1318END \name
1319.endm
1320
1321// TODO: xSELF -> x19. Temporarily rely on xSELF being saved in REF_ONLY
1322.macro THREE_ARG_REF_DOWNCALL name, entrypoint, return
1323 .extern \entrypoint
1324ENTRY \name
1325 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampecf4035a2014-05-28 22:43:01 -07001326 ldr w3, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001327 mov x4, xSELF // pass Thread::Current
1328 mov x5, sp // pass SP
1329 bl \entrypoint
1330 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1331 \return
1332END \name
1333.endm
1334
Matteo Franchindfd891a2014-04-30 12:17:17 +01001335 /*
1336 * Entry from managed code when uninitialized static storage, this stub will run the class
1337 * initializer and deliver the exception on error. On success the static storage base is
1338 * returned.
1339 */
1340TWO_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO
1341
1342UNIMPLEMENTED art_quick_initialize_type
1343UNIMPLEMENTED art_quick_initialize_type_and_verify_access
1344
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001345ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1346ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1347ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1348
1349TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1350TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1351TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1352
1353TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1354TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1355
1356THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1357THREE_ARG_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1358THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1359
1360// This is separated out as the argument order is different.
1361 .extern artSet64StaticFromCode
1362ENTRY art_quick_set64_static
1363 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1364 mov x3, x1 // Store value
Andreas Gampecf4035a2014-05-28 22:43:01 -07001365 ldr w1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001366 mov x2, x3 // Put value param
1367 mov x3, xSELF // pass Thread::Current
1368 mov x4, sp // pass SP
1369 bl artSet64StaticFromCode
1370 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1371 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1372END art_quick_set64_static
1373
Matteo Franchindfd891a2014-04-30 12:17:17 +01001374 /*
1375 * Entry from managed code to resolve a string, this stub will allocate a String and deliver an
1376 * exception on error. On success the String is returned. x0 holds the referring method,
1377 * w1 holds the string index. The fast path check for hit in strings cache has already been
1378 * performed.
1379 */
1380TWO_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001381
Stuart Monteithb95a5342014-03-12 13:32:32 +00001382// Generate the allocation entrypoints for each allocator.
1383GENERATE_ALL_ALLOC_ENTRYPOINTS
1384
Zheng Xu48241e72014-05-23 11:52:42 +08001385 /*
1386 * Called by managed code when the value in wSUSPEND has been decremented to 0.
1387 */
1388 .extern artTestSuspendFromCode
1389ENTRY art_quick_test_suspend
1390 ldrh w0, [xSELF, #THREAD_FLAGS_OFFSET] // get xSELF->state_and_flags.as_struct.flags
1391 mov wSUSPEND, #SUSPEND_CHECK_INTERVAL // reset wSUSPEND to SUSPEND_CHECK_INTERVAL
1392 cbnz w0, .Lneed_suspend // check flags == 0
1393 ret // return if flags == 0
1394.Lneed_suspend:
1395 mov x0, xSELF
1396 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl
1397 mov x1, sp
1398 bl artTestSuspendFromCode // (Thread*, SP)
1399 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
1400END art_quick_test_suspend
Stuart Monteithb95a5342014-03-12 13:32:32 +00001401
Andreas Gampee62a07e2014-03-26 14:53:21 -07001402 /*
1403 * Called by managed code that is attempting to call a method on a proxy class. On entry
1404 * x0 holds the proxy method and x1 holds the receiver; The frame size of the invoked proxy
1405 * method agrees with a ref and args callee save frame.
1406 */
1407 .extern artQuickProxyInvokeHandler
1408ENTRY art_quick_proxy_invoke_handler
1409 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
1410 str x0, [sp, #0] // place proxy method at bottom of frame
1411 mov x2, xSELF // pass Thread::Current
1412 mov x3, sp // pass SP
1413 bl artQuickProxyInvokeHandler // (Method* proxy method, receiver, Thread*, SP)
Serban Constantinescu63206f32014-05-07 18:40:49 +01001414 ldr xSELF, [sp, #200] // Restore self pointer.
Andreas Gampee62a07e2014-03-26 14:53:21 -07001415 ldr x2, [xSELF, THREAD_EXCEPTION_OFFSET]
1416 cbnz x2, .Lexception_in_proxy // success if no exception is pending
1417 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME_NO_D0 // keep d0
1418 ret // return on success
1419.Lexception_in_proxy:
1420 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1421 DELIVER_PENDING_EXCEPTION
1422END art_quick_proxy_invoke_handler
Stuart Monteithb95a5342014-03-12 13:32:32 +00001423
Andreas Gampe51f76352014-05-21 08:28:48 -07001424 /*
1425 * Called to resolve an imt conflict. x12 is a hidden argument that holds the target method's
1426 * dex method index.
1427 */
1428ENTRY art_quick_imt_conflict_trampoline
Andreas Gampecf4035a2014-05-28 22:43:01 -07001429 ldr w0, [sp, #0] // load caller Method*
Andreas Gampe51f76352014-05-21 08:28:48 -07001430 ldr w0, [x0, #METHOD_DEX_CACHE_METHODS_OFFSET] // load dex_cache_resolved_methods
1431 add x0, x0, #OBJECT_ARRAY_DATA_OFFSET // get starting address of data
1432 ldr w0, [x0, x12, lsl 2] // load the target method
1433 b art_quick_invoke_interface_trampoline
1434END art_quick_imt_conflict_trampoline
Stuart Monteithb95a5342014-03-12 13:32:32 +00001435
1436ENTRY art_quick_resolution_trampoline
1437 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
1438 mov x2, xSELF
1439 mov x3, sp
1440 bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP)
Matteo Franchindfd891a2014-04-30 12:17:17 +01001441 cbz x0, 1f
1442 mov x9, x0 // Remember returned code pointer in x9.
Andreas Gampecf4035a2014-05-28 22:43:01 -07001443 ldr w0, [sp, #0] // artQuickResolutionTrampoline puts called method in *SP.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001444 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gampec6ee54e2014-03-24 16:45:44 -07001445 br x9
Stuart Monteithb95a5342014-03-12 13:32:32 +000014461:
1447 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1448 DELIVER_PENDING_EXCEPTION
1449END art_quick_resolution_trampoline
1450
1451/*
1452 * Generic JNI frame layout:
1453 *
1454 * #-------------------#
1455 * | |
1456 * | caller method... |
1457 * #-------------------# <--- SP on entry
1458 * | Return X30/LR |
1459 * | X29/FP | callee save
1460 * | X28 | callee save
1461 * | X27 | callee save
1462 * | X26 | callee save
1463 * | X25 | callee save
1464 * | X24 | callee save
1465 * | X23 | callee save
1466 * | X22 | callee save
1467 * | X21 | callee save
1468 * | X20 | callee save
1469 * | X19 | callee save
1470 * | X7 | arg7
1471 * | X6 | arg6
1472 * | X5 | arg5
1473 * | X4 | arg4
1474 * | X3 | arg3
1475 * | X2 | arg2
1476 * | X1 | arg1
1477 * | D15 | float arg 8
1478 * | D14 | float arg 8
1479 * | D13 | float arg 8
1480 * | D12 | callee save
1481 * | D11 | callee save
1482 * | D10 | callee save
1483 * | D9 | callee save
1484 * | D8 | callee save
1485 * | D7 | float arg 8
1486 * | D6 | float arg 7
1487 * | D5 | float arg 6
1488 * | D4 | float arg 5
1489 * | D3 | float arg 4
1490 * | D2 | float arg 3
1491 * | D1 | float arg 2
1492 * | D0 | float arg 1
Andreas Gampecf4035a2014-05-28 22:43:01 -07001493 * | Method* | <- X0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001494 * #-------------------#
1495 * | local ref cookie | // 4B
Mathieu Chartier421c5372014-05-14 14:11:40 -07001496 * | handle scope size | // 4B
Stuart Monteithb95a5342014-03-12 13:32:32 +00001497 * #-------------------#
1498 * | JNI Call Stack |
1499 * #-------------------# <--- SP on native call
1500 * | |
1501 * | Stack for Regs | The trampoline assembly will pop these values
1502 * | | into registers for native call
1503 * #-------------------#
1504 * | Native code ptr |
1505 * #-------------------#
1506 * | Free scratch |
1507 * #-------------------#
1508 * | Ptr to (1) | <--- SP
1509 * #-------------------#
1510 */
1511 /*
1512 * Called to do a generic JNI down-call
1513 */
1514ENTRY art_quick_generic_jni_trampoline
1515 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
1516 str x0, [sp, #0] // Store native ArtMethod* to bottom of stack.
1517
1518 // Save SP , so we can have static CFI info.
1519 mov x28, sp
1520 .cfi_def_cfa_register x28
1521
1522 // This looks the same, but is different: this will be updated to point to the bottom
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001523 // of the frame when the handle scope is inserted.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001524 mov xFP, sp
1525
1526 mov x8, #5120
1527 sub sp, sp, x8
1528
1529 // prepare for artQuickGenericJniTrampoline call
1530 // (Thread*, SP)
1531 // x0 x1 <= C calling convention
1532 // xSELF xFP <= where they are
1533
1534 mov x0, xSELF // Thread*
1535 mov x1, xFP
1536 bl artQuickGenericJniTrampoline // (Thread*, sp)
1537
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001538 // Get the updated pointer. This is the bottom of the frame _with_ handle scope.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001539 ldr xFP, [sp]
1540 add x9, sp, #8
1541
1542 cmp x0, #0
1543 b.mi .Lentry_error // Check for error, negative value.
1544
1545 // release part of the alloca.
1546 add x9, x9, x0
1547
1548 // Get the code pointer
1549 ldr xIP0, [x9, #0]
1550
1551 // Load parameters from frame into registers.
1552 // TODO Check with artQuickGenericJniTrampoline.
1553 // Also, check again APPCS64 - the stack arguments are interleaved.
1554 ldp x0, x1, [x9, #8]
1555 ldp x2, x3, [x9, #24]
1556 ldp x4, x5, [x9, #40]
1557 ldp x6, x7, [x9, #56]
1558
1559 ldp d0, d1, [x9, #72]
1560 ldp d2, d3, [x9, #88]
1561 ldp d4, d5, [x9, #104]
1562 ldp d6, d7, [x9, #120]
1563
1564 add sp, x9, #136
1565
1566 blr xIP0 // native call.
1567
1568 // Restore self pointer.
1569 ldr xSELF, [x28, #200]
1570
1571 // result sign extension is handled in C code
1572 // prepare for artQuickGenericJniEndTrampoline call
1573 // (Thread*, SP, result, result_f)
1574 // x0 x1 x2 x3 <= C calling convention
1575 mov x5, x0 // Save return value
1576 mov x0, xSELF // Thread register
1577 mov x1, xFP // Stack pointer
1578 mov x2, x5 // Result (from saved)
1579 fmov x3, d0 // d0 will contain floating point result, but needs to go into x3
1580
1581 bl artQuickGenericJniEndTrampoline
1582
1583 // Tear down the alloca.
1584 mov sp, x28
1585 .cfi_def_cfa_register sp
1586
1587 // Restore self pointer.
1588 ldr xSELF, [x28, #200]
1589
1590 // Pending exceptions possible.
1591 ldr x1, [xSELF, THREAD_EXCEPTION_OFFSET]
1592 cbnz x1, .Lexception_in_native
1593
1594 // Tear down the callee-save frame.
1595 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1596
1597 // store into fpr, for when it's a fpr return...
1598 fmov d0, x0
1599 ret
1600
1601.Lentry_error:
1602 mov sp, x28
1603 .cfi_def_cfa_register sp
1604 ldr xSELF, [x28, #200]
1605.Lexception_in_native:
1606 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1607 DELIVER_PENDING_EXCEPTION
1608
1609END art_quick_generic_jni_trampoline
1610
1611/*
1612 * Called to bridge from the quick to interpreter ABI. On entry the arguments match those
1613 * of a quick call:
1614 * x0 = method being called/to bridge to.
1615 * x1..x7, d0..d7 = arguments to that method.
1616 */
1617ENTRY art_quick_to_interpreter_bridge
1618 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments.
1619
1620 // x0 will contain mirror::ArtMethod* method.
1621 mov x1, xSELF // How to get Thread::Current() ???
1622 mov x2, sp
1623
1624 // uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
1625 // mirror::ArtMethod** sp)
1626 bl artQuickToInterpreterBridge
1627
1628 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case.
1629
1630 fmov d0, x0
1631
1632 RETURN_OR_DELIVER_PENDING_EXCEPTION
1633END art_quick_to_interpreter_bridge
1634
1635UNIMPLEMENTED art_quick_instrumentation_entry
1636UNIMPLEMENTED art_quick_instrumentation_exit
1637UNIMPLEMENTED art_quick_deoptimize
Stuart Monteithb95a5342014-03-12 13:32:32 +00001638UNIMPLEMENTED art_quick_indexof
Andreas Gampe266340d2014-05-02 07:55:24 -07001639
1640 /*
1641 * String's compareTo.
1642 *
1643 * TODO: Not very optimized.
1644 *
1645 * On entry:
1646 * x0: this object pointer
1647 * x1: comp object pointer
1648 *
1649 */
1650 .extern __memcmp16
1651ENTRY art_quick_string_compareto
1652 mov x2, x0 // x0 is return, use x2 for first input.
1653 sub x0, x2, x1 // Same string object?
1654 cbnz x0,1f
1655 ret
16561: // Different string objects.
1657
1658 ldr w6, [x2, #STRING_OFFSET_OFFSET]
1659 ldr w5, [x1, #STRING_OFFSET_OFFSET]
1660 ldr w4, [x2, #STRING_COUNT_OFFSET]
1661 ldr w3, [x1, #STRING_COUNT_OFFSET]
1662 ldr w2, [x2, #STRING_VALUE_OFFSET]
1663 ldr w1, [x1, #STRING_VALUE_OFFSET]
1664
1665 /*
1666 * Now: CharArray* Offset Count
1667 * first arg x2 w6 w4
1668 * second arg x1 w5 w3
1669 */
1670
1671 // x0 := str1.length(w4) - str2.length(w3). ldr zero-extended w3/w4 into x3/x4.
1672 subs x0, x4, x3
1673 // Min(count1, count2) into w3.
1674 csel x3, x3, x4, ge
1675
1676 // Build pointer into string data.
1677
1678 // Add offset in array (substr etc.) (sign extend and << 1).
1679 add x2, x2, w6, sxtw #1
1680 add x1, x1, w5, sxtw #1
1681
1682 // Add offset in CharArray to array.
1683 add x2, x2, #STRING_DATA_OFFSET
1684 add x1, x1, #STRING_DATA_OFFSET
1685
1686 // Check for long string, do memcmp16 for them.
1687 cmp w3, #28 // Constant from arm32.
1688 bgt .Ldo_memcmp16
1689
1690 /*
1691 * Now:
1692 * x2: *first string data
1693 * x1: *second string data
1694 * w3: iteration count
1695 * x0: return value if comparison equal
1696 * x4, x5, x6, x7: free
1697 */
1698
1699 // Do a simple unrolled loop.
1700.Lloop:
1701 // At least two more elements?
1702 subs w3, w3, #2
1703 b.lt .Lremainder_or_done
1704
1705 ldrh w4, [x2], #2
1706 ldrh w5, [x1], #2
1707
1708 ldrh w6, [x2], #2
1709 ldrh w7, [x1], #2
1710
1711 subs w4, w4, w5
1712 b.ne .Lw4_result
1713
1714 subs w6, w6, w7
1715 b.ne .Lw6_result
1716
1717 b .Lloop
1718
1719.Lremainder_or_done:
1720 adds w3, w3, #1
1721 b.eq .Lremainder
1722 ret
1723
1724.Lremainder:
1725 ldrh w4, [x2], #2
1726 ldrh w5, [x1], #2
1727 subs w4, w4, w5
1728 b.ne .Lw4_result
1729 ret
1730
1731// Result is in w4
1732.Lw4_result:
1733 sxtw x0, w4
1734 ret
1735
1736// Result is in w6
1737.Lw6_result:
1738 sxtw x0, w6
1739 ret
1740
1741.Ldo_memcmp16:
1742 str x0, [sp,#-16]! // Save x0
1743
1744 mov x0, x2
1745 uxtw x2, w3
1746 bl __memcmp16
1747
1748 ldr x1, [sp], #16 // Restore old x0 = length diff
1749
1750 cmp x0, #0 // Check the memcmp difference
1751 csel x0, x0, x1, ne // x0 := x0 != 0 ? x0 : x1
1752 ret
1753END art_quick_string_compareto