blob: dd345832885144e936af467d7f73d22ac6264bb0 [file] [log] [blame]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm64.S"
18
19#include "arch/quick_alloc_entrypoints.S"
20
21
22 /*
23 * Macro that sets up the callee save frame to conform with
24 * Runtime::CreateCalleeSaveMethod(kSaveAll)
25 */
26.macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
27 adrp x9, :got:_ZN3art7Runtime9instance_E
28 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
29
30 // Our registers aren't intermixed - just spill in order.
31 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
32
33 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
34 ldr x9, [x9, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET ]
35
36 sub sp, sp, #368
37 .cfi_adjust_cfa_offset 368
38
39 // FP args
40 stp d1, d2, [sp, #8]
41 stp d2, d3, [sp, #24]
42 stp d4, d5, [sp, #40]
43 stp d6, d7, [sp, #56]
44
45 // FP callee-saves
46 stp d8, d9, [sp, #72]
47 stp d10, d11, [sp, #88]
48 stp d12, d13, [sp, #104]
49 stp d14, d15, [sp, #120]
50
51 stp d16, d17, [sp, #136]
52 stp d18, d19, [sp, #152]
53 stp d20, d21, [sp, #168]
54 stp d22, d23, [sp, #184]
55 stp d24, d25, [sp, #200]
56 stp d26, d27, [sp, #216]
57 stp d28, d29, [sp, #232]
58 stp d30, d31, [sp, #248]
59
60
61 // Callee saved.
62 stp xSELF, x19, [sp, #264]
Andreas Gampe03906cf2014-04-07 12:08:28 -070063 .cfi_rel_offset x18, 264
64 .cfi_rel_offset x19, 272
Stuart Monteithb95a5342014-03-12 13:32:32 +000065
Andreas Gampe03906cf2014-04-07 12:08:28 -070066 stp x20, x21, [sp, #280]
67 .cfi_rel_offset x20, 280
68 .cfi_rel_offset x21, 288
69
70 stp x22, x23, [sp, #296]
71 .cfi_rel_offset x22, 296
72 .cfi_rel_offset x23, 304
73
74 stp x24, x25, [sp, #312]
75 .cfi_rel_offset x24, 312
76 .cfi_rel_offset x25, 320
77
78 stp x26, x27, [sp, #328]
79 .cfi_rel_offset x26, 328
80 .cfi_rel_offset x27, 336
81
82 stp x28, xFP, [sp, #344] // Save FP.
83 .cfi_rel_offset x28, 344
84 .cfi_rel_offset x29, 352
85
86 str xLR, [sp, #360]
87 .cfi_rel_offset x30, 360
Stuart Monteithb95a5342014-03-12 13:32:32 +000088
89 // Loads appropriate callee-save-method
90 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
91
92.endm
93
94 /*
95 * Macro that sets up the callee save frame to conform with
96 * Runtime::CreateCalleeSaveMethod(kRefsOnly).
97 */
98.macro SETUP_REF_ONLY_CALLEE_SAVE_FRAME
99 brk 0
100.endm
101
102.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
103 brk 0
104.endm
105
106.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
107 brk 0
108.endm
109
110
111.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
112 sub sp, sp, #304
113 .cfi_adjust_cfa_offset 304
114
115 stp d0, d1, [sp, #16]
116 stp d2, d3, [sp, #32]
117 stp d4, d5, [sp, #48]
118 stp d6, d7, [sp, #64]
119 stp d8, d9, [sp, #80]
120 stp d10, d11, [sp, #96]
121 stp d12, d13, [sp, #112]
122 stp d14, d15, [sp, #128]
123
124 stp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700125 .cfi_rel_offset x1, 144
126 .cfi_rel_offset x2, 152
Stuart Monteithb95a5342014-03-12 13:32:32 +0000127
Andreas Gampe03906cf2014-04-07 12:08:28 -0700128 stp x3, x4, [sp, #160]
129 .cfi_rel_offset x3, 160
130 .cfi_rel_offset x4, 168
131
132 stp x5, x6, [sp, #176]
133 .cfi_rel_offset x5, 176
134 .cfi_rel_offset x6, 184
135
136 stp x7, xSELF, [sp, #192]
137 .cfi_rel_offset x7, 192
138 .cfi_rel_offset x18, 200
139
140 stp x19, x20, [sp, #208]
141 .cfi_rel_offset x19, 208
142 .cfi_rel_offset x20, 216
143
144 stp x21, x22, [sp, #224]
145 .cfi_rel_offset x21, 224
146 .cfi_rel_offset x22, 232
147
148 stp x23, x24, [sp, #240]
149 .cfi_rel_offset x23, 240
150 .cfi_rel_offset x24, 248
151
152 stp x25, x26, [sp, #256]
153 .cfi_rel_offset x25, 256
154 .cfi_rel_offset x26, 264
155
156 stp x27, x28, [sp, #272]
157 .cfi_rel_offset x27, 272
158 .cfi_rel_offset x28, 280
159
160 stp xFP, xLR, [sp, #288]
161 .cfi_rel_offset x29, 288
162 .cfi_rel_offset x30, 296
Stuart Monteithb95a5342014-03-12 13:32:32 +0000163.endm
164
165 /*
166 * Macro that sets up the callee save frame to conform with
167 * Runtime::CreateCalleeSaveMethod(kRefsAndArgs).
168 *
169 * TODO This is probably too conservative - saving FP & LR.
170 */
171.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
172 adrp x9, :got:_ZN3art7Runtime9instance_E
173 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
174
175 // Our registers aren't intermixed - just spill in order.
176 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
177
178 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
179 ldr x9, [x9, RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ]
180
181 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
182
183 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
184.endm
185
186.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
187
188 ldp d0, d1, [sp, #16]
189 ldp d2, d3, [sp, #32]
190 ldp d4, d5, [sp, #48]
191 ldp d6, d7, [sp, #64]
192 ldp d8, d9, [sp, #80]
193 ldp d10, d11, [sp, #96]
194 ldp d12, d13, [sp, #112]
195 ldp d14, d15, [sp, #128]
196
197 // args.
198 ldp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700199 .cfi_restore x1
200 .cfi_restore x2
201
Stuart Monteithb95a5342014-03-12 13:32:32 +0000202 ldp x3, x4, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700203 .cfi_restore x3
204 .cfi_restore x4
205
Stuart Monteithb95a5342014-03-12 13:32:32 +0000206 ldp x5, x6, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700207 .cfi_restore x5
208 .cfi_restore x6
209
Stuart Monteithb95a5342014-03-12 13:32:32 +0000210 ldp x7, xSELF, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700211 .cfi_restore x7
212 .cfi_restore x18
213
Stuart Monteithb95a5342014-03-12 13:32:32 +0000214 ldp x19, x20, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700215 .cfi_restore x19
216 .cfi_restore x20
217
Stuart Monteithb95a5342014-03-12 13:32:32 +0000218 ldp x21, x22, [sp, #224]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700219 .cfi_restore x21
220 .cfi_restore x22
221
Stuart Monteithb95a5342014-03-12 13:32:32 +0000222 ldp x23, x24, [sp, #240]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700223 .cfi_restore x23
224 .cfi_restore x24
225
Stuart Monteithb95a5342014-03-12 13:32:32 +0000226 ldp x25, x26, [sp, #256]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700227 .cfi_restore x25
228 .cfi_restore x26
229
Stuart Monteithb95a5342014-03-12 13:32:32 +0000230 ldp x27, x28, [sp, #272]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700231 .cfi_restore x27
232 .cfi_restore x28
233
Stuart Monteithb95a5342014-03-12 13:32:32 +0000234 ldp xFP, xLR, [sp, #288]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700235 .cfi_restore x29
236 .cfi_restore x30
Stuart Monteithb95a5342014-03-12 13:32:32 +0000237
238 add sp, sp, #304
239 .cfi_adjust_cfa_offset -304
240.endm
241
Andreas Gampee62a07e2014-03-26 14:53:21 -0700242.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME_NO_D0
243
244 ldr d1, [sp, #24]
245 ldp d2, d3, [sp, #32]
246 ldp d4, d5, [sp, #48]
247 ldp d6, d7, [sp, #64]
248 ldp d8, d9, [sp, #80]
249 ldp d10, d11, [sp, #96]
250 ldp d12, d13, [sp, #112]
251 ldp d14, d15, [sp, #128]
252
253 // args.
254 ldp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700255 .cfi_restore x1
256 .cfi_restore x2
257
Andreas Gampee62a07e2014-03-26 14:53:21 -0700258 ldp x3, x4, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700259 .cfi_restore x3
260 .cfi_restore x4
261
Andreas Gampee62a07e2014-03-26 14:53:21 -0700262 ldp x5, x6, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700263 .cfi_restore x5
264 .cfi_restore x6
265
Andreas Gampee62a07e2014-03-26 14:53:21 -0700266 ldp x7, xSELF, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700267 .cfi_restore x7
268 .cfi_restore x18
269
Andreas Gampee62a07e2014-03-26 14:53:21 -0700270 ldp x19, x20, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700271 .cfi_restore x19
272 .cfi_restore x20
273
Andreas Gampee62a07e2014-03-26 14:53:21 -0700274 ldp x21, x22, [sp, #224]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700275 .cfi_restore x21
276 .cfi_restore x22
277
Andreas Gampee62a07e2014-03-26 14:53:21 -0700278 ldp x23, x24, [sp, #240]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700279 .cfi_restore x23
280 .cfi_restore x24
281
Andreas Gampee62a07e2014-03-26 14:53:21 -0700282 ldp x25, x26, [sp, #256]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700283 .cfi_restore x25
284 .cfi_restore x26
285
Andreas Gampee62a07e2014-03-26 14:53:21 -0700286 ldp x27, x28, [sp, #272]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700287 .cfi_restore x27
288 .cfi_restore x28
289
Andreas Gampee62a07e2014-03-26 14:53:21 -0700290 ldp xFP, xLR, [sp, #288]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700291 .cfi_restore x29
292 .cfi_restore x30
Andreas Gampee62a07e2014-03-26 14:53:21 -0700293
294 add sp, sp, #304
295 .cfi_adjust_cfa_offset -304
296.endm
297
Stuart Monteithb95a5342014-03-12 13:32:32 +0000298.macro RETURN_IF_RESULT_IS_ZERO
299 brk 0
300.endm
301
302.macro RETURN_IF_RESULT_IS_NON_ZERO
303 brk 0
304.endm
305
306 /*
307 * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
308 * exception is Thread::Current()->exception_
309 */
310.macro DELIVER_PENDING_EXCEPTION
311 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
312 mov x0, xSELF
313 mov x1, sp
314
315 // Point of no return.
316 b artDeliverPendingExceptionFromCode // artDeliverPendingExceptionFromCode(Thread*, SP)
317 brk 0 // Unreached
318.endm
319
320.macro RETURN_OR_DELIVER_PENDING_EXCEPTION
321 ldr x9, [xSELF, # THREAD_EXCEPTION_OFFSET] // Get exception field.
322 cbnz x9, 1f
323 ret
3241:
325 DELIVER_PENDING_EXCEPTION
326.endm
327
328.macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
329 .extern \cxx_name
330ENTRY \c_name
331 brk 0
332END \c_name
333.endm
334
335.macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name
336 .extern \cxx_name
337ENTRY \c_name
338 brk 0
339END \c_name
340.endm
341
342.macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
343 .extern \cxx_name
344ENTRY \c_name
345 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
346 brk 0
347END \c_name
348.endm
349
350 /*
351 * Called by managed code, saves callee saves and then calls artThrowException
352 * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
353 */
354ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
355
356 /*
357 * Called by managed code to create and deliver a NullPointerException.
358 */
359NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
360
361 /*
362 * Called by managed code to create and deliver an ArithmeticException.
363 */
364NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
365
366 /*
367 * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
368 * index, arg2 holds limit.
369 */
370TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
371
372 /*
373 * Called by managed code to create and deliver a StackOverflowError.
374 */
375NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
376
377 /*
378 * Called by managed code to create and deliver a NoSuchMethodError.
379 */
380ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
381
382 /*
383 * TODO arm64 specifics need to be fleshed out.
384 * All generated callsites for interface invokes and invocation slow paths will load arguments
385 * as usual - except instead of loading x0 with the target Method*, x0 will contain
386 * the method_idx. This wrapper will save x1-x3, load the caller's Method*, align the
387 * stack and call the appropriate C helper.
388 * NOTE: "this" is first visible argument of the target, and so can be found in x1.
389 *
390 * The helper will attempt to locate the target and return a result in x0 consisting
391 * of the target Method* in x0 and method->code_ in x1.
392 *
393 * If unsuccessful, the helper will return NULL/NULL. There will be a pending exception in the
394 * thread and we branch to another stub to deliver it.
395 *
396 * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
397 * pointing back to the original caller.
398 */
399.macro INVOKE_TRAMPOLINE c_name, cxx_name
400 .extern \cxx_name
401ENTRY \c_name
402 brk 0
403END \c_name
404.endm
405
406INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline
407INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
408
409INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
410INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
411INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
412INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
413
Andreas Gampe03906cf2014-04-07 12:08:28 -0700414
415.macro INVOKE_STUB_CREATE_FRAME
416
417SAVE_SIZE=5*8 // x4, x5, SP, LR & FP saved.
418SAVE_SIZE_AND_METHOD=SAVE_SIZE+8
419
420 mov x9, sp // Save stack pointer.
421 .cfi_register sp,x9
422
423 add x10, x2, # SAVE_SIZE_AND_METHOD // calculate size of frame.
424 sub x10, sp, x10 // Calculate SP position - saves + ArtMethod* + args
425 and x10, x10, # ~0xf // Enforce 16 byte stack alignment.
426 mov sp, x10 // Set new SP.
427
428 sub x10, x9, #SAVE_SIZE // Calculate new FP (later). Done here as we must move SP
429 .cfi_def_cfa_register x10 // before this.
430 .cfi_adjust_cfa_offset SAVE_SIZE
431
432 str x9, [x10, #32] // Save old stack pointer.
433 .cfi_rel_offset sp, 32
434
435 stp x4, x5, [x10, #16] // Save result and shorty addresses.
436 .cfi_rel_offset x4, 16
437 .cfi_rel_offset x5, 24
438
439 stp xFP, xLR, [x10] // Store LR & FP.
440 .cfi_rel_offset x29, 0
441 .cfi_rel_offset x30, 8
442
443 mov xFP, x10 // Use xFP now, as it's callee-saved.
444 .cfi_def_cfa_register x29
445 mov xSELF, x3 // Move thread pointer into SELF register.
446
447 // Copy arguments into stack frame.
448 // Use simple copy routine for now.
449 // 4 bytes per slot.
450 // X1 - source address
451 // W2 - args length
452 // X9 - destination address.
453 // W10 - temporary
454 add x9, sp, #8 // Destination address is bottom of stack + NULL.
455
456 // Use \@ to differentiate between macro invocations.
457.LcopyParams\@:
458 cmp w2, #0
459 beq .LendCopyParams\@
460 sub w2, w2, #4 // Need 65536 bytes of range.
461 ldr w10, [x1, x2]
462 str w10, [x9, x2]
463
464 b .LcopyParams\@
465
466.LendCopyParams\@:
467
468 // Store NULL into Method* at bottom of frame.
469 str xzr, [sp]
470
471.endm
472
473.macro INVOKE_STUB_CALL_AND_RETURN
474
475 // load method-> METHOD_QUICK_CODE_OFFSET
476 ldr x9, [x0 , #METHOD_QUICK_CODE_OFFSET]
477 // Branch to method.
478 blr x9
479
480 // Restore return value address and shorty address.
481 ldp x4,x5, [xFP, #16]
482 .cfi_restore x4
483 .cfi_restore x5
484
485 // Store result (w0/x0/s0/d0) appropriately, depending on resultType.
486 ldrb w10, [x5]
487
488 // Don't set anything for a void type.
489 cmp w10, #'V'
490 beq .Lexit_art_quick_invoke_stub\@
491
492 cmp w10, #'D'
493 bne .Lreturn_is_float\@
494 str d0, [x4]
495 b .Lexit_art_quick_invoke_stub\@
496
497.Lreturn_is_float\@:
498 cmp w10, #'F'
499 bne .Lreturn_is_int\@
500 str s0, [x4]
501 b .Lexit_art_quick_invoke_stub\@
502
503 // Just store x0. Doesn't matter if it is 64 or 32 bits.
504.Lreturn_is_int\@:
505 str x0, [x4]
506
507.Lexit_art_quick_invoke_stub\@:
508 ldr x2, [x29, #32] // Restore stack pointer.
509 mov sp, x2
510 .cfi_restore sp
511
512 ldp x29, x30, [x29] // Restore old frame pointer and link register.
513 .cfi_restore x29
514 .cfi_restore x30
515
516 ret
517
518.endm
519
520
Stuart Monteithb95a5342014-03-12 13:32:32 +0000521/*
522 * extern"C" void art_quick_invoke_stub(ArtMethod *method, x0
523 * uint32_t *args, x1
524 * uint32_t argsize, w2
525 * Thread *self, x3
526 * JValue *result, x4
527 * char *shorty); x5
528 * +----------------------+
529 * | |
530 * | C/C++ frame |
531 * | LR'' |
532 * | FP'' | <- SP'
533 * +----------------------+
534 * +----------------------+
535 * | SP' |
536 * | X5 |
537 * | X4 | Saved registers
538 * | LR' |
539 * | FP' | <- FP
540 * +----------------------+
541 * | uint32_t out[n-1] |
542 * | : : | Outs
543 * | uint32_t out[0] |
544 * | ArtMethod* NULL | <- SP
545 * +----------------------+
546 *
547 * Outgoing registers:
548 * x0 - Method*
549 * x1-x7 - integer parameters.
550 * d0-d7 - Floating point parameters.
551 * xSELF = self
552 * SP = & of ArtMethod*
553 * x1 = "this" pointer.
554 *
555 */
556ENTRY art_quick_invoke_stub
557 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700558 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000559
560 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
561 // Parse the passed shorty to determine which register to load.
562 // Load addresses for routines that load WXSD registers.
563 adr x11, .LstoreW2
564 adr x12, .LstoreX2
565 adr x13, .LstoreS0
566 adr x14, .LstoreD0
567
568 // Initialize routine offsets to 0 for integers and floats.
569 // x8 for integers, x15 for floating point.
570 mov x8, #0
571 mov x15, #0
572
573 add x10, x5, #1 // Load shorty address, plus one to skip return value.
574 ldr w1, [x9],#4 // Load "this" parameter, and increment arg pointer.
575
576 // Loop to fill registers.
577.LfillRegisters:
578 ldrb w17, [x10], #1 // Load next character in signature, and increment.
579 cbz w17, .LcallFunction // Exit at end of signature. Shorty 0 terminated.
580
581 cmp w17, #'F' // is this a float?
582 bne .LisDouble
583
584 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700585 beq .Ladvance4
Stuart Monteithb95a5342014-03-12 13:32:32 +0000586
587 add x17, x13, x15 // Calculate subroutine to jump to.
588 br x17
589
590.LisDouble:
591 cmp w17, #'D' // is this a double?
592 bne .LisLong
593
594 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700595 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000596
597 add x17, x14, x15 // Calculate subroutine to jump to.
598 br x17
599
600.LisLong:
601 cmp w17, #'J' // is this a long?
602 bne .LisOther
603
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700604 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700605 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000606
607 add x17, x12, x8 // Calculate subroutine to jump to.
608 br x17
609
Stuart Monteithb95a5342014-03-12 13:32:32 +0000610.LisOther: // Everything else takes one vReg.
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700611 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700612 beq .Ladvance4
613
Stuart Monteithb95a5342014-03-12 13:32:32 +0000614 add x17, x11, x8 // Calculate subroutine to jump to.
615 br x17
616
Andreas Gampe03906cf2014-04-07 12:08:28 -0700617.Ladvance4:
618 add x9, x9, #4
619 b .LfillRegisters
620
621.Ladvance8:
622 add x9, x9, #8
623 b .LfillRegisters
624
Stuart Monteithb95a5342014-03-12 13:32:32 +0000625// Macro for loading a parameter into a register.
626// counter - the register with offset into these tables
627// size - the size of the register - 4 or 8 bytes.
628// register - the name of the register to be loaded.
629.macro LOADREG counter size register return
630 ldr \register , [x9], #\size
631 add \counter, \counter, 12
632 b \return
633.endm
634
635// Store ints.
636.LstoreW2:
637 LOADREG x8 4 w2 .LfillRegisters
638 LOADREG x8 4 w3 .LfillRegisters
639 LOADREG x8 4 w4 .LfillRegisters
640 LOADREG x8 4 w5 .LfillRegisters
641 LOADREG x8 4 w6 .LfillRegisters
642 LOADREG x8 4 w7 .LfillRegisters
643
644// Store longs.
645.LstoreX2:
646 LOADREG x8 8 x2 .LfillRegisters
647 LOADREG x8 8 x3 .LfillRegisters
648 LOADREG x8 8 x4 .LfillRegisters
649 LOADREG x8 8 x5 .LfillRegisters
650 LOADREG x8 8 x6 .LfillRegisters
651 LOADREG x8 8 x7 .LfillRegisters
652
653// Store singles.
654.LstoreS0:
655 LOADREG x15 4 s0 .LfillRegisters
656 LOADREG x15 4 s1 .LfillRegisters
657 LOADREG x15 4 s2 .LfillRegisters
658 LOADREG x15 4 s3 .LfillRegisters
659 LOADREG x15 4 s4 .LfillRegisters
660 LOADREG x15 4 s5 .LfillRegisters
661 LOADREG x15 4 s6 .LfillRegisters
662 LOADREG x15 4 s7 .LfillRegisters
663
664// Store doubles.
665.LstoreD0:
666 LOADREG x15 8 d0 .LfillRegisters
667 LOADREG x15 8 d1 .LfillRegisters
668 LOADREG x15 8 d2 .LfillRegisters
669 LOADREG x15 8 d3 .LfillRegisters
670 LOADREG x15 8 d4 .LfillRegisters
671 LOADREG x15 8 d5 .LfillRegisters
672 LOADREG x15 8 d6 .LfillRegisters
673 LOADREG x15 8 d7 .LfillRegisters
674
675
676.LcallFunction:
677
Andreas Gampe03906cf2014-04-07 12:08:28 -0700678 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000679
Stuart Monteithb95a5342014-03-12 13:32:32 +0000680END art_quick_invoke_stub
681
682/* extern"C"
683 * void art_quick_invoke_static_stub(ArtMethod *method, x0
684 * uint32_t *args, x1
685 * uint32_t argsize, w2
686 * Thread *self, x3
687 * JValue *result, x4
688 * char *shorty); x5
689 */
690ENTRY art_quick_invoke_static_stub
691 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700692 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000693
694 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
695 // Parse the passed shorty to determine which register to load.
696 // Load addresses for routines that load WXSD registers.
697 adr x11, .LstoreW1_2
698 adr x12, .LstoreX1_2
699 adr x13, .LstoreS0_2
700 adr x14, .LstoreD0_2
701
702 // Initialize routine offsets to 0 for integers and floats.
703 // x8 for integers, x15 for floating point.
704 mov x8, #0
705 mov x15, #0
706
707 add x10, x5, #1 // Load shorty address, plus one to skip return value.
708
709 // Loop to fill registers.
710.LfillRegisters2:
711 ldrb w17, [x10], #1 // Load next character in signature, and increment.
712 cbz w17, .LcallFunction2 // Exit at end of signature. Shorty 0 terminated.
713
714 cmp w17, #'F' // is this a float?
715 bne .LisDouble2
716
717 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700718 beq .Ladvance4_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000719
720 add x17, x13, x15 // Calculate subroutine to jump to.
721 br x17
722
723.LisDouble2:
724 cmp w17, #'D' // is this a double?
725 bne .LisLong2
726
727 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700728 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000729
730 add x17, x14, x15 // Calculate subroutine to jump to.
731 br x17
732
733.LisLong2:
734 cmp w17, #'J' // is this a long?
735 bne .LisOther2
736
737 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700738 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000739
740 add x17, x12, x8 // Calculate subroutine to jump to.
741 br x17
742
Stuart Monteithb95a5342014-03-12 13:32:32 +0000743.LisOther2: // Everything else takes one vReg.
744 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700745 beq .Ladvance4_2
746
Stuart Monteithb95a5342014-03-12 13:32:32 +0000747 add x17, x11, x8 // Calculate subroutine to jump to.
748 br x17
749
Andreas Gampe03906cf2014-04-07 12:08:28 -0700750.Ladvance4_2:
751 add x9, x9, #4
752 b .LfillRegisters2
753
754.Ladvance8_2:
755 add x9, x9, #8
756 b .LfillRegisters2
757
Stuart Monteithb95a5342014-03-12 13:32:32 +0000758// Store ints.
759.LstoreW1_2:
760 LOADREG x8 4 w1 .LfillRegisters2
761 LOADREG x8 4 w2 .LfillRegisters2
762 LOADREG x8 4 w3 .LfillRegisters2
763 LOADREG x8 4 w4 .LfillRegisters2
764 LOADREG x8 4 w5 .LfillRegisters2
765 LOADREG x8 4 w6 .LfillRegisters2
766 LOADREG x8 4 w7 .LfillRegisters2
767
768// Store longs.
769.LstoreX1_2:
770 LOADREG x8 8 x1 .LfillRegisters2
771 LOADREG x8 8 x2 .LfillRegisters2
772 LOADREG x8 8 x3 .LfillRegisters2
773 LOADREG x8 8 x4 .LfillRegisters2
774 LOADREG x8 8 x5 .LfillRegisters2
775 LOADREG x8 8 x6 .LfillRegisters2
776 LOADREG x8 8 x7 .LfillRegisters2
777
778// Store singles.
779.LstoreS0_2:
780 LOADREG x15 4 s0 .LfillRegisters2
781 LOADREG x15 4 s1 .LfillRegisters2
782 LOADREG x15 4 s2 .LfillRegisters2
783 LOADREG x15 4 s3 .LfillRegisters2
784 LOADREG x15 4 s4 .LfillRegisters2
785 LOADREG x15 4 s5 .LfillRegisters2
786 LOADREG x15 4 s6 .LfillRegisters2
787 LOADREG x15 4 s7 .LfillRegisters2
788
789// Store doubles.
790.LstoreD0_2:
791 LOADREG x15 8 d0 .LfillRegisters2
792 LOADREG x15 8 d1 .LfillRegisters2
793 LOADREG x15 8 d2 .LfillRegisters2
794 LOADREG x15 8 d3 .LfillRegisters2
795 LOADREG x15 8 d4 .LfillRegisters2
796 LOADREG x15 8 d5 .LfillRegisters2
797 LOADREG x15 8 d6 .LfillRegisters2
798 LOADREG x15 8 d7 .LfillRegisters2
799
800
801.LcallFunction2:
802
Andreas Gampe03906cf2014-04-07 12:08:28 -0700803 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000804
Stuart Monteithb95a5342014-03-12 13:32:32 +0000805END art_quick_invoke_static_stub
806
Andreas Gampe03906cf2014-04-07 12:08:28 -0700807
Stuart Monteithb95a5342014-03-12 13:32:32 +0000808
809 /*
810 * On entry x0 is uintptr_t* gprs_ and x1 is uint64_t* fprs_
811 */
812
813ENTRY art_quick_do_long_jump
814 // Load FPRs
815 ldp d0, d1, [x1], #16
816 ldp d2, d3, [x1], #16
817 ldp d4, d5, [x1], #16
818 ldp d6, d7, [x1], #16
819 ldp d8, d9, [x1], #16
820 ldp d10, d11, [x1], #16
821 ldp d12, d13, [x1], #16
822 ldp d14, d15, [x1], #16
823 ldp d16, d17, [x1], #16
824 ldp d18, d19, [x1], #16
825 ldp d20, d21, [x1], #16
826 ldp d22, d23, [x1], #16
827 ldp d24, d25, [x1], #16
828 ldp d26, d27, [x1], #16
829 ldp d28, d29, [x1], #16
830 ldp d30, d31, [x1]
831
832 // Load GPRs
833 // TODO: lots of those are smashed, could optimize.
834 add x0, x0, #30*8
835 ldp x30, x1, [x0], #-16
836 ldp x28, x29, [x0], #-16
837 ldp x26, x27, [x0], #-16
838 ldp x24, x25, [x0], #-16
839 ldp x22, x23, [x0], #-16
840 ldp x20, x21, [x0], #-16
841 ldp x18, x19, [x0], #-16
842 ldp x16, x17, [x0], #-16
843 ldp x14, x15, [x0], #-16
844 ldp x12, x13, [x0], #-16
845 ldp x10, x11, [x0], #-16
846 ldp x8, x9, [x0], #-16
847 ldp x6, x7, [x0], #-16
848 ldp x4, x5, [x0], #-16
849 ldp x2, x3, [x0], #-16
850 mov sp, x1
851
852 // TODO: Is it really OK to use LR for the target PC?
853 mov x0, #0
854 mov x1, #0
855 br xLR
856END art_quick_do_long_jump
857
858UNIMPLEMENTED art_quick_handle_fill_data
859
860UNIMPLEMENTED art_quick_lock_object
861UNIMPLEMENTED art_quick_unlock_object
862UNIMPLEMENTED art_quick_check_cast
863UNIMPLEMENTED art_quick_aput_obj_with_null_and_bound_check
864UNIMPLEMENTED art_quick_aput_obj_with_bound_check
865UNIMPLEMENTED art_quick_aput_obj
866UNIMPLEMENTED art_quick_initialize_static_storage
867UNIMPLEMENTED art_quick_initialize_type
868UNIMPLEMENTED art_quick_initialize_type_and_verify_access
869UNIMPLEMENTED art_quick_get32_static
870UNIMPLEMENTED art_quick_get64_static
871UNIMPLEMENTED art_quick_get_obj_static
872UNIMPLEMENTED art_quick_get32_instance
873UNIMPLEMENTED art_quick_get64_instance
874UNIMPLEMENTED art_quick_get_obj_instance
875UNIMPLEMENTED art_quick_set32_static
876UNIMPLEMENTED art_quick_set64_static
877UNIMPLEMENTED art_quick_set_obj_static
878UNIMPLEMENTED art_quick_set32_instance
879UNIMPLEMENTED art_quick_set64_instance
880UNIMPLEMENTED art_quick_set_obj_instance
881UNIMPLEMENTED art_quick_resolve_string
882
883// Macro to facilitate adding new allocation entrypoints.
884.macro TWO_ARG_DOWNCALL name, entrypoint, return
885 .extern \entrypoint
886ENTRY \name
887 brk 0
888END \name
889.endm
890
891// Macro to facilitate adding new array allocation entrypoints.
892.macro THREE_ARG_DOWNCALL name, entrypoint, return
893 .extern \entrypoint
894ENTRY \name
895 brk 0
896END \name
897.endm
898
899// Generate the allocation entrypoints for each allocator.
900GENERATE_ALL_ALLOC_ENTRYPOINTS
901
902UNIMPLEMENTED art_quick_test_suspend
903
Andreas Gampee62a07e2014-03-26 14:53:21 -0700904 /*
905 * Called by managed code that is attempting to call a method on a proxy class. On entry
906 * x0 holds the proxy method and x1 holds the receiver; The frame size of the invoked proxy
907 * method agrees with a ref and args callee save frame.
908 */
909 .extern artQuickProxyInvokeHandler
910ENTRY art_quick_proxy_invoke_handler
911 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
912 str x0, [sp, #0] // place proxy method at bottom of frame
913 mov x2, xSELF // pass Thread::Current
914 mov x3, sp // pass SP
915 bl artQuickProxyInvokeHandler // (Method* proxy method, receiver, Thread*, SP)
916 ldr xSELF, [sp, #200] // Restore self pointer.
917 ldr x2, [xSELF, THREAD_EXCEPTION_OFFSET]
918 cbnz x2, .Lexception_in_proxy // success if no exception is pending
919 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME_NO_D0 // keep d0
920 ret // return on success
921.Lexception_in_proxy:
922 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
923 DELIVER_PENDING_EXCEPTION
924END art_quick_proxy_invoke_handler
Stuart Monteithb95a5342014-03-12 13:32:32 +0000925
926UNIMPLEMENTED art_quick_imt_conflict_trampoline
927
928
929ENTRY art_quick_resolution_trampoline
930 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700931 mov x19, x0 // save the called method
Stuart Monteithb95a5342014-03-12 13:32:32 +0000932 mov x2, xSELF
933 mov x3, sp
934 bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP)
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700935 mov x9, x0 // Remember returned code pointer in x9.
936 mov x0, x19 // Restore the method, before x19 is restored to on-call value
Stuart Monteithb95a5342014-03-12 13:32:32 +0000937 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
938 cbz x9, 1f
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700939 br x9
Stuart Monteithb95a5342014-03-12 13:32:32 +00009401:
941 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
942 DELIVER_PENDING_EXCEPTION
943END art_quick_resolution_trampoline
944
945/*
946 * Generic JNI frame layout:
947 *
948 * #-------------------#
949 * | |
950 * | caller method... |
951 * #-------------------# <--- SP on entry
952 * | Return X30/LR |
953 * | X29/FP | callee save
954 * | X28 | callee save
955 * | X27 | callee save
956 * | X26 | callee save
957 * | X25 | callee save
958 * | X24 | callee save
959 * | X23 | callee save
960 * | X22 | callee save
961 * | X21 | callee save
962 * | X20 | callee save
963 * | X19 | callee save
964 * | X7 | arg7
965 * | X6 | arg6
966 * | X5 | arg5
967 * | X4 | arg4
968 * | X3 | arg3
969 * | X2 | arg2
970 * | X1 | arg1
971 * | D15 | float arg 8
972 * | D14 | float arg 8
973 * | D13 | float arg 8
974 * | D12 | callee save
975 * | D11 | callee save
976 * | D10 | callee save
977 * | D9 | callee save
978 * | D8 | callee save
979 * | D7 | float arg 8
980 * | D6 | float arg 7
981 * | D5 | float arg 6
982 * | D4 | float arg 5
983 * | D3 | float arg 4
984 * | D2 | float arg 3
985 * | D1 | float arg 2
986 * | D0 | float arg 1
987 * | RDI/Method* | <- X0
988 * #-------------------#
989 * | local ref cookie | // 4B
990 * | SIRT size | // 4B
991 * #-------------------#
992 * | JNI Call Stack |
993 * #-------------------# <--- SP on native call
994 * | |
995 * | Stack for Regs | The trampoline assembly will pop these values
996 * | | into registers for native call
997 * #-------------------#
998 * | Native code ptr |
999 * #-------------------#
1000 * | Free scratch |
1001 * #-------------------#
1002 * | Ptr to (1) | <--- SP
1003 * #-------------------#
1004 */
1005 /*
1006 * Called to do a generic JNI down-call
1007 */
1008ENTRY art_quick_generic_jni_trampoline
1009 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
1010 str x0, [sp, #0] // Store native ArtMethod* to bottom of stack.
1011
1012 // Save SP , so we can have static CFI info.
1013 mov x28, sp
1014 .cfi_def_cfa_register x28
1015
1016 // This looks the same, but is different: this will be updated to point to the bottom
1017 // of the frame when the SIRT is inserted.
1018 mov xFP, sp
1019
1020 mov x8, #5120
1021 sub sp, sp, x8
1022
1023 // prepare for artQuickGenericJniTrampoline call
1024 // (Thread*, SP)
1025 // x0 x1 <= C calling convention
1026 // xSELF xFP <= where they are
1027
1028 mov x0, xSELF // Thread*
1029 mov x1, xFP
1030 bl artQuickGenericJniTrampoline // (Thread*, sp)
1031
1032 // Get the updated pointer. This is the bottom of the frame _with_ SIRT.
1033 ldr xFP, [sp]
1034 add x9, sp, #8
1035
1036 cmp x0, #0
1037 b.mi .Lentry_error // Check for error, negative value.
1038
1039 // release part of the alloca.
1040 add x9, x9, x0
1041
1042 // Get the code pointer
1043 ldr xIP0, [x9, #0]
1044
1045 // Load parameters from frame into registers.
1046 // TODO Check with artQuickGenericJniTrampoline.
1047 // Also, check again APPCS64 - the stack arguments are interleaved.
1048 ldp x0, x1, [x9, #8]
1049 ldp x2, x3, [x9, #24]
1050 ldp x4, x5, [x9, #40]
1051 ldp x6, x7, [x9, #56]
1052
1053 ldp d0, d1, [x9, #72]
1054 ldp d2, d3, [x9, #88]
1055 ldp d4, d5, [x9, #104]
1056 ldp d6, d7, [x9, #120]
1057
1058 add sp, x9, #136
1059
1060 blr xIP0 // native call.
1061
1062 // Restore self pointer.
1063 ldr xSELF, [x28, #200]
1064
1065 // result sign extension is handled in C code
1066 // prepare for artQuickGenericJniEndTrampoline call
1067 // (Thread*, SP, result, result_f)
1068 // x0 x1 x2 x3 <= C calling convention
1069 mov x5, x0 // Save return value
1070 mov x0, xSELF // Thread register
1071 mov x1, xFP // Stack pointer
1072 mov x2, x5 // Result (from saved)
1073 fmov x3, d0 // d0 will contain floating point result, but needs to go into x3
1074
1075 bl artQuickGenericJniEndTrampoline
1076
1077 // Tear down the alloca.
1078 mov sp, x28
1079 .cfi_def_cfa_register sp
1080
1081 // Restore self pointer.
1082 ldr xSELF, [x28, #200]
1083
1084 // Pending exceptions possible.
1085 ldr x1, [xSELF, THREAD_EXCEPTION_OFFSET]
1086 cbnz x1, .Lexception_in_native
1087
1088 // Tear down the callee-save frame.
1089 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1090
1091 // store into fpr, for when it's a fpr return...
1092 fmov d0, x0
1093 ret
1094
1095.Lentry_error:
1096 mov sp, x28
1097 .cfi_def_cfa_register sp
1098 ldr xSELF, [x28, #200]
1099.Lexception_in_native:
1100 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1101 DELIVER_PENDING_EXCEPTION
1102
1103END art_quick_generic_jni_trampoline
1104
1105/*
1106 * Called to bridge from the quick to interpreter ABI. On entry the arguments match those
1107 * of a quick call:
1108 * x0 = method being called/to bridge to.
1109 * x1..x7, d0..d7 = arguments to that method.
1110 */
1111ENTRY art_quick_to_interpreter_bridge
1112 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments.
1113
1114 // x0 will contain mirror::ArtMethod* method.
1115 mov x1, xSELF // How to get Thread::Current() ???
1116 mov x2, sp
1117
1118 // uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
1119 // mirror::ArtMethod** sp)
1120 bl artQuickToInterpreterBridge
1121
1122 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case.
1123
1124 fmov d0, x0
1125
1126 RETURN_OR_DELIVER_PENDING_EXCEPTION
1127END art_quick_to_interpreter_bridge
1128
1129UNIMPLEMENTED art_quick_instrumentation_entry
1130UNIMPLEMENTED art_quick_instrumentation_exit
1131UNIMPLEMENTED art_quick_deoptimize
1132UNIMPLEMENTED art_quick_mul_long
1133UNIMPLEMENTED art_quick_shl_long
1134UNIMPLEMENTED art_quick_shr_long
1135UNIMPLEMENTED art_quick_ushr_long
1136UNIMPLEMENTED art_quick_indexof
1137UNIMPLEMENTED art_quick_string_compareto