blob: 6ce5d06f0e8352390613ce68f505c1f1d4ee5a53 [file] [log] [blame]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm64.S"
18
19#include "arch/quick_alloc_entrypoints.S"
20
21
22 /*
23 * Macro that sets up the callee save frame to conform with
24 * Runtime::CreateCalleeSaveMethod(kSaveAll)
25 */
26.macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
27 adrp x9, :got:_ZN3art7Runtime9instance_E
28 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
29
30 // Our registers aren't intermixed - just spill in order.
31 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
32
33 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
34 ldr x9, [x9, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET ]
35
36 sub sp, sp, #368
37 .cfi_adjust_cfa_offset 368
38
39 // FP args
40 stp d1, d2, [sp, #8]
41 stp d2, d3, [sp, #24]
42 stp d4, d5, [sp, #40]
43 stp d6, d7, [sp, #56]
44
45 // FP callee-saves
46 stp d8, d9, [sp, #72]
47 stp d10, d11, [sp, #88]
48 stp d12, d13, [sp, #104]
49 stp d14, d15, [sp, #120]
50
51 stp d16, d17, [sp, #136]
52 stp d18, d19, [sp, #152]
53 stp d20, d21, [sp, #168]
54 stp d22, d23, [sp, #184]
55 stp d24, d25, [sp, #200]
56 stp d26, d27, [sp, #216]
57 stp d28, d29, [sp, #232]
58 stp d30, d31, [sp, #248]
59
60
61 // Callee saved.
62 stp xSELF, x19, [sp, #264]
Andreas Gampe03906cf2014-04-07 12:08:28 -070063 .cfi_rel_offset x18, 264
64 .cfi_rel_offset x19, 272
Stuart Monteithb95a5342014-03-12 13:32:32 +000065
Andreas Gampe03906cf2014-04-07 12:08:28 -070066 stp x20, x21, [sp, #280]
67 .cfi_rel_offset x20, 280
68 .cfi_rel_offset x21, 288
69
70 stp x22, x23, [sp, #296]
71 .cfi_rel_offset x22, 296
72 .cfi_rel_offset x23, 304
73
74 stp x24, x25, [sp, #312]
75 .cfi_rel_offset x24, 312
76 .cfi_rel_offset x25, 320
77
78 stp x26, x27, [sp, #328]
79 .cfi_rel_offset x26, 328
80 .cfi_rel_offset x27, 336
81
82 stp x28, xFP, [sp, #344] // Save FP.
83 .cfi_rel_offset x28, 344
84 .cfi_rel_offset x29, 352
85
86 str xLR, [sp, #360]
87 .cfi_rel_offset x30, 360
Stuart Monteithb95a5342014-03-12 13:32:32 +000088
89 // Loads appropriate callee-save-method
90 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
91
92.endm
93
94 /*
95 * Macro that sets up the callee save frame to conform with
96 * Runtime::CreateCalleeSaveMethod(kRefsOnly).
97 */
98.macro SETUP_REF_ONLY_CALLEE_SAVE_FRAME
99 brk 0
100.endm
101
102.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
103 brk 0
104.endm
105
106.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
107 brk 0
108.endm
109
110
111.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
112 sub sp, sp, #304
113 .cfi_adjust_cfa_offset 304
114
115 stp d0, d1, [sp, #16]
116 stp d2, d3, [sp, #32]
117 stp d4, d5, [sp, #48]
118 stp d6, d7, [sp, #64]
119 stp d8, d9, [sp, #80]
120 stp d10, d11, [sp, #96]
121 stp d12, d13, [sp, #112]
122 stp d14, d15, [sp, #128]
123
124 stp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700125 .cfi_rel_offset x1, 144
126 .cfi_rel_offset x2, 152
Stuart Monteithb95a5342014-03-12 13:32:32 +0000127
Andreas Gampe03906cf2014-04-07 12:08:28 -0700128 stp x3, x4, [sp, #160]
129 .cfi_rel_offset x3, 160
130 .cfi_rel_offset x4, 168
131
132 stp x5, x6, [sp, #176]
133 .cfi_rel_offset x5, 176
134 .cfi_rel_offset x6, 184
135
136 stp x7, xSELF, [sp, #192]
137 .cfi_rel_offset x7, 192
138 .cfi_rel_offset x18, 200
139
140 stp x19, x20, [sp, #208]
141 .cfi_rel_offset x19, 208
142 .cfi_rel_offset x20, 216
143
144 stp x21, x22, [sp, #224]
145 .cfi_rel_offset x21, 224
146 .cfi_rel_offset x22, 232
147
148 stp x23, x24, [sp, #240]
149 .cfi_rel_offset x23, 240
150 .cfi_rel_offset x24, 248
151
152 stp x25, x26, [sp, #256]
153 .cfi_rel_offset x25, 256
154 .cfi_rel_offset x26, 264
155
156 stp x27, x28, [sp, #272]
157 .cfi_rel_offset x27, 272
158 .cfi_rel_offset x28, 280
159
160 stp xFP, xLR, [sp, #288]
161 .cfi_rel_offset x29, 288
162 .cfi_rel_offset x30, 296
Stuart Monteithb95a5342014-03-12 13:32:32 +0000163.endm
164
165 /*
166 * Macro that sets up the callee save frame to conform with
167 * Runtime::CreateCalleeSaveMethod(kRefsAndArgs).
168 *
169 * TODO This is probably too conservative - saving FP & LR.
170 */
171.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
172 adrp x9, :got:_ZN3art7Runtime9instance_E
173 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
174
175 // Our registers aren't intermixed - just spill in order.
176 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
177
178 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
179 ldr x9, [x9, RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ]
180
181 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
182
183 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
184.endm
185
186.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
187
188 ldp d0, d1, [sp, #16]
189 ldp d2, d3, [sp, #32]
190 ldp d4, d5, [sp, #48]
191 ldp d6, d7, [sp, #64]
192 ldp d8, d9, [sp, #80]
193 ldp d10, d11, [sp, #96]
194 ldp d12, d13, [sp, #112]
195 ldp d14, d15, [sp, #128]
196
197 // args.
198 ldp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700199 .cfi_restore x1
200 .cfi_restore x2
201
Stuart Monteithb95a5342014-03-12 13:32:32 +0000202 ldp x3, x4, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700203 .cfi_restore x3
204 .cfi_restore x4
205
Stuart Monteithb95a5342014-03-12 13:32:32 +0000206 ldp x5, x6, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700207 .cfi_restore x5
208 .cfi_restore x6
209
Stuart Monteithb95a5342014-03-12 13:32:32 +0000210 ldp x7, xSELF, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700211 .cfi_restore x7
212 .cfi_restore x18
213
Stuart Monteithb95a5342014-03-12 13:32:32 +0000214 ldp x19, x20, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700215 .cfi_restore x19
216 .cfi_restore x20
217
Stuart Monteithb95a5342014-03-12 13:32:32 +0000218 ldp x21, x22, [sp, #224]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700219 .cfi_restore x21
220 .cfi_restore x22
221
Stuart Monteithb95a5342014-03-12 13:32:32 +0000222 ldp x23, x24, [sp, #240]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700223 .cfi_restore x23
224 .cfi_restore x24
225
Stuart Monteithb95a5342014-03-12 13:32:32 +0000226 ldp x25, x26, [sp, #256]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700227 .cfi_restore x25
228 .cfi_restore x26
229
Stuart Monteithb95a5342014-03-12 13:32:32 +0000230 ldp x27, x28, [sp, #272]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700231 .cfi_restore x27
232 .cfi_restore x28
233
Stuart Monteithb95a5342014-03-12 13:32:32 +0000234 ldp xFP, xLR, [sp, #288]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700235 .cfi_restore x29
236 .cfi_restore x30
Stuart Monteithb95a5342014-03-12 13:32:32 +0000237
238 add sp, sp, #304
239 .cfi_adjust_cfa_offset -304
240.endm
241
Andreas Gampee62a07e2014-03-26 14:53:21 -0700242.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME_NO_D0
243
244 ldr d1, [sp, #24]
245 ldp d2, d3, [sp, #32]
246 ldp d4, d5, [sp, #48]
247 ldp d6, d7, [sp, #64]
248 ldp d8, d9, [sp, #80]
249 ldp d10, d11, [sp, #96]
250 ldp d12, d13, [sp, #112]
251 ldp d14, d15, [sp, #128]
252
253 // args.
254 ldp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700255 .cfi_restore x1
256 .cfi_restore x2
257
Andreas Gampee62a07e2014-03-26 14:53:21 -0700258 ldp x3, x4, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700259 .cfi_restore x3
260 .cfi_restore x4
261
Andreas Gampee62a07e2014-03-26 14:53:21 -0700262 ldp x5, x6, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700263 .cfi_restore x5
264 .cfi_restore x6
265
Andreas Gampee62a07e2014-03-26 14:53:21 -0700266 ldp x7, xSELF, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700267 .cfi_restore x7
268 .cfi_restore x18
269
Andreas Gampee62a07e2014-03-26 14:53:21 -0700270 ldp x19, x20, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700271 .cfi_restore x19
272 .cfi_restore x20
273
Andreas Gampee62a07e2014-03-26 14:53:21 -0700274 ldp x21, x22, [sp, #224]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700275 .cfi_restore x21
276 .cfi_restore x22
277
Andreas Gampee62a07e2014-03-26 14:53:21 -0700278 ldp x23, x24, [sp, #240]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700279 .cfi_restore x23
280 .cfi_restore x24
281
Andreas Gampee62a07e2014-03-26 14:53:21 -0700282 ldp x25, x26, [sp, #256]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700283 .cfi_restore x25
284 .cfi_restore x26
285
Andreas Gampee62a07e2014-03-26 14:53:21 -0700286 ldp x27, x28, [sp, #272]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700287 .cfi_restore x27
288 .cfi_restore x28
289
Andreas Gampee62a07e2014-03-26 14:53:21 -0700290 ldp xFP, xLR, [sp, #288]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700291 .cfi_restore x29
292 .cfi_restore x30
Andreas Gampee62a07e2014-03-26 14:53:21 -0700293
294 add sp, sp, #304
295 .cfi_adjust_cfa_offset -304
296.endm
297
Stuart Monteithb95a5342014-03-12 13:32:32 +0000298.macro RETURN_IF_RESULT_IS_ZERO
299 brk 0
300.endm
301
302.macro RETURN_IF_RESULT_IS_NON_ZERO
303 brk 0
304.endm
305
306 /*
307 * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
308 * exception is Thread::Current()->exception_
309 */
310.macro DELIVER_PENDING_EXCEPTION
311 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
312 mov x0, xSELF
313 mov x1, sp
314
315 // Point of no return.
316 b artDeliverPendingExceptionFromCode // artDeliverPendingExceptionFromCode(Thread*, SP)
317 brk 0 // Unreached
318.endm
319
320.macro RETURN_OR_DELIVER_PENDING_EXCEPTION
321 ldr x9, [xSELF, # THREAD_EXCEPTION_OFFSET] // Get exception field.
322 cbnz x9, 1f
323 ret
3241:
325 DELIVER_PENDING_EXCEPTION
326.endm
327
328.macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
329 .extern \cxx_name
330ENTRY \c_name
331 brk 0
332END \c_name
333.endm
334
Serban Constantinescu75b91132014-04-09 18:39:10 +0100335// FIXME: Temporary fix for TR(XSELF).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000336.macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name
337 .extern \cxx_name
338ENTRY \c_name
Serban Constantinescu75b91132014-04-09 18:39:10 +0100339 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context.
340 mov x1, x19 // pass Thread::Current.
341 mov x2, sp // pass SP.
342 b \cxx_name // \cxx_name(Thread*, SP).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000343 brk 0
344END \c_name
345.endm
346
347.macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
348 .extern \cxx_name
349ENTRY \c_name
350 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
351 brk 0
352END \c_name
353.endm
354
355 /*
356 * Called by managed code, saves callee saves and then calls artThrowException
357 * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
358 */
359ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
360
361 /*
362 * Called by managed code to create and deliver a NullPointerException.
363 */
364NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
365
366 /*
367 * Called by managed code to create and deliver an ArithmeticException.
368 */
369NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
370
371 /*
372 * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
373 * index, arg2 holds limit.
374 */
375TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
376
377 /*
378 * Called by managed code to create and deliver a StackOverflowError.
379 */
380NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
381
382 /*
383 * Called by managed code to create and deliver a NoSuchMethodError.
384 */
385ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
386
387 /*
388 * TODO arm64 specifics need to be fleshed out.
389 * All generated callsites for interface invokes and invocation slow paths will load arguments
390 * as usual - except instead of loading x0 with the target Method*, x0 will contain
391 * the method_idx. This wrapper will save x1-x3, load the caller's Method*, align the
392 * stack and call the appropriate C helper.
393 * NOTE: "this" is first visible argument of the target, and so can be found in x1.
394 *
395 * The helper will attempt to locate the target and return a result in x0 consisting
396 * of the target Method* in x0 and method->code_ in x1.
397 *
398 * If unsuccessful, the helper will return NULL/NULL. There will be a pending exception in the
399 * thread and we branch to another stub to deliver it.
400 *
401 * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
402 * pointing back to the original caller.
403 */
404.macro INVOKE_TRAMPOLINE c_name, cxx_name
405 .extern \cxx_name
406ENTRY \c_name
407 brk 0
408END \c_name
409.endm
410
411INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline
412INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
413
414INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
415INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
416INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
417INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
418
Andreas Gampe03906cf2014-04-07 12:08:28 -0700419
420.macro INVOKE_STUB_CREATE_FRAME
421
422SAVE_SIZE=5*8 // x4, x5, SP, LR & FP saved.
423SAVE_SIZE_AND_METHOD=SAVE_SIZE+8
424
425 mov x9, sp // Save stack pointer.
426 .cfi_register sp,x9
427
428 add x10, x2, # SAVE_SIZE_AND_METHOD // calculate size of frame.
429 sub x10, sp, x10 // Calculate SP position - saves + ArtMethod* + args
430 and x10, x10, # ~0xf // Enforce 16 byte stack alignment.
431 mov sp, x10 // Set new SP.
432
433 sub x10, x9, #SAVE_SIZE // Calculate new FP (later). Done here as we must move SP
434 .cfi_def_cfa_register x10 // before this.
435 .cfi_adjust_cfa_offset SAVE_SIZE
436
437 str x9, [x10, #32] // Save old stack pointer.
438 .cfi_rel_offset sp, 32
439
440 stp x4, x5, [x10, #16] // Save result and shorty addresses.
441 .cfi_rel_offset x4, 16
442 .cfi_rel_offset x5, 24
443
444 stp xFP, xLR, [x10] // Store LR & FP.
445 .cfi_rel_offset x29, 0
446 .cfi_rel_offset x30, 8
447
448 mov xFP, x10 // Use xFP now, as it's callee-saved.
449 .cfi_def_cfa_register x29
450 mov xSELF, x3 // Move thread pointer into SELF register.
451
452 // Copy arguments into stack frame.
453 // Use simple copy routine for now.
454 // 4 bytes per slot.
455 // X1 - source address
456 // W2 - args length
457 // X9 - destination address.
458 // W10 - temporary
459 add x9, sp, #8 // Destination address is bottom of stack + NULL.
460
461 // Use \@ to differentiate between macro invocations.
462.LcopyParams\@:
463 cmp w2, #0
464 beq .LendCopyParams\@
465 sub w2, w2, #4 // Need 65536 bytes of range.
466 ldr w10, [x1, x2]
467 str w10, [x9, x2]
468
469 b .LcopyParams\@
470
471.LendCopyParams\@:
472
473 // Store NULL into Method* at bottom of frame.
474 str xzr, [sp]
475
476.endm
477
478.macro INVOKE_STUB_CALL_AND_RETURN
479
480 // load method-> METHOD_QUICK_CODE_OFFSET
481 ldr x9, [x0 , #METHOD_QUICK_CODE_OFFSET]
482 // Branch to method.
483 blr x9
484
485 // Restore return value address and shorty address.
486 ldp x4,x5, [xFP, #16]
487 .cfi_restore x4
488 .cfi_restore x5
489
490 // Store result (w0/x0/s0/d0) appropriately, depending on resultType.
491 ldrb w10, [x5]
492
493 // Don't set anything for a void type.
494 cmp w10, #'V'
495 beq .Lexit_art_quick_invoke_stub\@
496
497 cmp w10, #'D'
498 bne .Lreturn_is_float\@
499 str d0, [x4]
500 b .Lexit_art_quick_invoke_stub\@
501
502.Lreturn_is_float\@:
503 cmp w10, #'F'
504 bne .Lreturn_is_int\@
505 str s0, [x4]
506 b .Lexit_art_quick_invoke_stub\@
507
508 // Just store x0. Doesn't matter if it is 64 or 32 bits.
509.Lreturn_is_int\@:
510 str x0, [x4]
511
512.Lexit_art_quick_invoke_stub\@:
513 ldr x2, [x29, #32] // Restore stack pointer.
514 mov sp, x2
515 .cfi_restore sp
516
517 ldp x29, x30, [x29] // Restore old frame pointer and link register.
518 .cfi_restore x29
519 .cfi_restore x30
520
521 ret
522
523.endm
524
525
Stuart Monteithb95a5342014-03-12 13:32:32 +0000526/*
527 * extern"C" void art_quick_invoke_stub(ArtMethod *method, x0
528 * uint32_t *args, x1
529 * uint32_t argsize, w2
530 * Thread *self, x3
531 * JValue *result, x4
532 * char *shorty); x5
533 * +----------------------+
534 * | |
535 * | C/C++ frame |
536 * | LR'' |
537 * | FP'' | <- SP'
538 * +----------------------+
539 * +----------------------+
540 * | SP' |
541 * | X5 |
542 * | X4 | Saved registers
543 * | LR' |
544 * | FP' | <- FP
545 * +----------------------+
546 * | uint32_t out[n-1] |
547 * | : : | Outs
548 * | uint32_t out[0] |
549 * | ArtMethod* NULL | <- SP
550 * +----------------------+
551 *
552 * Outgoing registers:
553 * x0 - Method*
554 * x1-x7 - integer parameters.
555 * d0-d7 - Floating point parameters.
556 * xSELF = self
557 * SP = & of ArtMethod*
558 * x1 = "this" pointer.
559 *
560 */
561ENTRY art_quick_invoke_stub
562 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700563 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000564
565 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
566 // Parse the passed shorty to determine which register to load.
567 // Load addresses for routines that load WXSD registers.
568 adr x11, .LstoreW2
569 adr x12, .LstoreX2
570 adr x13, .LstoreS0
571 adr x14, .LstoreD0
572
573 // Initialize routine offsets to 0 for integers and floats.
574 // x8 for integers, x15 for floating point.
575 mov x8, #0
576 mov x15, #0
577
578 add x10, x5, #1 // Load shorty address, plus one to skip return value.
579 ldr w1, [x9],#4 // Load "this" parameter, and increment arg pointer.
580
581 // Loop to fill registers.
582.LfillRegisters:
583 ldrb w17, [x10], #1 // Load next character in signature, and increment.
584 cbz w17, .LcallFunction // Exit at end of signature. Shorty 0 terminated.
585
586 cmp w17, #'F' // is this a float?
587 bne .LisDouble
588
589 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700590 beq .Ladvance4
Stuart Monteithb95a5342014-03-12 13:32:32 +0000591
592 add x17, x13, x15 // Calculate subroutine to jump to.
593 br x17
594
595.LisDouble:
596 cmp w17, #'D' // is this a double?
597 bne .LisLong
598
599 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700600 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000601
602 add x17, x14, x15 // Calculate subroutine to jump to.
603 br x17
604
605.LisLong:
606 cmp w17, #'J' // is this a long?
607 bne .LisOther
608
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700609 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700610 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000611
612 add x17, x12, x8 // Calculate subroutine to jump to.
613 br x17
614
Stuart Monteithb95a5342014-03-12 13:32:32 +0000615.LisOther: // Everything else takes one vReg.
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700616 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700617 beq .Ladvance4
618
Stuart Monteithb95a5342014-03-12 13:32:32 +0000619 add x17, x11, x8 // Calculate subroutine to jump to.
620 br x17
621
Andreas Gampe03906cf2014-04-07 12:08:28 -0700622.Ladvance4:
623 add x9, x9, #4
624 b .LfillRegisters
625
626.Ladvance8:
627 add x9, x9, #8
628 b .LfillRegisters
629
Stuart Monteithb95a5342014-03-12 13:32:32 +0000630// Macro for loading a parameter into a register.
631// counter - the register with offset into these tables
632// size - the size of the register - 4 or 8 bytes.
633// register - the name of the register to be loaded.
634.macro LOADREG counter size register return
635 ldr \register , [x9], #\size
636 add \counter, \counter, 12
637 b \return
638.endm
639
640// Store ints.
641.LstoreW2:
642 LOADREG x8 4 w2 .LfillRegisters
643 LOADREG x8 4 w3 .LfillRegisters
644 LOADREG x8 4 w4 .LfillRegisters
645 LOADREG x8 4 w5 .LfillRegisters
646 LOADREG x8 4 w6 .LfillRegisters
647 LOADREG x8 4 w7 .LfillRegisters
648
649// Store longs.
650.LstoreX2:
651 LOADREG x8 8 x2 .LfillRegisters
652 LOADREG x8 8 x3 .LfillRegisters
653 LOADREG x8 8 x4 .LfillRegisters
654 LOADREG x8 8 x5 .LfillRegisters
655 LOADREG x8 8 x6 .LfillRegisters
656 LOADREG x8 8 x7 .LfillRegisters
657
658// Store singles.
659.LstoreS0:
660 LOADREG x15 4 s0 .LfillRegisters
661 LOADREG x15 4 s1 .LfillRegisters
662 LOADREG x15 4 s2 .LfillRegisters
663 LOADREG x15 4 s3 .LfillRegisters
664 LOADREG x15 4 s4 .LfillRegisters
665 LOADREG x15 4 s5 .LfillRegisters
666 LOADREG x15 4 s6 .LfillRegisters
667 LOADREG x15 4 s7 .LfillRegisters
668
669// Store doubles.
670.LstoreD0:
671 LOADREG x15 8 d0 .LfillRegisters
672 LOADREG x15 8 d1 .LfillRegisters
673 LOADREG x15 8 d2 .LfillRegisters
674 LOADREG x15 8 d3 .LfillRegisters
675 LOADREG x15 8 d4 .LfillRegisters
676 LOADREG x15 8 d5 .LfillRegisters
677 LOADREG x15 8 d6 .LfillRegisters
678 LOADREG x15 8 d7 .LfillRegisters
679
680
681.LcallFunction:
682
Andreas Gampe03906cf2014-04-07 12:08:28 -0700683 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000684
Stuart Monteithb95a5342014-03-12 13:32:32 +0000685END art_quick_invoke_stub
686
687/* extern"C"
688 * void art_quick_invoke_static_stub(ArtMethod *method, x0
689 * uint32_t *args, x1
690 * uint32_t argsize, w2
691 * Thread *self, x3
692 * JValue *result, x4
693 * char *shorty); x5
694 */
695ENTRY art_quick_invoke_static_stub
696 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700697 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000698
699 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
700 // Parse the passed shorty to determine which register to load.
701 // Load addresses for routines that load WXSD registers.
702 adr x11, .LstoreW1_2
703 adr x12, .LstoreX1_2
704 adr x13, .LstoreS0_2
705 adr x14, .LstoreD0_2
706
707 // Initialize routine offsets to 0 for integers and floats.
708 // x8 for integers, x15 for floating point.
709 mov x8, #0
710 mov x15, #0
711
712 add x10, x5, #1 // Load shorty address, plus one to skip return value.
713
714 // Loop to fill registers.
715.LfillRegisters2:
716 ldrb w17, [x10], #1 // Load next character in signature, and increment.
717 cbz w17, .LcallFunction2 // Exit at end of signature. Shorty 0 terminated.
718
719 cmp w17, #'F' // is this a float?
720 bne .LisDouble2
721
722 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700723 beq .Ladvance4_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000724
725 add x17, x13, x15 // Calculate subroutine to jump to.
726 br x17
727
728.LisDouble2:
729 cmp w17, #'D' // is this a double?
730 bne .LisLong2
731
732 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700733 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000734
735 add x17, x14, x15 // Calculate subroutine to jump to.
736 br x17
737
738.LisLong2:
739 cmp w17, #'J' // is this a long?
740 bne .LisOther2
741
742 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700743 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000744
745 add x17, x12, x8 // Calculate subroutine to jump to.
746 br x17
747
Stuart Monteithb95a5342014-03-12 13:32:32 +0000748.LisOther2: // Everything else takes one vReg.
749 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700750 beq .Ladvance4_2
751
Stuart Monteithb95a5342014-03-12 13:32:32 +0000752 add x17, x11, x8 // Calculate subroutine to jump to.
753 br x17
754
Andreas Gampe03906cf2014-04-07 12:08:28 -0700755.Ladvance4_2:
756 add x9, x9, #4
757 b .LfillRegisters2
758
759.Ladvance8_2:
760 add x9, x9, #8
761 b .LfillRegisters2
762
Stuart Monteithb95a5342014-03-12 13:32:32 +0000763// Store ints.
764.LstoreW1_2:
765 LOADREG x8 4 w1 .LfillRegisters2
766 LOADREG x8 4 w2 .LfillRegisters2
767 LOADREG x8 4 w3 .LfillRegisters2
768 LOADREG x8 4 w4 .LfillRegisters2
769 LOADREG x8 4 w5 .LfillRegisters2
770 LOADREG x8 4 w6 .LfillRegisters2
771 LOADREG x8 4 w7 .LfillRegisters2
772
773// Store longs.
774.LstoreX1_2:
775 LOADREG x8 8 x1 .LfillRegisters2
776 LOADREG x8 8 x2 .LfillRegisters2
777 LOADREG x8 8 x3 .LfillRegisters2
778 LOADREG x8 8 x4 .LfillRegisters2
779 LOADREG x8 8 x5 .LfillRegisters2
780 LOADREG x8 8 x6 .LfillRegisters2
781 LOADREG x8 8 x7 .LfillRegisters2
782
783// Store singles.
784.LstoreS0_2:
785 LOADREG x15 4 s0 .LfillRegisters2
786 LOADREG x15 4 s1 .LfillRegisters2
787 LOADREG x15 4 s2 .LfillRegisters2
788 LOADREG x15 4 s3 .LfillRegisters2
789 LOADREG x15 4 s4 .LfillRegisters2
790 LOADREG x15 4 s5 .LfillRegisters2
791 LOADREG x15 4 s6 .LfillRegisters2
792 LOADREG x15 4 s7 .LfillRegisters2
793
794// Store doubles.
795.LstoreD0_2:
796 LOADREG x15 8 d0 .LfillRegisters2
797 LOADREG x15 8 d1 .LfillRegisters2
798 LOADREG x15 8 d2 .LfillRegisters2
799 LOADREG x15 8 d3 .LfillRegisters2
800 LOADREG x15 8 d4 .LfillRegisters2
801 LOADREG x15 8 d5 .LfillRegisters2
802 LOADREG x15 8 d6 .LfillRegisters2
803 LOADREG x15 8 d7 .LfillRegisters2
804
805
806.LcallFunction2:
807
Andreas Gampe03906cf2014-04-07 12:08:28 -0700808 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000809
Stuart Monteithb95a5342014-03-12 13:32:32 +0000810END art_quick_invoke_static_stub
811
Andreas Gampe03906cf2014-04-07 12:08:28 -0700812
Stuart Monteithb95a5342014-03-12 13:32:32 +0000813
814 /*
815 * On entry x0 is uintptr_t* gprs_ and x1 is uint64_t* fprs_
816 */
817
818ENTRY art_quick_do_long_jump
819 // Load FPRs
820 ldp d0, d1, [x1], #16
821 ldp d2, d3, [x1], #16
822 ldp d4, d5, [x1], #16
823 ldp d6, d7, [x1], #16
824 ldp d8, d9, [x1], #16
825 ldp d10, d11, [x1], #16
826 ldp d12, d13, [x1], #16
827 ldp d14, d15, [x1], #16
828 ldp d16, d17, [x1], #16
829 ldp d18, d19, [x1], #16
830 ldp d20, d21, [x1], #16
831 ldp d22, d23, [x1], #16
832 ldp d24, d25, [x1], #16
833 ldp d26, d27, [x1], #16
834 ldp d28, d29, [x1], #16
835 ldp d30, d31, [x1]
836
837 // Load GPRs
838 // TODO: lots of those are smashed, could optimize.
839 add x0, x0, #30*8
840 ldp x30, x1, [x0], #-16
841 ldp x28, x29, [x0], #-16
842 ldp x26, x27, [x0], #-16
843 ldp x24, x25, [x0], #-16
844 ldp x22, x23, [x0], #-16
845 ldp x20, x21, [x0], #-16
846 ldp x18, x19, [x0], #-16
847 ldp x16, x17, [x0], #-16
848 ldp x14, x15, [x0], #-16
849 ldp x12, x13, [x0], #-16
850 ldp x10, x11, [x0], #-16
851 ldp x8, x9, [x0], #-16
852 ldp x6, x7, [x0], #-16
853 ldp x4, x5, [x0], #-16
854 ldp x2, x3, [x0], #-16
855 mov sp, x1
856
857 // TODO: Is it really OK to use LR for the target PC?
858 mov x0, #0
859 mov x1, #0
860 br xLR
861END art_quick_do_long_jump
862
863UNIMPLEMENTED art_quick_handle_fill_data
864
865UNIMPLEMENTED art_quick_lock_object
866UNIMPLEMENTED art_quick_unlock_object
867UNIMPLEMENTED art_quick_check_cast
868UNIMPLEMENTED art_quick_aput_obj_with_null_and_bound_check
869UNIMPLEMENTED art_quick_aput_obj_with_bound_check
870UNIMPLEMENTED art_quick_aput_obj
871UNIMPLEMENTED art_quick_initialize_static_storage
872UNIMPLEMENTED art_quick_initialize_type
873UNIMPLEMENTED art_quick_initialize_type_and_verify_access
874UNIMPLEMENTED art_quick_get32_static
875UNIMPLEMENTED art_quick_get64_static
876UNIMPLEMENTED art_quick_get_obj_static
877UNIMPLEMENTED art_quick_get32_instance
878UNIMPLEMENTED art_quick_get64_instance
879UNIMPLEMENTED art_quick_get_obj_instance
880UNIMPLEMENTED art_quick_set32_static
881UNIMPLEMENTED art_quick_set64_static
882UNIMPLEMENTED art_quick_set_obj_static
883UNIMPLEMENTED art_quick_set32_instance
884UNIMPLEMENTED art_quick_set64_instance
885UNIMPLEMENTED art_quick_set_obj_instance
886UNIMPLEMENTED art_quick_resolve_string
887
888// Macro to facilitate adding new allocation entrypoints.
889.macro TWO_ARG_DOWNCALL name, entrypoint, return
890 .extern \entrypoint
891ENTRY \name
892 brk 0
893END \name
894.endm
895
896// Macro to facilitate adding new array allocation entrypoints.
897.macro THREE_ARG_DOWNCALL name, entrypoint, return
898 .extern \entrypoint
899ENTRY \name
900 brk 0
901END \name
902.endm
903
904// Generate the allocation entrypoints for each allocator.
905GENERATE_ALL_ALLOC_ENTRYPOINTS
906
907UNIMPLEMENTED art_quick_test_suspend
908
Andreas Gampee62a07e2014-03-26 14:53:21 -0700909 /*
910 * Called by managed code that is attempting to call a method on a proxy class. On entry
911 * x0 holds the proxy method and x1 holds the receiver; The frame size of the invoked proxy
912 * method agrees with a ref and args callee save frame.
913 */
914 .extern artQuickProxyInvokeHandler
915ENTRY art_quick_proxy_invoke_handler
916 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
917 str x0, [sp, #0] // place proxy method at bottom of frame
918 mov x2, xSELF // pass Thread::Current
919 mov x3, sp // pass SP
920 bl artQuickProxyInvokeHandler // (Method* proxy method, receiver, Thread*, SP)
921 ldr xSELF, [sp, #200] // Restore self pointer.
922 ldr x2, [xSELF, THREAD_EXCEPTION_OFFSET]
923 cbnz x2, .Lexception_in_proxy // success if no exception is pending
924 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME_NO_D0 // keep d0
925 ret // return on success
926.Lexception_in_proxy:
927 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
928 DELIVER_PENDING_EXCEPTION
929END art_quick_proxy_invoke_handler
Stuart Monteithb95a5342014-03-12 13:32:32 +0000930
931UNIMPLEMENTED art_quick_imt_conflict_trampoline
932
933
934ENTRY art_quick_resolution_trampoline
935 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700936 mov x19, x0 // save the called method
Stuart Monteithb95a5342014-03-12 13:32:32 +0000937 mov x2, xSELF
938 mov x3, sp
939 bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP)
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700940 mov x9, x0 // Remember returned code pointer in x9.
941 mov x0, x19 // Restore the method, before x19 is restored to on-call value
Stuart Monteithb95a5342014-03-12 13:32:32 +0000942 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
943 cbz x9, 1f
Andreas Gampec6ee54e2014-03-24 16:45:44 -0700944 br x9
Stuart Monteithb95a5342014-03-12 13:32:32 +00009451:
946 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
947 DELIVER_PENDING_EXCEPTION
948END art_quick_resolution_trampoline
949
950/*
951 * Generic JNI frame layout:
952 *
953 * #-------------------#
954 * | |
955 * | caller method... |
956 * #-------------------# <--- SP on entry
957 * | Return X30/LR |
958 * | X29/FP | callee save
959 * | X28 | callee save
960 * | X27 | callee save
961 * | X26 | callee save
962 * | X25 | callee save
963 * | X24 | callee save
964 * | X23 | callee save
965 * | X22 | callee save
966 * | X21 | callee save
967 * | X20 | callee save
968 * | X19 | callee save
969 * | X7 | arg7
970 * | X6 | arg6
971 * | X5 | arg5
972 * | X4 | arg4
973 * | X3 | arg3
974 * | X2 | arg2
975 * | X1 | arg1
976 * | D15 | float arg 8
977 * | D14 | float arg 8
978 * | D13 | float arg 8
979 * | D12 | callee save
980 * | D11 | callee save
981 * | D10 | callee save
982 * | D9 | callee save
983 * | D8 | callee save
984 * | D7 | float arg 8
985 * | D6 | float arg 7
986 * | D5 | float arg 6
987 * | D4 | float arg 5
988 * | D3 | float arg 4
989 * | D2 | float arg 3
990 * | D1 | float arg 2
991 * | D0 | float arg 1
992 * | RDI/Method* | <- X0
993 * #-------------------#
994 * | local ref cookie | // 4B
995 * | SIRT size | // 4B
996 * #-------------------#
997 * | JNI Call Stack |
998 * #-------------------# <--- SP on native call
999 * | |
1000 * | Stack for Regs | The trampoline assembly will pop these values
1001 * | | into registers for native call
1002 * #-------------------#
1003 * | Native code ptr |
1004 * #-------------------#
1005 * | Free scratch |
1006 * #-------------------#
1007 * | Ptr to (1) | <--- SP
1008 * #-------------------#
1009 */
1010 /*
1011 * Called to do a generic JNI down-call
1012 */
1013ENTRY art_quick_generic_jni_trampoline
1014 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
1015 str x0, [sp, #0] // Store native ArtMethod* to bottom of stack.
1016
1017 // Save SP , so we can have static CFI info.
1018 mov x28, sp
1019 .cfi_def_cfa_register x28
1020
1021 // This looks the same, but is different: this will be updated to point to the bottom
1022 // of the frame when the SIRT is inserted.
1023 mov xFP, sp
1024
1025 mov x8, #5120
1026 sub sp, sp, x8
1027
1028 // prepare for artQuickGenericJniTrampoline call
1029 // (Thread*, SP)
1030 // x0 x1 <= C calling convention
1031 // xSELF xFP <= where they are
1032
1033 mov x0, xSELF // Thread*
1034 mov x1, xFP
1035 bl artQuickGenericJniTrampoline // (Thread*, sp)
1036
1037 // Get the updated pointer. This is the bottom of the frame _with_ SIRT.
1038 ldr xFP, [sp]
1039 add x9, sp, #8
1040
1041 cmp x0, #0
1042 b.mi .Lentry_error // Check for error, negative value.
1043
1044 // release part of the alloca.
1045 add x9, x9, x0
1046
1047 // Get the code pointer
1048 ldr xIP0, [x9, #0]
1049
1050 // Load parameters from frame into registers.
1051 // TODO Check with artQuickGenericJniTrampoline.
1052 // Also, check again APPCS64 - the stack arguments are interleaved.
1053 ldp x0, x1, [x9, #8]
1054 ldp x2, x3, [x9, #24]
1055 ldp x4, x5, [x9, #40]
1056 ldp x6, x7, [x9, #56]
1057
1058 ldp d0, d1, [x9, #72]
1059 ldp d2, d3, [x9, #88]
1060 ldp d4, d5, [x9, #104]
1061 ldp d6, d7, [x9, #120]
1062
1063 add sp, x9, #136
1064
1065 blr xIP0 // native call.
1066
1067 // Restore self pointer.
1068 ldr xSELF, [x28, #200]
1069
1070 // result sign extension is handled in C code
1071 // prepare for artQuickGenericJniEndTrampoline call
1072 // (Thread*, SP, result, result_f)
1073 // x0 x1 x2 x3 <= C calling convention
1074 mov x5, x0 // Save return value
1075 mov x0, xSELF // Thread register
1076 mov x1, xFP // Stack pointer
1077 mov x2, x5 // Result (from saved)
1078 fmov x3, d0 // d0 will contain floating point result, but needs to go into x3
1079
1080 bl artQuickGenericJniEndTrampoline
1081
1082 // Tear down the alloca.
1083 mov sp, x28
1084 .cfi_def_cfa_register sp
1085
1086 // Restore self pointer.
1087 ldr xSELF, [x28, #200]
1088
1089 // Pending exceptions possible.
1090 ldr x1, [xSELF, THREAD_EXCEPTION_OFFSET]
1091 cbnz x1, .Lexception_in_native
1092
1093 // Tear down the callee-save frame.
1094 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1095
1096 // store into fpr, for when it's a fpr return...
1097 fmov d0, x0
1098 ret
1099
1100.Lentry_error:
1101 mov sp, x28
1102 .cfi_def_cfa_register sp
1103 ldr xSELF, [x28, #200]
1104.Lexception_in_native:
1105 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1106 DELIVER_PENDING_EXCEPTION
1107
1108END art_quick_generic_jni_trampoline
1109
1110/*
1111 * Called to bridge from the quick to interpreter ABI. On entry the arguments match those
1112 * of a quick call:
1113 * x0 = method being called/to bridge to.
1114 * x1..x7, d0..d7 = arguments to that method.
1115 */
1116ENTRY art_quick_to_interpreter_bridge
1117 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments.
1118
1119 // x0 will contain mirror::ArtMethod* method.
1120 mov x1, xSELF // How to get Thread::Current() ???
1121 mov x2, sp
1122
1123 // uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
1124 // mirror::ArtMethod** sp)
1125 bl artQuickToInterpreterBridge
1126
1127 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case.
1128
1129 fmov d0, x0
1130
1131 RETURN_OR_DELIVER_PENDING_EXCEPTION
1132END art_quick_to_interpreter_bridge
1133
1134UNIMPLEMENTED art_quick_instrumentation_entry
1135UNIMPLEMENTED art_quick_instrumentation_exit
1136UNIMPLEMENTED art_quick_deoptimize
1137UNIMPLEMENTED art_quick_mul_long
1138UNIMPLEMENTED art_quick_shl_long
1139UNIMPLEMENTED art_quick_shr_long
1140UNIMPLEMENTED art_quick_ushr_long
1141UNIMPLEMENTED art_quick_indexof
1142UNIMPLEMENTED art_quick_string_compareto