blob: ac922ddecd8e6f844594696523076a22dd80b04a [file] [log] [blame]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm64.S"
18
19#include "arch/quick_alloc_entrypoints.S"
20
21
22 /*
23 * Macro that sets up the callee save frame to conform with
24 * Runtime::CreateCalleeSaveMethod(kSaveAll)
25 */
26.macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
27 adrp x9, :got:_ZN3art7Runtime9instance_E
28 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
29
30 // Our registers aren't intermixed - just spill in order.
31 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
32
33 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
34 ldr x9, [x9, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET ]
35
36 sub sp, sp, #368
37 .cfi_adjust_cfa_offset 368
38
Andreas Gampe5c1e4352014-04-21 19:28:24 -070039 // Ugly compile-time check, but we only have the preprocessor.
40#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 368)
41#error "SAVE_ALL_CALLEE_SAVE_FRAME(ARM64) size not as expected."
42#endif
43
Stuart Monteithb95a5342014-03-12 13:32:32 +000044 // FP args
Andreas Gampe6cf80102014-05-19 11:32:41 -070045 stp d0, d1, [sp, #8]
Stuart Monteithb95a5342014-03-12 13:32:32 +000046 stp d2, d3, [sp, #24]
47 stp d4, d5, [sp, #40]
48 stp d6, d7, [sp, #56]
49
50 // FP callee-saves
51 stp d8, d9, [sp, #72]
52 stp d10, d11, [sp, #88]
53 stp d12, d13, [sp, #104]
54 stp d14, d15, [sp, #120]
55
56 stp d16, d17, [sp, #136]
57 stp d18, d19, [sp, #152]
58 stp d20, d21, [sp, #168]
59 stp d22, d23, [sp, #184]
60 stp d24, d25, [sp, #200]
61 stp d26, d27, [sp, #216]
62 stp d28, d29, [sp, #232]
63 stp d30, d31, [sp, #248]
64
65
66 // Callee saved.
67 stp xSELF, x19, [sp, #264]
Andreas Gampe03906cf2014-04-07 12:08:28 -070068 .cfi_rel_offset x18, 264
69 .cfi_rel_offset x19, 272
Stuart Monteithb95a5342014-03-12 13:32:32 +000070
Andreas Gampe03906cf2014-04-07 12:08:28 -070071 stp x20, x21, [sp, #280]
72 .cfi_rel_offset x20, 280
73 .cfi_rel_offset x21, 288
74
75 stp x22, x23, [sp, #296]
76 .cfi_rel_offset x22, 296
77 .cfi_rel_offset x23, 304
78
79 stp x24, x25, [sp, #312]
80 .cfi_rel_offset x24, 312
81 .cfi_rel_offset x25, 320
82
83 stp x26, x27, [sp, #328]
84 .cfi_rel_offset x26, 328
85 .cfi_rel_offset x27, 336
86
87 stp x28, xFP, [sp, #344] // Save FP.
88 .cfi_rel_offset x28, 344
89 .cfi_rel_offset x29, 352
90
91 str xLR, [sp, #360]
92 .cfi_rel_offset x30, 360
Stuart Monteithb95a5342014-03-12 13:32:32 +000093
94 // Loads appropriate callee-save-method
95 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
96
97.endm
98
99 /*
100 * Macro that sets up the callee save frame to conform with
101 * Runtime::CreateCalleeSaveMethod(kRefsOnly).
102 */
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700103// WIP.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000104.macro SETUP_REF_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700105 adrp x9, :got:_ZN3art7Runtime9instance_E
106 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
107
108 // Our registers aren't intermixed - just spill in order.
109 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
110
111 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
112 ldr x9, [x9, RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET ]
113
114 sub sp, sp, #176
115 .cfi_adjust_cfa_offset 176
116
117 // Ugly compile-time check, but we only have the preprocessor.
118#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 176)
119#error "REFS_ONLY_CALLEE_SAVE_FRAME(ARM64) size not as expected."
120#endif
121
122 // FP callee-saves
123 stp d8, d9, [sp, #8]
124 stp d10, d11, [sp, #24]
125 stp d12, d13, [sp, #40]
126 stp d14, d15, [sp, #56]
127
128 // Callee saved.
129 stp xSELF, x19, [sp, #72]
130 .cfi_rel_offset x18, 72
131 .cfi_rel_offset x19, 80
132
133 stp x20, x21, [sp, #88]
134 .cfi_rel_offset x20, 88
135 .cfi_rel_offset x21, 96
136
137 stp x22, x23, [sp, #104]
138 .cfi_rel_offset x22, 104
139 .cfi_rel_offset x23, 112
140
141 stp x24, x25, [sp, #120]
142 .cfi_rel_offset x24, 120
143 .cfi_rel_offset x25, 128
144
145 stp x26, x27, [sp, #136]
146 .cfi_rel_offset x26, 136
147 .cfi_rel_offset x27, 144
148
149 stp x28, xFP, [sp, #152] // Save FP.
150 .cfi_rel_offset x28, 152
151 .cfi_rel_offset x29, 160
152
153 str xLR, [sp, #168]
154 .cfi_rel_offset x30, 168
155
156 // Loads appropriate callee-save-method
157 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000158.endm
159
160.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700161 // FP callee saves
162 ldp d8, d9, [sp, #8]
163 ldp d10, d11, [sp, #24]
164 ldp d12, d13, [sp, #40]
165 ldp d14, d15, [sp, #56]
166
167 // Callee saved.
168 ldp xSELF, x19, [sp, #72]
169 .cfi_restore x18
170 .cfi_restore x19
171
172 ldp x20, x21, [sp, #88]
173 .cfi_restore x20
174 .cfi_restore x21
175
176 ldp x22, x23, [sp, #104]
177 .cfi_restore x22
178 .cfi_restore x23
179
180 ldp x24, x25, [sp, #120]
181 .cfi_restore x24
182 .cfi_restore x25
183
184 ldp x26, x27, [sp, #136]
185 .cfi_restore x26
186 .cfi_restore x27
187
188 ldp x28, xFP, [sp, #152] // Save FP.
189 .cfi_restore x28
190 .cfi_restore x29
191
192 ldr xLR, [sp, #168]
193 .cfi_restore x30
194
195 add sp, sp, #176
196 .cfi_adjust_cfa_offset -176
Stuart Monteithb95a5342014-03-12 13:32:32 +0000197.endm
198
199.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
200 brk 0
201.endm
202
203
204.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
205 sub sp, sp, #304
206 .cfi_adjust_cfa_offset 304
207
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700208 // Ugly compile-time check, but we only have the preprocessor.
209#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 304)
210#error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(ARM64) size not as expected."
211#endif
212
Stuart Monteithb95a5342014-03-12 13:32:32 +0000213 stp d0, d1, [sp, #16]
214 stp d2, d3, [sp, #32]
215 stp d4, d5, [sp, #48]
216 stp d6, d7, [sp, #64]
217 stp d8, d9, [sp, #80]
218 stp d10, d11, [sp, #96]
219 stp d12, d13, [sp, #112]
220 stp d14, d15, [sp, #128]
221
222 stp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700223 .cfi_rel_offset x1, 144
224 .cfi_rel_offset x2, 152
Stuart Monteithb95a5342014-03-12 13:32:32 +0000225
Andreas Gampe03906cf2014-04-07 12:08:28 -0700226 stp x3, x4, [sp, #160]
227 .cfi_rel_offset x3, 160
228 .cfi_rel_offset x4, 168
229
230 stp x5, x6, [sp, #176]
231 .cfi_rel_offset x5, 176
232 .cfi_rel_offset x6, 184
233
234 stp x7, xSELF, [sp, #192]
235 .cfi_rel_offset x7, 192
236 .cfi_rel_offset x18, 200
237
238 stp x19, x20, [sp, #208]
239 .cfi_rel_offset x19, 208
240 .cfi_rel_offset x20, 216
241
242 stp x21, x22, [sp, #224]
243 .cfi_rel_offset x21, 224
244 .cfi_rel_offset x22, 232
245
246 stp x23, x24, [sp, #240]
247 .cfi_rel_offset x23, 240
248 .cfi_rel_offset x24, 248
249
250 stp x25, x26, [sp, #256]
251 .cfi_rel_offset x25, 256
252 .cfi_rel_offset x26, 264
253
254 stp x27, x28, [sp, #272]
255 .cfi_rel_offset x27, 272
256 .cfi_rel_offset x28, 280
257
258 stp xFP, xLR, [sp, #288]
259 .cfi_rel_offset x29, 288
260 .cfi_rel_offset x30, 296
Stuart Monteithb95a5342014-03-12 13:32:32 +0000261.endm
262
263 /*
264 * Macro that sets up the callee save frame to conform with
265 * Runtime::CreateCalleeSaveMethod(kRefsAndArgs).
266 *
267 * TODO This is probably too conservative - saving FP & LR.
268 */
269.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
270 adrp x9, :got:_ZN3art7Runtime9instance_E
271 ldr x9, [x9, #:got_lo12:_ZN3art7Runtime9instance_E]
272
273 // Our registers aren't intermixed - just spill in order.
274 ldr x9,[x9] // x9 = & (art::Runtime * art::Runtime.instance_) .
275
276 // x9 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
277 ldr x9, [x9, RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ]
278
279 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
280
281 str x9, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
282.endm
283
284.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
285
286 ldp d0, d1, [sp, #16]
287 ldp d2, d3, [sp, #32]
288 ldp d4, d5, [sp, #48]
289 ldp d6, d7, [sp, #64]
290 ldp d8, d9, [sp, #80]
291 ldp d10, d11, [sp, #96]
292 ldp d12, d13, [sp, #112]
293 ldp d14, d15, [sp, #128]
294
295 // args.
296 ldp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700297 .cfi_restore x1
298 .cfi_restore x2
299
Stuart Monteithb95a5342014-03-12 13:32:32 +0000300 ldp x3, x4, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700301 .cfi_restore x3
302 .cfi_restore x4
303
Stuart Monteithb95a5342014-03-12 13:32:32 +0000304 ldp x5, x6, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700305 .cfi_restore x5
306 .cfi_restore x6
307
Stuart Monteithb95a5342014-03-12 13:32:32 +0000308 ldp x7, xSELF, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700309 .cfi_restore x7
310 .cfi_restore x18
311
Stuart Monteithb95a5342014-03-12 13:32:32 +0000312 ldp x19, x20, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700313 .cfi_restore x19
314 .cfi_restore x20
315
Stuart Monteithb95a5342014-03-12 13:32:32 +0000316 ldp x21, x22, [sp, #224]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700317 .cfi_restore x21
318 .cfi_restore x22
319
Stuart Monteithb95a5342014-03-12 13:32:32 +0000320 ldp x23, x24, [sp, #240]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700321 .cfi_restore x23
322 .cfi_restore x24
323
Stuart Monteithb95a5342014-03-12 13:32:32 +0000324 ldp x25, x26, [sp, #256]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700325 .cfi_restore x25
326 .cfi_restore x26
327
Stuart Monteithb95a5342014-03-12 13:32:32 +0000328 ldp x27, x28, [sp, #272]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700329 .cfi_restore x27
330 .cfi_restore x28
331
Stuart Monteithb95a5342014-03-12 13:32:32 +0000332 ldp xFP, xLR, [sp, #288]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700333 .cfi_restore x29
334 .cfi_restore x30
Stuart Monteithb95a5342014-03-12 13:32:32 +0000335
336 add sp, sp, #304
337 .cfi_adjust_cfa_offset -304
338.endm
339
Andreas Gampee62a07e2014-03-26 14:53:21 -0700340.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME_NO_D0
341
342 ldr d1, [sp, #24]
343 ldp d2, d3, [sp, #32]
344 ldp d4, d5, [sp, #48]
345 ldp d6, d7, [sp, #64]
346 ldp d8, d9, [sp, #80]
347 ldp d10, d11, [sp, #96]
348 ldp d12, d13, [sp, #112]
349 ldp d14, d15, [sp, #128]
350
351 // args.
352 ldp x1, x2, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700353 .cfi_restore x1
354 .cfi_restore x2
355
Andreas Gampee62a07e2014-03-26 14:53:21 -0700356 ldp x3, x4, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700357 .cfi_restore x3
358 .cfi_restore x4
359
Andreas Gampee62a07e2014-03-26 14:53:21 -0700360 ldp x5, x6, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700361 .cfi_restore x5
362 .cfi_restore x6
363
Andreas Gampee62a07e2014-03-26 14:53:21 -0700364 ldp x7, xSELF, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700365 .cfi_restore x7
366 .cfi_restore x18
367
Andreas Gampee62a07e2014-03-26 14:53:21 -0700368 ldp x19, x20, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700369 .cfi_restore x19
370 .cfi_restore x20
371
Andreas Gampee62a07e2014-03-26 14:53:21 -0700372 ldp x21, x22, [sp, #224]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700373 .cfi_restore x21
374 .cfi_restore x22
375
Andreas Gampee62a07e2014-03-26 14:53:21 -0700376 ldp x23, x24, [sp, #240]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700377 .cfi_restore x23
378 .cfi_restore x24
379
Andreas Gampee62a07e2014-03-26 14:53:21 -0700380 ldp x25, x26, [sp, #256]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700381 .cfi_restore x25
382 .cfi_restore x26
383
Andreas Gampee62a07e2014-03-26 14:53:21 -0700384 ldp x27, x28, [sp, #272]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700385 .cfi_restore x27
386 .cfi_restore x28
387
Andreas Gampee62a07e2014-03-26 14:53:21 -0700388 ldp xFP, xLR, [sp, #288]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700389 .cfi_restore x29
390 .cfi_restore x30
Andreas Gampee62a07e2014-03-26 14:53:21 -0700391
392 add sp, sp, #304
393 .cfi_adjust_cfa_offset -304
394.endm
395
Stuart Monteithb95a5342014-03-12 13:32:32 +0000396.macro RETURN_IF_RESULT_IS_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700397 cbnz x0, 1f // result non-zero branch over
398 ret // return
3991:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000400.endm
401
402.macro RETURN_IF_RESULT_IS_NON_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700403 cbz x0, 1f // result zero branch over
404 ret // return
4051:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000406.endm
407
408 /*
409 * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
410 * exception is Thread::Current()->exception_
411 */
412.macro DELIVER_PENDING_EXCEPTION
413 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
414 mov x0, xSELF
415 mov x1, sp
416
417 // Point of no return.
418 b artDeliverPendingExceptionFromCode // artDeliverPendingExceptionFromCode(Thread*, SP)
419 brk 0 // Unreached
420.endm
421
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700422.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_REG reg
423 ldr \reg, [xSELF, # THREAD_EXCEPTION_OFFSET] // Get exception field.
424 cbnz \reg, 1f
Stuart Monteithb95a5342014-03-12 13:32:32 +0000425 ret
4261:
427 DELIVER_PENDING_EXCEPTION
428.endm
429
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700430.macro RETURN_OR_DELIVER_PENDING_EXCEPTION
431 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG x9
432.endm
433
434// Same as above with x1. This is helpful in stubs that want to avoid clobbering another register.
435.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
436 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG x1
437.endm
438
439.macro RETURN_IF_W0_IS_ZERO_OR_DELIVER
440 cbnz w0, 1f // result non-zero branch over
441 ret // return
4421:
443 DELIVER_PENDING_EXCEPTION
444.endm
445
Stuart Monteithb95a5342014-03-12 13:32:32 +0000446.macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
447 .extern \cxx_name
448ENTRY \c_name
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700449 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Serban Constantinescu63206f32014-05-07 18:40:49 +0100450 mov x0, xSELF // pass Thread::Current
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700451 mov x1, sp // pass SP
452 b \cxx_name // \cxx_name(Thread*, SP)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000453END \c_name
454.endm
455
456.macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name
457 .extern \cxx_name
458ENTRY \c_name
Serban Constantinescu75b91132014-04-09 18:39:10 +0100459 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context.
Serban Constantinescu63206f32014-05-07 18:40:49 +0100460 mov x1, xSELF // pass Thread::Current.
Serban Constantinescu75b91132014-04-09 18:39:10 +0100461 mov x2, sp // pass SP.
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700462 b \cxx_name // \cxx_name(arg, Thread*, SP).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000463 brk 0
464END \c_name
465.endm
466
467.macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
468 .extern \cxx_name
469ENTRY \c_name
470 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Serban Constantinescu63206f32014-05-07 18:40:49 +0100471 mov x2, xSELF // pass Thread::Current
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700472 mov x3, sp // pass SP
473 b \cxx_name // \cxx_name(arg1, arg2, Thread*, SP)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000474 brk 0
475END \c_name
476.endm
477
478 /*
479 * Called by managed code, saves callee saves and then calls artThrowException
480 * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
481 */
482ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
483
484 /*
485 * Called by managed code to create and deliver a NullPointerException.
486 */
487NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
488
489 /*
490 * Called by managed code to create and deliver an ArithmeticException.
491 */
492NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
493
494 /*
495 * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
496 * index, arg2 holds limit.
497 */
498TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
499
500 /*
501 * Called by managed code to create and deliver a StackOverflowError.
502 */
503NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
504
505 /*
506 * Called by managed code to create and deliver a NoSuchMethodError.
507 */
508ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
509
510 /*
Stuart Monteithb95a5342014-03-12 13:32:32 +0000511 * All generated callsites for interface invokes and invocation slow paths will load arguments
Andreas Gampe51f76352014-05-21 08:28:48 -0700512 * as usual - except instead of loading arg0/x0 with the target Method*, arg0/x0 will contain
513 * the method_idx. This wrapper will save arg1-arg3, load the caller's Method*, align the
Stuart Monteithb95a5342014-03-12 13:32:32 +0000514 * stack and call the appropriate C helper.
Andreas Gampe51f76352014-05-21 08:28:48 -0700515 * NOTE: "this" is first visible argument of the target, and so can be found in arg1/x1.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000516 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700517 * The helper will attempt to locate the target and return a 128-bit result in x0/x1 consisting
Stuart Monteithb95a5342014-03-12 13:32:32 +0000518 * of the target Method* in x0 and method->code_ in x1.
519 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700520 * If unsuccessful, the helper will return NULL/????. There will be a pending exception in the
Stuart Monteithb95a5342014-03-12 13:32:32 +0000521 * thread and we branch to another stub to deliver it.
522 *
523 * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
524 * pointing back to the original caller.
Andreas Gampe51f76352014-05-21 08:28:48 -0700525 *
526 * Adapted from ARM32 code.
527 *
528 * Clobbers x12.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000529 */
530.macro INVOKE_TRAMPOLINE c_name, cxx_name
531 .extern \cxx_name
532ENTRY \c_name
Andreas Gampe51f76352014-05-21 08:28:48 -0700533 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // save callee saves in case allocation triggers GC
534 // Helper signature is always
535 // (method_idx, *this_object, *caller_method, *self, sp)
536
537 ldr x2, [sp, #FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE] // pass caller Method*
538 mov x3, xSELF // pass Thread::Current
539 mov x4, sp
540 bl \cxx_name // (method_idx, this, caller, Thread*, SP)
541 mov x12, x1 // save Method*->code_
542 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
543 cbz x0, 1f // did we find the target? if not go to exception delivery
544 br x12 // tail call to target
5451:
546 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +0000547END \c_name
548.endm
549
550INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline
551INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
552
553INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
554INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
555INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
556INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
557
Andreas Gampe03906cf2014-04-07 12:08:28 -0700558
559.macro INVOKE_STUB_CREATE_FRAME
560
561SAVE_SIZE=5*8 // x4, x5, SP, LR & FP saved.
562SAVE_SIZE_AND_METHOD=SAVE_SIZE+8
563
564 mov x9, sp // Save stack pointer.
565 .cfi_register sp,x9
566
567 add x10, x2, # SAVE_SIZE_AND_METHOD // calculate size of frame.
568 sub x10, sp, x10 // Calculate SP position - saves + ArtMethod* + args
569 and x10, x10, # ~0xf // Enforce 16 byte stack alignment.
570 mov sp, x10 // Set new SP.
571
572 sub x10, x9, #SAVE_SIZE // Calculate new FP (later). Done here as we must move SP
573 .cfi_def_cfa_register x10 // before this.
574 .cfi_adjust_cfa_offset SAVE_SIZE
575
576 str x9, [x10, #32] // Save old stack pointer.
577 .cfi_rel_offset sp, 32
578
579 stp x4, x5, [x10, #16] // Save result and shorty addresses.
580 .cfi_rel_offset x4, 16
581 .cfi_rel_offset x5, 24
582
583 stp xFP, xLR, [x10] // Store LR & FP.
584 .cfi_rel_offset x29, 0
585 .cfi_rel_offset x30, 8
586
587 mov xFP, x10 // Use xFP now, as it's callee-saved.
588 .cfi_def_cfa_register x29
589 mov xSELF, x3 // Move thread pointer into SELF register.
590
591 // Copy arguments into stack frame.
592 // Use simple copy routine for now.
593 // 4 bytes per slot.
594 // X1 - source address
595 // W2 - args length
596 // X9 - destination address.
597 // W10 - temporary
598 add x9, sp, #8 // Destination address is bottom of stack + NULL.
599
600 // Use \@ to differentiate between macro invocations.
601.LcopyParams\@:
602 cmp w2, #0
603 beq .LendCopyParams\@
604 sub w2, w2, #4 // Need 65536 bytes of range.
605 ldr w10, [x1, x2]
606 str w10, [x9, x2]
607
608 b .LcopyParams\@
609
610.LendCopyParams\@:
611
612 // Store NULL into Method* at bottom of frame.
613 str xzr, [sp]
614
615.endm
616
617.macro INVOKE_STUB_CALL_AND_RETURN
618
619 // load method-> METHOD_QUICK_CODE_OFFSET
620 ldr x9, [x0 , #METHOD_QUICK_CODE_OFFSET]
621 // Branch to method.
622 blr x9
623
624 // Restore return value address and shorty address.
625 ldp x4,x5, [xFP, #16]
626 .cfi_restore x4
627 .cfi_restore x5
628
629 // Store result (w0/x0/s0/d0) appropriately, depending on resultType.
630 ldrb w10, [x5]
631
632 // Don't set anything for a void type.
633 cmp w10, #'V'
634 beq .Lexit_art_quick_invoke_stub\@
635
636 cmp w10, #'D'
637 bne .Lreturn_is_float\@
638 str d0, [x4]
639 b .Lexit_art_quick_invoke_stub\@
640
641.Lreturn_is_float\@:
642 cmp w10, #'F'
643 bne .Lreturn_is_int\@
644 str s0, [x4]
645 b .Lexit_art_quick_invoke_stub\@
646
647 // Just store x0. Doesn't matter if it is 64 or 32 bits.
648.Lreturn_is_int\@:
649 str x0, [x4]
650
651.Lexit_art_quick_invoke_stub\@:
652 ldr x2, [x29, #32] // Restore stack pointer.
653 mov sp, x2
654 .cfi_restore sp
655
656 ldp x29, x30, [x29] // Restore old frame pointer and link register.
657 .cfi_restore x29
658 .cfi_restore x30
659
660 ret
661
662.endm
663
664
Stuart Monteithb95a5342014-03-12 13:32:32 +0000665/*
666 * extern"C" void art_quick_invoke_stub(ArtMethod *method, x0
667 * uint32_t *args, x1
668 * uint32_t argsize, w2
669 * Thread *self, x3
670 * JValue *result, x4
671 * char *shorty); x5
672 * +----------------------+
673 * | |
674 * | C/C++ frame |
675 * | LR'' |
676 * | FP'' | <- SP'
677 * +----------------------+
678 * +----------------------+
679 * | SP' |
680 * | X5 |
681 * | X4 | Saved registers
682 * | LR' |
683 * | FP' | <- FP
684 * +----------------------+
685 * | uint32_t out[n-1] |
686 * | : : | Outs
687 * | uint32_t out[0] |
688 * | ArtMethod* NULL | <- SP
689 * +----------------------+
690 *
691 * Outgoing registers:
692 * x0 - Method*
693 * x1-x7 - integer parameters.
694 * d0-d7 - Floating point parameters.
695 * xSELF = self
696 * SP = & of ArtMethod*
697 * x1 = "this" pointer.
698 *
699 */
700ENTRY art_quick_invoke_stub
701 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700702 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000703
704 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
705 // Parse the passed shorty to determine which register to load.
706 // Load addresses for routines that load WXSD registers.
707 adr x11, .LstoreW2
708 adr x12, .LstoreX2
709 adr x13, .LstoreS0
710 adr x14, .LstoreD0
711
712 // Initialize routine offsets to 0 for integers and floats.
713 // x8 for integers, x15 for floating point.
714 mov x8, #0
715 mov x15, #0
716
717 add x10, x5, #1 // Load shorty address, plus one to skip return value.
718 ldr w1, [x9],#4 // Load "this" parameter, and increment arg pointer.
719
720 // Loop to fill registers.
721.LfillRegisters:
722 ldrb w17, [x10], #1 // Load next character in signature, and increment.
723 cbz w17, .LcallFunction // Exit at end of signature. Shorty 0 terminated.
724
725 cmp w17, #'F' // is this a float?
726 bne .LisDouble
727
728 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700729 beq .Ladvance4
Stuart Monteithb95a5342014-03-12 13:32:32 +0000730
731 add x17, x13, x15 // Calculate subroutine to jump to.
732 br x17
733
734.LisDouble:
735 cmp w17, #'D' // is this a double?
736 bne .LisLong
737
738 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700739 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000740
741 add x17, x14, x15 // Calculate subroutine to jump to.
742 br x17
743
744.LisLong:
745 cmp w17, #'J' // is this a long?
746 bne .LisOther
747
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700748 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700749 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000750
751 add x17, x12, x8 // Calculate subroutine to jump to.
752 br x17
753
Stuart Monteithb95a5342014-03-12 13:32:32 +0000754.LisOther: // Everything else takes one vReg.
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700755 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700756 beq .Ladvance4
757
Stuart Monteithb95a5342014-03-12 13:32:32 +0000758 add x17, x11, x8 // Calculate subroutine to jump to.
759 br x17
760
Andreas Gampe03906cf2014-04-07 12:08:28 -0700761.Ladvance4:
762 add x9, x9, #4
763 b .LfillRegisters
764
765.Ladvance8:
766 add x9, x9, #8
767 b .LfillRegisters
768
Stuart Monteithb95a5342014-03-12 13:32:32 +0000769// Macro for loading a parameter into a register.
770// counter - the register with offset into these tables
771// size - the size of the register - 4 or 8 bytes.
772// register - the name of the register to be loaded.
773.macro LOADREG counter size register return
774 ldr \register , [x9], #\size
775 add \counter, \counter, 12
776 b \return
777.endm
778
779// Store ints.
780.LstoreW2:
781 LOADREG x8 4 w2 .LfillRegisters
782 LOADREG x8 4 w3 .LfillRegisters
783 LOADREG x8 4 w4 .LfillRegisters
784 LOADREG x8 4 w5 .LfillRegisters
785 LOADREG x8 4 w6 .LfillRegisters
786 LOADREG x8 4 w7 .LfillRegisters
787
788// Store longs.
789.LstoreX2:
790 LOADREG x8 8 x2 .LfillRegisters
791 LOADREG x8 8 x3 .LfillRegisters
792 LOADREG x8 8 x4 .LfillRegisters
793 LOADREG x8 8 x5 .LfillRegisters
794 LOADREG x8 8 x6 .LfillRegisters
795 LOADREG x8 8 x7 .LfillRegisters
796
797// Store singles.
798.LstoreS0:
799 LOADREG x15 4 s0 .LfillRegisters
800 LOADREG x15 4 s1 .LfillRegisters
801 LOADREG x15 4 s2 .LfillRegisters
802 LOADREG x15 4 s3 .LfillRegisters
803 LOADREG x15 4 s4 .LfillRegisters
804 LOADREG x15 4 s5 .LfillRegisters
805 LOADREG x15 4 s6 .LfillRegisters
806 LOADREG x15 4 s7 .LfillRegisters
807
808// Store doubles.
809.LstoreD0:
810 LOADREG x15 8 d0 .LfillRegisters
811 LOADREG x15 8 d1 .LfillRegisters
812 LOADREG x15 8 d2 .LfillRegisters
813 LOADREG x15 8 d3 .LfillRegisters
814 LOADREG x15 8 d4 .LfillRegisters
815 LOADREG x15 8 d5 .LfillRegisters
816 LOADREG x15 8 d6 .LfillRegisters
817 LOADREG x15 8 d7 .LfillRegisters
818
819
820.LcallFunction:
821
Andreas Gampe03906cf2014-04-07 12:08:28 -0700822 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000823
Stuart Monteithb95a5342014-03-12 13:32:32 +0000824END art_quick_invoke_stub
825
826/* extern"C"
827 * void art_quick_invoke_static_stub(ArtMethod *method, x0
828 * uint32_t *args, x1
829 * uint32_t argsize, w2
830 * Thread *self, x3
831 * JValue *result, x4
832 * char *shorty); x5
833 */
834ENTRY art_quick_invoke_static_stub
835 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700836 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000837
838 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
839 // Parse the passed shorty to determine which register to load.
840 // Load addresses for routines that load WXSD registers.
841 adr x11, .LstoreW1_2
842 adr x12, .LstoreX1_2
843 adr x13, .LstoreS0_2
844 adr x14, .LstoreD0_2
845
846 // Initialize routine offsets to 0 for integers and floats.
847 // x8 for integers, x15 for floating point.
848 mov x8, #0
849 mov x15, #0
850
851 add x10, x5, #1 // Load shorty address, plus one to skip return value.
852
853 // Loop to fill registers.
854.LfillRegisters2:
855 ldrb w17, [x10], #1 // Load next character in signature, and increment.
856 cbz w17, .LcallFunction2 // Exit at end of signature. Shorty 0 terminated.
857
858 cmp w17, #'F' // is this a float?
859 bne .LisDouble2
860
861 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700862 beq .Ladvance4_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000863
864 add x17, x13, x15 // Calculate subroutine to jump to.
865 br x17
866
867.LisDouble2:
868 cmp w17, #'D' // is this a double?
869 bne .LisLong2
870
871 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700872 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000873
874 add x17, x14, x15 // Calculate subroutine to jump to.
875 br x17
876
877.LisLong2:
878 cmp w17, #'J' // is this a long?
879 bne .LisOther2
880
881 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700882 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000883
884 add x17, x12, x8 // Calculate subroutine to jump to.
885 br x17
886
Stuart Monteithb95a5342014-03-12 13:32:32 +0000887.LisOther2: // Everything else takes one vReg.
888 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700889 beq .Ladvance4_2
890
Stuart Monteithb95a5342014-03-12 13:32:32 +0000891 add x17, x11, x8 // Calculate subroutine to jump to.
892 br x17
893
Andreas Gampe03906cf2014-04-07 12:08:28 -0700894.Ladvance4_2:
895 add x9, x9, #4
896 b .LfillRegisters2
897
898.Ladvance8_2:
899 add x9, x9, #8
900 b .LfillRegisters2
901
Stuart Monteithb95a5342014-03-12 13:32:32 +0000902// Store ints.
903.LstoreW1_2:
904 LOADREG x8 4 w1 .LfillRegisters2
905 LOADREG x8 4 w2 .LfillRegisters2
906 LOADREG x8 4 w3 .LfillRegisters2
907 LOADREG x8 4 w4 .LfillRegisters2
908 LOADREG x8 4 w5 .LfillRegisters2
909 LOADREG x8 4 w6 .LfillRegisters2
910 LOADREG x8 4 w7 .LfillRegisters2
911
912// Store longs.
913.LstoreX1_2:
914 LOADREG x8 8 x1 .LfillRegisters2
915 LOADREG x8 8 x2 .LfillRegisters2
916 LOADREG x8 8 x3 .LfillRegisters2
917 LOADREG x8 8 x4 .LfillRegisters2
918 LOADREG x8 8 x5 .LfillRegisters2
919 LOADREG x8 8 x6 .LfillRegisters2
920 LOADREG x8 8 x7 .LfillRegisters2
921
922// Store singles.
923.LstoreS0_2:
924 LOADREG x15 4 s0 .LfillRegisters2
925 LOADREG x15 4 s1 .LfillRegisters2
926 LOADREG x15 4 s2 .LfillRegisters2
927 LOADREG x15 4 s3 .LfillRegisters2
928 LOADREG x15 4 s4 .LfillRegisters2
929 LOADREG x15 4 s5 .LfillRegisters2
930 LOADREG x15 4 s6 .LfillRegisters2
931 LOADREG x15 4 s7 .LfillRegisters2
932
933// Store doubles.
934.LstoreD0_2:
935 LOADREG x15 8 d0 .LfillRegisters2
936 LOADREG x15 8 d1 .LfillRegisters2
937 LOADREG x15 8 d2 .LfillRegisters2
938 LOADREG x15 8 d3 .LfillRegisters2
939 LOADREG x15 8 d4 .LfillRegisters2
940 LOADREG x15 8 d5 .LfillRegisters2
941 LOADREG x15 8 d6 .LfillRegisters2
942 LOADREG x15 8 d7 .LfillRegisters2
943
944
945.LcallFunction2:
946
Andreas Gampe03906cf2014-04-07 12:08:28 -0700947 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000948
Stuart Monteithb95a5342014-03-12 13:32:32 +0000949END art_quick_invoke_static_stub
950
Andreas Gampe03906cf2014-04-07 12:08:28 -0700951
Stuart Monteithb95a5342014-03-12 13:32:32 +0000952
953 /*
954 * On entry x0 is uintptr_t* gprs_ and x1 is uint64_t* fprs_
955 */
956
957ENTRY art_quick_do_long_jump
958 // Load FPRs
959 ldp d0, d1, [x1], #16
960 ldp d2, d3, [x1], #16
961 ldp d4, d5, [x1], #16
962 ldp d6, d7, [x1], #16
963 ldp d8, d9, [x1], #16
964 ldp d10, d11, [x1], #16
965 ldp d12, d13, [x1], #16
966 ldp d14, d15, [x1], #16
967 ldp d16, d17, [x1], #16
968 ldp d18, d19, [x1], #16
969 ldp d20, d21, [x1], #16
970 ldp d22, d23, [x1], #16
971 ldp d24, d25, [x1], #16
972 ldp d26, d27, [x1], #16
973 ldp d28, d29, [x1], #16
974 ldp d30, d31, [x1]
975
976 // Load GPRs
977 // TODO: lots of those are smashed, could optimize.
978 add x0, x0, #30*8
979 ldp x30, x1, [x0], #-16
980 ldp x28, x29, [x0], #-16
981 ldp x26, x27, [x0], #-16
982 ldp x24, x25, [x0], #-16
983 ldp x22, x23, [x0], #-16
984 ldp x20, x21, [x0], #-16
985 ldp x18, x19, [x0], #-16
986 ldp x16, x17, [x0], #-16
987 ldp x14, x15, [x0], #-16
988 ldp x12, x13, [x0], #-16
989 ldp x10, x11, [x0], #-16
990 ldp x8, x9, [x0], #-16
991 ldp x6, x7, [x0], #-16
992 ldp x4, x5, [x0], #-16
993 ldp x2, x3, [x0], #-16
994 mov sp, x1
995
996 // TODO: Is it really OK to use LR for the target PC?
997 mov x0, #0
998 mov x1, #0
999 br xLR
1000END art_quick_do_long_jump
1001
Andreas Gampef4e910b2014-04-29 16:55:52 -07001002 /*
1003 * Entry from managed code that calls artHandleFillArrayDataFromCode and delivers exception on
1004 * failure.
1005 */
1006 .extern artHandleFillArrayDataFromCode
Andreas Gampef4e910b2014-04-29 16:55:52 -07001007ENTRY art_quick_handle_fill_data
1008 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // Save callee saves in case exception allocation triggers GC.
1009 mov x2, xSELF // Pass Thread::Current.
1010 mov x3, sp // Pass SP.
1011 bl artHandleFillArrayDataFromCode // (Array*, const DexFile::Payload*, Thread*, SP)
1012 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1013 RETURN_IF_RESULT_IS_ZERO
1014 DELIVER_PENDING_EXCEPTION
1015END art_quick_handle_fill_data
Stuart Monteithb95a5342014-03-12 13:32:32 +00001016
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001017 /*
1018 * Entry from managed code that calls artLockObjectFromCode, may block for GC. x0 holds the
1019 * possibly null object to lock.
1020 *
1021 * Derived from arm32 code.
1022 */
1023 .extern artLockObjectFromCode
1024ENTRY art_quick_lock_object
1025 cbz w0, .Lslow_lock
1026 add x4, x0, #LOCK_WORD_OFFSET // exclusive load/store had no immediate anymore
1027.Lretry_lock:
1028 ldr w2, [xSELF, #THREAD_ID_OFFSET] // TODO: Can the thread ID really change during the loop?
1029 ldxr w1, [x4]
1030 cbnz w1, .Lnot_unlocked // already thin locked
1031 stxr w3, w2, [x4]
1032 cbnz w3, .Lstrex_fail // store failed, retry
Andreas Gampe675967d2014-05-14 16:28:34 -07001033 dmb ishld // full (LoadLoad|LoadStore) memory barrier
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001034 ret
1035.Lstrex_fail:
1036 b .Lretry_lock // unlikely forward branch, need to reload and recheck r1/r2
1037.Lnot_unlocked:
1038 lsr w3, w1, 30
1039 cbnz w3, .Lslow_lock // if either of the top two bits are set, go slow path
1040 eor w2, w1, w2 // lock_word.ThreadId() ^ self->ThreadId()
1041 uxth w2, w2 // zero top 16 bits
1042 cbnz w2, .Lslow_lock // lock word and self thread id's match -> recursive lock
1043 // else contention, go to slow path
1044 add w2, w1, #65536 // increment count in lock word placing in w2 for storing
1045 lsr w1, w2, 30 // if either of the top two bits are set, we overflowed.
1046 cbnz w1, .Lslow_lock // if we overflow the count go slow path
1047 str w2, [x0, #LOCK_WORD_OFFSET]// no need for stxr as we hold the lock
1048 ret
1049.Lslow_lock:
1050 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case we block
1051 mov x1, xSELF // pass Thread::Current
1052 mov x2, sp // pass SP
1053 bl artLockObjectFromCode // (Object* obj, Thread*, SP)
1054 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1055 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1056END art_quick_lock_object
1057
1058 /*
1059 * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure.
1060 * x0 holds the possibly null object to lock.
1061 *
1062 * Derived from arm32 code.
1063 */
1064 .extern artUnlockObjectFromCode
1065ENTRY art_quick_unlock_object
1066 cbz x0, .Lslow_unlock
1067 ldr w1, [x0, #LOCK_WORD_OFFSET]
1068 lsr w2, w1, 30
1069 cbnz w2, .Lslow_unlock // if either of the top two bits are set, go slow path
1070 ldr w2, [xSELF, #THREAD_ID_OFFSET]
1071 eor w3, w1, w2 // lock_word.ThreadId() ^ self->ThreadId()
1072 uxth w3, w3 // zero top 16 bits
1073 cbnz w3, .Lslow_unlock // do lock word and self thread id's match?
1074 cmp w1, #65536
1075 bpl .Lrecursive_thin_unlock
1076 // transition to unlocked, w3 holds 0
Andreas Gampe675967d2014-05-14 16:28:34 -07001077 dmb ish // full (LoadStore|StoreStore) memory barrier
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001078 str w3, [x0, #LOCK_WORD_OFFSET]
1079 ret
1080.Lrecursive_thin_unlock:
1081 sub w1, w1, #65536
1082 str w1, [x0, #LOCK_WORD_OFFSET]
1083 ret
1084.Lslow_unlock:
1085 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case exception allocation triggers GC
1086 mov x1, xSELF // pass Thread::Current
1087 mov x2, sp // pass SP
1088 bl artUnlockObjectFromCode // (Object* obj, Thread*, SP)
1089 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1090 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1091END art_quick_unlock_object
Andreas Gampe525cde22014-04-22 15:44:50 -07001092
1093 /*
1094 * Entry from managed code that calls artIsAssignableFromCode and on failure calls
1095 * artThrowClassCastException.
1096 */
1097 .extern artThrowClassCastException
1098ENTRY art_quick_check_cast
1099 // Store arguments and link register
1100 sub sp, sp, #32 // Stack needs to be 16b aligned on calls
1101 .cfi_adjust_cfa_offset 32
1102 stp x0, x1, [sp]
1103 .cfi_rel_offset x0, 0
1104 .cfi_rel_offset x1, 8
1105 stp xSELF, xLR, [sp, #16]
1106 .cfi_rel_offset x18, 16
1107 .cfi_rel_offset x30, 24
1108
1109 // Call runtime code
1110 bl artIsAssignableFromCode
1111
1112 // Check for exception
1113 cbz x0, .Lthrow_class_cast_exception
1114
1115 // Restore and return
1116 ldp x0, x1, [sp]
1117 .cfi_restore x0
1118 .cfi_restore x1
1119 ldp xSELF, xLR, [sp, #16]
1120 .cfi_restore x18
1121 .cfi_restore x30
1122 add sp, sp, #32
1123 .cfi_adjust_cfa_offset -32
1124 ret
1125
1126.Lthrow_class_cast_exception:
1127 // Restore
1128 ldp x0, x1, [sp]
1129 .cfi_restore x0
1130 .cfi_restore x1
1131 ldp xSELF, xLR, [sp, #16]
1132 .cfi_restore x18
1133 .cfi_restore x30
1134 add sp, sp, #32
1135 .cfi_adjust_cfa_offset -32
1136
1137 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
1138 mov x2, xSELF // pass Thread::Current
1139 mov x3, sp // pass SP
1140 b artThrowClassCastException // (Class*, Class*, Thread*, SP)
1141 brk 0 // We should not return here...
1142END art_quick_check_cast
1143
Andreas Gampef4e910b2014-04-29 16:55:52 -07001144 /*
1145 * Entry from managed code for array put operations of objects where the value being stored
1146 * needs to be checked for compatibility.
1147 * x0 = array, x1 = index, x2 = value
1148 *
1149 * Currently all values should fit into w0/w1/w2, and w1 always will as indices are 32b. We
1150 * assume, though, that the upper 32b are zeroed out. At least for x1/w1 we can do better by
1151 * using index-zero-extension in load/stores.
1152 *
1153 * Temporaries: x3, x4
1154 * TODO: x4 OK? ip seems wrong here.
1155 */
1156ENTRY art_quick_aput_obj_with_null_and_bound_check
1157 tst x0, x0
1158 bne art_quick_aput_obj_with_bound_check
1159 b art_quick_throw_null_pointer_exception
1160END art_quick_aput_obj_with_null_and_bound_check
1161
1162ENTRY art_quick_aput_obj_with_bound_check
1163 ldr w3, [x0, #ARRAY_LENGTH_OFFSET]
1164 cmp w3, w1
1165 bhi art_quick_aput_obj
1166 mov x0, x1
1167 mov x1, x3
1168 b art_quick_throw_array_bounds
1169END art_quick_aput_obj_with_bound_check
1170
1171ENTRY art_quick_aput_obj
1172 cbz x2, .Ldo_aput_null
1173 ldr w3, [x0, #CLASS_OFFSET] // Heap reference = 32b
1174 // This also zero-extends to x3
1175 ldr w4, [x2, #CLASS_OFFSET] // Heap reference = 32b
1176 // This also zero-extends to x4
1177 ldr w3, [x3, #CLASS_COMPONENT_TYPE_OFFSET] // Heap reference = 32b
1178 // This also zero-extends to x3
1179 cmp w3, w4 // value's type == array's component type - trivial assignability
1180 bne .Lcheck_assignability
1181.Ldo_aput:
1182 add x3, x0, #OBJECT_ARRAY_DATA_OFFSET
1183 // "Compress" = do nothing
1184 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1185 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1186 lsr x0, x0, #7
1187 strb w3, [x3, x0]
1188 ret
1189.Ldo_aput_null:
1190 add x3, x0, #OBJECT_ARRAY_DATA_OFFSET
1191 // "Compress" = do nothing
1192 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1193 ret
1194.Lcheck_assignability:
1195 // Store arguments and link register
1196 sub sp, sp, #48 // Stack needs to be 16b aligned on calls
1197 .cfi_adjust_cfa_offset 48
1198 stp x0, x1, [sp]
1199 .cfi_rel_offset x0, 0
1200 .cfi_rel_offset x1, 8
1201 stp x2, xSELF, [sp, #16]
1202 .cfi_rel_offset x2, 16
1203 .cfi_rel_offset x18, 24
1204 str xLR, [sp, #32]
1205 .cfi_rel_offset x30, 32
1206
1207 // Call runtime code
1208 mov x0, x3 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1209 mov x1, x4 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1210 bl artIsAssignableFromCode
1211
1212 // Check for exception
1213 cbz x0, .Lthrow_array_store_exception
1214
1215 // Restore
1216 ldp x0, x1, [sp]
1217 .cfi_restore x0
1218 .cfi_restore x1
1219 ldp x2, xSELF, [sp, #16]
1220 .cfi_restore x2
1221 .cfi_restore x18
1222 ldr xLR, [sp, #32]
1223 .cfi_restore x30
1224 add sp, sp, #48
1225 .cfi_adjust_cfa_offset -48
1226
1227 add x3, x0, #OBJECT_ARRAY_DATA_OFFSET
1228 // "Compress" = do nothing
1229 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1230 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1231 lsr x0, x0, #7
1232 strb w3, [x3, x0]
1233 ret
1234.Lthrow_array_store_exception:
1235 ldp x0, x1, [sp]
1236 .cfi_restore x0
1237 .cfi_restore x1
1238 ldp x2, xSELF, [sp, #16]
1239 .cfi_restore x2
1240 .cfi_restore x18
1241 ldr xLR, [sp, #32]
1242 .cfi_restore x30
1243 add sp, sp, #48
1244 .cfi_adjust_cfa_offset -48
1245
1246 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
1247 mov x1, x2 // Pass value.
1248 mov x2, xSELF // Pass Thread::Current.
1249 mov x3, sp // Pass SP.
1250 b artThrowArrayStoreException // (Object*, Object*, Thread*, SP).
1251 brk 0 // Unreached.
1252END art_quick_aput_obj
1253
Stuart Monteithb95a5342014-03-12 13:32:32 +00001254// Macro to facilitate adding new allocation entrypoints.
1255.macro TWO_ARG_DOWNCALL name, entrypoint, return
1256 .extern \entrypoint
1257ENTRY \name
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001258 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1259 mov x2, xSELF // pass Thread::Current
1260 mov x3, sp // pass SP
1261 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP)
1262 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1263 \return
1264 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +00001265END \name
1266.endm
1267
1268// Macro to facilitate adding new array allocation entrypoints.
1269.macro THREE_ARG_DOWNCALL name, entrypoint, return
1270 .extern \entrypoint
1271ENTRY \name
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001272 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1273 mov x3, xSELF // pass Thread::Current
1274 mov x4, sp // pass SP
1275 bl \entrypoint
1276 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1277 \return
1278 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +00001279END \name
1280.endm
1281
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001282// Macros taking opportunity of code similarities for downcalls with referrer.
1283
1284// TODO: xSELF -> x19. Temporarily rely on xSELF being saved in REF_ONLY
1285.macro ONE_ARG_REF_DOWNCALL name, entrypoint, return
1286 .extern \entrypoint
1287ENTRY \name
1288 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1289 ldr x1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
1290 mov x2, xSELF // pass Thread::Current
1291 mov x3, sp // pass SP
1292 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP)
1293 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1294 \return
1295END \name
1296.endm
1297
1298// TODO: xSELF -> x19. Temporarily rely on xSELF being saved in REF_ONLY
1299.macro TWO_ARG_REF_DOWNCALL name, entrypoint, return
1300 .extern \entrypoint
1301ENTRY \name
1302 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1303 ldr x2, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
1304 mov x3, xSELF // pass Thread::Current
1305 mov x4, sp // pass SP
1306 bl \entrypoint
1307 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1308 \return
1309END \name
1310.endm
1311
1312// TODO: xSELF -> x19. Temporarily rely on xSELF being saved in REF_ONLY
1313.macro THREE_ARG_REF_DOWNCALL name, entrypoint, return
1314 .extern \entrypoint
1315ENTRY \name
1316 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1317 ldr x3, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
1318 mov x4, xSELF // pass Thread::Current
1319 mov x5, sp // pass SP
1320 bl \entrypoint
1321 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1322 \return
1323END \name
1324.endm
1325
Matteo Franchindfd891a2014-04-30 12:17:17 +01001326 /*
1327 * Entry from managed code when uninitialized static storage, this stub will run the class
1328 * initializer and deliver the exception on error. On success the static storage base is
1329 * returned.
1330 */
1331TWO_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO
1332
1333UNIMPLEMENTED art_quick_initialize_type
1334UNIMPLEMENTED art_quick_initialize_type_and_verify_access
1335
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001336ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1337ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1338ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1339
1340TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1341TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1342TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1343
1344TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1345TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1346
1347THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1348THREE_ARG_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1349THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1350
1351// This is separated out as the argument order is different.
1352 .extern artSet64StaticFromCode
1353ENTRY art_quick_set64_static
1354 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1355 mov x3, x1 // Store value
1356 ldr x1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
1357 mov x2, x3 // Put value param
1358 mov x3, xSELF // pass Thread::Current
1359 mov x4, sp // pass SP
1360 bl artSet64StaticFromCode
1361 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1362 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1363END art_quick_set64_static
1364
Matteo Franchindfd891a2014-04-30 12:17:17 +01001365 /*
1366 * Entry from managed code to resolve a string, this stub will allocate a String and deliver an
1367 * exception on error. On success the String is returned. x0 holds the referring method,
1368 * w1 holds the string index. The fast path check for hit in strings cache has already been
1369 * performed.
1370 */
1371TWO_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001372
Stuart Monteithb95a5342014-03-12 13:32:32 +00001373// Generate the allocation entrypoints for each allocator.
1374GENERATE_ALL_ALLOC_ENTRYPOINTS
1375
1376UNIMPLEMENTED art_quick_test_suspend
1377
Andreas Gampee62a07e2014-03-26 14:53:21 -07001378 /*
1379 * Called by managed code that is attempting to call a method on a proxy class. On entry
1380 * x0 holds the proxy method and x1 holds the receiver; The frame size of the invoked proxy
1381 * method agrees with a ref and args callee save frame.
1382 */
1383 .extern artQuickProxyInvokeHandler
1384ENTRY art_quick_proxy_invoke_handler
1385 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
1386 str x0, [sp, #0] // place proxy method at bottom of frame
1387 mov x2, xSELF // pass Thread::Current
1388 mov x3, sp // pass SP
1389 bl artQuickProxyInvokeHandler // (Method* proxy method, receiver, Thread*, SP)
Serban Constantinescu63206f32014-05-07 18:40:49 +01001390 ldr xSELF, [sp, #200] // Restore self pointer.
Andreas Gampee62a07e2014-03-26 14:53:21 -07001391 ldr x2, [xSELF, THREAD_EXCEPTION_OFFSET]
1392 cbnz x2, .Lexception_in_proxy // success if no exception is pending
1393 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME_NO_D0 // keep d0
1394 ret // return on success
1395.Lexception_in_proxy:
1396 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1397 DELIVER_PENDING_EXCEPTION
1398END art_quick_proxy_invoke_handler
Stuart Monteithb95a5342014-03-12 13:32:32 +00001399
Andreas Gampe51f76352014-05-21 08:28:48 -07001400 /*
1401 * Called to resolve an imt conflict. x12 is a hidden argument that holds the target method's
1402 * dex method index.
1403 */
1404ENTRY art_quick_imt_conflict_trampoline
1405 ldr x0, [sp, #0] // load caller Method*
1406 ldr w0, [x0, #METHOD_DEX_CACHE_METHODS_OFFSET] // load dex_cache_resolved_methods
1407 add x0, x0, #OBJECT_ARRAY_DATA_OFFSET // get starting address of data
1408 ldr w0, [x0, x12, lsl 2] // load the target method
1409 b art_quick_invoke_interface_trampoline
1410END art_quick_imt_conflict_trampoline
Stuart Monteithb95a5342014-03-12 13:32:32 +00001411
1412ENTRY art_quick_resolution_trampoline
1413 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
1414 mov x2, xSELF
1415 mov x3, sp
1416 bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP)
Matteo Franchindfd891a2014-04-30 12:17:17 +01001417 cbz x0, 1f
1418 mov x9, x0 // Remember returned code pointer in x9.
1419 ldr x0, [sp, #0] // artQuickResolutionTrampoline puts called method in *SP.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001420 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gampec6ee54e2014-03-24 16:45:44 -07001421 br x9
Stuart Monteithb95a5342014-03-12 13:32:32 +000014221:
1423 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1424 DELIVER_PENDING_EXCEPTION
1425END art_quick_resolution_trampoline
1426
1427/*
1428 * Generic JNI frame layout:
1429 *
1430 * #-------------------#
1431 * | |
1432 * | caller method... |
1433 * #-------------------# <--- SP on entry
1434 * | Return X30/LR |
1435 * | X29/FP | callee save
1436 * | X28 | callee save
1437 * | X27 | callee save
1438 * | X26 | callee save
1439 * | X25 | callee save
1440 * | X24 | callee save
1441 * | X23 | callee save
1442 * | X22 | callee save
1443 * | X21 | callee save
1444 * | X20 | callee save
1445 * | X19 | callee save
1446 * | X7 | arg7
1447 * | X6 | arg6
1448 * | X5 | arg5
1449 * | X4 | arg4
1450 * | X3 | arg3
1451 * | X2 | arg2
1452 * | X1 | arg1
1453 * | D15 | float arg 8
1454 * | D14 | float arg 8
1455 * | D13 | float arg 8
1456 * | D12 | callee save
1457 * | D11 | callee save
1458 * | D10 | callee save
1459 * | D9 | callee save
1460 * | D8 | callee save
1461 * | D7 | float arg 8
1462 * | D6 | float arg 7
1463 * | D5 | float arg 6
1464 * | D4 | float arg 5
1465 * | D3 | float arg 4
1466 * | D2 | float arg 3
1467 * | D1 | float arg 2
1468 * | D0 | float arg 1
1469 * | RDI/Method* | <- X0
1470 * #-------------------#
1471 * | local ref cookie | // 4B
Mathieu Chartier421c5372014-05-14 14:11:40 -07001472 * | handle scope size | // 4B
Stuart Monteithb95a5342014-03-12 13:32:32 +00001473 * #-------------------#
1474 * | JNI Call Stack |
1475 * #-------------------# <--- SP on native call
1476 * | |
1477 * | Stack for Regs | The trampoline assembly will pop these values
1478 * | | into registers for native call
1479 * #-------------------#
1480 * | Native code ptr |
1481 * #-------------------#
1482 * | Free scratch |
1483 * #-------------------#
1484 * | Ptr to (1) | <--- SP
1485 * #-------------------#
1486 */
1487 /*
1488 * Called to do a generic JNI down-call
1489 */
1490ENTRY art_quick_generic_jni_trampoline
1491 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
1492 str x0, [sp, #0] // Store native ArtMethod* to bottom of stack.
1493
1494 // Save SP , so we can have static CFI info.
1495 mov x28, sp
1496 .cfi_def_cfa_register x28
1497
1498 // This looks the same, but is different: this will be updated to point to the bottom
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001499 // of the frame when the handle scope is inserted.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001500 mov xFP, sp
1501
1502 mov x8, #5120
1503 sub sp, sp, x8
1504
1505 // prepare for artQuickGenericJniTrampoline call
1506 // (Thread*, SP)
1507 // x0 x1 <= C calling convention
1508 // xSELF xFP <= where they are
1509
1510 mov x0, xSELF // Thread*
1511 mov x1, xFP
1512 bl artQuickGenericJniTrampoline // (Thread*, sp)
1513
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001514 // Get the updated pointer. This is the bottom of the frame _with_ handle scope.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001515 ldr xFP, [sp]
1516 add x9, sp, #8
1517
1518 cmp x0, #0
1519 b.mi .Lentry_error // Check for error, negative value.
1520
1521 // release part of the alloca.
1522 add x9, x9, x0
1523
1524 // Get the code pointer
1525 ldr xIP0, [x9, #0]
1526
1527 // Load parameters from frame into registers.
1528 // TODO Check with artQuickGenericJniTrampoline.
1529 // Also, check again APPCS64 - the stack arguments are interleaved.
1530 ldp x0, x1, [x9, #8]
1531 ldp x2, x3, [x9, #24]
1532 ldp x4, x5, [x9, #40]
1533 ldp x6, x7, [x9, #56]
1534
1535 ldp d0, d1, [x9, #72]
1536 ldp d2, d3, [x9, #88]
1537 ldp d4, d5, [x9, #104]
1538 ldp d6, d7, [x9, #120]
1539
1540 add sp, x9, #136
1541
1542 blr xIP0 // native call.
1543
1544 // Restore self pointer.
1545 ldr xSELF, [x28, #200]
1546
1547 // result sign extension is handled in C code
1548 // prepare for artQuickGenericJniEndTrampoline call
1549 // (Thread*, SP, result, result_f)
1550 // x0 x1 x2 x3 <= C calling convention
1551 mov x5, x0 // Save return value
1552 mov x0, xSELF // Thread register
1553 mov x1, xFP // Stack pointer
1554 mov x2, x5 // Result (from saved)
1555 fmov x3, d0 // d0 will contain floating point result, but needs to go into x3
1556
1557 bl artQuickGenericJniEndTrampoline
1558
1559 // Tear down the alloca.
1560 mov sp, x28
1561 .cfi_def_cfa_register sp
1562
1563 // Restore self pointer.
1564 ldr xSELF, [x28, #200]
1565
1566 // Pending exceptions possible.
1567 ldr x1, [xSELF, THREAD_EXCEPTION_OFFSET]
1568 cbnz x1, .Lexception_in_native
1569
1570 // Tear down the callee-save frame.
1571 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1572
1573 // store into fpr, for when it's a fpr return...
1574 fmov d0, x0
1575 ret
1576
1577.Lentry_error:
1578 mov sp, x28
1579 .cfi_def_cfa_register sp
1580 ldr xSELF, [x28, #200]
1581.Lexception_in_native:
1582 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1583 DELIVER_PENDING_EXCEPTION
1584
1585END art_quick_generic_jni_trampoline
1586
1587/*
1588 * Called to bridge from the quick to interpreter ABI. On entry the arguments match those
1589 * of a quick call:
1590 * x0 = method being called/to bridge to.
1591 * x1..x7, d0..d7 = arguments to that method.
1592 */
1593ENTRY art_quick_to_interpreter_bridge
1594 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments.
1595
1596 // x0 will contain mirror::ArtMethod* method.
1597 mov x1, xSELF // How to get Thread::Current() ???
1598 mov x2, sp
1599
1600 // uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
1601 // mirror::ArtMethod** sp)
1602 bl artQuickToInterpreterBridge
1603
1604 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case.
1605
1606 fmov d0, x0
1607
1608 RETURN_OR_DELIVER_PENDING_EXCEPTION
1609END art_quick_to_interpreter_bridge
1610
1611UNIMPLEMENTED art_quick_instrumentation_entry
1612UNIMPLEMENTED art_quick_instrumentation_exit
1613UNIMPLEMENTED art_quick_deoptimize
1614UNIMPLEMENTED art_quick_mul_long
1615UNIMPLEMENTED art_quick_shl_long
1616UNIMPLEMENTED art_quick_shr_long
1617UNIMPLEMENTED art_quick_ushr_long
1618UNIMPLEMENTED art_quick_indexof
Andreas Gampe266340d2014-05-02 07:55:24 -07001619
1620 /*
1621 * String's compareTo.
1622 *
1623 * TODO: Not very optimized.
1624 *
1625 * On entry:
1626 * x0: this object pointer
1627 * x1: comp object pointer
1628 *
1629 */
1630 .extern __memcmp16
1631ENTRY art_quick_string_compareto
1632 mov x2, x0 // x0 is return, use x2 for first input.
1633 sub x0, x2, x1 // Same string object?
1634 cbnz x0,1f
1635 ret
16361: // Different string objects.
1637
1638 ldr w6, [x2, #STRING_OFFSET_OFFSET]
1639 ldr w5, [x1, #STRING_OFFSET_OFFSET]
1640 ldr w4, [x2, #STRING_COUNT_OFFSET]
1641 ldr w3, [x1, #STRING_COUNT_OFFSET]
1642 ldr w2, [x2, #STRING_VALUE_OFFSET]
1643 ldr w1, [x1, #STRING_VALUE_OFFSET]
1644
1645 /*
1646 * Now: CharArray* Offset Count
1647 * first arg x2 w6 w4
1648 * second arg x1 w5 w3
1649 */
1650
1651 // x0 := str1.length(w4) - str2.length(w3). ldr zero-extended w3/w4 into x3/x4.
1652 subs x0, x4, x3
1653 // Min(count1, count2) into w3.
1654 csel x3, x3, x4, ge
1655
1656 // Build pointer into string data.
1657
1658 // Add offset in array (substr etc.) (sign extend and << 1).
1659 add x2, x2, w6, sxtw #1
1660 add x1, x1, w5, sxtw #1
1661
1662 // Add offset in CharArray to array.
1663 add x2, x2, #STRING_DATA_OFFSET
1664 add x1, x1, #STRING_DATA_OFFSET
1665
1666 // Check for long string, do memcmp16 for them.
1667 cmp w3, #28 // Constant from arm32.
1668 bgt .Ldo_memcmp16
1669
1670 /*
1671 * Now:
1672 * x2: *first string data
1673 * x1: *second string data
1674 * w3: iteration count
1675 * x0: return value if comparison equal
1676 * x4, x5, x6, x7: free
1677 */
1678
1679 // Do a simple unrolled loop.
1680.Lloop:
1681 // At least two more elements?
1682 subs w3, w3, #2
1683 b.lt .Lremainder_or_done
1684
1685 ldrh w4, [x2], #2
1686 ldrh w5, [x1], #2
1687
1688 ldrh w6, [x2], #2
1689 ldrh w7, [x1], #2
1690
1691 subs w4, w4, w5
1692 b.ne .Lw4_result
1693
1694 subs w6, w6, w7
1695 b.ne .Lw6_result
1696
1697 b .Lloop
1698
1699.Lremainder_or_done:
1700 adds w3, w3, #1
1701 b.eq .Lremainder
1702 ret
1703
1704.Lremainder:
1705 ldrh w4, [x2], #2
1706 ldrh w5, [x1], #2
1707 subs w4, w4, w5
1708 b.ne .Lw4_result
1709 ret
1710
1711// Result is in w4
1712.Lw4_result:
1713 sxtw x0, w4
1714 ret
1715
1716// Result is in w6
1717.Lw6_result:
1718 sxtw x0, w6
1719 ret
1720
1721.Ldo_memcmp16:
1722 str x0, [sp,#-16]! // Save x0
1723
1724 mov x0, x2
1725 uxtw x2, w3
1726 bl __memcmp16
1727
1728 ldr x1, [sp], #16 // Restore old x0 = length diff
1729
1730 cmp x0, #0 // Check the memcmp difference
1731 csel x0, x0, x1, ne // x0 := x0 != 0 ? x0 : x1
1732 ret
1733END art_quick_string_compareto