blob: ab9bf2d34eb4881b264f23dfdf5917f0cc61234c [file] [log] [blame]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm64.S"
18
19#include "arch/quick_alloc_entrypoints.S"
20
21
22 /*
23 * Macro that sets up the callee save frame to conform with
24 * Runtime::CreateCalleeSaveMethod(kSaveAll)
25 */
26.macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +080027 adrp xIP0, :got:_ZN3art7Runtime9instance_E
28 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
Stuart Monteithb95a5342014-03-12 13:32:32 +000029
30 // Our registers aren't intermixed - just spill in order.
Zheng Xub551fdc2014-07-25 11:49:42 +080031 ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) .
Stuart Monteithb95a5342014-03-12 13:32:32 +000032
Zheng Xub551fdc2014-07-25 11:49:42 +080033 // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
Hiroshi Yamauchiab088112014-07-14 13:00:14 -070034 THIS_LOAD_REQUIRES_READ_BARRIER
Zheng Xub551fdc2014-07-25 11:49:42 +080035 ldr xIP0, [xIP0, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET ]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070036
37 sub sp, sp, #176
38 .cfi_adjust_cfa_offset 176
39
40 // Ugly compile-time check, but we only have the preprocessor.
Zheng Xub551fdc2014-07-25 11:49:42 +080041#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 176)
42#error "SAVE_ALL_CALLEE_SAVE_FRAME(ARM64) size not as expected."
Andreas Gampe5c1e4352014-04-21 19:28:24 -070043#endif
44
45 // FP callee-saves
46 stp d8, d9, [sp, #8]
47 stp d10, d11, [sp, #24]
48 stp d12, d13, [sp, #40]
49 stp d14, d15, [sp, #56]
50
Zheng Xub551fdc2014-07-25 11:49:42 +080051 // Reserved registers
52 stp xSELF, xSUSPEND, [sp, #72]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070053 .cfi_rel_offset x18, 72
54 .cfi_rel_offset x19, 80
55
Zheng Xub551fdc2014-07-25 11:49:42 +080056 // callee-saves
Andreas Gampe5c1e4352014-04-21 19:28:24 -070057 stp x20, x21, [sp, #88]
58 .cfi_rel_offset x20, 88
59 .cfi_rel_offset x21, 96
60
61 stp x22, x23, [sp, #104]
62 .cfi_rel_offset x22, 104
63 .cfi_rel_offset x23, 112
64
65 stp x24, x25, [sp, #120]
66 .cfi_rel_offset x24, 120
67 .cfi_rel_offset x25, 128
68
69 stp x26, x27, [sp, #136]
70 .cfi_rel_offset x26, 136
71 .cfi_rel_offset x27, 144
72
Zheng Xub551fdc2014-07-25 11:49:42 +080073 stp x28, x29, [sp, #152]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070074 .cfi_rel_offset x28, 152
75 .cfi_rel_offset x29, 160
76
77 str xLR, [sp, #168]
78 .cfi_rel_offset x30, 168
79
80 // Loads appropriate callee-save-method
Zheng Xub551fdc2014-07-25 11:49:42 +080081 str xIP0, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
Stuart Monteithb95a5342014-03-12 13:32:32 +000082.endm
83
Zheng Xub551fdc2014-07-25 11:49:42 +080084 /*
85 * Macro that sets up the callee save frame to conform with
86 * Runtime::CreateCalleeSaveMethod(kRefsOnly).
87 */
88.macro SETUP_REF_ONLY_CALLEE_SAVE_FRAME
89 adrp xIP0, :got:_ZN3art7Runtime9instance_E
90 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
91
92 // Our registers aren't intermixed - just spill in order.
93 ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) .
94
95 // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
96 THIS_LOAD_REQUIRES_READ_BARRIER
97 ldr xIP0, [xIP0, RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET ]
98
99 sub sp, sp, #96
100 .cfi_adjust_cfa_offset 96
101
102 // Ugly compile-time check, but we only have the preprocessor.
103#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 96)
104#error "REFS_ONLY_CALLEE_SAVE_FRAME(ARM64) size not as expected."
105#endif
106
107 // Callee-saves
108 stp x20, x21, [sp, #8]
109 .cfi_rel_offset x20, 8
110 .cfi_rel_offset x21, 16
111
112 stp x22, x23, [sp, #24]
113 .cfi_rel_offset x22, 24
114 .cfi_rel_offset x23, 32
115
116 stp x24, x25, [sp, #40]
117 .cfi_rel_offset x24, 40
118 .cfi_rel_offset x25, 48
119
120 stp x26, x27, [sp, #56]
121 .cfi_rel_offset x26, 56
122 .cfi_rel_offset x27, 64
123
124 stp x28, x29, [sp, #72]
125 .cfi_rel_offset x28, 72
126 .cfi_rel_offset x29, 80
127
128 // LR
129 str xLR, [sp, #88]
130 .cfi_rel_offset x30, 88
131
132 // Save xSELF to xETR.
133 mov xETR, xSELF
134
135 // Loads appropriate callee-save-method
136 str xIP0, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
137.endm
138
139// TODO: Probably no need to restore registers preserved by aapcs64.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000140.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +0800141 // Restore xSELF.
142 mov xSELF, xETR
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700143
Zheng Xub551fdc2014-07-25 11:49:42 +0800144 // Callee-saves
145 ldp x20, x21, [sp, #8]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700146 .cfi_restore x20
147 .cfi_restore x21
148
Zheng Xub551fdc2014-07-25 11:49:42 +0800149 ldp x22, x23, [sp, #24]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700150 .cfi_restore x22
151 .cfi_restore x23
152
Zheng Xub551fdc2014-07-25 11:49:42 +0800153 ldp x24, x25, [sp, #40]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700154 .cfi_restore x24
155 .cfi_restore x25
156
Zheng Xub551fdc2014-07-25 11:49:42 +0800157 ldp x26, x27, [sp, #56]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700158 .cfi_restore x26
159 .cfi_restore x27
160
Zheng Xub551fdc2014-07-25 11:49:42 +0800161 ldp x28, x29, [sp, #72]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700162 .cfi_restore x28
163 .cfi_restore x29
164
Zheng Xub551fdc2014-07-25 11:49:42 +0800165 // LR
166 ldr xLR, [sp, #88]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700167 .cfi_restore x30
168
Zheng Xub551fdc2014-07-25 11:49:42 +0800169 add sp, sp, #96
170 .cfi_adjust_cfa_offset -96
Stuart Monteithb95a5342014-03-12 13:32:32 +0000171.endm
172
Andreas Gamped58342c2014-06-05 14:18:08 -0700173.macro POP_REF_ONLY_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +0800174 // Restore xSELF as it might be scratched.
175 mov xSELF, xETR
176 // ETR
177 ldr xETR, [sp, #16]
178 .cfi_restore x21
179
180 add sp, sp, #96
181 .cfi_adjust_cfa_offset -96
Andreas Gamped58342c2014-06-05 14:18:08 -0700182.endm
183
Stuart Monteithb95a5342014-03-12 13:32:32 +0000184.macro RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
Zheng Xu48241e72014-05-23 11:52:42 +0800185 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
186 ret
Stuart Monteithb95a5342014-03-12 13:32:32 +0000187.endm
188
189
190.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
Zheng Xub551fdc2014-07-25 11:49:42 +0800191 sub sp, sp, #224
192 .cfi_adjust_cfa_offset 224
Stuart Monteithb95a5342014-03-12 13:32:32 +0000193
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700194 // Ugly compile-time check, but we only have the preprocessor.
Zheng Xub551fdc2014-07-25 11:49:42 +0800195#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 224)
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700196#error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(ARM64) size not as expected."
197#endif
198
Zheng Xub551fdc2014-07-25 11:49:42 +0800199 // FP args
200 stp d0, d1, [sp, #16]
201 stp d2, d3, [sp, #32]
202 stp d4, d5, [sp, #48]
203 stp d6, d7, [sp, #64]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000204
Zheng Xub551fdc2014-07-25 11:49:42 +0800205 // args and x20(callee-save)
206 stp x1, x2, [sp, #80]
207 .cfi_rel_offset x1, 80
208 .cfi_rel_offset x2, 88
Stuart Monteithb95a5342014-03-12 13:32:32 +0000209
Zheng Xub551fdc2014-07-25 11:49:42 +0800210 stp x3, x4, [sp, #96]
211 .cfi_rel_offset x3, 96
212 .cfi_rel_offset x4, 104
Andreas Gampe03906cf2014-04-07 12:08:28 -0700213
Zheng Xub551fdc2014-07-25 11:49:42 +0800214 stp x5, x6, [sp, #112]
215 .cfi_rel_offset x5, 112
216 .cfi_rel_offset x6, 120
Andreas Gampe03906cf2014-04-07 12:08:28 -0700217
Zheng Xub551fdc2014-07-25 11:49:42 +0800218 stp x7, x20, [sp, #128]
219 .cfi_rel_offset x7, 128
220 .cfi_rel_offset x20, 136
Andreas Gampe03906cf2014-04-07 12:08:28 -0700221
Zheng Xub551fdc2014-07-25 11:49:42 +0800222 // Callee-saves.
223 stp x21, x22, [sp, #144]
224 .cfi_rel_offset x21, 144
225 .cfi_rel_offset x22, 152
Andreas Gampe03906cf2014-04-07 12:08:28 -0700226
Zheng Xub551fdc2014-07-25 11:49:42 +0800227 stp x23, x24, [sp, #160]
228 .cfi_rel_offset x23, 160
229 .cfi_rel_offset x24, 168
Andreas Gampe03906cf2014-04-07 12:08:28 -0700230
Zheng Xub551fdc2014-07-25 11:49:42 +0800231 stp x25, x26, [sp, #176]
232 .cfi_rel_offset x25, 176
233 .cfi_rel_offset x26, 184
Andreas Gampe03906cf2014-04-07 12:08:28 -0700234
Zheng Xub551fdc2014-07-25 11:49:42 +0800235 stp x27, x28, [sp, #192]
236 .cfi_rel_offset x27, 192
237 .cfi_rel_offset x28, 200
Andreas Gampe03906cf2014-04-07 12:08:28 -0700238
Zheng Xub551fdc2014-07-25 11:49:42 +0800239 // x29(callee-save) and LR
240 stp x29, xLR, [sp, #208]
241 .cfi_rel_offset x29, 208
242 .cfi_rel_offset x30, 216
Andreas Gampe03906cf2014-04-07 12:08:28 -0700243
Zheng Xub551fdc2014-07-25 11:49:42 +0800244 // Save xSELF to xETR.
245 mov xETR, xSELF
Stuart Monteithb95a5342014-03-12 13:32:32 +0000246.endm
247
248 /*
249 * Macro that sets up the callee save frame to conform with
250 * Runtime::CreateCalleeSaveMethod(kRefsAndArgs).
251 *
252 * TODO This is probably too conservative - saving FP & LR.
253 */
254.macro SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +0800255 adrp xIP0, :got:_ZN3art7Runtime9instance_E
256 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000257
258 // Our registers aren't intermixed - just spill in order.
Zheng Xub551fdc2014-07-25 11:49:42 +0800259 ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) .
Stuart Monteithb95a5342014-03-12 13:32:32 +0000260
Zheng Xub551fdc2014-07-25 11:49:42 +0800261 // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
Hiroshi Yamauchiab088112014-07-14 13:00:14 -0700262 THIS_LOAD_REQUIRES_READ_BARRIER
Zheng Xub551fdc2014-07-25 11:49:42 +0800263 ldr xIP0, [xIP0, RUNTIME_REF_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000264
265 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
266
Zheng Xub551fdc2014-07-25 11:49:42 +0800267 str xIP0, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000268.endm
269
Zheng Xub551fdc2014-07-25 11:49:42 +0800270// TODO: Probably no need to restore registers preserved by aapcs64.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000271.macro RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +0800272 // Restore xSELF.
273 mov xSELF, xETR
Stuart Monteithb95a5342014-03-12 13:32:32 +0000274
Zheng Xub551fdc2014-07-25 11:49:42 +0800275 // FP args
276 ldp d0, d1, [sp, #16]
277 ldp d2, d3, [sp, #32]
278 ldp d4, d5, [sp, #48]
279 ldp d6, d7, [sp, #64]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000280
Zheng Xub551fdc2014-07-25 11:49:42 +0800281 // args and x20(callee-save)
282 ldp x1, x2, [sp, #80]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700283 .cfi_restore x1
284 .cfi_restore x2
285
Zheng Xub551fdc2014-07-25 11:49:42 +0800286 ldp x3, x4, [sp, #96]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700287 .cfi_restore x3
288 .cfi_restore x4
289
Zheng Xub551fdc2014-07-25 11:49:42 +0800290 ldp x5, x6, [sp, #112]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700291 .cfi_restore x5
292 .cfi_restore x6
293
Zheng Xub551fdc2014-07-25 11:49:42 +0800294 ldp x7, x20, [sp, #128]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700295 .cfi_restore x7
Andreas Gampe03906cf2014-04-07 12:08:28 -0700296 .cfi_restore x20
297
Zheng Xub551fdc2014-07-25 11:49:42 +0800298 // Callee-saves.
299 ldp x21, x22, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700300 .cfi_restore x21
301 .cfi_restore x22
302
Zheng Xub551fdc2014-07-25 11:49:42 +0800303 ldp x23, x24, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700304 .cfi_restore x23
305 .cfi_restore x24
306
Zheng Xub551fdc2014-07-25 11:49:42 +0800307 ldp x25, x26, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700308 .cfi_restore x25
309 .cfi_restore x26
310
Zheng Xub551fdc2014-07-25 11:49:42 +0800311 ldp x27, x28, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700312 .cfi_restore x27
313 .cfi_restore x28
314
Zheng Xub551fdc2014-07-25 11:49:42 +0800315 // x29(callee-save) and LR
316 ldp x29, xLR, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700317 .cfi_restore x29
318 .cfi_restore x30
Stuart Monteithb95a5342014-03-12 13:32:32 +0000319
Zheng Xub551fdc2014-07-25 11:49:42 +0800320 add sp, sp, #224
321 .cfi_adjust_cfa_offset -224
Stuart Monteithb95a5342014-03-12 13:32:32 +0000322.endm
323
324.macro RETURN_IF_RESULT_IS_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700325 cbnz x0, 1f // result non-zero branch over
326 ret // return
3271:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000328.endm
329
330.macro RETURN_IF_RESULT_IS_NON_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700331 cbz x0, 1f // result zero branch over
332 ret // return
3331:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000334.endm
335
336 /*
337 * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
338 * exception is Thread::Current()->exception_
339 */
340.macro DELIVER_PENDING_EXCEPTION
341 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
342 mov x0, xSELF
343 mov x1, sp
344
345 // Point of no return.
346 b artDeliverPendingExceptionFromCode // artDeliverPendingExceptionFromCode(Thread*, SP)
347 brk 0 // Unreached
348.endm
349
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700350.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_REG reg
351 ldr \reg, [xSELF, # THREAD_EXCEPTION_OFFSET] // Get exception field.
352 cbnz \reg, 1f
Stuart Monteithb95a5342014-03-12 13:32:32 +0000353 ret
3541:
355 DELIVER_PENDING_EXCEPTION
356.endm
357
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700358.macro RETURN_OR_DELIVER_PENDING_EXCEPTION
Zheng Xub551fdc2014-07-25 11:49:42 +0800359 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG xIP0
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700360.endm
361
362// Same as above with x1. This is helpful in stubs that want to avoid clobbering another register.
363.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
364 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG x1
365.endm
366
367.macro RETURN_IF_W0_IS_ZERO_OR_DELIVER
368 cbnz w0, 1f // result non-zero branch over
369 ret // return
3701:
371 DELIVER_PENDING_EXCEPTION
372.endm
373
Stuart Monteithb95a5342014-03-12 13:32:32 +0000374.macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
375 .extern \cxx_name
376ENTRY \c_name
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700377 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Zheng Xub551fdc2014-07-25 11:49:42 +0800378 mov x0, xSELF // pass Thread::Current
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700379 mov x1, sp // pass SP
380 b \cxx_name // \cxx_name(Thread*, SP)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000381END \c_name
382.endm
383
384.macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name
385 .extern \cxx_name
386ENTRY \c_name
Serban Constantinescu75b91132014-04-09 18:39:10 +0100387 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context.
Zheng Xub551fdc2014-07-25 11:49:42 +0800388 mov x1, xSELF // pass Thread::Current.
Serban Constantinescu75b91132014-04-09 18:39:10 +0100389 mov x2, sp // pass SP.
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700390 b \cxx_name // \cxx_name(arg, Thread*, SP).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000391 brk 0
392END \c_name
393.endm
394
395.macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
396 .extern \cxx_name
397ENTRY \c_name
398 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Zheng Xub551fdc2014-07-25 11:49:42 +0800399 mov x2, xSELF // pass Thread::Current
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700400 mov x3, sp // pass SP
401 b \cxx_name // \cxx_name(arg1, arg2, Thread*, SP)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000402 brk 0
403END \c_name
404.endm
405
406 /*
407 * Called by managed code, saves callee saves and then calls artThrowException
408 * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
409 */
410ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
411
412 /*
413 * Called by managed code to create and deliver a NullPointerException.
414 */
415NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
416
417 /*
418 * Called by managed code to create and deliver an ArithmeticException.
419 */
420NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
421
422 /*
423 * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
424 * index, arg2 holds limit.
425 */
426TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
427
428 /*
429 * Called by managed code to create and deliver a StackOverflowError.
430 */
431NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
432
433 /*
434 * Called by managed code to create and deliver a NoSuchMethodError.
435 */
436ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
437
438 /*
Stuart Monteithb95a5342014-03-12 13:32:32 +0000439 * All generated callsites for interface invokes and invocation slow paths will load arguments
Andreas Gampe51f76352014-05-21 08:28:48 -0700440 * as usual - except instead of loading arg0/x0 with the target Method*, arg0/x0 will contain
441 * the method_idx. This wrapper will save arg1-arg3, load the caller's Method*, align the
Stuart Monteithb95a5342014-03-12 13:32:32 +0000442 * stack and call the appropriate C helper.
Andreas Gampe51f76352014-05-21 08:28:48 -0700443 * NOTE: "this" is first visible argument of the target, and so can be found in arg1/x1.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000444 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700445 * The helper will attempt to locate the target and return a 128-bit result in x0/x1 consisting
Stuart Monteithb95a5342014-03-12 13:32:32 +0000446 * of the target Method* in x0 and method->code_ in x1.
447 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700448 * If unsuccessful, the helper will return NULL/????. There will be a pending exception in the
Stuart Monteithb95a5342014-03-12 13:32:32 +0000449 * thread and we branch to another stub to deliver it.
450 *
451 * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
452 * pointing back to the original caller.
Andreas Gampe51f76352014-05-21 08:28:48 -0700453 *
454 * Adapted from ARM32 code.
455 *
Zheng Xub551fdc2014-07-25 11:49:42 +0800456 * Clobbers xIP0.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000457 */
458.macro INVOKE_TRAMPOLINE c_name, cxx_name
459 .extern \cxx_name
460ENTRY \c_name
Andreas Gampe51f76352014-05-21 08:28:48 -0700461 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // save callee saves in case allocation triggers GC
462 // Helper signature is always
463 // (method_idx, *this_object, *caller_method, *self, sp)
464
Alexei Zavjalov41c507a2014-05-15 16:02:46 +0700465 ldr w2, [sp, #FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE] // pass caller Method*
Andreas Gampe51f76352014-05-21 08:28:48 -0700466 mov x3, xSELF // pass Thread::Current
467 mov x4, sp
468 bl \cxx_name // (method_idx, this, caller, Thread*, SP)
Zheng Xub551fdc2014-07-25 11:49:42 +0800469 mov xIP0, x1 // save Method*->code_
Andreas Gampe51f76352014-05-21 08:28:48 -0700470 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
471 cbz x0, 1f // did we find the target? if not go to exception delivery
Zheng Xub551fdc2014-07-25 11:49:42 +0800472 br xIP0 // tail call to target
Andreas Gampe51f76352014-05-21 08:28:48 -07004731:
474 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +0000475END \c_name
476.endm
477
478INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline
479INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
480
481INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
482INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
483INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
484INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
485
Andreas Gampe03906cf2014-04-07 12:08:28 -0700486
487.macro INVOKE_STUB_CREATE_FRAME
488
Zheng Xub551fdc2014-07-25 11:49:42 +0800489SAVE_SIZE=6*8 // x4, x5, xSUSPEND, SP, LR & FP saved.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700490SAVE_SIZE_AND_METHOD=SAVE_SIZE+STACK_REFERENCE_SIZE
491
Andreas Gampe03906cf2014-04-07 12:08:28 -0700492
Zheng Xu48241e72014-05-23 11:52:42 +0800493 mov x9, sp // Save stack pointer.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700494 .cfi_register sp,x9
495
Zheng Xu48241e72014-05-23 11:52:42 +0800496 add x10, x2, # SAVE_SIZE_AND_METHOD // calculate size of frame.
497 sub x10, sp, x10 // Calculate SP position - saves + ArtMethod* + args
498 and x10, x10, # ~0xf // Enforce 16 byte stack alignment.
499 mov sp, x10 // Set new SP.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700500
Zheng Xu48241e72014-05-23 11:52:42 +0800501 sub x10, x9, #SAVE_SIZE // Calculate new FP (later). Done here as we must move SP
502 .cfi_def_cfa_register x10 // before this.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700503 .cfi_adjust_cfa_offset SAVE_SIZE
504
Zheng Xub551fdc2014-07-25 11:49:42 +0800505 stp x9, xSUSPEND, [x10, #32] // Save old stack pointer and xSUSPEND
Andreas Gampe03906cf2014-04-07 12:08:28 -0700506 .cfi_rel_offset sp, 32
Andreas Gampecf4035a2014-05-28 22:43:01 -0700507 .cfi_rel_offset x19, 40
Andreas Gampe03906cf2014-04-07 12:08:28 -0700508
Zheng Xu48241e72014-05-23 11:52:42 +0800509 stp x4, x5, [x10, #16] // Save result and shorty addresses.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700510 .cfi_rel_offset x4, 16
511 .cfi_rel_offset x5, 24
512
Zheng Xu48241e72014-05-23 11:52:42 +0800513 stp xFP, xLR, [x10] // Store LR & FP.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700514 .cfi_rel_offset x29, 0
515 .cfi_rel_offset x30, 8
516
Zheng Xu48241e72014-05-23 11:52:42 +0800517 mov xFP, x10 // Use xFP now, as it's callee-saved.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700518 .cfi_def_cfa_register x29
Zheng Xu48241e72014-05-23 11:52:42 +0800519 mov xSELF, x3 // Move thread pointer into SELF register.
520 mov wSUSPEND, #SUSPEND_CHECK_INTERVAL // reset wSUSPEND to suspend check interval
Andreas Gampe03906cf2014-04-07 12:08:28 -0700521
522 // Copy arguments into stack frame.
523 // Use simple copy routine for now.
524 // 4 bytes per slot.
525 // X1 - source address
526 // W2 - args length
527 // X9 - destination address.
528 // W10 - temporary
Andreas Gampecf4035a2014-05-28 22:43:01 -0700529 add x9, sp, #4 // Destination address is bottom of stack + NULL.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700530
531 // Use \@ to differentiate between macro invocations.
532.LcopyParams\@:
533 cmp w2, #0
534 beq .LendCopyParams\@
535 sub w2, w2, #4 // Need 65536 bytes of range.
536 ldr w10, [x1, x2]
537 str w10, [x9, x2]
538
539 b .LcopyParams\@
540
541.LendCopyParams\@:
542
Andreas Gampecf4035a2014-05-28 22:43:01 -0700543 // Store NULL into StackReference<Method>* at bottom of frame.
544 str wzr, [sp]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700545
Andreas Gampecf4035a2014-05-28 22:43:01 -0700546#if (STACK_REFERENCE_SIZE != 4)
547#error "STACK_REFERENCE_SIZE(ARM64) size not as expected."
548#endif
Andreas Gampe03906cf2014-04-07 12:08:28 -0700549.endm
550
551.macro INVOKE_STUB_CALL_AND_RETURN
552
553 // load method-> METHOD_QUICK_CODE_OFFSET
554 ldr x9, [x0 , #METHOD_QUICK_CODE_OFFSET]
555 // Branch to method.
556 blr x9
557
558 // Restore return value address and shorty address.
559 ldp x4,x5, [xFP, #16]
560 .cfi_restore x4
561 .cfi_restore x5
562
563 // Store result (w0/x0/s0/d0) appropriately, depending on resultType.
564 ldrb w10, [x5]
565
566 // Don't set anything for a void type.
567 cmp w10, #'V'
568 beq .Lexit_art_quick_invoke_stub\@
569
570 cmp w10, #'D'
571 bne .Lreturn_is_float\@
572 str d0, [x4]
573 b .Lexit_art_quick_invoke_stub\@
574
575.Lreturn_is_float\@:
576 cmp w10, #'F'
577 bne .Lreturn_is_int\@
578 str s0, [x4]
579 b .Lexit_art_quick_invoke_stub\@
580
581 // Just store x0. Doesn't matter if it is 64 or 32 bits.
582.Lreturn_is_int\@:
583 str x0, [x4]
584
585.Lexit_art_quick_invoke_stub\@:
Zheng Xub551fdc2014-07-25 11:49:42 +0800586 ldp x2, xSUSPEND, [xFP, #32] // Restore stack pointer and xSUSPEND.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700587 .cfi_restore x19
Andreas Gampe03906cf2014-04-07 12:08:28 -0700588 mov sp, x2
589 .cfi_restore sp
590
Andreas Gamped58342c2014-06-05 14:18:08 -0700591 ldp xFP, xLR, [xFP] // Restore old frame pointer and link register.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700592 .cfi_restore x29
593 .cfi_restore x30
594
595 ret
596
597.endm
598
599
Stuart Monteithb95a5342014-03-12 13:32:32 +0000600/*
601 * extern"C" void art_quick_invoke_stub(ArtMethod *method, x0
602 * uint32_t *args, x1
603 * uint32_t argsize, w2
604 * Thread *self, x3
605 * JValue *result, x4
606 * char *shorty); x5
607 * +----------------------+
608 * | |
609 * | C/C++ frame |
610 * | LR'' |
611 * | FP'' | <- SP'
612 * +----------------------+
613 * +----------------------+
Zheng Xub551fdc2014-07-25 11:49:42 +0800614 * | x19 | <- Used as wSUSPEND, won't be restored by managed code.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000615 * | SP' |
616 * | X5 |
617 * | X4 | Saved registers
618 * | LR' |
619 * | FP' | <- FP
620 * +----------------------+
621 * | uint32_t out[n-1] |
622 * | : : | Outs
623 * | uint32_t out[0] |
Andreas Gampecf4035a2014-05-28 22:43:01 -0700624 * | StackRef<ArtMethod> | <- SP value=null
Stuart Monteithb95a5342014-03-12 13:32:32 +0000625 * +----------------------+
626 *
627 * Outgoing registers:
628 * x0 - Method*
629 * x1-x7 - integer parameters.
630 * d0-d7 - Floating point parameters.
631 * xSELF = self
Zheng Xu48241e72014-05-23 11:52:42 +0800632 * wSUSPEND = suspend count
Stuart Monteithb95a5342014-03-12 13:32:32 +0000633 * SP = & of ArtMethod*
634 * x1 = "this" pointer.
635 *
636 */
637ENTRY art_quick_invoke_stub
638 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700639 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000640
641 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
642 // Parse the passed shorty to determine which register to load.
643 // Load addresses for routines that load WXSD registers.
644 adr x11, .LstoreW2
645 adr x12, .LstoreX2
646 adr x13, .LstoreS0
647 adr x14, .LstoreD0
648
649 // Initialize routine offsets to 0 for integers and floats.
650 // x8 for integers, x15 for floating point.
651 mov x8, #0
652 mov x15, #0
653
654 add x10, x5, #1 // Load shorty address, plus one to skip return value.
655 ldr w1, [x9],#4 // Load "this" parameter, and increment arg pointer.
656
657 // Loop to fill registers.
658.LfillRegisters:
659 ldrb w17, [x10], #1 // Load next character in signature, and increment.
660 cbz w17, .LcallFunction // Exit at end of signature. Shorty 0 terminated.
661
662 cmp w17, #'F' // is this a float?
663 bne .LisDouble
664
665 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700666 beq .Ladvance4
Stuart Monteithb95a5342014-03-12 13:32:32 +0000667
668 add x17, x13, x15 // Calculate subroutine to jump to.
669 br x17
670
671.LisDouble:
672 cmp w17, #'D' // is this a double?
673 bne .LisLong
674
675 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700676 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000677
678 add x17, x14, x15 // Calculate subroutine to jump to.
679 br x17
680
681.LisLong:
682 cmp w17, #'J' // is this a long?
683 bne .LisOther
684
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700685 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700686 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000687
688 add x17, x12, x8 // Calculate subroutine to jump to.
689 br x17
690
Stuart Monteithb95a5342014-03-12 13:32:32 +0000691.LisOther: // Everything else takes one vReg.
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700692 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700693 beq .Ladvance4
694
Stuart Monteithb95a5342014-03-12 13:32:32 +0000695 add x17, x11, x8 // Calculate subroutine to jump to.
696 br x17
697
Andreas Gampe03906cf2014-04-07 12:08:28 -0700698.Ladvance4:
699 add x9, x9, #4
700 b .LfillRegisters
701
702.Ladvance8:
703 add x9, x9, #8
704 b .LfillRegisters
705
Stuart Monteithb95a5342014-03-12 13:32:32 +0000706// Macro for loading a parameter into a register.
707// counter - the register with offset into these tables
708// size - the size of the register - 4 or 8 bytes.
709// register - the name of the register to be loaded.
710.macro LOADREG counter size register return
711 ldr \register , [x9], #\size
712 add \counter, \counter, 12
713 b \return
714.endm
715
716// Store ints.
717.LstoreW2:
718 LOADREG x8 4 w2 .LfillRegisters
719 LOADREG x8 4 w3 .LfillRegisters
720 LOADREG x8 4 w4 .LfillRegisters
721 LOADREG x8 4 w5 .LfillRegisters
722 LOADREG x8 4 w6 .LfillRegisters
723 LOADREG x8 4 w7 .LfillRegisters
724
725// Store longs.
726.LstoreX2:
727 LOADREG x8 8 x2 .LfillRegisters
728 LOADREG x8 8 x3 .LfillRegisters
729 LOADREG x8 8 x4 .LfillRegisters
730 LOADREG x8 8 x5 .LfillRegisters
731 LOADREG x8 8 x6 .LfillRegisters
732 LOADREG x8 8 x7 .LfillRegisters
733
734// Store singles.
735.LstoreS0:
736 LOADREG x15 4 s0 .LfillRegisters
737 LOADREG x15 4 s1 .LfillRegisters
738 LOADREG x15 4 s2 .LfillRegisters
739 LOADREG x15 4 s3 .LfillRegisters
740 LOADREG x15 4 s4 .LfillRegisters
741 LOADREG x15 4 s5 .LfillRegisters
742 LOADREG x15 4 s6 .LfillRegisters
743 LOADREG x15 4 s7 .LfillRegisters
744
745// Store doubles.
746.LstoreD0:
747 LOADREG x15 8 d0 .LfillRegisters
748 LOADREG x15 8 d1 .LfillRegisters
749 LOADREG x15 8 d2 .LfillRegisters
750 LOADREG x15 8 d3 .LfillRegisters
751 LOADREG x15 8 d4 .LfillRegisters
752 LOADREG x15 8 d5 .LfillRegisters
753 LOADREG x15 8 d6 .LfillRegisters
754 LOADREG x15 8 d7 .LfillRegisters
755
756
757.LcallFunction:
758
Andreas Gampe03906cf2014-04-07 12:08:28 -0700759 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000760
Stuart Monteithb95a5342014-03-12 13:32:32 +0000761END art_quick_invoke_stub
762
763/* extern"C"
764 * void art_quick_invoke_static_stub(ArtMethod *method, x0
765 * uint32_t *args, x1
766 * uint32_t argsize, w2
767 * Thread *self, x3
768 * JValue *result, x4
769 * char *shorty); x5
770 */
771ENTRY art_quick_invoke_static_stub
772 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700773 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000774
775 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
776 // Parse the passed shorty to determine which register to load.
777 // Load addresses for routines that load WXSD registers.
778 adr x11, .LstoreW1_2
779 adr x12, .LstoreX1_2
780 adr x13, .LstoreS0_2
781 adr x14, .LstoreD0_2
782
783 // Initialize routine offsets to 0 for integers and floats.
784 // x8 for integers, x15 for floating point.
785 mov x8, #0
786 mov x15, #0
787
788 add x10, x5, #1 // Load shorty address, plus one to skip return value.
789
790 // Loop to fill registers.
791.LfillRegisters2:
792 ldrb w17, [x10], #1 // Load next character in signature, and increment.
793 cbz w17, .LcallFunction2 // Exit at end of signature. Shorty 0 terminated.
794
795 cmp w17, #'F' // is this a float?
796 bne .LisDouble2
797
798 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700799 beq .Ladvance4_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000800
801 add x17, x13, x15 // Calculate subroutine to jump to.
802 br x17
803
804.LisDouble2:
805 cmp w17, #'D' // is this a double?
806 bne .LisLong2
807
808 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700809 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000810
811 add x17, x14, x15 // Calculate subroutine to jump to.
812 br x17
813
814.LisLong2:
815 cmp w17, #'J' // is this a long?
816 bne .LisOther2
817
818 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700819 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000820
821 add x17, x12, x8 // Calculate subroutine to jump to.
822 br x17
823
Stuart Monteithb95a5342014-03-12 13:32:32 +0000824.LisOther2: // Everything else takes one vReg.
825 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700826 beq .Ladvance4_2
827
Stuart Monteithb95a5342014-03-12 13:32:32 +0000828 add x17, x11, x8 // Calculate subroutine to jump to.
829 br x17
830
Andreas Gampe03906cf2014-04-07 12:08:28 -0700831.Ladvance4_2:
832 add x9, x9, #4
833 b .LfillRegisters2
834
835.Ladvance8_2:
836 add x9, x9, #8
837 b .LfillRegisters2
838
Stuart Monteithb95a5342014-03-12 13:32:32 +0000839// Store ints.
840.LstoreW1_2:
841 LOADREG x8 4 w1 .LfillRegisters2
842 LOADREG x8 4 w2 .LfillRegisters2
843 LOADREG x8 4 w3 .LfillRegisters2
844 LOADREG x8 4 w4 .LfillRegisters2
845 LOADREG x8 4 w5 .LfillRegisters2
846 LOADREG x8 4 w6 .LfillRegisters2
847 LOADREG x8 4 w7 .LfillRegisters2
848
849// Store longs.
850.LstoreX1_2:
851 LOADREG x8 8 x1 .LfillRegisters2
852 LOADREG x8 8 x2 .LfillRegisters2
853 LOADREG x8 8 x3 .LfillRegisters2
854 LOADREG x8 8 x4 .LfillRegisters2
855 LOADREG x8 8 x5 .LfillRegisters2
856 LOADREG x8 8 x6 .LfillRegisters2
857 LOADREG x8 8 x7 .LfillRegisters2
858
859// Store singles.
860.LstoreS0_2:
861 LOADREG x15 4 s0 .LfillRegisters2
862 LOADREG x15 4 s1 .LfillRegisters2
863 LOADREG x15 4 s2 .LfillRegisters2
864 LOADREG x15 4 s3 .LfillRegisters2
865 LOADREG x15 4 s4 .LfillRegisters2
866 LOADREG x15 4 s5 .LfillRegisters2
867 LOADREG x15 4 s6 .LfillRegisters2
868 LOADREG x15 4 s7 .LfillRegisters2
869
870// Store doubles.
871.LstoreD0_2:
872 LOADREG x15 8 d0 .LfillRegisters2
873 LOADREG x15 8 d1 .LfillRegisters2
874 LOADREG x15 8 d2 .LfillRegisters2
875 LOADREG x15 8 d3 .LfillRegisters2
876 LOADREG x15 8 d4 .LfillRegisters2
877 LOADREG x15 8 d5 .LfillRegisters2
878 LOADREG x15 8 d6 .LfillRegisters2
879 LOADREG x15 8 d7 .LfillRegisters2
880
881
882.LcallFunction2:
883
Andreas Gampe03906cf2014-04-07 12:08:28 -0700884 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000885
Stuart Monteithb95a5342014-03-12 13:32:32 +0000886END art_quick_invoke_static_stub
887
Andreas Gampe03906cf2014-04-07 12:08:28 -0700888
Stuart Monteithb95a5342014-03-12 13:32:32 +0000889
890 /*
891 * On entry x0 is uintptr_t* gprs_ and x1 is uint64_t* fprs_
892 */
893
894ENTRY art_quick_do_long_jump
895 // Load FPRs
896 ldp d0, d1, [x1], #16
897 ldp d2, d3, [x1], #16
898 ldp d4, d5, [x1], #16
899 ldp d6, d7, [x1], #16
900 ldp d8, d9, [x1], #16
901 ldp d10, d11, [x1], #16
902 ldp d12, d13, [x1], #16
903 ldp d14, d15, [x1], #16
904 ldp d16, d17, [x1], #16
905 ldp d18, d19, [x1], #16
906 ldp d20, d21, [x1], #16
907 ldp d22, d23, [x1], #16
908 ldp d24, d25, [x1], #16
909 ldp d26, d27, [x1], #16
910 ldp d28, d29, [x1], #16
911 ldp d30, d31, [x1]
912
913 // Load GPRs
914 // TODO: lots of those are smashed, could optimize.
915 add x0, x0, #30*8
916 ldp x30, x1, [x0], #-16
917 ldp x28, x29, [x0], #-16
918 ldp x26, x27, [x0], #-16
919 ldp x24, x25, [x0], #-16
920 ldp x22, x23, [x0], #-16
921 ldp x20, x21, [x0], #-16
922 ldp x18, x19, [x0], #-16
923 ldp x16, x17, [x0], #-16
924 ldp x14, x15, [x0], #-16
925 ldp x12, x13, [x0], #-16
926 ldp x10, x11, [x0], #-16
927 ldp x8, x9, [x0], #-16
928 ldp x6, x7, [x0], #-16
929 ldp x4, x5, [x0], #-16
930 ldp x2, x3, [x0], #-16
931 mov sp, x1
932
933 // TODO: Is it really OK to use LR for the target PC?
934 mov x0, #0
935 mov x1, #0
936 br xLR
937END art_quick_do_long_jump
938
Andreas Gampef4e910b2014-04-29 16:55:52 -0700939 /*
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700940 * Entry from managed code that calls artLockObjectFromCode, may block for GC. x0 holds the
941 * possibly null object to lock.
942 *
943 * Derived from arm32 code.
944 */
945 .extern artLockObjectFromCode
946ENTRY art_quick_lock_object
947 cbz w0, .Lslow_lock
948 add x4, x0, #LOCK_WORD_OFFSET // exclusive load/store had no immediate anymore
949.Lretry_lock:
950 ldr w2, [xSELF, #THREAD_ID_OFFSET] // TODO: Can the thread ID really change during the loop?
951 ldxr w1, [x4]
952 cbnz w1, .Lnot_unlocked // already thin locked
953 stxr w3, w2, [x4]
954 cbnz w3, .Lstrex_fail // store failed, retry
Andreas Gampe675967d2014-05-14 16:28:34 -0700955 dmb ishld // full (LoadLoad|LoadStore) memory barrier
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700956 ret
957.Lstrex_fail:
958 b .Lretry_lock // unlikely forward branch, need to reload and recheck r1/r2
959.Lnot_unlocked:
960 lsr w3, w1, 30
961 cbnz w3, .Lslow_lock // if either of the top two bits are set, go slow path
962 eor w2, w1, w2 // lock_word.ThreadId() ^ self->ThreadId()
963 uxth w2, w2 // zero top 16 bits
964 cbnz w2, .Lslow_lock // lock word and self thread id's match -> recursive lock
965 // else contention, go to slow path
966 add w2, w1, #65536 // increment count in lock word placing in w2 for storing
967 lsr w1, w2, 30 // if either of the top two bits are set, we overflowed.
968 cbnz w1, .Lslow_lock // if we overflow the count go slow path
969 str w2, [x0, #LOCK_WORD_OFFSET]// no need for stxr as we hold the lock
970 ret
971.Lslow_lock:
972 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case we block
973 mov x1, xSELF // pass Thread::Current
974 mov x2, sp // pass SP
975 bl artLockObjectFromCode // (Object* obj, Thread*, SP)
976 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
977 RETURN_IF_W0_IS_ZERO_OR_DELIVER
978END art_quick_lock_object
979
980 /*
981 * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure.
982 * x0 holds the possibly null object to lock.
983 *
984 * Derived from arm32 code.
985 */
986 .extern artUnlockObjectFromCode
987ENTRY art_quick_unlock_object
988 cbz x0, .Lslow_unlock
989 ldr w1, [x0, #LOCK_WORD_OFFSET]
990 lsr w2, w1, 30
991 cbnz w2, .Lslow_unlock // if either of the top two bits are set, go slow path
992 ldr w2, [xSELF, #THREAD_ID_OFFSET]
993 eor w3, w1, w2 // lock_word.ThreadId() ^ self->ThreadId()
994 uxth w3, w3 // zero top 16 bits
995 cbnz w3, .Lslow_unlock // do lock word and self thread id's match?
996 cmp w1, #65536
997 bpl .Lrecursive_thin_unlock
998 // transition to unlocked, w3 holds 0
Andreas Gampe675967d2014-05-14 16:28:34 -0700999 dmb ish // full (LoadStore|StoreStore) memory barrier
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001000 str w3, [x0, #LOCK_WORD_OFFSET]
1001 ret
1002.Lrecursive_thin_unlock:
1003 sub w1, w1, #65536
1004 str w1, [x0, #LOCK_WORD_OFFSET]
1005 ret
1006.Lslow_unlock:
1007 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case exception allocation triggers GC
1008 mov x1, xSELF // pass Thread::Current
1009 mov x2, sp // pass SP
1010 bl artUnlockObjectFromCode // (Object* obj, Thread*, SP)
1011 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1012 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1013END art_quick_unlock_object
Andreas Gampe525cde22014-04-22 15:44:50 -07001014
1015 /*
1016 * Entry from managed code that calls artIsAssignableFromCode and on failure calls
1017 * artThrowClassCastException.
1018 */
1019 .extern artThrowClassCastException
1020ENTRY art_quick_check_cast
1021 // Store arguments and link register
1022 sub sp, sp, #32 // Stack needs to be 16b aligned on calls
1023 .cfi_adjust_cfa_offset 32
1024 stp x0, x1, [sp]
1025 .cfi_rel_offset x0, 0
1026 .cfi_rel_offset x1, 8
1027 stp xSELF, xLR, [sp, #16]
1028 .cfi_rel_offset x18, 16
1029 .cfi_rel_offset x30, 24
1030
1031 // Call runtime code
1032 bl artIsAssignableFromCode
1033
1034 // Check for exception
1035 cbz x0, .Lthrow_class_cast_exception
1036
1037 // Restore and return
1038 ldp x0, x1, [sp]
1039 .cfi_restore x0
1040 .cfi_restore x1
1041 ldp xSELF, xLR, [sp, #16]
1042 .cfi_restore x18
1043 .cfi_restore x30
1044 add sp, sp, #32
1045 .cfi_adjust_cfa_offset -32
1046 ret
1047
1048.Lthrow_class_cast_exception:
1049 // Restore
1050 ldp x0, x1, [sp]
1051 .cfi_restore x0
1052 .cfi_restore x1
1053 ldp xSELF, xLR, [sp, #16]
1054 .cfi_restore x18
1055 .cfi_restore x30
1056 add sp, sp, #32
1057 .cfi_adjust_cfa_offset -32
1058
1059 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
1060 mov x2, xSELF // pass Thread::Current
1061 mov x3, sp // pass SP
1062 b artThrowClassCastException // (Class*, Class*, Thread*, SP)
1063 brk 0 // We should not return here...
1064END art_quick_check_cast
1065
Andreas Gampef4e910b2014-04-29 16:55:52 -07001066 /*
1067 * Entry from managed code for array put operations of objects where the value being stored
1068 * needs to be checked for compatibility.
1069 * x0 = array, x1 = index, x2 = value
1070 *
1071 * Currently all values should fit into w0/w1/w2, and w1 always will as indices are 32b. We
1072 * assume, though, that the upper 32b are zeroed out. At least for x1/w1 we can do better by
1073 * using index-zero-extension in load/stores.
1074 *
1075 * Temporaries: x3, x4
1076 * TODO: x4 OK? ip seems wrong here.
1077 */
1078ENTRY art_quick_aput_obj_with_null_and_bound_check
1079 tst x0, x0
1080 bne art_quick_aput_obj_with_bound_check
1081 b art_quick_throw_null_pointer_exception
1082END art_quick_aput_obj_with_null_and_bound_check
1083
1084ENTRY art_quick_aput_obj_with_bound_check
1085 ldr w3, [x0, #ARRAY_LENGTH_OFFSET]
1086 cmp w3, w1
1087 bhi art_quick_aput_obj
1088 mov x0, x1
1089 mov x1, x3
1090 b art_quick_throw_array_bounds
1091END art_quick_aput_obj_with_bound_check
1092
1093ENTRY art_quick_aput_obj
1094 cbz x2, .Ldo_aput_null
1095 ldr w3, [x0, #CLASS_OFFSET] // Heap reference = 32b
1096 // This also zero-extends to x3
1097 ldr w4, [x2, #CLASS_OFFSET] // Heap reference = 32b
1098 // This also zero-extends to x4
1099 ldr w3, [x3, #CLASS_COMPONENT_TYPE_OFFSET] // Heap reference = 32b
1100 // This also zero-extends to x3
1101 cmp w3, w4 // value's type == array's component type - trivial assignability
1102 bne .Lcheck_assignability
1103.Ldo_aput:
1104 add x3, x0, #OBJECT_ARRAY_DATA_OFFSET
1105 // "Compress" = do nothing
1106 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1107 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1108 lsr x0, x0, #7
1109 strb w3, [x3, x0]
1110 ret
1111.Ldo_aput_null:
1112 add x3, x0, #OBJECT_ARRAY_DATA_OFFSET
1113 // "Compress" = do nothing
1114 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1115 ret
1116.Lcheck_assignability:
1117 // Store arguments and link register
1118 sub sp, sp, #48 // Stack needs to be 16b aligned on calls
1119 .cfi_adjust_cfa_offset 48
1120 stp x0, x1, [sp]
1121 .cfi_rel_offset x0, 0
1122 .cfi_rel_offset x1, 8
1123 stp x2, xSELF, [sp, #16]
1124 .cfi_rel_offset x2, 16
1125 .cfi_rel_offset x18, 24
1126 str xLR, [sp, #32]
1127 .cfi_rel_offset x30, 32
1128
1129 // Call runtime code
1130 mov x0, x3 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1131 mov x1, x4 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1132 bl artIsAssignableFromCode
1133
1134 // Check for exception
1135 cbz x0, .Lthrow_array_store_exception
1136
1137 // Restore
1138 ldp x0, x1, [sp]
1139 .cfi_restore x0
1140 .cfi_restore x1
1141 ldp x2, xSELF, [sp, #16]
1142 .cfi_restore x2
1143 .cfi_restore x18
1144 ldr xLR, [sp, #32]
1145 .cfi_restore x30
1146 add sp, sp, #48
1147 .cfi_adjust_cfa_offset -48
1148
1149 add x3, x0, #OBJECT_ARRAY_DATA_OFFSET
1150 // "Compress" = do nothing
1151 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1152 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1153 lsr x0, x0, #7
1154 strb w3, [x3, x0]
1155 ret
1156.Lthrow_array_store_exception:
1157 ldp x0, x1, [sp]
1158 .cfi_restore x0
1159 .cfi_restore x1
1160 ldp x2, xSELF, [sp, #16]
1161 .cfi_restore x2
1162 .cfi_restore x18
1163 ldr xLR, [sp, #32]
1164 .cfi_restore x30
1165 add sp, sp, #48
1166 .cfi_adjust_cfa_offset -48
1167
1168 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
1169 mov x1, x2 // Pass value.
1170 mov x2, xSELF // Pass Thread::Current.
1171 mov x3, sp // Pass SP.
1172 b artThrowArrayStoreException // (Object*, Object*, Thread*, SP).
1173 brk 0 // Unreached.
1174END art_quick_aput_obj
1175
Stuart Monteithb95a5342014-03-12 13:32:32 +00001176// Macro to facilitate adding new allocation entrypoints.
1177.macro TWO_ARG_DOWNCALL name, entrypoint, return
1178 .extern \entrypoint
1179ENTRY \name
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001180 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1181 mov x2, xSELF // pass Thread::Current
1182 mov x3, sp // pass SP
1183 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP)
1184 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1185 \return
1186 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +00001187END \name
1188.endm
1189
1190// Macro to facilitate adding new array allocation entrypoints.
1191.macro THREE_ARG_DOWNCALL name, entrypoint, return
1192 .extern \entrypoint
1193ENTRY \name
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001194 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1195 mov x3, xSELF // pass Thread::Current
1196 mov x4, sp // pass SP
1197 bl \entrypoint
1198 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1199 \return
1200 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +00001201END \name
1202.endm
1203
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001204// Macros taking opportunity of code similarities for downcalls with referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001205.macro ONE_ARG_REF_DOWNCALL name, entrypoint, return
1206 .extern \entrypoint
1207ENTRY \name
1208 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampecf4035a2014-05-28 22:43:01 -07001209 ldr w1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001210 mov x2, xSELF // pass Thread::Current
1211 mov x3, sp // pass SP
1212 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP)
1213 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1214 \return
1215END \name
1216.endm
1217
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001218.macro TWO_ARG_REF_DOWNCALL name, entrypoint, return
1219 .extern \entrypoint
1220ENTRY \name
1221 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampecf4035a2014-05-28 22:43:01 -07001222 ldr w2, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001223 mov x3, xSELF // pass Thread::Current
1224 mov x4, sp // pass SP
1225 bl \entrypoint
1226 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1227 \return
1228END \name
1229.endm
1230
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001231.macro THREE_ARG_REF_DOWNCALL name, entrypoint, return
1232 .extern \entrypoint
1233ENTRY \name
1234 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampecf4035a2014-05-28 22:43:01 -07001235 ldr w3, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001236 mov x4, xSELF // pass Thread::Current
1237 mov x5, sp // pass SP
1238 bl \entrypoint
1239 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1240 \return
1241END \name
1242.endm
1243
Matteo Franchindfd891a2014-04-30 12:17:17 +01001244 /*
Vladimir Marko3b370732014-10-09 18:34:28 +01001245 * Entry from managed code that calls artHandleFillArrayDataFromCode and delivers exception on
1246 * failure.
1247 */
1248TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1249
1250 /*
Matteo Franchindfd891a2014-04-30 12:17:17 +01001251 * Entry from managed code when uninitialized static storage, this stub will run the class
1252 * initializer and deliver the exception on error. On success the static storage base is
1253 * returned.
1254 */
1255TWO_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO
1256
Andreas Gampe6aac3552014-06-09 14:55:53 -07001257TWO_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO
1258TWO_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO
Matteo Franchindfd891a2014-04-30 12:17:17 +01001259
Fred Shih37f05ef2014-07-16 18:38:08 -07001260ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1261ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1262ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1263ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001264ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1265ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1266ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1267
Fred Shih37f05ef2014-07-16 18:38:08 -07001268TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1269TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1270TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1271TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001272TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1273TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1274TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1275
Fred Shih37f05ef2014-07-16 18:38:08 -07001276TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1277TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001278TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1279TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1280
Fred Shih37f05ef2014-07-16 18:38:08 -07001281THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1282THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001283THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1284THREE_ARG_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1285THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1286
1287// This is separated out as the argument order is different.
1288 .extern artSet64StaticFromCode
1289ENTRY art_quick_set64_static
1290 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1291 mov x3, x1 // Store value
Andreas Gampecf4035a2014-05-28 22:43:01 -07001292 ldr w1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001293 mov x2, x3 // Put value param
1294 mov x3, xSELF // pass Thread::Current
1295 mov x4, sp // pass SP
1296 bl artSet64StaticFromCode
1297 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME
1298 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1299END art_quick_set64_static
1300
Matteo Franchindfd891a2014-04-30 12:17:17 +01001301 /*
1302 * Entry from managed code to resolve a string, this stub will allocate a String and deliver an
1303 * exception on error. On success the String is returned. x0 holds the referring method,
1304 * w1 holds the string index. The fast path check for hit in strings cache has already been
1305 * performed.
1306 */
1307TWO_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001308
Stuart Monteithb95a5342014-03-12 13:32:32 +00001309// Generate the allocation entrypoints for each allocator.
1310GENERATE_ALL_ALLOC_ENTRYPOINTS
1311
Zheng Xu48241e72014-05-23 11:52:42 +08001312 /*
1313 * Called by managed code when the value in wSUSPEND has been decremented to 0.
1314 */
1315 .extern artTestSuspendFromCode
1316ENTRY art_quick_test_suspend
1317 ldrh w0, [xSELF, #THREAD_FLAGS_OFFSET] // get xSELF->state_and_flags.as_struct.flags
1318 mov wSUSPEND, #SUSPEND_CHECK_INTERVAL // reset wSUSPEND to SUSPEND_CHECK_INTERVAL
1319 cbnz w0, .Lneed_suspend // check flags == 0
1320 ret // return if flags == 0
1321.Lneed_suspend:
1322 mov x0, xSELF
1323 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl
1324 mov x1, sp
1325 bl artTestSuspendFromCode // (Thread*, SP)
1326 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
1327END art_quick_test_suspend
Stuart Monteithb95a5342014-03-12 13:32:32 +00001328
Stuart Monteithd5c78f42014-06-11 16:44:46 +01001329ENTRY art_quick_implicit_suspend
1330 mov x0, xSELF
1331 SETUP_REF_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl
1332 mov x1, sp
1333 bl artTestSuspendFromCode // (Thread*, SP)
1334 RESTORE_REF_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
1335END art_quick_implicit_suspend
1336
Andreas Gampee62a07e2014-03-26 14:53:21 -07001337 /*
1338 * Called by managed code that is attempting to call a method on a proxy class. On entry
1339 * x0 holds the proxy method and x1 holds the receiver; The frame size of the invoked proxy
1340 * method agrees with a ref and args callee save frame.
1341 */
1342 .extern artQuickProxyInvokeHandler
1343ENTRY art_quick_proxy_invoke_handler
1344 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
1345 str x0, [sp, #0] // place proxy method at bottom of frame
1346 mov x2, xSELF // pass Thread::Current
1347 mov x3, sp // pass SP
1348 bl artQuickProxyInvokeHandler // (Method* proxy method, receiver, Thread*, SP)
Zheng Xub551fdc2014-07-25 11:49:42 +08001349 // Use xETR as xSELF might be scratched by native function above.
1350 ldr x2, [xETR, THREAD_EXCEPTION_OFFSET]
Andreas Gampee62a07e2014-03-26 14:53:21 -07001351 cbnz x2, .Lexception_in_proxy // success if no exception is pending
Andreas Gamped1e91672014-06-02 22:50:05 -07001352 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // Restore frame
1353 fmov d0, x0 // Store result in d0 in case it was float or double
Andreas Gampee62a07e2014-03-26 14:53:21 -07001354 ret // return on success
1355.Lexception_in_proxy:
1356 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1357 DELIVER_PENDING_EXCEPTION
1358END art_quick_proxy_invoke_handler
Stuart Monteithb95a5342014-03-12 13:32:32 +00001359
Andreas Gampe51f76352014-05-21 08:28:48 -07001360 /*
Zheng Xub551fdc2014-07-25 11:49:42 +08001361 * Called to resolve an imt conflict. xIP1 is a hidden argument that holds the target method's
Andreas Gampe51f76352014-05-21 08:28:48 -07001362 * dex method index.
1363 */
1364ENTRY art_quick_imt_conflict_trampoline
Andreas Gampecf4035a2014-05-28 22:43:01 -07001365 ldr w0, [sp, #0] // load caller Method*
Andreas Gampe51f76352014-05-21 08:28:48 -07001366 ldr w0, [x0, #METHOD_DEX_CACHE_METHODS_OFFSET] // load dex_cache_resolved_methods
1367 add x0, x0, #OBJECT_ARRAY_DATA_OFFSET // get starting address of data
Zheng Xub551fdc2014-07-25 11:49:42 +08001368 ldr w0, [x0, xIP1, lsl 2] // load the target method
Andreas Gampe51f76352014-05-21 08:28:48 -07001369 b art_quick_invoke_interface_trampoline
1370END art_quick_imt_conflict_trampoline
Stuart Monteithb95a5342014-03-12 13:32:32 +00001371
1372ENTRY art_quick_resolution_trampoline
1373 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
1374 mov x2, xSELF
1375 mov x3, sp
1376 bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP)
Matteo Franchindfd891a2014-04-30 12:17:17 +01001377 cbz x0, 1f
Zheng Xub551fdc2014-07-25 11:49:42 +08001378 mov xIP0, x0 // Remember returned code pointer in xIP0.
Andreas Gampecf4035a2014-05-28 22:43:01 -07001379 ldr w0, [sp, #0] // artQuickResolutionTrampoline puts called method in *SP.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001380 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +08001381 br xIP0
Stuart Monteithb95a5342014-03-12 13:32:32 +000013821:
1383 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1384 DELIVER_PENDING_EXCEPTION
1385END art_quick_resolution_trampoline
1386
1387/*
1388 * Generic JNI frame layout:
1389 *
1390 * #-------------------#
1391 * | |
1392 * | caller method... |
1393 * #-------------------# <--- SP on entry
1394 * | Return X30/LR |
1395 * | X29/FP | callee save
1396 * | X28 | callee save
1397 * | X27 | callee save
1398 * | X26 | callee save
1399 * | X25 | callee save
1400 * | X24 | callee save
1401 * | X23 | callee save
1402 * | X22 | callee save
1403 * | X21 | callee save
1404 * | X20 | callee save
Stuart Monteithb95a5342014-03-12 13:32:32 +00001405 * | X7 | arg7
1406 * | X6 | arg6
1407 * | X5 | arg5
1408 * | X4 | arg4
1409 * | X3 | arg3
1410 * | X2 | arg2
1411 * | X1 | arg1
Stuart Monteithb95a5342014-03-12 13:32:32 +00001412 * | D7 | float arg 8
1413 * | D6 | float arg 7
1414 * | D5 | float arg 6
1415 * | D4 | float arg 5
1416 * | D3 | float arg 4
1417 * | D2 | float arg 3
1418 * | D1 | float arg 2
1419 * | D0 | float arg 1
Andreas Gampecf4035a2014-05-28 22:43:01 -07001420 * | Method* | <- X0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001421 * #-------------------#
1422 * | local ref cookie | // 4B
Mathieu Chartier421c5372014-05-14 14:11:40 -07001423 * | handle scope size | // 4B
Stuart Monteithb95a5342014-03-12 13:32:32 +00001424 * #-------------------#
1425 * | JNI Call Stack |
1426 * #-------------------# <--- SP on native call
1427 * | |
1428 * | Stack for Regs | The trampoline assembly will pop these values
1429 * | | into registers for native call
1430 * #-------------------#
1431 * | Native code ptr |
1432 * #-------------------#
1433 * | Free scratch |
1434 * #-------------------#
1435 * | Ptr to (1) | <--- SP
1436 * #-------------------#
1437 */
1438 /*
1439 * Called to do a generic JNI down-call
1440 */
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001441ENTRY art_quick_generic_jni_trampoline
Stuart Monteithb95a5342014-03-12 13:32:32 +00001442 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
1443 str x0, [sp, #0] // Store native ArtMethod* to bottom of stack.
1444
1445 // Save SP , so we can have static CFI info.
1446 mov x28, sp
1447 .cfi_def_cfa_register x28
1448
1449 // This looks the same, but is different: this will be updated to point to the bottom
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001450 // of the frame when the handle scope is inserted.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001451 mov xFP, sp
1452
Zheng Xub551fdc2014-07-25 11:49:42 +08001453 mov xIP0, #5120
1454 sub sp, sp, xIP0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001455
1456 // prepare for artQuickGenericJniTrampoline call
1457 // (Thread*, SP)
1458 // x0 x1 <= C calling convention
1459 // xSELF xFP <= where they are
1460
1461 mov x0, xSELF // Thread*
1462 mov x1, xFP
1463 bl artQuickGenericJniTrampoline // (Thread*, sp)
1464
Andreas Gampec200a4a2014-06-16 18:39:09 -07001465 // The C call will have registered the complete save-frame on success.
1466 // The result of the call is:
1467 // x0: pointer to native code, 0 on error.
1468 // x1: pointer to the bottom of the used area of the alloca, can restore stack till there.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001469
Andreas Gampec200a4a2014-06-16 18:39:09 -07001470 // Check for error = 0.
1471 cbz x0, .Lentry_error
Stuart Monteithb95a5342014-03-12 13:32:32 +00001472
Andreas Gampec200a4a2014-06-16 18:39:09 -07001473 // Release part of the alloca.
1474 mov sp, x1
Stuart Monteithb95a5342014-03-12 13:32:32 +00001475
Andreas Gampec200a4a2014-06-16 18:39:09 -07001476 // Save the code pointer
1477 mov xIP0, x0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001478
1479 // Load parameters from frame into registers.
1480 // TODO Check with artQuickGenericJniTrampoline.
1481 // Also, check again APPCS64 - the stack arguments are interleaved.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001482 ldp x0, x1, [sp]
1483 ldp x2, x3, [sp, #16]
1484 ldp x4, x5, [sp, #32]
1485 ldp x6, x7, [sp, #48]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001486
Andreas Gampec200a4a2014-06-16 18:39:09 -07001487 ldp d0, d1, [sp, #64]
1488 ldp d2, d3, [sp, #80]
1489 ldp d4, d5, [sp, #96]
1490 ldp d6, d7, [sp, #112]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001491
Andreas Gampec200a4a2014-06-16 18:39:09 -07001492 add sp, sp, #128
Stuart Monteithb95a5342014-03-12 13:32:32 +00001493
Zheng Xub551fdc2014-07-25 11:49:42 +08001494 blr xIP0 // native call.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001495
1496 // result sign extension is handled in C code
1497 // prepare for artQuickGenericJniEndTrampoline call
Andreas Gampec200a4a2014-06-16 18:39:09 -07001498 // (Thread*, result, result_f)
1499 // x0 x1 x2 <= C calling convention
1500 mov x1, x0 // Result (from saved)
Zheng Xub551fdc2014-07-25 11:49:42 +08001501 mov x0, xETR // Thread register, original xSELF might be scratched by native code.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001502 fmov x2, d0 // d0 will contain floating point result, but needs to go into x2
Stuart Monteithb95a5342014-03-12 13:32:32 +00001503
1504 bl artQuickGenericJniEndTrampoline
1505
1506 // Tear down the alloca.
1507 mov sp, x28
1508 .cfi_def_cfa_register sp
1509
Stuart Monteithb95a5342014-03-12 13:32:32 +00001510 // Pending exceptions possible.
Zheng Xub551fdc2014-07-25 11:49:42 +08001511 // Use xETR as xSELF might be scratched by native code
1512 ldr x1, [xETR, THREAD_EXCEPTION_OFFSET]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001513 cbnz x1, .Lexception_in_native
1514
1515 // Tear down the callee-save frame.
1516 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1517
1518 // store into fpr, for when it's a fpr return...
1519 fmov d0, x0
1520 ret
1521
1522.Lentry_error:
1523 mov sp, x28
1524 .cfi_def_cfa_register sp
Stuart Monteithb95a5342014-03-12 13:32:32 +00001525.Lexception_in_native:
1526 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME
1527 DELIVER_PENDING_EXCEPTION
1528
1529END art_quick_generic_jni_trampoline
1530
1531/*
1532 * Called to bridge from the quick to interpreter ABI. On entry the arguments match those
1533 * of a quick call:
1534 * x0 = method being called/to bridge to.
1535 * x1..x7, d0..d7 = arguments to that method.
1536 */
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001537ENTRY art_quick_to_interpreter_bridge
Stuart Monteithb95a5342014-03-12 13:32:32 +00001538 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments.
1539
1540 // x0 will contain mirror::ArtMethod* method.
1541 mov x1, xSELF // How to get Thread::Current() ???
1542 mov x2, sp
1543
1544 // uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
1545 // mirror::ArtMethod** sp)
1546 bl artQuickToInterpreterBridge
1547
1548 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case.
1549
1550 fmov d0, x0
1551
1552 RETURN_OR_DELIVER_PENDING_EXCEPTION
1553END art_quick_to_interpreter_bridge
1554
Andreas Gamped58342c2014-06-05 14:18:08 -07001555
1556//
1557// Instrumentation-related stubs
1558//
1559 .extern artInstrumentationMethodEntryFromCode
1560ENTRY art_quick_instrumentation_entry
1561 SETUP_REF_AND_ARGS_CALLEE_SAVE_FRAME
1562
Zheng Xub551fdc2014-07-25 11:49:42 +08001563 mov x20, x0 // Preserve method reference in a callee-save.
Andreas Gamped58342c2014-06-05 14:18:08 -07001564
1565 mov x2, xSELF
1566 mov x3, sp
1567 mov x4, xLR
1568 bl artInstrumentationMethodEntryFromCode // (Method*, Object*, Thread*, SP, LR)
1569
Zheng Xub551fdc2014-07-25 11:49:42 +08001570 mov xIP0, x0 // x0 = result of call.
1571 mov x0, x20 // Reload method reference.
Andreas Gamped58342c2014-06-05 14:18:08 -07001572
1573 RESTORE_REF_AND_ARGS_CALLEE_SAVE_FRAME // Note: will restore xSELF
1574 adr xLR, art_quick_instrumentation_exit
Zheng Xub551fdc2014-07-25 11:49:42 +08001575 br xIP0 // Tail-call method with lr set to art_quick_instrumentation_exit.
Andreas Gamped58342c2014-06-05 14:18:08 -07001576END art_quick_instrumentation_entry
1577
1578 .extern artInstrumentationMethodExitFromCode
1579ENTRY art_quick_instrumentation_exit
1580 mov xLR, #0 // Clobber LR for later checks.
1581
1582 SETUP_REF_ONLY_CALLEE_SAVE_FRAME
1583
1584 // We need to save x0 and d0. We could use a callee-save from SETUP_REF_ONLY, but then
1585 // we would need to fully restore it. As there are a lot of callee-save registers, it seems
1586 // easier to have an extra small stack area.
1587
Sebastien Hertz70f8d4b2014-06-19 11:51:41 +02001588 str x0, [sp, #-16]! // Save integer result.
Andreas Gamped58342c2014-06-05 14:18:08 -07001589 .cfi_adjust_cfa_offset 16
1590 str d0, [sp, #8] // Save floating-point result.
1591
Andreas Gamped58342c2014-06-05 14:18:08 -07001592 add x1, sp, #16 // Pass SP.
1593 mov x2, x0 // Pass integer result.
1594 fmov x3, d0 // Pass floating-point result.
Sebastien Hertz70f8d4b2014-06-19 11:51:41 +02001595 mov x0, xSELF // Pass Thread.
Andreas Gamped58342c2014-06-05 14:18:08 -07001596 bl artInstrumentationMethodExitFromCode // (Thread*, SP, gpr_res, fpr_res)
1597
Zheng Xub551fdc2014-07-25 11:49:42 +08001598 mov xIP0, x0 // Return address from instrumentation call.
Andreas Gamped58342c2014-06-05 14:18:08 -07001599 mov xLR, x1 // r1 is holding link register if we're to bounce to deoptimize
1600
1601 ldr d0, [sp, #8] // Restore floating-point result.
1602 ldr x0, [sp], 16 // Restore integer result, and drop stack area.
1603 .cfi_adjust_cfa_offset 16
1604
1605 POP_REF_ONLY_CALLEE_SAVE_FRAME
1606
Zheng Xub551fdc2014-07-25 11:49:42 +08001607 br xIP0 // Tail-call out.
Andreas Gamped58342c2014-06-05 14:18:08 -07001608END art_quick_instrumentation_exit
1609
1610 /*
1611 * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
1612 * will long jump to the upcall with a special exception of -1.
1613 */
1614 .extern artDeoptimize
1615ENTRY art_quick_deoptimize
1616 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
1617 mov x0, xSELF // Pass thread.
1618 mov x1, sp // Pass SP.
1619 bl artDeoptimize // artDeoptimize(Thread*, SP)
Serban Constantinescu86797a72014-06-19 16:17:56 +01001620 brk 0
Andreas Gamped58342c2014-06-05 14:18:08 -07001621END art_quick_deoptimize
1622
1623
Serban Constantinescu169489b2014-06-11 16:43:35 +01001624 /*
1625 * String's indexOf.
1626 *
1627 * TODO: Not very optimized.
1628 * On entry:
1629 * x0: string object (known non-null)
1630 * w1: char to match (known <= 0xFFFF)
1631 * w2: Starting offset in string data
1632 */
1633ENTRY art_quick_indexof
1634 ldr w3, [x0, #STRING_COUNT_OFFSET]
1635 ldr w4, [x0, #STRING_OFFSET_OFFSET]
1636 ldr w0, [x0, #STRING_VALUE_OFFSET] // x0 ?
1637
1638 /* Clamp start to [0..count] */
1639 cmp w2, #0
1640 csel w2, wzr, w2, lt
1641 cmp w2, w3
1642 csel w2, w3, w2, gt
1643
1644 /* Build a pointer to the start of the string data */
1645 add x0, x0, #STRING_DATA_OFFSET
1646 add x0, x0, x4, lsl #1
1647
1648 /* Save a copy to compute result */
1649 mov x5, x0
1650
1651 /* Build pointer to start of data to compare and pre-bias */
1652 add x0, x0, x2, lsl #1
1653 sub x0, x0, #2
1654
1655 /* Compute iteration count */
1656 sub w2, w3, w2
1657
1658 /*
1659 * At this point we have:
1660 * x0: start of the data to test
1661 * w1: char to compare
1662 * w2: iteration count
1663 * x5: original start of string data
1664 */
1665
1666 subs w2, w2, #4
1667 b.lt .Lindexof_remainder
1668
1669.Lindexof_loop4:
1670 ldrh w6, [x0, #2]!
1671 ldrh w7, [x0, #2]!
Zheng Xub551fdc2014-07-25 11:49:42 +08001672 ldrh wIP0, [x0, #2]!
1673 ldrh wIP1, [x0, #2]!
Serban Constantinescu169489b2014-06-11 16:43:35 +01001674 cmp w6, w1
1675 b.eq .Lmatch_0
1676 cmp w7, w1
1677 b.eq .Lmatch_1
Zheng Xub551fdc2014-07-25 11:49:42 +08001678 cmp wIP0, w1
Serban Constantinescu169489b2014-06-11 16:43:35 +01001679 b.eq .Lmatch_2
Zheng Xub551fdc2014-07-25 11:49:42 +08001680 cmp wIP1, w1
Serban Constantinescu169489b2014-06-11 16:43:35 +01001681 b.eq .Lmatch_3
1682 subs w2, w2, #4
1683 b.ge .Lindexof_loop4
1684
1685.Lindexof_remainder:
1686 adds w2, w2, #4
1687 b.eq .Lindexof_nomatch
1688
1689.Lindexof_loop1:
1690 ldrh w6, [x0, #2]!
1691 cmp w6, w1
1692 b.eq .Lmatch_3
1693 subs w2, w2, #1
1694 b.ne .Lindexof_loop1
1695
1696.Lindexof_nomatch:
1697 mov x0, #-1
1698 ret
1699
1700.Lmatch_0:
1701 sub x0, x0, #6
1702 sub x0, x0, x5
1703 asr x0, x0, #1
1704 ret
1705.Lmatch_1:
1706 sub x0, x0, #4
1707 sub x0, x0, x5
1708 asr x0, x0, #1
1709 ret
1710.Lmatch_2:
1711 sub x0, x0, #2
1712 sub x0, x0, x5
1713 asr x0, x0, #1
1714 ret
1715.Lmatch_3:
1716 sub x0, x0, x5
1717 asr x0, x0, #1
1718 ret
1719END art_quick_indexof
Andreas Gampe266340d2014-05-02 07:55:24 -07001720
1721 /*
1722 * String's compareTo.
1723 *
1724 * TODO: Not very optimized.
1725 *
1726 * On entry:
1727 * x0: this object pointer
1728 * x1: comp object pointer
1729 *
1730 */
Serban Constantinescu86797a72014-06-19 16:17:56 +01001731 .extern __memcmp16
Andreas Gampe266340d2014-05-02 07:55:24 -07001732ENTRY art_quick_string_compareto
1733 mov x2, x0 // x0 is return, use x2 for first input.
1734 sub x0, x2, x1 // Same string object?
1735 cbnz x0,1f
1736 ret
17371: // Different string objects.
1738
1739 ldr w6, [x2, #STRING_OFFSET_OFFSET]
1740 ldr w5, [x1, #STRING_OFFSET_OFFSET]
1741 ldr w4, [x2, #STRING_COUNT_OFFSET]
1742 ldr w3, [x1, #STRING_COUNT_OFFSET]
1743 ldr w2, [x2, #STRING_VALUE_OFFSET]
1744 ldr w1, [x1, #STRING_VALUE_OFFSET]
1745
1746 /*
1747 * Now: CharArray* Offset Count
1748 * first arg x2 w6 w4
1749 * second arg x1 w5 w3
1750 */
1751
1752 // x0 := str1.length(w4) - str2.length(w3). ldr zero-extended w3/w4 into x3/x4.
1753 subs x0, x4, x3
1754 // Min(count1, count2) into w3.
1755 csel x3, x3, x4, ge
1756
1757 // Build pointer into string data.
1758
1759 // Add offset in array (substr etc.) (sign extend and << 1).
1760 add x2, x2, w6, sxtw #1
1761 add x1, x1, w5, sxtw #1
1762
1763 // Add offset in CharArray to array.
1764 add x2, x2, #STRING_DATA_OFFSET
1765 add x1, x1, #STRING_DATA_OFFSET
1766
Serban Constantinescu169489b2014-06-11 16:43:35 +01001767 // TODO: Tune this value.
Andreas Gampe266340d2014-05-02 07:55:24 -07001768 // Check for long string, do memcmp16 for them.
1769 cmp w3, #28 // Constant from arm32.
1770 bgt .Ldo_memcmp16
1771
1772 /*
1773 * Now:
1774 * x2: *first string data
1775 * x1: *second string data
1776 * w3: iteration count
1777 * x0: return value if comparison equal
1778 * x4, x5, x6, x7: free
1779 */
1780
1781 // Do a simple unrolled loop.
1782.Lloop:
1783 // At least two more elements?
1784 subs w3, w3, #2
1785 b.lt .Lremainder_or_done
1786
1787 ldrh w4, [x2], #2
1788 ldrh w5, [x1], #2
1789
1790 ldrh w6, [x2], #2
1791 ldrh w7, [x1], #2
1792
1793 subs w4, w4, w5
1794 b.ne .Lw4_result
1795
1796 subs w6, w6, w7
1797 b.ne .Lw6_result
1798
1799 b .Lloop
1800
1801.Lremainder_or_done:
1802 adds w3, w3, #1
1803 b.eq .Lremainder
1804 ret
1805
1806.Lremainder:
1807 ldrh w4, [x2], #2
1808 ldrh w5, [x1], #2
1809 subs w4, w4, w5
1810 b.ne .Lw4_result
1811 ret
1812
1813// Result is in w4
1814.Lw4_result:
1815 sxtw x0, w4
1816 ret
1817
1818// Result is in w6
1819.Lw6_result:
1820 sxtw x0, w6
1821 ret
1822
1823.Ldo_memcmp16:
Zheng Xu62ddb322014-08-12 17:19:12 +08001824 mov x14, x0 // Save x0 and LR. __memcmp16 does not use these temps.
1825 mov x15, xLR // TODO: Codify and check that?
Andreas Gampe266340d2014-05-02 07:55:24 -07001826
1827 mov x0, x2
1828 uxtw x2, w3
Serban Constantinescu86797a72014-06-19 16:17:56 +01001829 bl __memcmp16
Andreas Gampe266340d2014-05-02 07:55:24 -07001830
Zheng Xu62ddb322014-08-12 17:19:12 +08001831 mov xLR, x15 // Restore LR.
Andreas Gampe266340d2014-05-02 07:55:24 -07001832
Serban Constantinescu86797a72014-06-19 16:17:56 +01001833 cmp x0, #0 // Check the memcmp difference.
Zheng Xu62ddb322014-08-12 17:19:12 +08001834 csel x0, x0, x14, ne // x0 := x0 != 0 ? x14(prev x0=length diff) : x1.
Andreas Gampe266340d2014-05-02 07:55:24 -07001835 ret
1836END art_quick_string_compareto
Zheng Xu0210d112014-06-17 12:25:48 +08001837
1838// Macro to facilitate adding new entrypoints which call to native function directly.
1839// Currently, xSELF is the only thing we need to take care of between managed code and AAPCS.
1840// But we might introduce more differences.
1841.macro NATIVE_DOWNCALL name, entrypoint
1842 .extern \entrypoint
1843ENTRY \name
Serban Constantinescu86797a72014-06-19 16:17:56 +01001844 stp xSELF, xLR, [sp, #-16]!
Zheng Xu0210d112014-06-17 12:25:48 +08001845 bl \entrypoint
Serban Constantinescu86797a72014-06-19 16:17:56 +01001846 ldp xSELF, xLR, [sp], #16
Zheng Xu0210d112014-06-17 12:25:48 +08001847 ret
1848END \name
1849.endm
1850
1851NATIVE_DOWNCALL art_quick_fmod fmod
1852NATIVE_DOWNCALL art_quick_fmodf fmodf
1853NATIVE_DOWNCALL art_quick_memcpy memcpy
Serban Constantinescu86797a72014-06-19 16:17:56 +01001854NATIVE_DOWNCALL art_quick_assignable_from_code artIsAssignableFromCode