blob: 19b04ae0947a1e1ae7ce46cc50c0634299f62722 [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080020#include "code_generator_arm64.h"
21#include "common_arm64.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
25#include "mirror/art_method.h"
26#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
31#include "a64/disasm-a64.h"
32#include "a64/macro-assembler-a64.h"
33
34using namespace vixl; // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
Andreas Gampe878d58c2015-01-15 23:24:00 -080043using helpers::RegisterFrom;
44using helpers::SRegisterFrom;
45using helpers::WRegisterFrom;
46using helpers::XRegisterFrom;
47
48
49namespace {
50
51ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
52 return MemOperand(XRegisterFrom(location), offset);
53}
54
55} // namespace
56
57vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
58 return codegen_->GetAssembler()->vixl_masm_;
59}
60
61ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
62 return codegen_->GetGraph()->GetArena();
63}
64
65#define __ codegen->GetAssembler()->vixl_masm_->
66
67static void MoveFromReturnRegister(Location trg,
68 Primitive::Type type,
69 CodeGeneratorARM64* codegen) {
70 if (!trg.IsValid()) {
71 DCHECK(type == Primitive::kPrimVoid);
72 return;
73 }
74
75 DCHECK_NE(type, Primitive::kPrimVoid);
76
Alexandre Rames542361f2015-01-29 16:57:31 +000077 if (Primitive::IsIntegralType(type)) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080078 Register trg_reg = RegisterFrom(trg, type);
79 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
80 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
81 } else {
82 FPRegister trg_reg = FPRegisterFrom(trg, type);
83 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
84 __ Fmov(trg_reg, res_reg);
85 }
86}
87
88static void MoveArguments(HInvoke* invoke, ArenaAllocator* arena, CodeGeneratorARM64* codegen) {
89 if (invoke->InputCount() == 0) {
90 return;
91 }
92
93 LocationSummary* locations = invoke->GetLocations();
94 InvokeDexCallingConventionVisitor calling_convention_visitor;
95
96 // We're moving potentially two or more locations to locations that could overlap, so we need
97 // a parallel move resolver.
98 HParallelMove parallel_move(arena);
99
100 for (size_t i = 0; i < invoke->InputCount(); i++) {
101 HInstruction* input = invoke->InputAt(i);
102 Location cc_loc = calling_convention_visitor.GetNextLocation(input->GetType());
103 Location actual_loc = locations->InAt(i);
104
105 parallel_move.AddMove(actual_loc, cc_loc, nullptr);
106 }
107
108 codegen->GetMoveResolver()->EmitNativeCode(&parallel_move);
109}
110
111// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
112// call. This will copy the arguments into the positions for a regular call.
113//
114// Note: The actual parameters are required to be in the locations given by the invoke's location
115// summary. If an intrinsic modifies those locations before a slowpath call, they must be
116// restored!
117class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
118 public:
119 explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { }
120
121 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
122 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
123 __ Bind(GetEntryLabel());
124
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000125 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800126
127 MoveArguments(invoke_, codegen->GetGraph()->GetArena(), codegen);
128
129 if (invoke_->IsInvokeStaticOrDirect()) {
130 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(), kArtMethodRegister);
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000131 RecordPcInfo(codegen, invoke_, invoke_->GetDexPc());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800132 } else {
133 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
134 UNREACHABLE();
135 }
136
137 // Copy the result back to the expected output.
138 Location out = invoke_->GetLocations()->Out();
139 if (out.IsValid()) {
140 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
141 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
142 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
143 }
144
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000145 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800146 __ B(GetExitLabel());
147 }
148
149 private:
150 // The instruction where this slow path is happening.
151 HInvoke* const invoke_;
152
153 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
154};
155
156#undef __
157
158bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
159 Dispatch(invoke);
160 LocationSummary* res = invoke->GetLocations();
161 return res != nullptr && res->Intrinsified();
162}
163
164#define __ masm->
165
166static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
167 LocationSummary* locations = new (arena) LocationSummary(invoke,
168 LocationSummary::kNoCall,
169 kIntrinsified);
170 locations->SetInAt(0, Location::RequiresFpuRegister());
171 locations->SetOut(Location::RequiresRegister());
172}
173
174static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
175 LocationSummary* locations = new (arena) LocationSummary(invoke,
176 LocationSummary::kNoCall,
177 kIntrinsified);
178 locations->SetInAt(0, Location::RequiresRegister());
179 locations->SetOut(Location::RequiresFpuRegister());
180}
181
182static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
183 Location input = locations->InAt(0);
184 Location output = locations->Out();
185 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
186 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
187}
188
189static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
190 Location input = locations->InAt(0);
191 Location output = locations->Out();
192 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
193 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
194}
195
196void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
197 CreateFPToIntLocations(arena_, invoke);
198}
199void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
200 CreateIntToFPLocations(arena_, invoke);
201}
202
203void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
204 MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler());
205}
206void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
207 MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler());
208}
209
210void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
211 CreateFPToIntLocations(arena_, invoke);
212}
213void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
214 CreateIntToFPLocations(arena_, invoke);
215}
216
217void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
218 MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler());
219}
220void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
221 MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler());
222}
223
224static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
225 LocationSummary* locations = new (arena) LocationSummary(invoke,
226 LocationSummary::kNoCall,
227 kIntrinsified);
228 locations->SetInAt(0, Location::RequiresRegister());
229 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
230}
231
232static void GenReverseBytes(LocationSummary* locations,
233 Primitive::Type type,
234 vixl::MacroAssembler* masm) {
235 Location in = locations->InAt(0);
236 Location out = locations->Out();
237
238 switch (type) {
239 case Primitive::kPrimShort:
240 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
241 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
242 break;
243 case Primitive::kPrimInt:
244 case Primitive::kPrimLong:
245 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
246 break;
247 default:
248 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
249 UNREACHABLE();
250 }
251}
252
253void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
254 CreateIntToIntLocations(arena_, invoke);
255}
256
257void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
258 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
259}
260
261void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
262 CreateIntToIntLocations(arena_, invoke);
263}
264
265void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
266 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
267}
268
269void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
270 CreateIntToIntLocations(arena_, invoke);
271}
272
273void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
274 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
275}
276
277static void GenReverse(LocationSummary* locations,
278 Primitive::Type type,
279 vixl::MacroAssembler* masm) {
280 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
281
282 Location in = locations->InAt(0);
283 Location out = locations->Out();
284
285 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
286}
287
288void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
289 CreateIntToIntLocations(arena_, invoke);
290}
291
292void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
293 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
294}
295
296void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
297 CreateIntToIntLocations(arena_, invoke);
298}
299
300void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
301 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
302}
303
304static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800305 LocationSummary* locations = new (arena) LocationSummary(invoke,
306 LocationSummary::kNoCall,
307 kIntrinsified);
308 locations->SetInAt(0, Location::RequiresFpuRegister());
309 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
310}
311
312static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
313 Location in = locations->InAt(0);
314 Location out = locations->Out();
315
316 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
317 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
318
319 __ Fabs(out_reg, in_reg);
320}
321
322void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
323 CreateFPToFPLocations(arena_, invoke);
324}
325
326void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
327 MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler());
328}
329
330void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
331 CreateFPToFPLocations(arena_, invoke);
332}
333
334void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
335 MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler());
336}
337
338static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
339 LocationSummary* locations = new (arena) LocationSummary(invoke,
340 LocationSummary::kNoCall,
341 kIntrinsified);
342 locations->SetInAt(0, Location::RequiresRegister());
343 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
344}
345
346static void GenAbsInteger(LocationSummary* locations,
347 bool is64bit,
348 vixl::MacroAssembler* masm) {
349 Location in = locations->InAt(0);
350 Location output = locations->Out();
351
352 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
353 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
354
355 __ Cmp(in_reg, Operand(0));
356 __ Cneg(out_reg, in_reg, lt);
357}
358
359void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
360 CreateIntToInt(arena_, invoke);
361}
362
363void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
364 GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler());
365}
366
367void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
368 CreateIntToInt(arena_, invoke);
369}
370
371void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
372 GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler());
373}
374
375static void GenMinMaxFP(LocationSummary* locations,
376 bool is_min,
377 bool is_double,
378 vixl::MacroAssembler* masm) {
379 Location op1 = locations->InAt(0);
380 Location op2 = locations->InAt(1);
381 Location out = locations->Out();
382
383 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
384 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
385 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
386 if (is_min) {
387 __ Fmin(out_reg, op1_reg, op2_reg);
388 } else {
389 __ Fmax(out_reg, op1_reg, op2_reg);
390 }
391}
392
393static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
394 LocationSummary* locations = new (arena) LocationSummary(invoke,
395 LocationSummary::kNoCall,
396 kIntrinsified);
397 locations->SetInAt(0, Location::RequiresFpuRegister());
398 locations->SetInAt(1, Location::RequiresFpuRegister());
399 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
400}
401
402void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
403 CreateFPFPToFPLocations(arena_, invoke);
404}
405
406void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
407 GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler());
408}
409
410void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
411 CreateFPFPToFPLocations(arena_, invoke);
412}
413
414void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
415 GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler());
416}
417
418void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
419 CreateFPFPToFPLocations(arena_, invoke);
420}
421
422void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
423 GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler());
424}
425
426void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
427 CreateFPFPToFPLocations(arena_, invoke);
428}
429
430void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
431 GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler());
432}
433
434static void GenMinMax(LocationSummary* locations,
435 bool is_min,
436 bool is_long,
437 vixl::MacroAssembler* masm) {
438 Location op1 = locations->InAt(0);
439 Location op2 = locations->InAt(1);
440 Location out = locations->Out();
441
442 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
443 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
444 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
445
446 __ Cmp(op1_reg, op2_reg);
447 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
448}
449
450static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
451 LocationSummary* locations = new (arena) LocationSummary(invoke,
452 LocationSummary::kNoCall,
453 kIntrinsified);
454 locations->SetInAt(0, Location::RequiresRegister());
455 locations->SetInAt(1, Location::RequiresRegister());
456 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
457}
458
459void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
460 CreateIntIntToIntLocations(arena_, invoke);
461}
462
463void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
464 GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler());
465}
466
467void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
468 CreateIntIntToIntLocations(arena_, invoke);
469}
470
471void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
472 GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler());
473}
474
475void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
476 CreateIntIntToIntLocations(arena_, invoke);
477}
478
479void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
480 GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler());
481}
482
483void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
484 CreateIntIntToIntLocations(arena_, invoke);
485}
486
487void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
488 GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler());
489}
490
491void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
492 CreateFPToFPLocations(arena_, invoke);
493}
494
495void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
496 LocationSummary* locations = invoke->GetLocations();
497 vixl::MacroAssembler* masm = GetVIXLAssembler();
498 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
499}
500
501void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
502 CreateFPToFPLocations(arena_, invoke);
503}
504
505void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
506 LocationSummary* locations = invoke->GetLocations();
507 vixl::MacroAssembler* masm = GetVIXLAssembler();
508 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
509}
510
511void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
512 CreateFPToFPLocations(arena_, invoke);
513}
514
515void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
516 LocationSummary* locations = invoke->GetLocations();
517 vixl::MacroAssembler* masm = GetVIXLAssembler();
518 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
519}
520
521void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
522 CreateFPToFPLocations(arena_, invoke);
523}
524
525void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
526 LocationSummary* locations = invoke->GetLocations();
527 vixl::MacroAssembler* masm = GetVIXLAssembler();
528 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
529}
530
531static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
532 LocationSummary* locations = new (arena) LocationSummary(invoke,
533 LocationSummary::kNoCall,
534 kIntrinsified);
535 locations->SetInAt(0, Location::RequiresFpuRegister());
536 locations->SetOut(Location::RequiresRegister());
537}
538
539static void GenMathRound(LocationSummary* locations,
540 bool is_double,
541 vixl::MacroAssembler* masm) {
542 FPRegister in_reg = is_double ?
543 DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
544 Register out_reg = is_double ?
545 XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
546 UseScratchRegisterScope temps(masm);
547 FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
548
549 // 0.5 can be encoded as an immediate, so use fmov.
550 if (is_double) {
551 __ Fmov(temp1_reg, static_cast<double>(0.5));
552 } else {
553 __ Fmov(temp1_reg, static_cast<float>(0.5));
554 }
555 __ Fadd(temp1_reg, in_reg, temp1_reg);
556 __ Fcvtms(out_reg, temp1_reg);
557}
558
559void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
560 CreateFPToIntPlusTempLocations(arena_, invoke);
561}
562
563void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
564 GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler());
565}
566
567void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
568 CreateFPToIntPlusTempLocations(arena_, invoke);
569}
570
571void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
572 GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler());
573}
574
575void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
576 CreateIntToIntLocations(arena_, invoke);
577}
578
579void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
580 vixl::MacroAssembler* masm = GetVIXLAssembler();
581 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
582 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
583}
584
585void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
586 CreateIntToIntLocations(arena_, invoke);
587}
588
589void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
590 vixl::MacroAssembler* masm = GetVIXLAssembler();
591 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
592 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
593}
594
595void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
596 CreateIntToIntLocations(arena_, invoke);
597}
598
599void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
600 vixl::MacroAssembler* masm = GetVIXLAssembler();
601 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
602 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
603}
604
605void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
606 CreateIntToIntLocations(arena_, invoke);
607}
608
609void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
610 vixl::MacroAssembler* masm = GetVIXLAssembler();
611 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
612 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
613}
614
615static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
616 LocationSummary* locations = new (arena) LocationSummary(invoke,
617 LocationSummary::kNoCall,
618 kIntrinsified);
619 locations->SetInAt(0, Location::RequiresRegister());
620 locations->SetInAt(1, Location::RequiresRegister());
621}
622
623void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
624 CreateIntIntToVoidLocations(arena_, invoke);
625}
626
627void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
628 vixl::MacroAssembler* masm = GetVIXLAssembler();
629 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
630 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
631}
632
633void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
634 CreateIntIntToVoidLocations(arena_, invoke);
635}
636
637void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
638 vixl::MacroAssembler* masm = GetVIXLAssembler();
639 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
640 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
641}
642
643void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
644 CreateIntIntToVoidLocations(arena_, invoke);
645}
646
647void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
648 vixl::MacroAssembler* masm = GetVIXLAssembler();
649 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
650 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
651}
652
653void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
654 CreateIntIntToVoidLocations(arena_, invoke);
655}
656
657void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
658 vixl::MacroAssembler* masm = GetVIXLAssembler();
659 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
660 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
661}
662
663void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
664 LocationSummary* locations = new (arena_) LocationSummary(invoke,
665 LocationSummary::kNoCall,
666 kIntrinsified);
667 locations->SetOut(Location::RequiresRegister());
668}
669
670void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
671 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
672 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
673}
674
675static void GenUnsafeGet(HInvoke* invoke,
676 Primitive::Type type,
677 bool is_volatile,
678 CodeGeneratorARM64* codegen) {
679 LocationSummary* locations = invoke->GetLocations();
680 DCHECK((type == Primitive::kPrimInt) ||
681 (type == Primitive::kPrimLong) ||
682 (type == Primitive::kPrimNot));
683 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
684 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
685 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
686 Register trg = RegisterFrom(locations->Out(), type);
Serban Constantinescu579885a2015-02-22 20:51:33 +0000687 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800688
689 MemOperand mem_op(base.X(), offset);
690 if (is_volatile) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000691 if (use_acquire_release) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800692 codegen->LoadAcquire(invoke, trg, mem_op);
693 } else {
694 codegen->Load(type, trg, mem_op);
695 __ Dmb(InnerShareable, BarrierReads);
696 }
697 } else {
698 codegen->Load(type, trg, mem_op);
699 }
700}
701
702static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
703 LocationSummary* locations = new (arena) LocationSummary(invoke,
704 LocationSummary::kNoCall,
705 kIntrinsified);
706 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
707 locations->SetInAt(1, Location::RequiresRegister());
708 locations->SetInAt(2, Location::RequiresRegister());
709 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
710}
711
712void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
713 CreateIntIntIntToIntLocations(arena_, invoke);
714}
715void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
716 CreateIntIntIntToIntLocations(arena_, invoke);
717}
718void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
719 CreateIntIntIntToIntLocations(arena_, invoke);
720}
721void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
722 CreateIntIntIntToIntLocations(arena_, invoke);
723}
724void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
725 CreateIntIntIntToIntLocations(arena_, invoke);
726}
727void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
728 CreateIntIntIntToIntLocations(arena_, invoke);
729}
730
731void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
732 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
733}
734void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
735 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
736}
737void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
738 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
739}
740void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
741 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
742}
743void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
744 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
745}
746void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
747 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
748}
749
750static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
751 LocationSummary* locations = new (arena) LocationSummary(invoke,
752 LocationSummary::kNoCall,
753 kIntrinsified);
754 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
755 locations->SetInAt(1, Location::RequiresRegister());
756 locations->SetInAt(2, Location::RequiresRegister());
757 locations->SetInAt(3, Location::RequiresRegister());
758}
759
760void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
761 CreateIntIntIntIntToVoid(arena_, invoke);
762}
763void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
764 CreateIntIntIntIntToVoid(arena_, invoke);
765}
766void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
767 CreateIntIntIntIntToVoid(arena_, invoke);
768}
769void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
770 CreateIntIntIntIntToVoid(arena_, invoke);
771}
772void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
773 CreateIntIntIntIntToVoid(arena_, invoke);
774}
775void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
776 CreateIntIntIntIntToVoid(arena_, invoke);
777}
778void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
779 CreateIntIntIntIntToVoid(arena_, invoke);
780}
781void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
782 CreateIntIntIntIntToVoid(arena_, invoke);
783}
784void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
785 CreateIntIntIntIntToVoid(arena_, invoke);
786}
787
788static void GenUnsafePut(LocationSummary* locations,
789 Primitive::Type type,
790 bool is_volatile,
791 bool is_ordered,
792 CodeGeneratorARM64* codegen) {
793 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
794
795 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
796 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
797 Register value = RegisterFrom(locations->InAt(3), type);
Serban Constantinescu579885a2015-02-22 20:51:33 +0000798 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800799
800 MemOperand mem_op(base.X(), offset);
801
802 if (is_volatile || is_ordered) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000803 if (use_acquire_release) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800804 codegen->StoreRelease(type, value, mem_op);
805 } else {
806 __ Dmb(InnerShareable, BarrierAll);
807 codegen->Store(type, value, mem_op);
808 if (is_volatile) {
809 __ Dmb(InnerShareable, BarrierReads);
810 }
811 }
812 } else {
813 codegen->Store(type, value, mem_op);
814 }
815
816 if (type == Primitive::kPrimNot) {
817 codegen->MarkGCCard(base, value);
818 }
819}
820
821void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
822 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
823}
824void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
825 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
826}
827void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
828 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
829}
830void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
831 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
832}
833void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
834 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
835}
836void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
837 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
838}
839void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
840 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
841}
842void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
843 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
844}
845void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
846 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
847}
848
849static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
850 LocationSummary* locations = new (arena) LocationSummary(invoke,
851 LocationSummary::kNoCall,
852 kIntrinsified);
853 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
854 locations->SetInAt(1, Location::RequiresRegister());
855 locations->SetInAt(2, Location::RequiresRegister());
856 locations->SetInAt(3, Location::RequiresRegister());
857 locations->SetInAt(4, Location::RequiresRegister());
858
859 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
860}
861
862static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000863 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800864 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
865
866 Register out = WRegisterFrom(locations->Out()); // Boolean result.
867
868 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
869 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
870 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
871 Register value = RegisterFrom(locations->InAt(4), type); // Value.
872
873 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
874 if (type == Primitive::kPrimNot) {
875 // Mark card for object assuming new value is stored.
876 codegen->MarkGCCard(base, value);
877 }
878
879 UseScratchRegisterScope temps(masm);
880 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
881 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
882
883 Register tmp_32 = tmp_value.W();
884
885 __ Add(tmp_ptr, base.X(), Operand(offset));
886
887 // do {
888 // tmp_value = [tmp_ptr] - expected;
889 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
890 // result = tmp_value != 0;
891
892 vixl::Label loop_head, exit_loop;
Serban Constantinescu579885a2015-02-22 20:51:33 +0000893 if (use_acquire_release) {
894 __ Bind(&loop_head);
895 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
896 __ Cmp(tmp_value, expected);
897 __ B(&exit_loop, ne);
898 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
899 __ Cbnz(tmp_32, &loop_head);
900 } else {
901 __ Dmb(InnerShareable, BarrierWrites);
902 __ Bind(&loop_head);
903 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
904 __ Cmp(tmp_value, expected);
905 __ B(&exit_loop, ne);
906 __ Stxr(tmp_32, value, MemOperand(tmp_ptr));
907 __ Cbnz(tmp_32, &loop_head);
908 __ Dmb(InnerShareable, BarrierAll);
909 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800910 __ Bind(&exit_loop);
911 __ Cset(out, eq);
912}
913
914void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
915 CreateIntIntIntIntIntToInt(arena_, invoke);
916}
917void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
918 CreateIntIntIntIntIntToInt(arena_, invoke);
919}
920void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
921 CreateIntIntIntIntIntToInt(arena_, invoke);
922}
923
924void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
925 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
926}
927void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
928 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
929}
930void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
931 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
932}
933
934void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800935 LocationSummary* locations = new (arena_) LocationSummary(invoke,
936 LocationSummary::kCallOnSlowPath,
937 kIntrinsified);
938 locations->SetInAt(0, Location::RequiresRegister());
939 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray82f34492015-02-04 10:44:23 +0000940 // In case we need to go in the slow path, we can't have the output be the same
941 // as the input: the current liveness analysis considers the input to be live
942 // at the point of the call.
943 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800944}
945
946void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
947 vixl::MacroAssembler* masm = GetVIXLAssembler();
948 LocationSummary* locations = invoke->GetLocations();
949
950 // Location of reference to data array
951 const MemberOffset value_offset = mirror::String::ValueOffset();
952 // Location of count
953 const MemberOffset count_offset = mirror::String::CountOffset();
954 // Starting offset within data array
955 const MemberOffset offset_offset = mirror::String::OffsetOffset();
956 // Start of char data with array_
957 const MemberOffset data_offset = mirror::Array::DataOffset(sizeof(uint16_t));
958
959 Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer.
960 Register idx = WRegisterFrom(locations->InAt(1)); // Index of character.
961 Register out = WRegisterFrom(locations->Out()); // Result character.
962
963 UseScratchRegisterScope temps(masm);
964 Register temp = temps.AcquireW();
965 Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling.
966
967 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
968 // the cost.
969 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
970 // we will not optimize the code for constants (which would save a register).
971
972 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
973 codegen_->AddSlowPath(slow_path);
974
975 __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length.
976 codegen_->MaybeRecordImplicitNullCheck(invoke);
977 __ Cmp(idx, temp);
978 __ B(hs, slow_path->GetEntryLabel());
979
980 // Index computation.
981 __ Ldr(temp, HeapOperand(obj, offset_offset)); // temp := str.offset.
982 __ Ldr(array_temp, HeapOperand(obj, value_offset)); // array_temp := str.offset.
983 __ Add(temp, temp, idx);
984 DCHECK_EQ(data_offset.Int32Value() % 2, 0); // We'll compensate by shifting.
985 __ Add(temp, temp, Operand(data_offset.Int32Value() / 2));
986
987 // Load the value.
988 __ Ldrh(out, MemOperand(array_temp.X(), temp, UXTW, 1)); // out := array_temp[temp].
989
990 __ Bind(slow_path->GetExitLabel());
991}
992
993// Unimplemented intrinsics.
994
995#define UNIMPLEMENTED_INTRINSIC(Name) \
996void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
997} \
998void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
999}
1000
1001UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
1002UNIMPLEMENTED_INTRINSIC(StringCompareTo)
1003UNIMPLEMENTED_INTRINSIC(StringIsEmpty) // Might not want to do these two anyways, inlining should
1004UNIMPLEMENTED_INTRINSIC(StringLength) // be good enough here.
1005UNIMPLEMENTED_INTRINSIC(StringIndexOf)
1006UNIMPLEMENTED_INTRINSIC(StringIndexOfAfter)
1007UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
1008
1009} // namespace arm64
1010} // namespace art