blob: c108ad5daa52350be9e0153dd7a37375072cfe93 [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080021#include "code_generator_arm64.h"
22#include "common_arm64.h"
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
Serban Constantinescu82e52ce2015-03-26 16:50:57 +000031#include "vixl/a64/disasm-a64.h"
32#include "vixl/a64/macro-assembler-a64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080033
34using namespace vixl; // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +000043using helpers::LocationFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080044using helpers::RegisterFrom;
45using helpers::SRegisterFrom;
46using helpers::WRegisterFrom;
47using helpers::XRegisterFrom;
48
49
50namespace {
51
52ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
53 return MemOperand(XRegisterFrom(location), offset);
54}
55
56} // namespace
57
58vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
59 return codegen_->GetAssembler()->vixl_masm_;
60}
61
62ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
63 return codegen_->GetGraph()->GetArena();
64}
65
66#define __ codegen->GetAssembler()->vixl_masm_->
67
68static void MoveFromReturnRegister(Location trg,
69 Primitive::Type type,
70 CodeGeneratorARM64* codegen) {
71 if (!trg.IsValid()) {
72 DCHECK(type == Primitive::kPrimVoid);
73 return;
74 }
75
76 DCHECK_NE(type, Primitive::kPrimVoid);
77
Jeff Hao848f70a2014-01-15 13:49:50 -080078 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080079 Register trg_reg = RegisterFrom(trg, type);
80 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
81 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
82 } else {
83 FPRegister trg_reg = FPRegisterFrom(trg, type);
84 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
85 __ Fmov(trg_reg, res_reg);
86 }
87}
88
Roland Levillainec525fc2015-04-28 15:50:20 +010089static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010090 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010091 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe878d58c2015-01-15 23:24:00 -080092}
93
94// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
95// call. This will copy the arguments into the positions for a regular call.
96//
97// Note: The actual parameters are required to be in the locations given by the invoke's location
98// summary. If an intrinsic modifies those locations before a slowpath call, they must be
99// restored!
100class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
101 public:
102 explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { }
103
104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
105 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
106 __ Bind(GetEntryLabel());
107
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000108 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800109
Roland Levillainec525fc2015-04-28 15:50:20 +0100110 MoveArguments(invoke_, codegen);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800111
112 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100113 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
114 LocationFrom(kArtMethodRegister));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000115 RecordPcInfo(codegen, invoke_, invoke_->GetDexPc());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800116 } else {
117 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
118 UNREACHABLE();
119 }
120
121 // Copy the result back to the expected output.
122 Location out = invoke_->GetLocations()->Out();
123 if (out.IsValid()) {
124 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
125 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
126 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
127 }
128
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000129 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800130 __ B(GetExitLabel());
131 }
132
133 private:
134 // The instruction where this slow path is happening.
135 HInvoke* const invoke_;
136
137 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
138};
139
140#undef __
141
142bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
143 Dispatch(invoke);
144 LocationSummary* res = invoke->GetLocations();
145 return res != nullptr && res->Intrinsified();
146}
147
148#define __ masm->
149
150static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
151 LocationSummary* locations = new (arena) LocationSummary(invoke,
152 LocationSummary::kNoCall,
153 kIntrinsified);
154 locations->SetInAt(0, Location::RequiresFpuRegister());
155 locations->SetOut(Location::RequiresRegister());
156}
157
158static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
159 LocationSummary* locations = new (arena) LocationSummary(invoke,
160 LocationSummary::kNoCall,
161 kIntrinsified);
162 locations->SetInAt(0, Location::RequiresRegister());
163 locations->SetOut(Location::RequiresFpuRegister());
164}
165
166static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
167 Location input = locations->InAt(0);
168 Location output = locations->Out();
169 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
170 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
171}
172
173static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
174 Location input = locations->InAt(0);
175 Location output = locations->Out();
176 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
177 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
178}
179
180void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
181 CreateFPToIntLocations(arena_, invoke);
182}
183void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
184 CreateIntToFPLocations(arena_, invoke);
185}
186
187void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
188 MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler());
189}
190void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
191 MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler());
192}
193
194void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
195 CreateFPToIntLocations(arena_, invoke);
196}
197void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
198 CreateIntToFPLocations(arena_, invoke);
199}
200
201void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
202 MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler());
203}
204void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
205 MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler());
206}
207
208static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
209 LocationSummary* locations = new (arena) LocationSummary(invoke,
210 LocationSummary::kNoCall,
211 kIntrinsified);
212 locations->SetInAt(0, Location::RequiresRegister());
213 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
214}
215
216static void GenReverseBytes(LocationSummary* locations,
217 Primitive::Type type,
218 vixl::MacroAssembler* masm) {
219 Location in = locations->InAt(0);
220 Location out = locations->Out();
221
222 switch (type) {
223 case Primitive::kPrimShort:
224 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
225 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
226 break;
227 case Primitive::kPrimInt:
228 case Primitive::kPrimLong:
229 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
230 break;
231 default:
232 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
233 UNREACHABLE();
234 }
235}
236
237void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
238 CreateIntToIntLocations(arena_, invoke);
239}
240
241void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
242 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
243}
244
245void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
246 CreateIntToIntLocations(arena_, invoke);
247}
248
249void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
250 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
251}
252
253void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
254 CreateIntToIntLocations(arena_, invoke);
255}
256
257void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
258 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
259}
260
261static void GenReverse(LocationSummary* locations,
262 Primitive::Type type,
263 vixl::MacroAssembler* masm) {
264 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
265
266 Location in = locations->InAt(0);
267 Location out = locations->Out();
268
269 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
270}
271
272void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
273 CreateIntToIntLocations(arena_, invoke);
274}
275
276void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
277 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
278}
279
280void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
281 CreateIntToIntLocations(arena_, invoke);
282}
283
284void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
285 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
286}
287
288static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800289 LocationSummary* locations = new (arena) LocationSummary(invoke,
290 LocationSummary::kNoCall,
291 kIntrinsified);
292 locations->SetInAt(0, Location::RequiresFpuRegister());
293 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
294}
295
296static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
297 Location in = locations->InAt(0);
298 Location out = locations->Out();
299
300 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
301 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
302
303 __ Fabs(out_reg, in_reg);
304}
305
306void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
307 CreateFPToFPLocations(arena_, invoke);
308}
309
310void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
311 MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler());
312}
313
314void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
315 CreateFPToFPLocations(arena_, invoke);
316}
317
318void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
319 MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler());
320}
321
322static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
323 LocationSummary* locations = new (arena) LocationSummary(invoke,
324 LocationSummary::kNoCall,
325 kIntrinsified);
326 locations->SetInAt(0, Location::RequiresRegister());
327 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
328}
329
330static void GenAbsInteger(LocationSummary* locations,
331 bool is64bit,
332 vixl::MacroAssembler* masm) {
333 Location in = locations->InAt(0);
334 Location output = locations->Out();
335
336 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
337 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
338
339 __ Cmp(in_reg, Operand(0));
340 __ Cneg(out_reg, in_reg, lt);
341}
342
343void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
344 CreateIntToInt(arena_, invoke);
345}
346
347void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
348 GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler());
349}
350
351void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
352 CreateIntToInt(arena_, invoke);
353}
354
355void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
356 GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler());
357}
358
359static void GenMinMaxFP(LocationSummary* locations,
360 bool is_min,
361 bool is_double,
362 vixl::MacroAssembler* masm) {
363 Location op1 = locations->InAt(0);
364 Location op2 = locations->InAt(1);
365 Location out = locations->Out();
366
367 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
368 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
369 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
370 if (is_min) {
371 __ Fmin(out_reg, op1_reg, op2_reg);
372 } else {
373 __ Fmax(out_reg, op1_reg, op2_reg);
374 }
375}
376
377static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
378 LocationSummary* locations = new (arena) LocationSummary(invoke,
379 LocationSummary::kNoCall,
380 kIntrinsified);
381 locations->SetInAt(0, Location::RequiresFpuRegister());
382 locations->SetInAt(1, Location::RequiresFpuRegister());
383 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
384}
385
386void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
387 CreateFPFPToFPLocations(arena_, invoke);
388}
389
390void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
391 GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler());
392}
393
394void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
395 CreateFPFPToFPLocations(arena_, invoke);
396}
397
398void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
399 GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler());
400}
401
402void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
403 CreateFPFPToFPLocations(arena_, invoke);
404}
405
406void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
407 GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler());
408}
409
410void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
411 CreateFPFPToFPLocations(arena_, invoke);
412}
413
414void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
415 GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler());
416}
417
418static void GenMinMax(LocationSummary* locations,
419 bool is_min,
420 bool is_long,
421 vixl::MacroAssembler* masm) {
422 Location op1 = locations->InAt(0);
423 Location op2 = locations->InAt(1);
424 Location out = locations->Out();
425
426 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
427 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
428 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
429
430 __ Cmp(op1_reg, op2_reg);
431 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
432}
433
434static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
435 LocationSummary* locations = new (arena) LocationSummary(invoke,
436 LocationSummary::kNoCall,
437 kIntrinsified);
438 locations->SetInAt(0, Location::RequiresRegister());
439 locations->SetInAt(1, Location::RequiresRegister());
440 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
441}
442
443void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
444 CreateIntIntToIntLocations(arena_, invoke);
445}
446
447void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
448 GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler());
449}
450
451void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
452 CreateIntIntToIntLocations(arena_, invoke);
453}
454
455void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
456 GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler());
457}
458
459void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
460 CreateIntIntToIntLocations(arena_, invoke);
461}
462
463void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
464 GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler());
465}
466
467void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
468 CreateIntIntToIntLocations(arena_, invoke);
469}
470
471void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
472 GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler());
473}
474
475void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
476 CreateFPToFPLocations(arena_, invoke);
477}
478
479void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
480 LocationSummary* locations = invoke->GetLocations();
481 vixl::MacroAssembler* masm = GetVIXLAssembler();
482 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
483}
484
485void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
486 CreateFPToFPLocations(arena_, invoke);
487}
488
489void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
490 LocationSummary* locations = invoke->GetLocations();
491 vixl::MacroAssembler* masm = GetVIXLAssembler();
492 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
493}
494
495void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
496 CreateFPToFPLocations(arena_, invoke);
497}
498
499void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
500 LocationSummary* locations = invoke->GetLocations();
501 vixl::MacroAssembler* masm = GetVIXLAssembler();
502 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
503}
504
505void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
506 CreateFPToFPLocations(arena_, invoke);
507}
508
509void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
510 LocationSummary* locations = invoke->GetLocations();
511 vixl::MacroAssembler* masm = GetVIXLAssembler();
512 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
513}
514
515static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
516 LocationSummary* locations = new (arena) LocationSummary(invoke,
517 LocationSummary::kNoCall,
518 kIntrinsified);
519 locations->SetInAt(0, Location::RequiresFpuRegister());
520 locations->SetOut(Location::RequiresRegister());
521}
522
523static void GenMathRound(LocationSummary* locations,
524 bool is_double,
525 vixl::MacroAssembler* masm) {
526 FPRegister in_reg = is_double ?
527 DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
528 Register out_reg = is_double ?
529 XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
530 UseScratchRegisterScope temps(masm);
531 FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
532
533 // 0.5 can be encoded as an immediate, so use fmov.
534 if (is_double) {
535 __ Fmov(temp1_reg, static_cast<double>(0.5));
536 } else {
537 __ Fmov(temp1_reg, static_cast<float>(0.5));
538 }
539 __ Fadd(temp1_reg, in_reg, temp1_reg);
540 __ Fcvtms(out_reg, temp1_reg);
541}
542
543void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
544 CreateFPToIntPlusTempLocations(arena_, invoke);
545}
546
547void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
548 GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler());
549}
550
551void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
552 CreateFPToIntPlusTempLocations(arena_, invoke);
553}
554
555void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
556 GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler());
557}
558
559void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
560 CreateIntToIntLocations(arena_, invoke);
561}
562
563void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
564 vixl::MacroAssembler* masm = GetVIXLAssembler();
565 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
566 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
567}
568
569void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
570 CreateIntToIntLocations(arena_, invoke);
571}
572
573void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
574 vixl::MacroAssembler* masm = GetVIXLAssembler();
575 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
576 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
577}
578
579void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
580 CreateIntToIntLocations(arena_, invoke);
581}
582
583void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
584 vixl::MacroAssembler* masm = GetVIXLAssembler();
585 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
586 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
587}
588
589void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
590 CreateIntToIntLocations(arena_, invoke);
591}
592
593void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
594 vixl::MacroAssembler* masm = GetVIXLAssembler();
595 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
596 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
597}
598
599static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
600 LocationSummary* locations = new (arena) LocationSummary(invoke,
601 LocationSummary::kNoCall,
602 kIntrinsified);
603 locations->SetInAt(0, Location::RequiresRegister());
604 locations->SetInAt(1, Location::RequiresRegister());
605}
606
607void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
608 CreateIntIntToVoidLocations(arena_, invoke);
609}
610
611void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
612 vixl::MacroAssembler* masm = GetVIXLAssembler();
613 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
614 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
615}
616
617void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
618 CreateIntIntToVoidLocations(arena_, invoke);
619}
620
621void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
622 vixl::MacroAssembler* masm = GetVIXLAssembler();
623 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
624 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
625}
626
627void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
628 CreateIntIntToVoidLocations(arena_, invoke);
629}
630
631void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
632 vixl::MacroAssembler* masm = GetVIXLAssembler();
633 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
634 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
635}
636
637void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
638 CreateIntIntToVoidLocations(arena_, invoke);
639}
640
641void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
642 vixl::MacroAssembler* masm = GetVIXLAssembler();
643 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
644 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
645}
646
647void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
648 LocationSummary* locations = new (arena_) LocationSummary(invoke,
649 LocationSummary::kNoCall,
650 kIntrinsified);
651 locations->SetOut(Location::RequiresRegister());
652}
653
654void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
655 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
656 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
657}
658
659static void GenUnsafeGet(HInvoke* invoke,
660 Primitive::Type type,
661 bool is_volatile,
662 CodeGeneratorARM64* codegen) {
663 LocationSummary* locations = invoke->GetLocations();
664 DCHECK((type == Primitive::kPrimInt) ||
665 (type == Primitive::kPrimLong) ||
666 (type == Primitive::kPrimNot));
667 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
668 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
669 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
670 Register trg = RegisterFrom(locations->Out(), type);
Serban Constantinescu579885a2015-02-22 20:51:33 +0000671 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800672
673 MemOperand mem_op(base.X(), offset);
674 if (is_volatile) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000675 if (use_acquire_release) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800676 codegen->LoadAcquire(invoke, trg, mem_op);
677 } else {
678 codegen->Load(type, trg, mem_op);
679 __ Dmb(InnerShareable, BarrierReads);
680 }
681 } else {
682 codegen->Load(type, trg, mem_op);
683 }
684}
685
686static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
687 LocationSummary* locations = new (arena) LocationSummary(invoke,
688 LocationSummary::kNoCall,
689 kIntrinsified);
690 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
691 locations->SetInAt(1, Location::RequiresRegister());
692 locations->SetInAt(2, Location::RequiresRegister());
693 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
694}
695
696void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
697 CreateIntIntIntToIntLocations(arena_, invoke);
698}
699void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
700 CreateIntIntIntToIntLocations(arena_, invoke);
701}
702void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
703 CreateIntIntIntToIntLocations(arena_, invoke);
704}
705void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
706 CreateIntIntIntToIntLocations(arena_, invoke);
707}
708void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
709 CreateIntIntIntToIntLocations(arena_, invoke);
710}
711void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
712 CreateIntIntIntToIntLocations(arena_, invoke);
713}
714
715void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
716 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
717}
718void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
719 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
720}
721void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
722 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
723}
724void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
725 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
726}
727void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
728 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
729}
730void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
731 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
732}
733
734static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
735 LocationSummary* locations = new (arena) LocationSummary(invoke,
736 LocationSummary::kNoCall,
737 kIntrinsified);
738 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
739 locations->SetInAt(1, Location::RequiresRegister());
740 locations->SetInAt(2, Location::RequiresRegister());
741 locations->SetInAt(3, Location::RequiresRegister());
742}
743
744void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
745 CreateIntIntIntIntToVoid(arena_, invoke);
746}
747void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
748 CreateIntIntIntIntToVoid(arena_, invoke);
749}
750void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
751 CreateIntIntIntIntToVoid(arena_, invoke);
752}
753void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
754 CreateIntIntIntIntToVoid(arena_, invoke);
755}
756void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
757 CreateIntIntIntIntToVoid(arena_, invoke);
758}
759void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
760 CreateIntIntIntIntToVoid(arena_, invoke);
761}
762void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
763 CreateIntIntIntIntToVoid(arena_, invoke);
764}
765void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
766 CreateIntIntIntIntToVoid(arena_, invoke);
767}
768void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
769 CreateIntIntIntIntToVoid(arena_, invoke);
770}
771
772static void GenUnsafePut(LocationSummary* locations,
773 Primitive::Type type,
774 bool is_volatile,
775 bool is_ordered,
776 CodeGeneratorARM64* codegen) {
777 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
778
779 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
780 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
781 Register value = RegisterFrom(locations->InAt(3), type);
Serban Constantinescu579885a2015-02-22 20:51:33 +0000782 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800783
784 MemOperand mem_op(base.X(), offset);
785
786 if (is_volatile || is_ordered) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000787 if (use_acquire_release) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800788 codegen->StoreRelease(type, value, mem_op);
789 } else {
790 __ Dmb(InnerShareable, BarrierAll);
791 codegen->Store(type, value, mem_op);
792 if (is_volatile) {
793 __ Dmb(InnerShareable, BarrierReads);
794 }
795 }
796 } else {
797 codegen->Store(type, value, mem_op);
798 }
799
800 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100801 bool value_can_be_null = true; // TODO: Worth finding out this information?
802 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800803 }
804}
805
806void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
807 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
808}
809void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
810 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
811}
812void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
813 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
814}
815void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
816 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
817}
818void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
819 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
820}
821void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
822 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
823}
824void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
825 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
826}
827void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
828 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
829}
830void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
831 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
832}
833
834static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
835 LocationSummary* locations = new (arena) LocationSummary(invoke,
836 LocationSummary::kNoCall,
837 kIntrinsified);
838 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
839 locations->SetInAt(1, Location::RequiresRegister());
840 locations->SetInAt(2, Location::RequiresRegister());
841 locations->SetInAt(3, Location::RequiresRegister());
842 locations->SetInAt(4, Location::RequiresRegister());
843
844 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
845}
846
847static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000848 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800849 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
850
851 Register out = WRegisterFrom(locations->Out()); // Boolean result.
852
853 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
854 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
855 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
856 Register value = RegisterFrom(locations->InAt(4), type); // Value.
857
858 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
859 if (type == Primitive::kPrimNot) {
860 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100861 bool value_can_be_null = true; // TODO: Worth finding out this information?
862 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800863 }
864
865 UseScratchRegisterScope temps(masm);
866 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
867 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
868
869 Register tmp_32 = tmp_value.W();
870
871 __ Add(tmp_ptr, base.X(), Operand(offset));
872
873 // do {
874 // tmp_value = [tmp_ptr] - expected;
875 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
876 // result = tmp_value != 0;
877
878 vixl::Label loop_head, exit_loop;
Serban Constantinescu579885a2015-02-22 20:51:33 +0000879 if (use_acquire_release) {
880 __ Bind(&loop_head);
881 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
882 __ Cmp(tmp_value, expected);
883 __ B(&exit_loop, ne);
884 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
885 __ Cbnz(tmp_32, &loop_head);
886 } else {
887 __ Dmb(InnerShareable, BarrierWrites);
888 __ Bind(&loop_head);
889 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
890 __ Cmp(tmp_value, expected);
891 __ B(&exit_loop, ne);
892 __ Stxr(tmp_32, value, MemOperand(tmp_ptr));
893 __ Cbnz(tmp_32, &loop_head);
894 __ Dmb(InnerShareable, BarrierAll);
895 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800896 __ Bind(&exit_loop);
897 __ Cset(out, eq);
898}
899
900void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
901 CreateIntIntIntIntIntToInt(arena_, invoke);
902}
903void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
904 CreateIntIntIntIntIntToInt(arena_, invoke);
905}
906void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
907 CreateIntIntIntIntIntToInt(arena_, invoke);
908}
909
910void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
911 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
912}
913void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
914 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
915}
916void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
917 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
918}
919
920void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800921 LocationSummary* locations = new (arena_) LocationSummary(invoke,
922 LocationSummary::kCallOnSlowPath,
923 kIntrinsified);
924 locations->SetInAt(0, Location::RequiresRegister());
925 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray82f34492015-02-04 10:44:23 +0000926 // In case we need to go in the slow path, we can't have the output be the same
927 // as the input: the current liveness analysis considers the input to be live
928 // at the point of the call.
929 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800930}
931
932void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
933 vixl::MacroAssembler* masm = GetVIXLAssembler();
934 LocationSummary* locations = invoke->GetLocations();
935
936 // Location of reference to data array
937 const MemberOffset value_offset = mirror::String::ValueOffset();
938 // Location of count
939 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800940
941 Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer.
942 Register idx = WRegisterFrom(locations->InAt(1)); // Index of character.
943 Register out = WRegisterFrom(locations->Out()); // Result character.
944
945 UseScratchRegisterScope temps(masm);
946 Register temp = temps.AcquireW();
947 Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling.
948
949 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
950 // the cost.
951 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
952 // we will not optimize the code for constants (which would save a register).
953
954 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
955 codegen_->AddSlowPath(slow_path);
956
957 __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length.
958 codegen_->MaybeRecordImplicitNullCheck(invoke);
959 __ Cmp(idx, temp);
960 __ B(hs, slow_path->GetEntryLabel());
961
Jeff Hao848f70a2014-01-15 13:49:50 -0800962 __ Add(array_temp, obj, Operand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe878d58c2015-01-15 23:24:00 -0800963
964 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -0800965 __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1)); // out := array_temp[idx].
Andreas Gampe878d58c2015-01-15 23:24:00 -0800966
967 __ Bind(slow_path->GetExitLabel());
968}
969
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000970void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000971 LocationSummary* locations = new (arena_) LocationSummary(invoke,
972 LocationSummary::kCall,
973 kIntrinsified);
974 InvokeRuntimeCallingConvention calling_convention;
975 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
976 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
977 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
978}
979
980void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
981 vixl::MacroAssembler* masm = GetVIXLAssembler();
982 LocationSummary* locations = invoke->GetLocations();
983
Nicolas Geoffray512e04d2015-03-27 17:21:24 +0000984 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +0100985 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000986
987 Register argument = WRegisterFrom(locations->InAt(1));
988 __ Cmp(argument, 0);
989 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
990 codegen_->AddSlowPath(slow_path);
991 __ B(eq, slow_path->GetEntryLabel());
992
993 __ Ldr(
994 lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value()));
995 __ Blr(lr);
996 __ Bind(slow_path->GetExitLabel());
997}
998
Andreas Gampeba6fdbc2015-05-07 22:31:55 -0700999static void GenerateVisitStringIndexOf(HInvoke* invoke,
1000 vixl::MacroAssembler* masm,
1001 CodeGeneratorARM64* codegen,
1002 ArenaAllocator* allocator,
1003 bool start_at_zero) {
1004 LocationSummary* locations = invoke->GetLocations();
1005 Register tmp_reg = WRegisterFrom(locations->GetTemp(0));
1006
1007 // Note that the null check must have been done earlier.
1008 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1009
1010 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1011 // or directly dispatch if we have a constant.
1012 SlowPathCodeARM64* slow_path = nullptr;
1013 if (invoke->InputAt(1)->IsIntConstant()) {
1014 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > 0xFFFFU) {
1015 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1016 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1017 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1018 codegen->AddSlowPath(slow_path);
1019 __ B(slow_path->GetEntryLabel());
1020 __ Bind(slow_path->GetExitLabel());
1021 return;
1022 }
1023 } else {
1024 Register char_reg = WRegisterFrom(locations->InAt(1));
1025 __ Mov(tmp_reg, 0xFFFF);
1026 __ Cmp(char_reg, Operand(tmp_reg));
1027 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1028 codegen->AddSlowPath(slow_path);
1029 __ B(hi, slow_path->GetEntryLabel());
1030 }
1031
1032 if (start_at_zero) {
1033 // Start-index = 0.
1034 __ Mov(tmp_reg, 0);
1035 }
1036
1037 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value()));
1038 __ Blr(lr);
1039
1040 if (slow_path != nullptr) {
1041 __ Bind(slow_path->GetExitLabel());
1042 }
1043}
1044
1045void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) {
1046 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1047 LocationSummary::kCall,
1048 kIntrinsified);
1049 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1050 // best to align the inputs accordingly.
1051 InvokeRuntimeCallingConvention calling_convention;
1052 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1053 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1054 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1055
1056 // Need a temp for slow-path codepoint compare, and need to send start_index=0.
1057 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1058}
1059
1060void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) {
1061 GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), true);
1062}
1063
1064void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1065 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1066 LocationSummary::kCall,
1067 kIntrinsified);
1068 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1069 // best to align the inputs accordingly.
1070 InvokeRuntimeCallingConvention calling_convention;
1071 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1072 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1073 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1074 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1075
1076 // Need a temp for slow-path codepoint compare.
1077 locations->AddTemp(Location::RequiresRegister());
1078}
1079
1080void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1081 GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), false);
1082}
1083
Jeff Hao848f70a2014-01-15 13:49:50 -08001084void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1085 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1086 LocationSummary::kCall,
1087 kIntrinsified);
1088 InvokeRuntimeCallingConvention calling_convention;
1089 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1090 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1091 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1092 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1093 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1094}
1095
1096void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1097 vixl::MacroAssembler* masm = GetVIXLAssembler();
1098 LocationSummary* locations = invoke->GetLocations();
1099
1100 Register byte_array = WRegisterFrom(locations->InAt(0));
1101 __ Cmp(byte_array, 0);
1102 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1103 codegen_->AddSlowPath(slow_path);
1104 __ B(eq, slow_path->GetEntryLabel());
1105
1106 __ Ldr(lr,
1107 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value()));
1108 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1109 __ Blr(lr);
1110 __ Bind(slow_path->GetExitLabel());
1111}
1112
1113void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1114 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1115 LocationSummary::kCall,
1116 kIntrinsified);
1117 InvokeRuntimeCallingConvention calling_convention;
1118 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1119 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1120 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1121 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1122}
1123
1124void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1125 vixl::MacroAssembler* masm = GetVIXLAssembler();
1126
1127 __ Ldr(lr,
1128 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value()));
1129 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1130 __ Blr(lr);
1131}
1132
1133void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1134 // The inputs plus one temp.
1135 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1136 LocationSummary::kCall,
1137 kIntrinsified);
1138 InvokeRuntimeCallingConvention calling_convention;
1139 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1140 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1141 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1142 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1143}
1144
1145void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1146 vixl::MacroAssembler* masm = GetVIXLAssembler();
1147 LocationSummary* locations = invoke->GetLocations();
1148
1149 Register string_to_copy = WRegisterFrom(locations->InAt(0));
1150 __ Cmp(string_to_copy, 0);
1151 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1152 codegen_->AddSlowPath(slow_path);
1153 __ B(eq, slow_path->GetEntryLabel());
1154
1155 __ Ldr(lr,
1156 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value()));
1157 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1158 __ Blr(lr);
1159 __ Bind(slow_path->GetExitLabel());
1160}
1161
Andreas Gampe878d58c2015-01-15 23:24:00 -08001162// Unimplemented intrinsics.
1163
1164#define UNIMPLEMENTED_INTRINSIC(Name) \
1165void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1166} \
1167void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1168}
1169
1170UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001171UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001172UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001173
1174} // namespace arm64
1175} // namespace art