blob: 8bcb88b4eacc18dfccc7a936926fed4abe38d8c2 [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080021#include "code_generator_arm64.h"
22#include "common_arm64.h"
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
Serban Constantinescu82e52ce2015-03-26 16:50:57 +000031#include "vixl/a64/disasm-a64.h"
32#include "vixl/a64/macro-assembler-a64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080033
34using namespace vixl; // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +000043using helpers::LocationFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080044using helpers::RegisterFrom;
45using helpers::SRegisterFrom;
46using helpers::WRegisterFrom;
47using helpers::XRegisterFrom;
48
49
50namespace {
51
52ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
53 return MemOperand(XRegisterFrom(location), offset);
54}
55
56} // namespace
57
58vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
59 return codegen_->GetAssembler()->vixl_masm_;
60}
61
62ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
63 return codegen_->GetGraph()->GetArena();
64}
65
66#define __ codegen->GetAssembler()->vixl_masm_->
67
68static void MoveFromReturnRegister(Location trg,
69 Primitive::Type type,
70 CodeGeneratorARM64* codegen) {
71 if (!trg.IsValid()) {
72 DCHECK(type == Primitive::kPrimVoid);
73 return;
74 }
75
76 DCHECK_NE(type, Primitive::kPrimVoid);
77
Jeff Hao848f70a2014-01-15 13:49:50 -080078 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080079 Register trg_reg = RegisterFrom(trg, type);
80 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
81 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
82 } else {
83 FPRegister trg_reg = FPRegisterFrom(trg, type);
84 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
85 __ Fmov(trg_reg, res_reg);
86 }
87}
88
Roland Levillainec525fc2015-04-28 15:50:20 +010089static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010090 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010091 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe878d58c2015-01-15 23:24:00 -080092}
93
94// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
95// call. This will copy the arguments into the positions for a regular call.
96//
97// Note: The actual parameters are required to be in the locations given by the invoke's location
98// summary. If an intrinsic modifies those locations before a slowpath call, they must be
99// restored!
100class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
101 public:
102 explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { }
103
104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
105 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
106 __ Bind(GetEntryLabel());
107
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000108 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800109
Roland Levillainec525fc2015-04-28 15:50:20 +0100110 MoveArguments(invoke_, codegen);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800111
112 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100113 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
114 LocationFrom(kArtMethodRegister));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000115 RecordPcInfo(codegen, invoke_, invoke_->GetDexPc());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800116 } else {
117 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
118 UNREACHABLE();
119 }
120
121 // Copy the result back to the expected output.
122 Location out = invoke_->GetLocations()->Out();
123 if (out.IsValid()) {
124 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
125 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
126 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
127 }
128
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000129 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800130 __ B(GetExitLabel());
131 }
132
Alexandre Rames9931f312015-06-19 14:47:01 +0100133 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM64"; }
134
Andreas Gampe878d58c2015-01-15 23:24:00 -0800135 private:
136 // The instruction where this slow path is happening.
137 HInvoke* const invoke_;
138
139 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
140};
141
142#undef __
143
144bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
145 Dispatch(invoke);
146 LocationSummary* res = invoke->GetLocations();
147 return res != nullptr && res->Intrinsified();
148}
149
150#define __ masm->
151
152static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
153 LocationSummary* locations = new (arena) LocationSummary(invoke,
154 LocationSummary::kNoCall,
155 kIntrinsified);
156 locations->SetInAt(0, Location::RequiresFpuRegister());
157 locations->SetOut(Location::RequiresRegister());
158}
159
160static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
161 LocationSummary* locations = new (arena) LocationSummary(invoke,
162 LocationSummary::kNoCall,
163 kIntrinsified);
164 locations->SetInAt(0, Location::RequiresRegister());
165 locations->SetOut(Location::RequiresFpuRegister());
166}
167
168static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
169 Location input = locations->InAt(0);
170 Location output = locations->Out();
171 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
172 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
173}
174
175static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
176 Location input = locations->InAt(0);
177 Location output = locations->Out();
178 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
179 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
180}
181
182void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
183 CreateFPToIntLocations(arena_, invoke);
184}
185void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
186 CreateIntToFPLocations(arena_, invoke);
187}
188
189void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
190 MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler());
191}
192void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
193 MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler());
194}
195
196void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
197 CreateFPToIntLocations(arena_, invoke);
198}
199void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
200 CreateIntToFPLocations(arena_, invoke);
201}
202
203void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
204 MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler());
205}
206void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
207 MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler());
208}
209
210static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
211 LocationSummary* locations = new (arena) LocationSummary(invoke,
212 LocationSummary::kNoCall,
213 kIntrinsified);
214 locations->SetInAt(0, Location::RequiresRegister());
215 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
216}
217
218static void GenReverseBytes(LocationSummary* locations,
219 Primitive::Type type,
220 vixl::MacroAssembler* masm) {
221 Location in = locations->InAt(0);
222 Location out = locations->Out();
223
224 switch (type) {
225 case Primitive::kPrimShort:
226 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
227 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
228 break;
229 case Primitive::kPrimInt:
230 case Primitive::kPrimLong:
231 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
232 break;
233 default:
234 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
235 UNREACHABLE();
236 }
237}
238
239void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
240 CreateIntToIntLocations(arena_, invoke);
241}
242
243void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
244 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
245}
246
247void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
248 CreateIntToIntLocations(arena_, invoke);
249}
250
251void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
252 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
253}
254
255void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
256 CreateIntToIntLocations(arena_, invoke);
257}
258
259void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
260 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
261}
262
263static void GenReverse(LocationSummary* locations,
264 Primitive::Type type,
265 vixl::MacroAssembler* masm) {
266 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
267
268 Location in = locations->InAt(0);
269 Location out = locations->Out();
270
271 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
272}
273
274void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
275 CreateIntToIntLocations(arena_, invoke);
276}
277
278void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
279 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
280}
281
282void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
283 CreateIntToIntLocations(arena_, invoke);
284}
285
286void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
287 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
288}
289
290static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800291 LocationSummary* locations = new (arena) LocationSummary(invoke,
292 LocationSummary::kNoCall,
293 kIntrinsified);
294 locations->SetInAt(0, Location::RequiresFpuRegister());
295 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
296}
297
298static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
299 Location in = locations->InAt(0);
300 Location out = locations->Out();
301
302 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
303 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
304
305 __ Fabs(out_reg, in_reg);
306}
307
308void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
309 CreateFPToFPLocations(arena_, invoke);
310}
311
312void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
313 MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler());
314}
315
316void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
317 CreateFPToFPLocations(arena_, invoke);
318}
319
320void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
321 MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler());
322}
323
324static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
325 LocationSummary* locations = new (arena) LocationSummary(invoke,
326 LocationSummary::kNoCall,
327 kIntrinsified);
328 locations->SetInAt(0, Location::RequiresRegister());
329 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
330}
331
332static void GenAbsInteger(LocationSummary* locations,
333 bool is64bit,
334 vixl::MacroAssembler* masm) {
335 Location in = locations->InAt(0);
336 Location output = locations->Out();
337
338 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
339 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
340
341 __ Cmp(in_reg, Operand(0));
342 __ Cneg(out_reg, in_reg, lt);
343}
344
345void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
346 CreateIntToInt(arena_, invoke);
347}
348
349void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
350 GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler());
351}
352
353void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
354 CreateIntToInt(arena_, invoke);
355}
356
357void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
358 GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler());
359}
360
361static void GenMinMaxFP(LocationSummary* locations,
362 bool is_min,
363 bool is_double,
364 vixl::MacroAssembler* masm) {
365 Location op1 = locations->InAt(0);
366 Location op2 = locations->InAt(1);
367 Location out = locations->Out();
368
369 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
370 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
371 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
372 if (is_min) {
373 __ Fmin(out_reg, op1_reg, op2_reg);
374 } else {
375 __ Fmax(out_reg, op1_reg, op2_reg);
376 }
377}
378
379static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
380 LocationSummary* locations = new (arena) LocationSummary(invoke,
381 LocationSummary::kNoCall,
382 kIntrinsified);
383 locations->SetInAt(0, Location::RequiresFpuRegister());
384 locations->SetInAt(1, Location::RequiresFpuRegister());
385 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
386}
387
388void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
389 CreateFPFPToFPLocations(arena_, invoke);
390}
391
392void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
393 GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler());
394}
395
396void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
397 CreateFPFPToFPLocations(arena_, invoke);
398}
399
400void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
401 GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler());
402}
403
404void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
405 CreateFPFPToFPLocations(arena_, invoke);
406}
407
408void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
409 GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler());
410}
411
412void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
413 CreateFPFPToFPLocations(arena_, invoke);
414}
415
416void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
417 GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler());
418}
419
420static void GenMinMax(LocationSummary* locations,
421 bool is_min,
422 bool is_long,
423 vixl::MacroAssembler* masm) {
424 Location op1 = locations->InAt(0);
425 Location op2 = locations->InAt(1);
426 Location out = locations->Out();
427
428 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
429 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
430 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
431
432 __ Cmp(op1_reg, op2_reg);
433 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
434}
435
436static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
437 LocationSummary* locations = new (arena) LocationSummary(invoke,
438 LocationSummary::kNoCall,
439 kIntrinsified);
440 locations->SetInAt(0, Location::RequiresRegister());
441 locations->SetInAt(1, Location::RequiresRegister());
442 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
443}
444
445void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
446 CreateIntIntToIntLocations(arena_, invoke);
447}
448
449void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
450 GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler());
451}
452
453void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
454 CreateIntIntToIntLocations(arena_, invoke);
455}
456
457void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
458 GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler());
459}
460
461void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
462 CreateIntIntToIntLocations(arena_, invoke);
463}
464
465void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
466 GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler());
467}
468
469void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
470 CreateIntIntToIntLocations(arena_, invoke);
471}
472
473void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
474 GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler());
475}
476
477void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
478 CreateFPToFPLocations(arena_, invoke);
479}
480
481void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
482 LocationSummary* locations = invoke->GetLocations();
483 vixl::MacroAssembler* masm = GetVIXLAssembler();
484 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
485}
486
487void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
488 CreateFPToFPLocations(arena_, invoke);
489}
490
491void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
492 LocationSummary* locations = invoke->GetLocations();
493 vixl::MacroAssembler* masm = GetVIXLAssembler();
494 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
495}
496
497void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
498 CreateFPToFPLocations(arena_, invoke);
499}
500
501void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
502 LocationSummary* locations = invoke->GetLocations();
503 vixl::MacroAssembler* masm = GetVIXLAssembler();
504 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
505}
506
507void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
508 CreateFPToFPLocations(arena_, invoke);
509}
510
511void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
512 LocationSummary* locations = invoke->GetLocations();
513 vixl::MacroAssembler* masm = GetVIXLAssembler();
514 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
515}
516
517static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
518 LocationSummary* locations = new (arena) LocationSummary(invoke,
519 LocationSummary::kNoCall,
520 kIntrinsified);
521 locations->SetInAt(0, Location::RequiresFpuRegister());
522 locations->SetOut(Location::RequiresRegister());
523}
524
525static void GenMathRound(LocationSummary* locations,
526 bool is_double,
527 vixl::MacroAssembler* masm) {
528 FPRegister in_reg = is_double ?
529 DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
530 Register out_reg = is_double ?
531 XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
532 UseScratchRegisterScope temps(masm);
533 FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
534
535 // 0.5 can be encoded as an immediate, so use fmov.
536 if (is_double) {
537 __ Fmov(temp1_reg, static_cast<double>(0.5));
538 } else {
539 __ Fmov(temp1_reg, static_cast<float>(0.5));
540 }
541 __ Fadd(temp1_reg, in_reg, temp1_reg);
542 __ Fcvtms(out_reg, temp1_reg);
543}
544
545void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
546 CreateFPToIntPlusTempLocations(arena_, invoke);
547}
548
549void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
550 GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler());
551}
552
553void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
554 CreateFPToIntPlusTempLocations(arena_, invoke);
555}
556
557void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
558 GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler());
559}
560
561void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
562 CreateIntToIntLocations(arena_, invoke);
563}
564
565void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
566 vixl::MacroAssembler* masm = GetVIXLAssembler();
567 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
568 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
569}
570
571void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
572 CreateIntToIntLocations(arena_, invoke);
573}
574
575void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
576 vixl::MacroAssembler* masm = GetVIXLAssembler();
577 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
578 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
579}
580
581void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
582 CreateIntToIntLocations(arena_, invoke);
583}
584
585void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
586 vixl::MacroAssembler* masm = GetVIXLAssembler();
587 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
588 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
589}
590
591void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
592 CreateIntToIntLocations(arena_, invoke);
593}
594
595void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
596 vixl::MacroAssembler* masm = GetVIXLAssembler();
597 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
598 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
599}
600
601static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
602 LocationSummary* locations = new (arena) LocationSummary(invoke,
603 LocationSummary::kNoCall,
604 kIntrinsified);
605 locations->SetInAt(0, Location::RequiresRegister());
606 locations->SetInAt(1, Location::RequiresRegister());
607}
608
609void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
610 CreateIntIntToVoidLocations(arena_, invoke);
611}
612
613void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
614 vixl::MacroAssembler* masm = GetVIXLAssembler();
615 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
616 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
617}
618
619void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
620 CreateIntIntToVoidLocations(arena_, invoke);
621}
622
623void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
624 vixl::MacroAssembler* masm = GetVIXLAssembler();
625 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
626 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
627}
628
629void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
630 CreateIntIntToVoidLocations(arena_, invoke);
631}
632
633void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
634 vixl::MacroAssembler* masm = GetVIXLAssembler();
635 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
636 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
637}
638
639void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
640 CreateIntIntToVoidLocations(arena_, invoke);
641}
642
643void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
644 vixl::MacroAssembler* masm = GetVIXLAssembler();
645 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
646 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
647}
648
649void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
650 LocationSummary* locations = new (arena_) LocationSummary(invoke,
651 LocationSummary::kNoCall,
652 kIntrinsified);
653 locations->SetOut(Location::RequiresRegister());
654}
655
656void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
657 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
658 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
659}
660
661static void GenUnsafeGet(HInvoke* invoke,
662 Primitive::Type type,
663 bool is_volatile,
664 CodeGeneratorARM64* codegen) {
665 LocationSummary* locations = invoke->GetLocations();
666 DCHECK((type == Primitive::kPrimInt) ||
667 (type == Primitive::kPrimLong) ||
668 (type == Primitive::kPrimNot));
669 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
670 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
671 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
672 Register trg = RegisterFrom(locations->Out(), type);
Serban Constantinescu579885a2015-02-22 20:51:33 +0000673 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800674
675 MemOperand mem_op(base.X(), offset);
676 if (is_volatile) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000677 if (use_acquire_release) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800678 codegen->LoadAcquire(invoke, trg, mem_op);
679 } else {
680 codegen->Load(type, trg, mem_op);
681 __ Dmb(InnerShareable, BarrierReads);
682 }
683 } else {
684 codegen->Load(type, trg, mem_op);
685 }
686}
687
688static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
689 LocationSummary* locations = new (arena) LocationSummary(invoke,
690 LocationSummary::kNoCall,
691 kIntrinsified);
692 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
693 locations->SetInAt(1, Location::RequiresRegister());
694 locations->SetInAt(2, Location::RequiresRegister());
695 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
696}
697
698void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
699 CreateIntIntIntToIntLocations(arena_, invoke);
700}
701void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
702 CreateIntIntIntToIntLocations(arena_, invoke);
703}
704void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
705 CreateIntIntIntToIntLocations(arena_, invoke);
706}
707void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
708 CreateIntIntIntToIntLocations(arena_, invoke);
709}
710void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
711 CreateIntIntIntToIntLocations(arena_, invoke);
712}
713void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
714 CreateIntIntIntToIntLocations(arena_, invoke);
715}
716
717void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
718 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
719}
720void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
721 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
722}
723void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
724 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
725}
726void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
727 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
728}
729void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
730 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
731}
732void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
733 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
734}
735
736static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
737 LocationSummary* locations = new (arena) LocationSummary(invoke,
738 LocationSummary::kNoCall,
739 kIntrinsified);
740 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
741 locations->SetInAt(1, Location::RequiresRegister());
742 locations->SetInAt(2, Location::RequiresRegister());
743 locations->SetInAt(3, Location::RequiresRegister());
744}
745
746void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
747 CreateIntIntIntIntToVoid(arena_, invoke);
748}
749void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
750 CreateIntIntIntIntToVoid(arena_, invoke);
751}
752void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
753 CreateIntIntIntIntToVoid(arena_, invoke);
754}
755void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
756 CreateIntIntIntIntToVoid(arena_, invoke);
757}
758void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
759 CreateIntIntIntIntToVoid(arena_, invoke);
760}
761void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
762 CreateIntIntIntIntToVoid(arena_, invoke);
763}
764void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
765 CreateIntIntIntIntToVoid(arena_, invoke);
766}
767void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
768 CreateIntIntIntIntToVoid(arena_, invoke);
769}
770void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
771 CreateIntIntIntIntToVoid(arena_, invoke);
772}
773
774static void GenUnsafePut(LocationSummary* locations,
775 Primitive::Type type,
776 bool is_volatile,
777 bool is_ordered,
778 CodeGeneratorARM64* codegen) {
779 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
780
781 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
782 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
783 Register value = RegisterFrom(locations->InAt(3), type);
Serban Constantinescu579885a2015-02-22 20:51:33 +0000784 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800785
786 MemOperand mem_op(base.X(), offset);
787
788 if (is_volatile || is_ordered) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000789 if (use_acquire_release) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800790 codegen->StoreRelease(type, value, mem_op);
791 } else {
792 __ Dmb(InnerShareable, BarrierAll);
793 codegen->Store(type, value, mem_op);
794 if (is_volatile) {
795 __ Dmb(InnerShareable, BarrierReads);
796 }
797 }
798 } else {
799 codegen->Store(type, value, mem_op);
800 }
801
802 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100803 bool value_can_be_null = true; // TODO: Worth finding out this information?
804 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800805 }
806}
807
808void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
809 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
810}
811void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
812 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
813}
814void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
815 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
816}
817void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
818 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
819}
820void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
821 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
822}
823void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
824 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
825}
826void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
827 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
828}
829void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
830 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
831}
832void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
833 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
834}
835
836static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
837 LocationSummary* locations = new (arena) LocationSummary(invoke,
838 LocationSummary::kNoCall,
839 kIntrinsified);
840 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
841 locations->SetInAt(1, Location::RequiresRegister());
842 locations->SetInAt(2, Location::RequiresRegister());
843 locations->SetInAt(3, Location::RequiresRegister());
844 locations->SetInAt(4, Location::RequiresRegister());
845
846 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
847}
848
849static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000850 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800851 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
852
853 Register out = WRegisterFrom(locations->Out()); // Boolean result.
854
855 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
856 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
857 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
858 Register value = RegisterFrom(locations->InAt(4), type); // Value.
859
860 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
861 if (type == Primitive::kPrimNot) {
862 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100863 bool value_can_be_null = true; // TODO: Worth finding out this information?
864 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800865 }
866
867 UseScratchRegisterScope temps(masm);
868 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
869 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
870
871 Register tmp_32 = tmp_value.W();
872
873 __ Add(tmp_ptr, base.X(), Operand(offset));
874
875 // do {
876 // tmp_value = [tmp_ptr] - expected;
877 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
878 // result = tmp_value != 0;
879
880 vixl::Label loop_head, exit_loop;
Serban Constantinescu579885a2015-02-22 20:51:33 +0000881 if (use_acquire_release) {
882 __ Bind(&loop_head);
883 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
884 __ Cmp(tmp_value, expected);
885 __ B(&exit_loop, ne);
886 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
887 __ Cbnz(tmp_32, &loop_head);
888 } else {
889 __ Dmb(InnerShareable, BarrierWrites);
890 __ Bind(&loop_head);
891 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
892 __ Cmp(tmp_value, expected);
893 __ B(&exit_loop, ne);
894 __ Stxr(tmp_32, value, MemOperand(tmp_ptr));
895 __ Cbnz(tmp_32, &loop_head);
896 __ Dmb(InnerShareable, BarrierAll);
897 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800898 __ Bind(&exit_loop);
899 __ Cset(out, eq);
900}
901
902void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
903 CreateIntIntIntIntIntToInt(arena_, invoke);
904}
905void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
906 CreateIntIntIntIntIntToInt(arena_, invoke);
907}
908void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
909 CreateIntIntIntIntIntToInt(arena_, invoke);
910}
911
912void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
913 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
914}
915void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
916 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
917}
918void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
919 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
920}
921
922void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800923 LocationSummary* locations = new (arena_) LocationSummary(invoke,
924 LocationSummary::kCallOnSlowPath,
925 kIntrinsified);
926 locations->SetInAt(0, Location::RequiresRegister());
927 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray82f34492015-02-04 10:44:23 +0000928 // In case we need to go in the slow path, we can't have the output be the same
929 // as the input: the current liveness analysis considers the input to be live
930 // at the point of the call.
931 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800932}
933
934void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
935 vixl::MacroAssembler* masm = GetVIXLAssembler();
936 LocationSummary* locations = invoke->GetLocations();
937
938 // Location of reference to data array
939 const MemberOffset value_offset = mirror::String::ValueOffset();
940 // Location of count
941 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800942
943 Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer.
944 Register idx = WRegisterFrom(locations->InAt(1)); // Index of character.
945 Register out = WRegisterFrom(locations->Out()); // Result character.
946
947 UseScratchRegisterScope temps(masm);
948 Register temp = temps.AcquireW();
949 Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling.
950
951 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
952 // the cost.
953 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
954 // we will not optimize the code for constants (which would save a register).
955
956 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
957 codegen_->AddSlowPath(slow_path);
958
959 __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length.
960 codegen_->MaybeRecordImplicitNullCheck(invoke);
961 __ Cmp(idx, temp);
962 __ B(hs, slow_path->GetEntryLabel());
963
Jeff Hao848f70a2014-01-15 13:49:50 -0800964 __ Add(array_temp, obj, Operand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe878d58c2015-01-15 23:24:00 -0800965
966 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -0800967 __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1)); // out := array_temp[idx].
Andreas Gampe878d58c2015-01-15 23:24:00 -0800968
969 __ Bind(slow_path->GetExitLabel());
970}
971
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000972void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000973 LocationSummary* locations = new (arena_) LocationSummary(invoke,
974 LocationSummary::kCall,
975 kIntrinsified);
976 InvokeRuntimeCallingConvention calling_convention;
977 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
978 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
979 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
980}
981
982void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
983 vixl::MacroAssembler* masm = GetVIXLAssembler();
984 LocationSummary* locations = invoke->GetLocations();
985
Nicolas Geoffray512e04d2015-03-27 17:21:24 +0000986 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +0100987 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000988
989 Register argument = WRegisterFrom(locations->InAt(1));
990 __ Cmp(argument, 0);
991 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
992 codegen_->AddSlowPath(slow_path);
993 __ B(eq, slow_path->GetEntryLabel());
994
995 __ Ldr(
996 lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value()));
997 __ Blr(lr);
998 __ Bind(slow_path->GetExitLabel());
999}
1000
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001001static void GenerateVisitStringIndexOf(HInvoke* invoke,
1002 vixl::MacroAssembler* masm,
1003 CodeGeneratorARM64* codegen,
1004 ArenaAllocator* allocator,
1005 bool start_at_zero) {
1006 LocationSummary* locations = invoke->GetLocations();
1007 Register tmp_reg = WRegisterFrom(locations->GetTemp(0));
1008
1009 // Note that the null check must have been done earlier.
1010 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1011
1012 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1013 // or directly dispatch if we have a constant.
1014 SlowPathCodeARM64* slow_path = nullptr;
1015 if (invoke->InputAt(1)->IsIntConstant()) {
1016 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > 0xFFFFU) {
1017 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1018 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1019 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1020 codegen->AddSlowPath(slow_path);
1021 __ B(slow_path->GetEntryLabel());
1022 __ Bind(slow_path->GetExitLabel());
1023 return;
1024 }
1025 } else {
1026 Register char_reg = WRegisterFrom(locations->InAt(1));
1027 __ Mov(tmp_reg, 0xFFFF);
1028 __ Cmp(char_reg, Operand(tmp_reg));
1029 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1030 codegen->AddSlowPath(slow_path);
1031 __ B(hi, slow_path->GetEntryLabel());
1032 }
1033
1034 if (start_at_zero) {
1035 // Start-index = 0.
1036 __ Mov(tmp_reg, 0);
1037 }
1038
1039 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value()));
1040 __ Blr(lr);
1041
1042 if (slow_path != nullptr) {
1043 __ Bind(slow_path->GetExitLabel());
1044 }
1045}
1046
1047void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) {
1048 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1049 LocationSummary::kCall,
1050 kIntrinsified);
1051 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1052 // best to align the inputs accordingly.
1053 InvokeRuntimeCallingConvention calling_convention;
1054 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1055 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1056 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1057
1058 // Need a temp for slow-path codepoint compare, and need to send start_index=0.
1059 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1060}
1061
1062void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) {
1063 GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), true);
1064}
1065
1066void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1067 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1068 LocationSummary::kCall,
1069 kIntrinsified);
1070 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1071 // best to align the inputs accordingly.
1072 InvokeRuntimeCallingConvention calling_convention;
1073 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1074 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1075 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1076 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1077
1078 // Need a temp for slow-path codepoint compare.
1079 locations->AddTemp(Location::RequiresRegister());
1080}
1081
1082void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1083 GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), false);
1084}
1085
Jeff Hao848f70a2014-01-15 13:49:50 -08001086void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1087 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1088 LocationSummary::kCall,
1089 kIntrinsified);
1090 InvokeRuntimeCallingConvention calling_convention;
1091 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1092 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1093 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1094 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1095 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1096}
1097
1098void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1099 vixl::MacroAssembler* masm = GetVIXLAssembler();
1100 LocationSummary* locations = invoke->GetLocations();
1101
1102 Register byte_array = WRegisterFrom(locations->InAt(0));
1103 __ Cmp(byte_array, 0);
1104 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1105 codegen_->AddSlowPath(slow_path);
1106 __ B(eq, slow_path->GetEntryLabel());
1107
1108 __ Ldr(lr,
1109 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value()));
1110 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1111 __ Blr(lr);
1112 __ Bind(slow_path->GetExitLabel());
1113}
1114
1115void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1116 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1117 LocationSummary::kCall,
1118 kIntrinsified);
1119 InvokeRuntimeCallingConvention calling_convention;
1120 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1121 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1122 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1123 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1124}
1125
1126void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1127 vixl::MacroAssembler* masm = GetVIXLAssembler();
1128
1129 __ Ldr(lr,
1130 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value()));
1131 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1132 __ Blr(lr);
1133}
1134
1135void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1136 // The inputs plus one temp.
1137 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1138 LocationSummary::kCall,
1139 kIntrinsified);
1140 InvokeRuntimeCallingConvention calling_convention;
1141 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1142 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1143 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1144 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1145}
1146
1147void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1148 vixl::MacroAssembler* masm = GetVIXLAssembler();
1149 LocationSummary* locations = invoke->GetLocations();
1150
1151 Register string_to_copy = WRegisterFrom(locations->InAt(0));
1152 __ Cmp(string_to_copy, 0);
1153 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1154 codegen_->AddSlowPath(slow_path);
1155 __ B(eq, slow_path->GetEntryLabel());
1156
1157 __ Ldr(lr,
1158 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value()));
1159 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1160 __ Blr(lr);
1161 __ Bind(slow_path->GetExitLabel());
1162}
1163
Andreas Gampe878d58c2015-01-15 23:24:00 -08001164// Unimplemented intrinsics.
1165
1166#define UNIMPLEMENTED_INTRINSIC(Name) \
1167void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1168} \
1169void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1170}
1171
1172UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001173UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001174UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001175
1176} // namespace arm64
1177} // namespace art