blob: 1dbca345b579f1172414e542224a4f2613ebbddd [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080021#include "code_generator_arm64.h"
22#include "common_arm64.h"
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
Serban Constantinescu82e52ce2015-03-26 16:50:57 +000031#include "vixl/a64/disasm-a64.h"
32#include "vixl/a64/macro-assembler-a64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080033
34using namespace vixl; // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +000043using helpers::LocationFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080044using helpers::RegisterFrom;
45using helpers::SRegisterFrom;
46using helpers::WRegisterFrom;
47using helpers::XRegisterFrom;
48
49
50namespace {
51
52ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
53 return MemOperand(XRegisterFrom(location), offset);
54}
55
56} // namespace
57
58vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
59 return codegen_->GetAssembler()->vixl_masm_;
60}
61
62ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
63 return codegen_->GetGraph()->GetArena();
64}
65
66#define __ codegen->GetAssembler()->vixl_masm_->
67
68static void MoveFromReturnRegister(Location trg,
69 Primitive::Type type,
70 CodeGeneratorARM64* codegen) {
71 if (!trg.IsValid()) {
72 DCHECK(type == Primitive::kPrimVoid);
73 return;
74 }
75
76 DCHECK_NE(type, Primitive::kPrimVoid);
77
Jeff Hao848f70a2014-01-15 13:49:50 -080078 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080079 Register trg_reg = RegisterFrom(trg, type);
80 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
81 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
82 } else {
83 FPRegister trg_reg = FPRegisterFrom(trg, type);
84 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
85 __ Fmov(trg_reg, res_reg);
86 }
87}
88
Roland Levillainec525fc2015-04-28 15:50:20 +010089static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010090 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010091 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe878d58c2015-01-15 23:24:00 -080092}
93
94// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
95// call. This will copy the arguments into the positions for a regular call.
96//
97// Note: The actual parameters are required to be in the locations given by the invoke's location
98// summary. If an intrinsic modifies those locations before a slowpath call, they must be
99// restored!
100class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
101 public:
102 explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { }
103
104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
105 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
106 __ Bind(GetEntryLabel());
107
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000108 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800109
Roland Levillainec525fc2015-04-28 15:50:20 +0100110 MoveArguments(invoke_, codegen);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800111
112 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100113 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
114 LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800115 } else {
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000116 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800117 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000118 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800119
120 // Copy the result back to the expected output.
121 Location out = invoke_->GetLocations()->Out();
122 if (out.IsValid()) {
123 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
124 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
125 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
126 }
127
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000128 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800129 __ B(GetExitLabel());
130 }
131
Alexandre Rames9931f312015-06-19 14:47:01 +0100132 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM64"; }
133
Andreas Gampe878d58c2015-01-15 23:24:00 -0800134 private:
135 // The instruction where this slow path is happening.
136 HInvoke* const invoke_;
137
138 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
139};
140
141#undef __
142
143bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
144 Dispatch(invoke);
145 LocationSummary* res = invoke->GetLocations();
146 return res != nullptr && res->Intrinsified();
147}
148
149#define __ masm->
150
151static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
152 LocationSummary* locations = new (arena) LocationSummary(invoke,
153 LocationSummary::kNoCall,
154 kIntrinsified);
155 locations->SetInAt(0, Location::RequiresFpuRegister());
156 locations->SetOut(Location::RequiresRegister());
157}
158
159static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
160 LocationSummary* locations = new (arena) LocationSummary(invoke,
161 LocationSummary::kNoCall,
162 kIntrinsified);
163 locations->SetInAt(0, Location::RequiresRegister());
164 locations->SetOut(Location::RequiresFpuRegister());
165}
166
167static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
168 Location input = locations->InAt(0);
169 Location output = locations->Out();
170 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
171 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
172}
173
174static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
175 Location input = locations->InAt(0);
176 Location output = locations->Out();
177 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
178 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
179}
180
181void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
182 CreateFPToIntLocations(arena_, invoke);
183}
184void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
185 CreateIntToFPLocations(arena_, invoke);
186}
187
188void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
189 MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler());
190}
191void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
192 MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler());
193}
194
195void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
196 CreateFPToIntLocations(arena_, invoke);
197}
198void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
199 CreateIntToFPLocations(arena_, invoke);
200}
201
202void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
203 MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler());
204}
205void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
206 MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler());
207}
208
209static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
210 LocationSummary* locations = new (arena) LocationSummary(invoke,
211 LocationSummary::kNoCall,
212 kIntrinsified);
213 locations->SetInAt(0, Location::RequiresRegister());
214 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
215}
216
217static void GenReverseBytes(LocationSummary* locations,
218 Primitive::Type type,
219 vixl::MacroAssembler* masm) {
220 Location in = locations->InAt(0);
221 Location out = locations->Out();
222
223 switch (type) {
224 case Primitive::kPrimShort:
225 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
226 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
227 break;
228 case Primitive::kPrimInt:
229 case Primitive::kPrimLong:
230 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
231 break;
232 default:
233 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
234 UNREACHABLE();
235 }
236}
237
238void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
239 CreateIntToIntLocations(arena_, invoke);
240}
241
242void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
243 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
244}
245
246void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
247 CreateIntToIntLocations(arena_, invoke);
248}
249
250void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
251 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
252}
253
254void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
255 CreateIntToIntLocations(arena_, invoke);
256}
257
258void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
259 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
260}
261
Scott Wakeling611d3392015-07-10 11:42:06 +0100262static void GenNumberOfLeadingZeros(LocationSummary* locations,
263 Primitive::Type type,
264 vixl::MacroAssembler* masm) {
265 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
266
267 Location in = locations->InAt(0);
268 Location out = locations->Out();
269
270 __ Clz(RegisterFrom(out, type), RegisterFrom(in, type));
271}
272
273void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
274 CreateIntToIntLocations(arena_, invoke);
275}
276
277void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
278 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
279}
280
281void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
282 CreateIntToIntLocations(arena_, invoke);
283}
284
285void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
286 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
287}
288
Andreas Gampe878d58c2015-01-15 23:24:00 -0800289static void GenReverse(LocationSummary* locations,
290 Primitive::Type type,
291 vixl::MacroAssembler* masm) {
292 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
293
294 Location in = locations->InAt(0);
295 Location out = locations->Out();
296
297 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
298}
299
300void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
301 CreateIntToIntLocations(arena_, invoke);
302}
303
304void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
305 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
306}
307
308void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
309 CreateIntToIntLocations(arena_, invoke);
310}
311
312void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
313 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
314}
315
316static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800317 LocationSummary* locations = new (arena) LocationSummary(invoke,
318 LocationSummary::kNoCall,
319 kIntrinsified);
320 locations->SetInAt(0, Location::RequiresFpuRegister());
321 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
322}
323
324static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
325 Location in = locations->InAt(0);
326 Location out = locations->Out();
327
328 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
329 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
330
331 __ Fabs(out_reg, in_reg);
332}
333
334void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
335 CreateFPToFPLocations(arena_, invoke);
336}
337
338void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
339 MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler());
340}
341
342void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
343 CreateFPToFPLocations(arena_, invoke);
344}
345
346void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
347 MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler());
348}
349
350static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
351 LocationSummary* locations = new (arena) LocationSummary(invoke,
352 LocationSummary::kNoCall,
353 kIntrinsified);
354 locations->SetInAt(0, Location::RequiresRegister());
355 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
356}
357
358static void GenAbsInteger(LocationSummary* locations,
359 bool is64bit,
360 vixl::MacroAssembler* masm) {
361 Location in = locations->InAt(0);
362 Location output = locations->Out();
363
364 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
365 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
366
367 __ Cmp(in_reg, Operand(0));
368 __ Cneg(out_reg, in_reg, lt);
369}
370
371void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
372 CreateIntToInt(arena_, invoke);
373}
374
375void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
376 GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler());
377}
378
379void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
380 CreateIntToInt(arena_, invoke);
381}
382
383void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
384 GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler());
385}
386
387static void GenMinMaxFP(LocationSummary* locations,
388 bool is_min,
389 bool is_double,
390 vixl::MacroAssembler* masm) {
391 Location op1 = locations->InAt(0);
392 Location op2 = locations->InAt(1);
393 Location out = locations->Out();
394
395 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
396 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
397 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
398 if (is_min) {
399 __ Fmin(out_reg, op1_reg, op2_reg);
400 } else {
401 __ Fmax(out_reg, op1_reg, op2_reg);
402 }
403}
404
405static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
406 LocationSummary* locations = new (arena) LocationSummary(invoke,
407 LocationSummary::kNoCall,
408 kIntrinsified);
409 locations->SetInAt(0, Location::RequiresFpuRegister());
410 locations->SetInAt(1, Location::RequiresFpuRegister());
411 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
412}
413
414void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
415 CreateFPFPToFPLocations(arena_, invoke);
416}
417
418void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
419 GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler());
420}
421
422void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
423 CreateFPFPToFPLocations(arena_, invoke);
424}
425
426void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
427 GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler());
428}
429
430void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
431 CreateFPFPToFPLocations(arena_, invoke);
432}
433
434void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
435 GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler());
436}
437
438void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
439 CreateFPFPToFPLocations(arena_, invoke);
440}
441
442void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
443 GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler());
444}
445
446static void GenMinMax(LocationSummary* locations,
447 bool is_min,
448 bool is_long,
449 vixl::MacroAssembler* masm) {
450 Location op1 = locations->InAt(0);
451 Location op2 = locations->InAt(1);
452 Location out = locations->Out();
453
454 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
455 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
456 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
457
458 __ Cmp(op1_reg, op2_reg);
459 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
460}
461
462static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
463 LocationSummary* locations = new (arena) LocationSummary(invoke,
464 LocationSummary::kNoCall,
465 kIntrinsified);
466 locations->SetInAt(0, Location::RequiresRegister());
467 locations->SetInAt(1, Location::RequiresRegister());
468 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
469}
470
471void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
472 CreateIntIntToIntLocations(arena_, invoke);
473}
474
475void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
476 GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler());
477}
478
479void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
480 CreateIntIntToIntLocations(arena_, invoke);
481}
482
483void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
484 GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler());
485}
486
487void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
488 CreateIntIntToIntLocations(arena_, invoke);
489}
490
491void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
492 GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler());
493}
494
495void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
496 CreateIntIntToIntLocations(arena_, invoke);
497}
498
499void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
500 GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler());
501}
502
503void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
504 CreateFPToFPLocations(arena_, invoke);
505}
506
507void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
508 LocationSummary* locations = invoke->GetLocations();
509 vixl::MacroAssembler* masm = GetVIXLAssembler();
510 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
511}
512
513void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
514 CreateFPToFPLocations(arena_, invoke);
515}
516
517void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
518 LocationSummary* locations = invoke->GetLocations();
519 vixl::MacroAssembler* masm = GetVIXLAssembler();
520 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
521}
522
523void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
524 CreateFPToFPLocations(arena_, invoke);
525}
526
527void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
528 LocationSummary* locations = invoke->GetLocations();
529 vixl::MacroAssembler* masm = GetVIXLAssembler();
530 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
531}
532
533void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
534 CreateFPToFPLocations(arena_, invoke);
535}
536
537void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
538 LocationSummary* locations = invoke->GetLocations();
539 vixl::MacroAssembler* masm = GetVIXLAssembler();
540 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
541}
542
543static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
544 LocationSummary* locations = new (arena) LocationSummary(invoke,
545 LocationSummary::kNoCall,
546 kIntrinsified);
547 locations->SetInAt(0, Location::RequiresFpuRegister());
548 locations->SetOut(Location::RequiresRegister());
549}
550
551static void GenMathRound(LocationSummary* locations,
552 bool is_double,
553 vixl::MacroAssembler* masm) {
554 FPRegister in_reg = is_double ?
555 DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
556 Register out_reg = is_double ?
557 XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
558 UseScratchRegisterScope temps(masm);
559 FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
560
561 // 0.5 can be encoded as an immediate, so use fmov.
562 if (is_double) {
563 __ Fmov(temp1_reg, static_cast<double>(0.5));
564 } else {
565 __ Fmov(temp1_reg, static_cast<float>(0.5));
566 }
567 __ Fadd(temp1_reg, in_reg, temp1_reg);
568 __ Fcvtms(out_reg, temp1_reg);
569}
570
571void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
572 CreateFPToIntPlusTempLocations(arena_, invoke);
573}
574
575void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
576 GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler());
577}
578
579void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
580 CreateFPToIntPlusTempLocations(arena_, invoke);
581}
582
583void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
584 GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler());
585}
586
587void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
588 CreateIntToIntLocations(arena_, invoke);
589}
590
591void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
592 vixl::MacroAssembler* masm = GetVIXLAssembler();
593 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
594 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
595}
596
597void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
598 CreateIntToIntLocations(arena_, invoke);
599}
600
601void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
602 vixl::MacroAssembler* masm = GetVIXLAssembler();
603 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
604 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
605}
606
607void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
608 CreateIntToIntLocations(arena_, invoke);
609}
610
611void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
612 vixl::MacroAssembler* masm = GetVIXLAssembler();
613 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
614 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
615}
616
617void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
618 CreateIntToIntLocations(arena_, invoke);
619}
620
621void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
622 vixl::MacroAssembler* masm = GetVIXLAssembler();
623 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
624 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
625}
626
627static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
628 LocationSummary* locations = new (arena) LocationSummary(invoke,
629 LocationSummary::kNoCall,
630 kIntrinsified);
631 locations->SetInAt(0, Location::RequiresRegister());
632 locations->SetInAt(1, Location::RequiresRegister());
633}
634
635void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
636 CreateIntIntToVoidLocations(arena_, invoke);
637}
638
639void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
640 vixl::MacroAssembler* masm = GetVIXLAssembler();
641 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
642 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
643}
644
645void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
646 CreateIntIntToVoidLocations(arena_, invoke);
647}
648
649void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
650 vixl::MacroAssembler* masm = GetVIXLAssembler();
651 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
652 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
653}
654
655void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
656 CreateIntIntToVoidLocations(arena_, invoke);
657}
658
659void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
660 vixl::MacroAssembler* masm = GetVIXLAssembler();
661 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
662 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
663}
664
665void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
666 CreateIntIntToVoidLocations(arena_, invoke);
667}
668
669void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
670 vixl::MacroAssembler* masm = GetVIXLAssembler();
671 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
672 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
673}
674
675void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
676 LocationSummary* locations = new (arena_) LocationSummary(invoke,
677 LocationSummary::kNoCall,
678 kIntrinsified);
679 locations->SetOut(Location::RequiresRegister());
680}
681
682void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
683 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
684 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
685}
686
687static void GenUnsafeGet(HInvoke* invoke,
688 Primitive::Type type,
689 bool is_volatile,
690 CodeGeneratorARM64* codegen) {
691 LocationSummary* locations = invoke->GetLocations();
692 DCHECK((type == Primitive::kPrimInt) ||
693 (type == Primitive::kPrimLong) ||
694 (type == Primitive::kPrimNot));
695 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
696 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
697 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
698 Register trg = RegisterFrom(locations->Out(), type);
Serban Constantinescu579885a2015-02-22 20:51:33 +0000699 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800700
701 MemOperand mem_op(base.X(), offset);
702 if (is_volatile) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000703 if (use_acquire_release) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800704 codegen->LoadAcquire(invoke, trg, mem_op);
705 } else {
706 codegen->Load(type, trg, mem_op);
707 __ Dmb(InnerShareable, BarrierReads);
708 }
709 } else {
710 codegen->Load(type, trg, mem_op);
711 }
Roland Levillain4d027112015-07-01 15:41:14 +0100712
713 if (type == Primitive::kPrimNot) {
714 DCHECK(trg.IsW());
715 codegen->GetAssembler()->MaybeUnpoisonHeapReference(trg);
716 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800717}
718
719static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
720 LocationSummary* locations = new (arena) LocationSummary(invoke,
721 LocationSummary::kNoCall,
722 kIntrinsified);
723 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
724 locations->SetInAt(1, Location::RequiresRegister());
725 locations->SetInAt(2, Location::RequiresRegister());
726 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
727}
728
729void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
730 CreateIntIntIntToIntLocations(arena_, invoke);
731}
732void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
733 CreateIntIntIntToIntLocations(arena_, invoke);
734}
735void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
736 CreateIntIntIntToIntLocations(arena_, invoke);
737}
738void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
739 CreateIntIntIntToIntLocations(arena_, invoke);
740}
741void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
742 CreateIntIntIntToIntLocations(arena_, invoke);
743}
744void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
745 CreateIntIntIntToIntLocations(arena_, invoke);
746}
747
748void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
749 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
750}
751void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
752 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
753}
754void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
755 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
756}
757void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
758 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
759}
760void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
761 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
762}
763void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
764 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
765}
766
767static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
768 LocationSummary* locations = new (arena) LocationSummary(invoke,
769 LocationSummary::kNoCall,
770 kIntrinsified);
771 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
772 locations->SetInAt(1, Location::RequiresRegister());
773 locations->SetInAt(2, Location::RequiresRegister());
774 locations->SetInAt(3, Location::RequiresRegister());
775}
776
777void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
778 CreateIntIntIntIntToVoid(arena_, invoke);
779}
780void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
781 CreateIntIntIntIntToVoid(arena_, invoke);
782}
783void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
784 CreateIntIntIntIntToVoid(arena_, invoke);
785}
786void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
787 CreateIntIntIntIntToVoid(arena_, invoke);
788}
789void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
790 CreateIntIntIntIntToVoid(arena_, invoke);
791}
792void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
793 CreateIntIntIntIntToVoid(arena_, invoke);
794}
795void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
796 CreateIntIntIntIntToVoid(arena_, invoke);
797}
798void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
799 CreateIntIntIntIntToVoid(arena_, invoke);
800}
801void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
802 CreateIntIntIntIntToVoid(arena_, invoke);
803}
804
805static void GenUnsafePut(LocationSummary* locations,
806 Primitive::Type type,
807 bool is_volatile,
808 bool is_ordered,
809 CodeGeneratorARM64* codegen) {
810 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
811
812 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
813 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
814 Register value = RegisterFrom(locations->InAt(3), type);
Roland Levillain4d027112015-07-01 15:41:14 +0100815 Register source = value;
Serban Constantinescu579885a2015-02-22 20:51:33 +0000816 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800817
818 MemOperand mem_op(base.X(), offset);
819
Roland Levillain4d027112015-07-01 15:41:14 +0100820 {
821 // We use a block to end the scratch scope before the write barrier, thus
822 // freeing the temporary registers so they can be used in `MarkGCCard`.
823 UseScratchRegisterScope temps(masm);
824
825 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
826 DCHECK(value.IsW());
827 Register temp = temps.AcquireW();
828 __ Mov(temp.W(), value.W());
829 codegen->GetAssembler()->PoisonHeapReference(temp.W());
830 source = temp;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800831 }
Roland Levillain4d027112015-07-01 15:41:14 +0100832
833 if (is_volatile || is_ordered) {
834 if (use_acquire_release) {
835 codegen->StoreRelease(type, source, mem_op);
836 } else {
837 __ Dmb(InnerShareable, BarrierAll);
838 codegen->Store(type, source, mem_op);
839 if (is_volatile) {
840 __ Dmb(InnerShareable, BarrierReads);
841 }
842 }
843 } else {
844 codegen->Store(type, source, mem_op);
845 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800846 }
847
848 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100849 bool value_can_be_null = true; // TODO: Worth finding out this information?
850 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800851 }
852}
853
854void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
855 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
856}
857void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
858 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
859}
860void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
861 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
862}
863void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
864 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
865}
866void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
867 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
868}
869void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
870 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
871}
872void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
873 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
874}
875void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
876 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
877}
878void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
879 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
880}
881
882static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
883 LocationSummary* locations = new (arena) LocationSummary(invoke,
884 LocationSummary::kNoCall,
885 kIntrinsified);
886 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
887 locations->SetInAt(1, Location::RequiresRegister());
888 locations->SetInAt(2, Location::RequiresRegister());
889 locations->SetInAt(3, Location::RequiresRegister());
890 locations->SetInAt(4, Location::RequiresRegister());
891
892 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
893}
894
895static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000896 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800897 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
898
899 Register out = WRegisterFrom(locations->Out()); // Boolean result.
900
901 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
902 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
903 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
904 Register value = RegisterFrom(locations->InAt(4), type); // Value.
905
906 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
907 if (type == Primitive::kPrimNot) {
908 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100909 bool value_can_be_null = true; // TODO: Worth finding out this information?
910 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800911 }
912
913 UseScratchRegisterScope temps(masm);
914 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
915 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
916
917 Register tmp_32 = tmp_value.W();
918
919 __ Add(tmp_ptr, base.X(), Operand(offset));
920
Roland Levillain4d027112015-07-01 15:41:14 +0100921 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
922 codegen->GetAssembler()->PoisonHeapReference(expected);
923 codegen->GetAssembler()->PoisonHeapReference(value);
924 }
925
Andreas Gampe878d58c2015-01-15 23:24:00 -0800926 // do {
927 // tmp_value = [tmp_ptr] - expected;
928 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
929 // result = tmp_value != 0;
930
931 vixl::Label loop_head, exit_loop;
Serban Constantinescu579885a2015-02-22 20:51:33 +0000932 if (use_acquire_release) {
933 __ Bind(&loop_head);
934 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
935 __ Cmp(tmp_value, expected);
936 __ B(&exit_loop, ne);
937 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
938 __ Cbnz(tmp_32, &loop_head);
939 } else {
940 __ Dmb(InnerShareable, BarrierWrites);
941 __ Bind(&loop_head);
942 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
943 __ Cmp(tmp_value, expected);
944 __ B(&exit_loop, ne);
945 __ Stxr(tmp_32, value, MemOperand(tmp_ptr));
946 __ Cbnz(tmp_32, &loop_head);
947 __ Dmb(InnerShareable, BarrierAll);
948 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800949 __ Bind(&exit_loop);
950 __ Cset(out, eq);
Roland Levillain4d027112015-07-01 15:41:14 +0100951
952 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
953 codegen->GetAssembler()->UnpoisonHeapReference(value);
954 codegen->GetAssembler()->UnpoisonHeapReference(expected);
955 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800956}
957
958void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
959 CreateIntIntIntIntIntToInt(arena_, invoke);
960}
961void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
962 CreateIntIntIntIntIntToInt(arena_, invoke);
963}
964void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
965 CreateIntIntIntIntIntToInt(arena_, invoke);
966}
967
968void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
969 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
970}
971void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
972 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
973}
974void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
975 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
976}
977
978void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800979 LocationSummary* locations = new (arena_) LocationSummary(invoke,
980 LocationSummary::kCallOnSlowPath,
981 kIntrinsified);
982 locations->SetInAt(0, Location::RequiresRegister());
983 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray82f34492015-02-04 10:44:23 +0000984 // In case we need to go in the slow path, we can't have the output be the same
985 // as the input: the current liveness analysis considers the input to be live
986 // at the point of the call.
987 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800988}
989
990void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
991 vixl::MacroAssembler* masm = GetVIXLAssembler();
992 LocationSummary* locations = invoke->GetLocations();
993
994 // Location of reference to data array
995 const MemberOffset value_offset = mirror::String::ValueOffset();
996 // Location of count
997 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800998
999 Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer.
1000 Register idx = WRegisterFrom(locations->InAt(1)); // Index of character.
1001 Register out = WRegisterFrom(locations->Out()); // Result character.
1002
1003 UseScratchRegisterScope temps(masm);
1004 Register temp = temps.AcquireW();
1005 Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling.
1006
1007 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
1008 // the cost.
1009 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
1010 // we will not optimize the code for constants (which would save a register).
1011
1012 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1013 codegen_->AddSlowPath(slow_path);
1014
1015 __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length.
1016 codegen_->MaybeRecordImplicitNullCheck(invoke);
1017 __ Cmp(idx, temp);
1018 __ B(hs, slow_path->GetEntryLabel());
1019
Jeff Hao848f70a2014-01-15 13:49:50 -08001020 __ Add(array_temp, obj, Operand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001021
1022 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -08001023 __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1)); // out := array_temp[idx].
Andreas Gampe878d58c2015-01-15 23:24:00 -08001024
1025 __ Bind(slow_path->GetExitLabel());
1026}
1027
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001028void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001029 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1030 LocationSummary::kCall,
1031 kIntrinsified);
1032 InvokeRuntimeCallingConvention calling_convention;
1033 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1034 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1035 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1036}
1037
1038void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
1039 vixl::MacroAssembler* masm = GetVIXLAssembler();
1040 LocationSummary* locations = invoke->GetLocations();
1041
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001042 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001043 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001044
1045 Register argument = WRegisterFrom(locations->InAt(1));
1046 __ Cmp(argument, 0);
1047 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1048 codegen_->AddSlowPath(slow_path);
1049 __ B(eq, slow_path->GetEntryLabel());
1050
1051 __ Ldr(
1052 lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value()));
1053 __ Blr(lr);
1054 __ Bind(slow_path->GetExitLabel());
1055}
1056
Agi Csakiea34b402015-08-13 17:51:19 -07001057void IntrinsicLocationsBuilderARM64::VisitStringEquals(HInvoke* invoke) {
1058 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1059 LocationSummary::kNoCall,
1060 kIntrinsified);
1061 locations->SetInAt(0, Location::RequiresRegister());
1062 locations->SetInAt(1, Location::RequiresRegister());
1063 // Temporary registers to store lengths of strings and for calculations.
1064 locations->AddTemp(Location::RequiresRegister());
1065 locations->AddTemp(Location::RequiresRegister());
1066
1067 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1068}
1069
1070void IntrinsicCodeGeneratorARM64::VisitStringEquals(HInvoke* invoke) {
1071 vixl::MacroAssembler* masm = GetVIXLAssembler();
1072 LocationSummary* locations = invoke->GetLocations();
1073
1074 Register str = WRegisterFrom(locations->InAt(0));
1075 Register arg = WRegisterFrom(locations->InAt(1));
1076 Register out = XRegisterFrom(locations->Out());
1077
1078 UseScratchRegisterScope scratch_scope(masm);
1079 Register temp = scratch_scope.AcquireW();
1080 Register temp1 = WRegisterFrom(locations->GetTemp(0));
1081 Register temp2 = WRegisterFrom(locations->GetTemp(1));
1082
1083 vixl::Label loop;
1084 vixl::Label end;
1085 vixl::Label return_true;
1086 vixl::Label return_false;
1087
1088 // Get offsets of count, value, and class fields within a string object.
1089 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1090 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1091 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1092
1093 // Note that the null check must have been done earlier.
1094 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1095
1096 // Check if input is null, return false if it is.
1097 __ Cbz(arg, &return_false);
1098
1099 // Reference equality check, return true if same reference.
1100 __ Cmp(str, arg);
1101 __ B(&return_true, eq);
1102
1103 // Instanceof check for the argument by comparing class fields.
1104 // All string objects must have the same type since String cannot be subclassed.
1105 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1106 // If the argument is a string object, its class field must be equal to receiver's class field.
1107 __ Ldr(temp, MemOperand(str.X(), class_offset));
1108 __ Ldr(temp1, MemOperand(arg.X(), class_offset));
1109 __ Cmp(temp, temp1);
1110 __ B(&return_false, ne);
1111
1112 // Load lengths of this and argument strings.
1113 __ Ldr(temp, MemOperand(str.X(), count_offset));
1114 __ Ldr(temp1, MemOperand(arg.X(), count_offset));
1115 // Check if lengths are equal, return false if they're not.
1116 __ Cmp(temp, temp1);
1117 __ B(&return_false, ne);
1118 // Store offset of string value in preparation for comparison loop
1119 __ Mov(temp1, value_offset);
1120 // Return true if both strings are empty.
1121 __ Cbz(temp, &return_true);
1122
1123 // Assertions that must hold in order to compare strings 4 characters at a time.
1124 DCHECK_ALIGNED(value_offset, 8);
1125 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1126
1127 temp1 = temp1.X();
1128 temp2 = temp2.X();
1129
1130 // Loop to compare strings 4 characters at a time starting at the beginning of the string.
1131 // Ok to do this because strings are zero-padded to be 8-byte aligned.
1132 __ Bind(&loop);
1133 __ Ldr(out, MemOperand(str.X(), temp1));
1134 __ Ldr(temp2, MemOperand(arg.X(), temp1));
1135 __ Add(temp1, temp1, Operand(sizeof(uint64_t)));
1136 __ Cmp(out, temp2);
1137 __ B(&return_false, ne);
1138 __ Sub(temp, temp, Operand(4), SetFlags);
1139 __ B(&loop, gt);
1140
1141 // Return true and exit the function.
1142 // If loop does not result in returning false, we return true.
1143 __ Bind(&return_true);
1144 __ Mov(out, 1);
1145 __ B(&end);
1146
1147 // Return false and exit the function.
1148 __ Bind(&return_false);
1149 __ Mov(out, 0);
1150 __ Bind(&end);
1151}
1152
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001153static void GenerateVisitStringIndexOf(HInvoke* invoke,
1154 vixl::MacroAssembler* masm,
1155 CodeGeneratorARM64* codegen,
1156 ArenaAllocator* allocator,
1157 bool start_at_zero) {
1158 LocationSummary* locations = invoke->GetLocations();
1159 Register tmp_reg = WRegisterFrom(locations->GetTemp(0));
1160
1161 // Note that the null check must have been done earlier.
1162 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1163
1164 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1165 // or directly dispatch if we have a constant.
1166 SlowPathCodeARM64* slow_path = nullptr;
1167 if (invoke->InputAt(1)->IsIntConstant()) {
1168 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > 0xFFFFU) {
1169 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1170 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1171 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1172 codegen->AddSlowPath(slow_path);
1173 __ B(slow_path->GetEntryLabel());
1174 __ Bind(slow_path->GetExitLabel());
1175 return;
1176 }
1177 } else {
1178 Register char_reg = WRegisterFrom(locations->InAt(1));
1179 __ Mov(tmp_reg, 0xFFFF);
1180 __ Cmp(char_reg, Operand(tmp_reg));
1181 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1182 codegen->AddSlowPath(slow_path);
1183 __ B(hi, slow_path->GetEntryLabel());
1184 }
1185
1186 if (start_at_zero) {
1187 // Start-index = 0.
1188 __ Mov(tmp_reg, 0);
1189 }
1190
1191 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value()));
1192 __ Blr(lr);
1193
1194 if (slow_path != nullptr) {
1195 __ Bind(slow_path->GetExitLabel());
1196 }
1197}
1198
1199void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) {
1200 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1201 LocationSummary::kCall,
1202 kIntrinsified);
1203 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1204 // best to align the inputs accordingly.
1205 InvokeRuntimeCallingConvention calling_convention;
1206 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1207 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1208 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1209
1210 // Need a temp for slow-path codepoint compare, and need to send start_index=0.
1211 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1212}
1213
1214void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) {
1215 GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), true);
1216}
1217
1218void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1219 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1220 LocationSummary::kCall,
1221 kIntrinsified);
1222 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1223 // best to align the inputs accordingly.
1224 InvokeRuntimeCallingConvention calling_convention;
1225 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1226 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1227 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1228 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1229
1230 // Need a temp for slow-path codepoint compare.
1231 locations->AddTemp(Location::RequiresRegister());
1232}
1233
1234void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1235 GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), false);
1236}
1237
Jeff Hao848f70a2014-01-15 13:49:50 -08001238void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1239 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1240 LocationSummary::kCall,
1241 kIntrinsified);
1242 InvokeRuntimeCallingConvention calling_convention;
1243 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1244 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1245 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1246 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1247 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1248}
1249
1250void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1251 vixl::MacroAssembler* masm = GetVIXLAssembler();
1252 LocationSummary* locations = invoke->GetLocations();
1253
1254 Register byte_array = WRegisterFrom(locations->InAt(0));
1255 __ Cmp(byte_array, 0);
1256 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1257 codegen_->AddSlowPath(slow_path);
1258 __ B(eq, slow_path->GetEntryLabel());
1259
1260 __ Ldr(lr,
1261 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value()));
1262 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1263 __ Blr(lr);
1264 __ Bind(slow_path->GetExitLabel());
1265}
1266
1267void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1268 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1269 LocationSummary::kCall,
1270 kIntrinsified);
1271 InvokeRuntimeCallingConvention calling_convention;
1272 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1273 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1274 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1275 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1276}
1277
1278void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1279 vixl::MacroAssembler* masm = GetVIXLAssembler();
1280
1281 __ Ldr(lr,
1282 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value()));
1283 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1284 __ Blr(lr);
1285}
1286
1287void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1288 // The inputs plus one temp.
1289 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1290 LocationSummary::kCall,
1291 kIntrinsified);
1292 InvokeRuntimeCallingConvention calling_convention;
1293 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1294 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1295 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1296 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1297}
1298
1299void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1300 vixl::MacroAssembler* masm = GetVIXLAssembler();
1301 LocationSummary* locations = invoke->GetLocations();
1302
1303 Register string_to_copy = WRegisterFrom(locations->InAt(0));
1304 __ Cmp(string_to_copy, 0);
1305 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1306 codegen_->AddSlowPath(slow_path);
1307 __ B(eq, slow_path->GetEntryLabel());
1308
1309 __ Ldr(lr,
1310 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value()));
1311 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1312 __ Blr(lr);
1313 __ Bind(slow_path->GetExitLabel());
1314}
1315
Andreas Gampe878d58c2015-01-15 23:24:00 -08001316// Unimplemented intrinsics.
1317
1318#define UNIMPLEMENTED_INTRINSIC(Name) \
1319void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1320} \
1321void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1322}
1323
1324UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001325UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001326UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001327
Roland Levillain4d027112015-07-01 15:41:14 +01001328#undef UNIMPLEMENTED_INTRINSIC
1329
1330#undef __
1331
Andreas Gampe878d58c2015-01-15 23:24:00 -08001332} // namespace arm64
1333} // namespace art