blob: 8741fd284fb5aa6589b352f726a007c34862a2ab [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080021#include "code_generator_arm64.h"
22#include "common_arm64.h"
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
Serban Constantinescu82e52ce2015-03-26 16:50:57 +000031#include "vixl/a64/disasm-a64.h"
32#include "vixl/a64/macro-assembler-a64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080033
34using namespace vixl; // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +000043using helpers::LocationFrom;
Scott Wakeling9ee23f42015-07-23 10:44:35 +010044using helpers::OperandFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080045using helpers::RegisterFrom;
46using helpers::SRegisterFrom;
47using helpers::WRegisterFrom;
48using helpers::XRegisterFrom;
49
Andreas Gampe878d58c2015-01-15 23:24:00 -080050namespace {
51
52ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
53 return MemOperand(XRegisterFrom(location), offset);
54}
55
56} // namespace
57
58vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
59 return codegen_->GetAssembler()->vixl_masm_;
60}
61
62ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
63 return codegen_->GetGraph()->GetArena();
64}
65
66#define __ codegen->GetAssembler()->vixl_masm_->
67
68static void MoveFromReturnRegister(Location trg,
69 Primitive::Type type,
70 CodeGeneratorARM64* codegen) {
71 if (!trg.IsValid()) {
72 DCHECK(type == Primitive::kPrimVoid);
73 return;
74 }
75
76 DCHECK_NE(type, Primitive::kPrimVoid);
77
Jeff Hao848f70a2014-01-15 13:49:50 -080078 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080079 Register trg_reg = RegisterFrom(trg, type);
80 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
81 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
82 } else {
83 FPRegister trg_reg = FPRegisterFrom(trg, type);
84 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
85 __ Fmov(trg_reg, res_reg);
86 }
87}
88
Roland Levillainec525fc2015-04-28 15:50:20 +010089static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010090 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010091 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe878d58c2015-01-15 23:24:00 -080092}
93
94// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
95// call. This will copy the arguments into the positions for a regular call.
96//
97// Note: The actual parameters are required to be in the locations given by the invoke's location
98// summary. If an intrinsic modifies those locations before a slowpath call, they must be
99// restored!
100class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
101 public:
102 explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { }
103
104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
105 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
106 __ Bind(GetEntryLabel());
107
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000108 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800109
Roland Levillainec525fc2015-04-28 15:50:20 +0100110 MoveArguments(invoke_, codegen);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800111
112 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100113 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
114 LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800115 } else {
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000116 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800117 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000118 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800119
120 // Copy the result back to the expected output.
121 Location out = invoke_->GetLocations()->Out();
122 if (out.IsValid()) {
123 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
124 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
125 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
126 }
127
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000128 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800129 __ B(GetExitLabel());
130 }
131
Alexandre Rames9931f312015-06-19 14:47:01 +0100132 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM64"; }
133
Andreas Gampe878d58c2015-01-15 23:24:00 -0800134 private:
135 // The instruction where this slow path is happening.
136 HInvoke* const invoke_;
137
138 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
139};
140
141#undef __
142
143bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
144 Dispatch(invoke);
145 LocationSummary* res = invoke->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000146 if (res == nullptr) {
147 return false;
148 }
149 if (kEmitCompilerReadBarrier && res->CanCall()) {
150 // Generating an intrinsic for this HInvoke may produce an
151 // IntrinsicSlowPathARM64 slow path. Currently this approach
152 // does not work when using read barriers, as the emitted
153 // calling sequence will make use of another slow path
154 // (ReadBarrierForRootSlowPathARM64 for HInvokeStaticOrDirect,
155 // ReadBarrierSlowPathARM64 for HInvokeVirtual). So we bail
156 // out in this case.
157 //
158 // TODO: Find a way to have intrinsics work with read barriers.
159 invoke->SetLocations(nullptr);
160 return false;
161 }
162 return res->Intrinsified();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800163}
164
165#define __ masm->
166
167static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
168 LocationSummary* locations = new (arena) LocationSummary(invoke,
169 LocationSummary::kNoCall,
170 kIntrinsified);
171 locations->SetInAt(0, Location::RequiresFpuRegister());
172 locations->SetOut(Location::RequiresRegister());
173}
174
175static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
176 LocationSummary* locations = new (arena) LocationSummary(invoke,
177 LocationSummary::kNoCall,
178 kIntrinsified);
179 locations->SetInAt(0, Location::RequiresRegister());
180 locations->SetOut(Location::RequiresFpuRegister());
181}
182
183static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
184 Location input = locations->InAt(0);
185 Location output = locations->Out();
186 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
187 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
188}
189
190static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
191 Location input = locations->InAt(0);
192 Location output = locations->Out();
193 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
194 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
195}
196
197void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
198 CreateFPToIntLocations(arena_, invoke);
199}
200void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
201 CreateIntToFPLocations(arena_, invoke);
202}
203
204void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000205 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800206}
207void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000208 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800209}
210
211void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
212 CreateFPToIntLocations(arena_, invoke);
213}
214void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
215 CreateIntToFPLocations(arena_, invoke);
216}
217
218void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000219 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800220}
221void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000222 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800223}
224
225static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
226 LocationSummary* locations = new (arena) LocationSummary(invoke,
227 LocationSummary::kNoCall,
228 kIntrinsified);
229 locations->SetInAt(0, Location::RequiresRegister());
230 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
231}
232
233static void GenReverseBytes(LocationSummary* locations,
234 Primitive::Type type,
235 vixl::MacroAssembler* masm) {
236 Location in = locations->InAt(0);
237 Location out = locations->Out();
238
239 switch (type) {
240 case Primitive::kPrimShort:
241 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
242 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
243 break;
244 case Primitive::kPrimInt:
245 case Primitive::kPrimLong:
246 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
247 break;
248 default:
249 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
250 UNREACHABLE();
251 }
252}
253
254void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
255 CreateIntToIntLocations(arena_, invoke);
256}
257
258void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
259 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
260}
261
262void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
263 CreateIntToIntLocations(arena_, invoke);
264}
265
266void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
267 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
268}
269
270void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
271 CreateIntToIntLocations(arena_, invoke);
272}
273
274void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
275 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
276}
277
Aart Bik7b565022016-01-28 14:36:22 -0800278static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
279 LocationSummary* locations = new (arena) LocationSummary(invoke,
280 LocationSummary::kNoCall,
281 kIntrinsified);
282 locations->SetInAt(0, Location::RequiresRegister());
283 locations->SetInAt(1, Location::RequiresRegister());
284 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
285}
286
Scott Wakeling611d3392015-07-10 11:42:06 +0100287static void GenNumberOfLeadingZeros(LocationSummary* locations,
288 Primitive::Type type,
289 vixl::MacroAssembler* masm) {
290 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
291
292 Location in = locations->InAt(0);
293 Location out = locations->Out();
294
295 __ Clz(RegisterFrom(out, type), RegisterFrom(in, type));
296}
297
298void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
299 CreateIntToIntLocations(arena_, invoke);
300}
301
302void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
303 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
304}
305
306void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
307 CreateIntToIntLocations(arena_, invoke);
308}
309
310void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
311 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
312}
313
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100314static void GenNumberOfTrailingZeros(LocationSummary* locations,
315 Primitive::Type type,
316 vixl::MacroAssembler* masm) {
317 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
318
319 Location in = locations->InAt(0);
320 Location out = locations->Out();
321
322 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
323 __ Clz(RegisterFrom(out, type), RegisterFrom(out, type));
324}
325
326void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
327 CreateIntToIntLocations(arena_, invoke);
328}
329
330void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
331 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
332}
333
334void IntrinsicLocationsBuilderARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
335 CreateIntToIntLocations(arena_, invoke);
336}
337
338void IntrinsicCodeGeneratorARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
339 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
340}
341
Andreas Gampe878d58c2015-01-15 23:24:00 -0800342static void GenReverse(LocationSummary* locations,
343 Primitive::Type type,
344 vixl::MacroAssembler* masm) {
345 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
346
347 Location in = locations->InAt(0);
348 Location out = locations->Out();
349
350 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
351}
352
353void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
354 CreateIntToIntLocations(arena_, invoke);
355}
356
357void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
358 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
359}
360
361void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
362 CreateIntToIntLocations(arena_, invoke);
363}
364
365void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
366 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
367}
368
369static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800370 LocationSummary* locations = new (arena) LocationSummary(invoke,
371 LocationSummary::kNoCall,
372 kIntrinsified);
373 locations->SetInAt(0, Location::RequiresFpuRegister());
374 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
375}
376
377static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
378 Location in = locations->InAt(0);
379 Location out = locations->Out();
380
381 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
382 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
383
384 __ Fabs(out_reg, in_reg);
385}
386
387void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
388 CreateFPToFPLocations(arena_, invoke);
389}
390
391void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000392 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800393}
394
395void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
396 CreateFPToFPLocations(arena_, invoke);
397}
398
399void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000400 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800401}
402
403static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
404 LocationSummary* locations = new (arena) LocationSummary(invoke,
405 LocationSummary::kNoCall,
406 kIntrinsified);
407 locations->SetInAt(0, Location::RequiresRegister());
408 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
409}
410
411static void GenAbsInteger(LocationSummary* locations,
412 bool is64bit,
413 vixl::MacroAssembler* masm) {
414 Location in = locations->InAt(0);
415 Location output = locations->Out();
416
417 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
418 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
419
420 __ Cmp(in_reg, Operand(0));
421 __ Cneg(out_reg, in_reg, lt);
422}
423
424void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
425 CreateIntToInt(arena_, invoke);
426}
427
428void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000429 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800430}
431
432void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
433 CreateIntToInt(arena_, invoke);
434}
435
436void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000437 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800438}
439
440static void GenMinMaxFP(LocationSummary* locations,
441 bool is_min,
442 bool is_double,
443 vixl::MacroAssembler* masm) {
444 Location op1 = locations->InAt(0);
445 Location op2 = locations->InAt(1);
446 Location out = locations->Out();
447
448 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
449 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
450 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
451 if (is_min) {
452 __ Fmin(out_reg, op1_reg, op2_reg);
453 } else {
454 __ Fmax(out_reg, op1_reg, op2_reg);
455 }
456}
457
458static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
459 LocationSummary* locations = new (arena) LocationSummary(invoke,
460 LocationSummary::kNoCall,
461 kIntrinsified);
462 locations->SetInAt(0, Location::RequiresFpuRegister());
463 locations->SetInAt(1, Location::RequiresFpuRegister());
464 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
465}
466
467void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
468 CreateFPFPToFPLocations(arena_, invoke);
469}
470
471void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000472 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800473}
474
475void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
476 CreateFPFPToFPLocations(arena_, invoke);
477}
478
479void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000480 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800481}
482
483void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
484 CreateFPFPToFPLocations(arena_, invoke);
485}
486
487void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000488 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800489}
490
491void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
492 CreateFPFPToFPLocations(arena_, invoke);
493}
494
495void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000496 GenMinMaxFP(
497 invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800498}
499
500static void GenMinMax(LocationSummary* locations,
501 bool is_min,
502 bool is_long,
503 vixl::MacroAssembler* masm) {
504 Location op1 = locations->InAt(0);
505 Location op2 = locations->InAt(1);
506 Location out = locations->Out();
507
508 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
509 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
510 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
511
512 __ Cmp(op1_reg, op2_reg);
513 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
514}
515
Andreas Gampe878d58c2015-01-15 23:24:00 -0800516void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
517 CreateIntIntToIntLocations(arena_, invoke);
518}
519
520void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000521 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800522}
523
524void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
525 CreateIntIntToIntLocations(arena_, invoke);
526}
527
528void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000529 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800530}
531
532void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
533 CreateIntIntToIntLocations(arena_, invoke);
534}
535
536void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000537 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800538}
539
540void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
541 CreateIntIntToIntLocations(arena_, invoke);
542}
543
544void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000545 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800546}
547
548void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
549 CreateFPToFPLocations(arena_, invoke);
550}
551
552void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
553 LocationSummary* locations = invoke->GetLocations();
554 vixl::MacroAssembler* masm = GetVIXLAssembler();
555 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
556}
557
558void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
559 CreateFPToFPLocations(arena_, invoke);
560}
561
562void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
563 LocationSummary* locations = invoke->GetLocations();
564 vixl::MacroAssembler* masm = GetVIXLAssembler();
565 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
566}
567
568void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
569 CreateFPToFPLocations(arena_, invoke);
570}
571
572void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
573 LocationSummary* locations = invoke->GetLocations();
574 vixl::MacroAssembler* masm = GetVIXLAssembler();
575 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
576}
577
578void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
579 CreateFPToFPLocations(arena_, invoke);
580}
581
582void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
583 LocationSummary* locations = invoke->GetLocations();
584 vixl::MacroAssembler* masm = GetVIXLAssembler();
585 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
586}
587
588static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
589 LocationSummary* locations = new (arena) LocationSummary(invoke,
590 LocationSummary::kNoCall,
591 kIntrinsified);
592 locations->SetInAt(0, Location::RequiresFpuRegister());
593 locations->SetOut(Location::RequiresRegister());
594}
595
596static void GenMathRound(LocationSummary* locations,
597 bool is_double,
598 vixl::MacroAssembler* masm) {
599 FPRegister in_reg = is_double ?
600 DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
601 Register out_reg = is_double ?
602 XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
603 UseScratchRegisterScope temps(masm);
604 FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
605
606 // 0.5 can be encoded as an immediate, so use fmov.
607 if (is_double) {
608 __ Fmov(temp1_reg, static_cast<double>(0.5));
609 } else {
610 __ Fmov(temp1_reg, static_cast<float>(0.5));
611 }
612 __ Fadd(temp1_reg, in_reg, temp1_reg);
613 __ Fcvtms(out_reg, temp1_reg);
614}
615
616void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
Andreas Gampee6d0d8d2015-12-28 09:54:29 -0800617 // See intrinsics.h.
618 if (kRoundIsPlusPointFive) {
619 CreateFPToIntPlusTempLocations(arena_, invoke);
620 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800621}
622
623void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000624 GenMathRound(invoke->GetLocations(), /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800625}
626
627void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
Andreas Gampee6d0d8d2015-12-28 09:54:29 -0800628 // See intrinsics.h.
629 if (kRoundIsPlusPointFive) {
630 CreateFPToIntPlusTempLocations(arena_, invoke);
631 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800632}
633
634void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000635 GenMathRound(invoke->GetLocations(), /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800636}
637
638void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
639 CreateIntToIntLocations(arena_, invoke);
640}
641
642void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
643 vixl::MacroAssembler* masm = GetVIXLAssembler();
644 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
645 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
646}
647
648void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
649 CreateIntToIntLocations(arena_, invoke);
650}
651
652void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
653 vixl::MacroAssembler* masm = GetVIXLAssembler();
654 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
655 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
656}
657
658void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
659 CreateIntToIntLocations(arena_, invoke);
660}
661
662void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
663 vixl::MacroAssembler* masm = GetVIXLAssembler();
664 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
665 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
666}
667
668void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
669 CreateIntToIntLocations(arena_, invoke);
670}
671
672void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
673 vixl::MacroAssembler* masm = GetVIXLAssembler();
674 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
675 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
676}
677
678static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
679 LocationSummary* locations = new (arena) LocationSummary(invoke,
680 LocationSummary::kNoCall,
681 kIntrinsified);
682 locations->SetInAt(0, Location::RequiresRegister());
683 locations->SetInAt(1, Location::RequiresRegister());
684}
685
686void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
687 CreateIntIntToVoidLocations(arena_, invoke);
688}
689
690void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
691 vixl::MacroAssembler* masm = GetVIXLAssembler();
692 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
693 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
694}
695
696void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
697 CreateIntIntToVoidLocations(arena_, invoke);
698}
699
700void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
701 vixl::MacroAssembler* masm = GetVIXLAssembler();
702 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
703 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
704}
705
706void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
707 CreateIntIntToVoidLocations(arena_, invoke);
708}
709
710void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
711 vixl::MacroAssembler* masm = GetVIXLAssembler();
712 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
713 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
714}
715
716void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
717 CreateIntIntToVoidLocations(arena_, invoke);
718}
719
720void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
721 vixl::MacroAssembler* masm = GetVIXLAssembler();
722 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
723 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
724}
725
726void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
727 LocationSummary* locations = new (arena_) LocationSummary(invoke,
728 LocationSummary::kNoCall,
729 kIntrinsified);
730 locations->SetOut(Location::RequiresRegister());
731}
732
733void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
734 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
735 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
736}
737
738static void GenUnsafeGet(HInvoke* invoke,
739 Primitive::Type type,
740 bool is_volatile,
741 CodeGeneratorARM64* codegen) {
742 LocationSummary* locations = invoke->GetLocations();
743 DCHECK((type == Primitive::kPrimInt) ||
744 (type == Primitive::kPrimLong) ||
745 (type == Primitive::kPrimNot));
746 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000747 Location base_loc = locations->InAt(1);
748 Register base = WRegisterFrom(base_loc); // Object pointer.
749 Location offset_loc = locations->InAt(2);
750 Register offset = XRegisterFrom(offset_loc); // Long offset.
751 Location trg_loc = locations->Out();
752 Register trg = RegisterFrom(trg_loc, type);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800753
Roland Levillain44015862016-01-22 11:47:17 +0000754 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
755 // UnsafeGetObject/UnsafeGetObjectVolatile with Baker's read barrier case.
756 UseScratchRegisterScope temps(masm);
757 Register temp = temps.AcquireW();
758 codegen->GenerateArrayLoadWithBakerReadBarrier(
759 invoke, trg_loc, base, 0U, offset_loc, temp, /* needs_null_check */ false);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800760 } else {
Roland Levillain44015862016-01-22 11:47:17 +0000761 // Other cases.
762 MemOperand mem_op(base.X(), offset);
763 if (is_volatile) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +0000764 codegen->LoadAcquire(invoke, trg, mem_op, /* needs_null_check */ true);
Roland Levillain44015862016-01-22 11:47:17 +0000765 } else {
766 codegen->Load(type, trg, mem_op);
767 }
Roland Levillain4d027112015-07-01 15:41:14 +0100768
Roland Levillain44015862016-01-22 11:47:17 +0000769 if (type == Primitive::kPrimNot) {
770 DCHECK(trg.IsW());
771 codegen->MaybeGenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
772 }
Roland Levillain4d027112015-07-01 15:41:14 +0100773 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800774}
775
776static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000777 bool can_call = kEmitCompilerReadBarrier &&
778 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
779 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800780 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000781 can_call ?
782 LocationSummary::kCallOnSlowPath :
783 LocationSummary::kNoCall,
Andreas Gampe878d58c2015-01-15 23:24:00 -0800784 kIntrinsified);
785 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
786 locations->SetInAt(1, Location::RequiresRegister());
787 locations->SetInAt(2, Location::RequiresRegister());
788 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
789}
790
791void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
792 CreateIntIntIntToIntLocations(arena_, invoke);
793}
794void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
795 CreateIntIntIntToIntLocations(arena_, invoke);
796}
797void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
798 CreateIntIntIntToIntLocations(arena_, invoke);
799}
800void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
801 CreateIntIntIntToIntLocations(arena_, invoke);
802}
803void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
804 CreateIntIntIntToIntLocations(arena_, invoke);
805}
806void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
807 CreateIntIntIntToIntLocations(arena_, invoke);
808}
809
810void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000811 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800812}
813void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000814 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800815}
816void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000817 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800818}
819void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000820 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800821}
822void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000823 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800824}
825void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000826 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800827}
828
829static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
830 LocationSummary* locations = new (arena) LocationSummary(invoke,
831 LocationSummary::kNoCall,
832 kIntrinsified);
833 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
834 locations->SetInAt(1, Location::RequiresRegister());
835 locations->SetInAt(2, Location::RequiresRegister());
836 locations->SetInAt(3, Location::RequiresRegister());
837}
838
839void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
840 CreateIntIntIntIntToVoid(arena_, invoke);
841}
842void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
843 CreateIntIntIntIntToVoid(arena_, invoke);
844}
845void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
846 CreateIntIntIntIntToVoid(arena_, invoke);
847}
848void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
849 CreateIntIntIntIntToVoid(arena_, invoke);
850}
851void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
852 CreateIntIntIntIntToVoid(arena_, invoke);
853}
854void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
855 CreateIntIntIntIntToVoid(arena_, invoke);
856}
857void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
858 CreateIntIntIntIntToVoid(arena_, invoke);
859}
860void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
861 CreateIntIntIntIntToVoid(arena_, invoke);
862}
863void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
864 CreateIntIntIntIntToVoid(arena_, invoke);
865}
866
867static void GenUnsafePut(LocationSummary* locations,
868 Primitive::Type type,
869 bool is_volatile,
870 bool is_ordered,
871 CodeGeneratorARM64* codegen) {
872 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
873
874 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
875 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
876 Register value = RegisterFrom(locations->InAt(3), type);
Roland Levillain4d027112015-07-01 15:41:14 +0100877 Register source = value;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800878 MemOperand mem_op(base.X(), offset);
879
Roland Levillain4d027112015-07-01 15:41:14 +0100880 {
881 // We use a block to end the scratch scope before the write barrier, thus
882 // freeing the temporary registers so they can be used in `MarkGCCard`.
883 UseScratchRegisterScope temps(masm);
884
885 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
886 DCHECK(value.IsW());
887 Register temp = temps.AcquireW();
888 __ Mov(temp.W(), value.W());
889 codegen->GetAssembler()->PoisonHeapReference(temp.W());
890 source = temp;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800891 }
Roland Levillain4d027112015-07-01 15:41:14 +0100892
893 if (is_volatile || is_ordered) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +0000894 codegen->StoreRelease(type, source, mem_op);
Roland Levillain4d027112015-07-01 15:41:14 +0100895 } else {
896 codegen->Store(type, source, mem_op);
897 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800898 }
899
900 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100901 bool value_can_be_null = true; // TODO: Worth finding out this information?
902 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800903 }
904}
905
906void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000907 GenUnsafePut(invoke->GetLocations(),
908 Primitive::kPrimInt,
909 /* is_volatile */ false,
910 /* is_ordered */ false,
911 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800912}
913void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000914 GenUnsafePut(invoke->GetLocations(),
915 Primitive::kPrimInt,
916 /* is_volatile */ false,
917 /* is_ordered */ true,
918 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800919}
920void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000921 GenUnsafePut(invoke->GetLocations(),
922 Primitive::kPrimInt,
923 /* is_volatile */ true,
924 /* is_ordered */ false,
925 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800926}
927void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000928 GenUnsafePut(invoke->GetLocations(),
929 Primitive::kPrimNot,
930 /* is_volatile */ false,
931 /* is_ordered */ false,
932 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800933}
934void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000935 GenUnsafePut(invoke->GetLocations(),
936 Primitive::kPrimNot,
937 /* is_volatile */ false,
938 /* is_ordered */ true,
939 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800940}
941void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000942 GenUnsafePut(invoke->GetLocations(),
943 Primitive::kPrimNot,
944 /* is_volatile */ true,
945 /* is_ordered */ false,
946 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800947}
948void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000949 GenUnsafePut(invoke->GetLocations(),
950 Primitive::kPrimLong,
951 /* is_volatile */ false,
952 /* is_ordered */ false,
953 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800954}
955void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000956 GenUnsafePut(invoke->GetLocations(),
957 Primitive::kPrimLong,
958 /* is_volatile */ false,
959 /* is_ordered */ true,
960 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800961}
962void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000963 GenUnsafePut(invoke->GetLocations(),
964 Primitive::kPrimLong,
965 /* is_volatile */ true,
966 /* is_ordered */ false,
967 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800968}
969
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000970static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena,
971 HInvoke* invoke,
972 Primitive::Type type) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800973 LocationSummary* locations = new (arena) LocationSummary(invoke,
974 LocationSummary::kNoCall,
975 kIntrinsified);
976 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
977 locations->SetInAt(1, Location::RequiresRegister());
978 locations->SetInAt(2, Location::RequiresRegister());
979 locations->SetInAt(3, Location::RequiresRegister());
980 locations->SetInAt(4, Location::RequiresRegister());
981
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000982 // If heap poisoning is enabled, we don't want the unpoisoning
983 // operations to potentially clobber the output.
984 Location::OutputOverlap overlaps = (kPoisonHeapReferences && type == Primitive::kPrimNot)
985 ? Location::kOutputOverlap
986 : Location::kNoOutputOverlap;
987 locations->SetOut(Location::RequiresRegister(), overlaps);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800988}
989
990static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800991 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
992
993 Register out = WRegisterFrom(locations->Out()); // Boolean result.
994
995 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
996 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
997 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
998 Register value = RegisterFrom(locations->InAt(4), type); // Value.
999
1000 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
1001 if (type == Primitive::kPrimNot) {
1002 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001003 bool value_can_be_null = true; // TODO: Worth finding out this information?
1004 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001005 }
1006
1007 UseScratchRegisterScope temps(masm);
1008 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1009 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
1010
1011 Register tmp_32 = tmp_value.W();
1012
1013 __ Add(tmp_ptr, base.X(), Operand(offset));
1014
Roland Levillain4d027112015-07-01 15:41:14 +01001015 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1016 codegen->GetAssembler()->PoisonHeapReference(expected);
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001017 if (value.Is(expected)) {
1018 // Do not poison `value`, as it is the same register as
1019 // `expected`, which has just been poisoned.
1020 } else {
1021 codegen->GetAssembler()->PoisonHeapReference(value);
1022 }
Roland Levillain4d027112015-07-01 15:41:14 +01001023 }
1024
Andreas Gampe878d58c2015-01-15 23:24:00 -08001025 // do {
1026 // tmp_value = [tmp_ptr] - expected;
1027 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1028 // result = tmp_value != 0;
1029
1030 vixl::Label loop_head, exit_loop;
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001031 __ Bind(&loop_head);
1032 // TODO: When `type == Primitive::kPrimNot`, add a read barrier for
1033 // the reference stored in the object before attempting the CAS,
1034 // similar to the one in the art::Unsafe_compareAndSwapObject JNI
1035 // implementation.
1036 //
1037 // Note that this code is not (yet) used when read barriers are
1038 // enabled (see IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject).
1039 DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier));
1040 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
1041 __ Cmp(tmp_value, expected);
1042 __ B(&exit_loop, ne);
1043 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
1044 __ Cbnz(tmp_32, &loop_head);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001045 __ Bind(&exit_loop);
1046 __ Cset(out, eq);
Roland Levillain4d027112015-07-01 15:41:14 +01001047
1048 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +01001049 codegen->GetAssembler()->UnpoisonHeapReference(expected);
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001050 if (value.Is(expected)) {
1051 // Do not unpoison `value`, as it is the same register as
1052 // `expected`, which has just been unpoisoned.
1053 } else {
1054 codegen->GetAssembler()->UnpoisonHeapReference(value);
1055 }
Roland Levillain4d027112015-07-01 15:41:14 +01001056 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001057}
1058
1059void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001060 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001061}
1062void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001063 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001064}
1065void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain391b8662015-12-18 11:43:38 +00001066 // The UnsafeCASObject intrinsic is missing a read barrier, and
1067 // therefore sometimes does not work as expected (b/25883050).
1068 // Turn it off temporarily as a quick fix, until the read barrier is
1069 // implemented (see TODO in GenCAS below).
1070 //
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001071 // TODO(rpl): Fix this issue and re-enable this intrinsic with read barriers.
1072 if (kEmitCompilerReadBarrier) {
Roland Levillain985ff702015-10-23 13:25:35 +01001073 return;
1074 }
1075
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001076 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001077}
1078
1079void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
1080 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
1081}
1082void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
1083 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
1084}
1085void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
1086 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
1087}
1088
1089void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001090 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1091 LocationSummary::kCallOnSlowPath,
1092 kIntrinsified);
1093 locations->SetInAt(0, Location::RequiresRegister());
1094 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray82f34492015-02-04 10:44:23 +00001095 // In case we need to go in the slow path, we can't have the output be the same
1096 // as the input: the current liveness analysis considers the input to be live
1097 // at the point of the call.
1098 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001099}
1100
1101void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
1102 vixl::MacroAssembler* masm = GetVIXLAssembler();
1103 LocationSummary* locations = invoke->GetLocations();
1104
1105 // Location of reference to data array
1106 const MemberOffset value_offset = mirror::String::ValueOffset();
1107 // Location of count
1108 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001109
1110 Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer.
1111 Register idx = WRegisterFrom(locations->InAt(1)); // Index of character.
1112 Register out = WRegisterFrom(locations->Out()); // Result character.
1113
1114 UseScratchRegisterScope temps(masm);
1115 Register temp = temps.AcquireW();
1116 Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling.
1117
1118 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
1119 // the cost.
1120 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
1121 // we will not optimize the code for constants (which would save a register).
1122
1123 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1124 codegen_->AddSlowPath(slow_path);
1125
1126 __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length.
1127 codegen_->MaybeRecordImplicitNullCheck(invoke);
1128 __ Cmp(idx, temp);
1129 __ B(hs, slow_path->GetEntryLabel());
1130
Jeff Hao848f70a2014-01-15 13:49:50 -08001131 __ Add(array_temp, obj, Operand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001132
1133 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -08001134 __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1)); // out := array_temp[idx].
Andreas Gampe878d58c2015-01-15 23:24:00 -08001135
1136 __ Bind(slow_path->GetExitLabel());
1137}
1138
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001139void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001140 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1141 LocationSummary::kCall,
1142 kIntrinsified);
1143 InvokeRuntimeCallingConvention calling_convention;
1144 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1145 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1146 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1147}
1148
1149void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
1150 vixl::MacroAssembler* masm = GetVIXLAssembler();
1151 LocationSummary* locations = invoke->GetLocations();
1152
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001153 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001154 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001155
1156 Register argument = WRegisterFrom(locations->InAt(1));
1157 __ Cmp(argument, 0);
1158 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1159 codegen_->AddSlowPath(slow_path);
1160 __ B(eq, slow_path->GetEntryLabel());
1161
1162 __ Ldr(
1163 lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value()));
1164 __ Blr(lr);
1165 __ Bind(slow_path->GetExitLabel());
1166}
1167
Agi Csakiea34b402015-08-13 17:51:19 -07001168void IntrinsicLocationsBuilderARM64::VisitStringEquals(HInvoke* invoke) {
1169 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1170 LocationSummary::kNoCall,
1171 kIntrinsified);
1172 locations->SetInAt(0, Location::RequiresRegister());
1173 locations->SetInAt(1, Location::RequiresRegister());
1174 // Temporary registers to store lengths of strings and for calculations.
1175 locations->AddTemp(Location::RequiresRegister());
1176 locations->AddTemp(Location::RequiresRegister());
1177
1178 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1179}
1180
1181void IntrinsicCodeGeneratorARM64::VisitStringEquals(HInvoke* invoke) {
1182 vixl::MacroAssembler* masm = GetVIXLAssembler();
1183 LocationSummary* locations = invoke->GetLocations();
1184
1185 Register str = WRegisterFrom(locations->InAt(0));
1186 Register arg = WRegisterFrom(locations->InAt(1));
1187 Register out = XRegisterFrom(locations->Out());
1188
1189 UseScratchRegisterScope scratch_scope(masm);
1190 Register temp = scratch_scope.AcquireW();
1191 Register temp1 = WRegisterFrom(locations->GetTemp(0));
1192 Register temp2 = WRegisterFrom(locations->GetTemp(1));
1193
1194 vixl::Label loop;
1195 vixl::Label end;
1196 vixl::Label return_true;
1197 vixl::Label return_false;
1198
1199 // Get offsets of count, value, and class fields within a string object.
1200 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1201 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1202 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1203
1204 // Note that the null check must have been done earlier.
1205 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1206
1207 // Check if input is null, return false if it is.
1208 __ Cbz(arg, &return_false);
1209
1210 // Reference equality check, return true if same reference.
1211 __ Cmp(str, arg);
1212 __ B(&return_true, eq);
1213
1214 // Instanceof check for the argument by comparing class fields.
1215 // All string objects must have the same type since String cannot be subclassed.
1216 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1217 // If the argument is a string object, its class field must be equal to receiver's class field.
1218 __ Ldr(temp, MemOperand(str.X(), class_offset));
1219 __ Ldr(temp1, MemOperand(arg.X(), class_offset));
1220 __ Cmp(temp, temp1);
1221 __ B(&return_false, ne);
1222
1223 // Load lengths of this and argument strings.
1224 __ Ldr(temp, MemOperand(str.X(), count_offset));
1225 __ Ldr(temp1, MemOperand(arg.X(), count_offset));
1226 // Check if lengths are equal, return false if they're not.
1227 __ Cmp(temp, temp1);
1228 __ B(&return_false, ne);
1229 // Store offset of string value in preparation for comparison loop
1230 __ Mov(temp1, value_offset);
1231 // Return true if both strings are empty.
1232 __ Cbz(temp, &return_true);
1233
1234 // Assertions that must hold in order to compare strings 4 characters at a time.
1235 DCHECK_ALIGNED(value_offset, 8);
1236 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1237
1238 temp1 = temp1.X();
1239 temp2 = temp2.X();
1240
1241 // Loop to compare strings 4 characters at a time starting at the beginning of the string.
1242 // Ok to do this because strings are zero-padded to be 8-byte aligned.
1243 __ Bind(&loop);
1244 __ Ldr(out, MemOperand(str.X(), temp1));
1245 __ Ldr(temp2, MemOperand(arg.X(), temp1));
1246 __ Add(temp1, temp1, Operand(sizeof(uint64_t)));
1247 __ Cmp(out, temp2);
1248 __ B(&return_false, ne);
1249 __ Sub(temp, temp, Operand(4), SetFlags);
1250 __ B(&loop, gt);
1251
1252 // Return true and exit the function.
1253 // If loop does not result in returning false, we return true.
1254 __ Bind(&return_true);
1255 __ Mov(out, 1);
1256 __ B(&end);
1257
1258 // Return false and exit the function.
1259 __ Bind(&return_false);
1260 __ Mov(out, 0);
1261 __ Bind(&end);
1262}
1263
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001264static void GenerateVisitStringIndexOf(HInvoke* invoke,
1265 vixl::MacroAssembler* masm,
1266 CodeGeneratorARM64* codegen,
1267 ArenaAllocator* allocator,
1268 bool start_at_zero) {
1269 LocationSummary* locations = invoke->GetLocations();
1270 Register tmp_reg = WRegisterFrom(locations->GetTemp(0));
1271
1272 // Note that the null check must have been done earlier.
1273 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1274
1275 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1276 // or directly dispatch if we have a constant.
1277 SlowPathCodeARM64* slow_path = nullptr;
1278 if (invoke->InputAt(1)->IsIntConstant()) {
1279 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > 0xFFFFU) {
1280 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1281 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1282 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1283 codegen->AddSlowPath(slow_path);
1284 __ B(slow_path->GetEntryLabel());
1285 __ Bind(slow_path->GetExitLabel());
1286 return;
1287 }
1288 } else {
1289 Register char_reg = WRegisterFrom(locations->InAt(1));
1290 __ Mov(tmp_reg, 0xFFFF);
1291 __ Cmp(char_reg, Operand(tmp_reg));
1292 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1293 codegen->AddSlowPath(slow_path);
1294 __ B(hi, slow_path->GetEntryLabel());
1295 }
1296
1297 if (start_at_zero) {
1298 // Start-index = 0.
1299 __ Mov(tmp_reg, 0);
1300 }
1301
1302 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value()));
1303 __ Blr(lr);
1304
1305 if (slow_path != nullptr) {
1306 __ Bind(slow_path->GetExitLabel());
1307 }
1308}
1309
1310void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) {
1311 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1312 LocationSummary::kCall,
1313 kIntrinsified);
1314 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1315 // best to align the inputs accordingly.
1316 InvokeRuntimeCallingConvention calling_convention;
1317 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1318 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1319 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1320
1321 // Need a temp for slow-path codepoint compare, and need to send start_index=0.
1322 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1323}
1324
1325void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001326 GenerateVisitStringIndexOf(
1327 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001328}
1329
1330void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1331 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1332 LocationSummary::kCall,
1333 kIntrinsified);
1334 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1335 // best to align the inputs accordingly.
1336 InvokeRuntimeCallingConvention calling_convention;
1337 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1338 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1339 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1340 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1341
1342 // Need a temp for slow-path codepoint compare.
1343 locations->AddTemp(Location::RequiresRegister());
1344}
1345
1346void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001347 GenerateVisitStringIndexOf(
1348 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001349}
1350
Jeff Hao848f70a2014-01-15 13:49:50 -08001351void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1352 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1353 LocationSummary::kCall,
1354 kIntrinsified);
1355 InvokeRuntimeCallingConvention calling_convention;
1356 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1357 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1358 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1359 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1360 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1361}
1362
1363void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1364 vixl::MacroAssembler* masm = GetVIXLAssembler();
1365 LocationSummary* locations = invoke->GetLocations();
1366
1367 Register byte_array = WRegisterFrom(locations->InAt(0));
1368 __ Cmp(byte_array, 0);
1369 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1370 codegen_->AddSlowPath(slow_path);
1371 __ B(eq, slow_path->GetEntryLabel());
1372
1373 __ Ldr(lr,
1374 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value()));
1375 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1376 __ Blr(lr);
1377 __ Bind(slow_path->GetExitLabel());
1378}
1379
1380void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1381 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1382 LocationSummary::kCall,
1383 kIntrinsified);
1384 InvokeRuntimeCallingConvention calling_convention;
1385 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1386 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1387 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1388 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1389}
1390
1391void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1392 vixl::MacroAssembler* masm = GetVIXLAssembler();
1393
1394 __ Ldr(lr,
1395 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value()));
1396 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1397 __ Blr(lr);
1398}
1399
1400void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1401 // The inputs plus one temp.
1402 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1403 LocationSummary::kCall,
1404 kIntrinsified);
1405 InvokeRuntimeCallingConvention calling_convention;
1406 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1407 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1408 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1409 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1410}
1411
1412void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1413 vixl::MacroAssembler* masm = GetVIXLAssembler();
1414 LocationSummary* locations = invoke->GetLocations();
1415
1416 Register string_to_copy = WRegisterFrom(locations->InAt(0));
1417 __ Cmp(string_to_copy, 0);
1418 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1419 codegen_->AddSlowPath(slow_path);
1420 __ B(eq, slow_path->GetEntryLabel());
1421
1422 __ Ldr(lr,
1423 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value()));
1424 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1425 __ Blr(lr);
1426 __ Bind(slow_path->GetExitLabel());
1427}
1428
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001429static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1430 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1431 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType()));
1432 DCHECK(Primitive::IsFloatingPointType(invoke->GetType()));
1433
1434 LocationSummary* const locations = new (arena) LocationSummary(invoke,
1435 LocationSummary::kCall,
1436 kIntrinsified);
1437 InvokeRuntimeCallingConvention calling_convention;
1438
1439 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
1440 locations->SetOut(calling_convention.GetReturnLocation(invoke->GetType()));
1441}
1442
1443static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1444 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
1445 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType()));
1446 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(1)->GetType()));
1447 DCHECK(Primitive::IsFloatingPointType(invoke->GetType()));
1448
1449 LocationSummary* const locations = new (arena) LocationSummary(invoke,
1450 LocationSummary::kCall,
1451 kIntrinsified);
1452 InvokeRuntimeCallingConvention calling_convention;
1453
1454 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
1455 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
1456 locations->SetOut(calling_convention.GetReturnLocation(invoke->GetType()));
1457}
1458
1459static void GenFPToFPCall(HInvoke* invoke,
1460 vixl::MacroAssembler* masm,
1461 CodeGeneratorARM64* codegen,
1462 QuickEntrypointEnum entry) {
1463 __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArm64WordSize>(entry).Int32Value()));
1464 __ Blr(lr);
1465 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
1466}
1467
1468void IntrinsicLocationsBuilderARM64::VisitMathCos(HInvoke* invoke) {
1469 CreateFPToFPCallLocations(arena_, invoke);
1470}
1471
1472void IntrinsicCodeGeneratorARM64::VisitMathCos(HInvoke* invoke) {
1473 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCos);
1474}
1475
1476void IntrinsicLocationsBuilderARM64::VisitMathSin(HInvoke* invoke) {
1477 CreateFPToFPCallLocations(arena_, invoke);
1478}
1479
1480void IntrinsicCodeGeneratorARM64::VisitMathSin(HInvoke* invoke) {
1481 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickSin);
1482}
1483
1484void IntrinsicLocationsBuilderARM64::VisitMathAcos(HInvoke* invoke) {
1485 CreateFPToFPCallLocations(arena_, invoke);
1486}
1487
1488void IntrinsicCodeGeneratorARM64::VisitMathAcos(HInvoke* invoke) {
1489 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAcos);
1490}
1491
1492void IntrinsicLocationsBuilderARM64::VisitMathAsin(HInvoke* invoke) {
1493 CreateFPToFPCallLocations(arena_, invoke);
1494}
1495
1496void IntrinsicCodeGeneratorARM64::VisitMathAsin(HInvoke* invoke) {
1497 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAsin);
1498}
1499
1500void IntrinsicLocationsBuilderARM64::VisitMathAtan(HInvoke* invoke) {
1501 CreateFPToFPCallLocations(arena_, invoke);
1502}
1503
1504void IntrinsicCodeGeneratorARM64::VisitMathAtan(HInvoke* invoke) {
1505 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAtan);
1506}
1507
1508void IntrinsicLocationsBuilderARM64::VisitMathCbrt(HInvoke* invoke) {
1509 CreateFPToFPCallLocations(arena_, invoke);
1510}
1511
1512void IntrinsicCodeGeneratorARM64::VisitMathCbrt(HInvoke* invoke) {
1513 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCbrt);
1514}
1515
1516void IntrinsicLocationsBuilderARM64::VisitMathCosh(HInvoke* invoke) {
1517 CreateFPToFPCallLocations(arena_, invoke);
1518}
1519
1520void IntrinsicCodeGeneratorARM64::VisitMathCosh(HInvoke* invoke) {
1521 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCosh);
1522}
1523
1524void IntrinsicLocationsBuilderARM64::VisitMathExp(HInvoke* invoke) {
1525 CreateFPToFPCallLocations(arena_, invoke);
1526}
1527
1528void IntrinsicCodeGeneratorARM64::VisitMathExp(HInvoke* invoke) {
1529 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickExp);
1530}
1531
1532void IntrinsicLocationsBuilderARM64::VisitMathExpm1(HInvoke* invoke) {
1533 CreateFPToFPCallLocations(arena_, invoke);
1534}
1535
1536void IntrinsicCodeGeneratorARM64::VisitMathExpm1(HInvoke* invoke) {
1537 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickExpm1);
1538}
1539
1540void IntrinsicLocationsBuilderARM64::VisitMathLog(HInvoke* invoke) {
1541 CreateFPToFPCallLocations(arena_, invoke);
1542}
1543
1544void IntrinsicCodeGeneratorARM64::VisitMathLog(HInvoke* invoke) {
1545 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickLog);
1546}
1547
1548void IntrinsicLocationsBuilderARM64::VisitMathLog10(HInvoke* invoke) {
1549 CreateFPToFPCallLocations(arena_, invoke);
1550}
1551
1552void IntrinsicCodeGeneratorARM64::VisitMathLog10(HInvoke* invoke) {
1553 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickLog10);
1554}
1555
1556void IntrinsicLocationsBuilderARM64::VisitMathSinh(HInvoke* invoke) {
1557 CreateFPToFPCallLocations(arena_, invoke);
1558}
1559
1560void IntrinsicCodeGeneratorARM64::VisitMathSinh(HInvoke* invoke) {
1561 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickSinh);
1562}
1563
1564void IntrinsicLocationsBuilderARM64::VisitMathTan(HInvoke* invoke) {
1565 CreateFPToFPCallLocations(arena_, invoke);
1566}
1567
1568void IntrinsicCodeGeneratorARM64::VisitMathTan(HInvoke* invoke) {
1569 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickTan);
1570}
1571
1572void IntrinsicLocationsBuilderARM64::VisitMathTanh(HInvoke* invoke) {
1573 CreateFPToFPCallLocations(arena_, invoke);
1574}
1575
1576void IntrinsicCodeGeneratorARM64::VisitMathTanh(HInvoke* invoke) {
1577 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickTanh);
1578}
1579
1580void IntrinsicLocationsBuilderARM64::VisitMathAtan2(HInvoke* invoke) {
1581 CreateFPFPToFPCallLocations(arena_, invoke);
1582}
1583
1584void IntrinsicCodeGeneratorARM64::VisitMathAtan2(HInvoke* invoke) {
1585 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAtan2);
1586}
1587
1588void IntrinsicLocationsBuilderARM64::VisitMathHypot(HInvoke* invoke) {
1589 CreateFPFPToFPCallLocations(arena_, invoke);
1590}
1591
1592void IntrinsicCodeGeneratorARM64::VisitMathHypot(HInvoke* invoke) {
1593 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickHypot);
1594}
1595
1596void IntrinsicLocationsBuilderARM64::VisitMathNextAfter(HInvoke* invoke) {
1597 CreateFPFPToFPCallLocations(arena_, invoke);
1598}
1599
1600void IntrinsicCodeGeneratorARM64::VisitMathNextAfter(HInvoke* invoke) {
1601 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickNextAfter);
1602}
1603
Andreas Gampe878d58c2015-01-15 23:24:00 -08001604// Unimplemented intrinsics.
1605
1606#define UNIMPLEMENTED_INTRINSIC(Name) \
1607void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1608} \
1609void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1610}
1611
Aart Bik3f67e692016-01-15 14:35:12 -08001612UNIMPLEMENTED_INTRINSIC(IntegerBitCount)
Aart Bik3f67e692016-01-15 14:35:12 -08001613UNIMPLEMENTED_INTRINSIC(LongBitCount)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001614UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Nicolas Geoffrayee3cf072015-10-06 11:45:02 +01001615UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001616UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001617UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001618
Aart Bik59c94542016-01-25 14:20:58 -08001619UNIMPLEMENTED_INTRINSIC(FloatIsInfinite)
1620UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite)
Aart Bik59c94542016-01-25 14:20:58 -08001621
Aart Bik59c94542016-01-25 14:20:58 -08001622UNIMPLEMENTED_INTRINSIC(IntegerHighestOneBit)
1623UNIMPLEMENTED_INTRINSIC(LongHighestOneBit)
1624UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit)
1625UNIMPLEMENTED_INTRINSIC(LongLowestOneBit)
Aart Bik59c94542016-01-25 14:20:58 -08001626
Aart Bika19616e2016-02-01 18:57:58 -08001627// Handled as HIR instructions.
Aart Bik75a38b22016-02-17 10:41:50 -08001628UNIMPLEMENTED_INTRINSIC(FloatIsNaN)
1629UNIMPLEMENTED_INTRINSIC(DoubleIsNaN)
Aart Bik59c94542016-01-25 14:20:58 -08001630UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
Aart Bik59c94542016-01-25 14:20:58 -08001631UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
Aart Bika19616e2016-02-01 18:57:58 -08001632UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
Aart Bik59c94542016-01-25 14:20:58 -08001633UNIMPLEMENTED_INTRINSIC(LongRotateRight)
Aart Bika19616e2016-02-01 18:57:58 -08001634UNIMPLEMENTED_INTRINSIC(IntegerCompare)
1635UNIMPLEMENTED_INTRINSIC(LongCompare)
1636UNIMPLEMENTED_INTRINSIC(IntegerSignum)
1637UNIMPLEMENTED_INTRINSIC(LongSignum)
Aart Bik59c94542016-01-25 14:20:58 -08001638
Roland Levillain4d027112015-07-01 15:41:14 +01001639#undef UNIMPLEMENTED_INTRINSIC
1640
1641#undef __
1642
Andreas Gampe878d58c2015-01-15 23:24:00 -08001643} // namespace arm64
1644} // namespace art