blob: b04dcceb05a06b98c521bf1c252d3abfd095189b [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080021#include "code_generator_arm64.h"
22#include "common_arm64.h"
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
Serban Constantinescu82e52ce2015-03-26 16:50:57 +000031#include "vixl/a64/disasm-a64.h"
32#include "vixl/a64/macro-assembler-a64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080033
34using namespace vixl; // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +000043using helpers::LocationFrom;
Scott Wakeling9ee23f42015-07-23 10:44:35 +010044using helpers::OperandFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080045using helpers::RegisterFrom;
46using helpers::SRegisterFrom;
47using helpers::WRegisterFrom;
48using helpers::XRegisterFrom;
49
Andreas Gampe878d58c2015-01-15 23:24:00 -080050namespace {
51
52ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
53 return MemOperand(XRegisterFrom(location), offset);
54}
55
56} // namespace
57
58vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
59 return codegen_->GetAssembler()->vixl_masm_;
60}
61
62ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
63 return codegen_->GetGraph()->GetArena();
64}
65
66#define __ codegen->GetAssembler()->vixl_masm_->
67
68static void MoveFromReturnRegister(Location trg,
69 Primitive::Type type,
70 CodeGeneratorARM64* codegen) {
71 if (!trg.IsValid()) {
72 DCHECK(type == Primitive::kPrimVoid);
73 return;
74 }
75
76 DCHECK_NE(type, Primitive::kPrimVoid);
77
Jeff Hao848f70a2014-01-15 13:49:50 -080078 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080079 Register trg_reg = RegisterFrom(trg, type);
80 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
81 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
82 } else {
83 FPRegister trg_reg = FPRegisterFrom(trg, type);
84 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
85 __ Fmov(trg_reg, res_reg);
86 }
87}
88
Roland Levillainec525fc2015-04-28 15:50:20 +010089static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010090 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010091 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe878d58c2015-01-15 23:24:00 -080092}
93
94// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
95// call. This will copy the arguments into the positions for a regular call.
96//
97// Note: The actual parameters are required to be in the locations given by the invoke's location
98// summary. If an intrinsic modifies those locations before a slowpath call, they must be
99// restored!
100class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
101 public:
102 explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { }
103
104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
105 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
106 __ Bind(GetEntryLabel());
107
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000108 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800109
Roland Levillainec525fc2015-04-28 15:50:20 +0100110 MoveArguments(invoke_, codegen);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800111
112 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100113 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
114 LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800115 } else {
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000116 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800117 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000118 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800119
120 // Copy the result back to the expected output.
121 Location out = invoke_->GetLocations()->Out();
122 if (out.IsValid()) {
123 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
124 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
125 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
126 }
127
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000128 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800129 __ B(GetExitLabel());
130 }
131
Alexandre Rames9931f312015-06-19 14:47:01 +0100132 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM64"; }
133
Andreas Gampe878d58c2015-01-15 23:24:00 -0800134 private:
135 // The instruction where this slow path is happening.
136 HInvoke* const invoke_;
137
138 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
139};
140
141#undef __
142
143bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
144 Dispatch(invoke);
145 LocationSummary* res = invoke->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000146 if (res == nullptr) {
147 return false;
148 }
149 if (kEmitCompilerReadBarrier && res->CanCall()) {
150 // Generating an intrinsic for this HInvoke may produce an
151 // IntrinsicSlowPathARM64 slow path. Currently this approach
152 // does not work when using read barriers, as the emitted
153 // calling sequence will make use of another slow path
154 // (ReadBarrierForRootSlowPathARM64 for HInvokeStaticOrDirect,
155 // ReadBarrierSlowPathARM64 for HInvokeVirtual). So we bail
156 // out in this case.
157 //
158 // TODO: Find a way to have intrinsics work with read barriers.
159 invoke->SetLocations(nullptr);
160 return false;
161 }
162 return res->Intrinsified();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800163}
164
165#define __ masm->
166
167static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
168 LocationSummary* locations = new (arena) LocationSummary(invoke,
169 LocationSummary::kNoCall,
170 kIntrinsified);
171 locations->SetInAt(0, Location::RequiresFpuRegister());
172 locations->SetOut(Location::RequiresRegister());
173}
174
175static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
176 LocationSummary* locations = new (arena) LocationSummary(invoke,
177 LocationSummary::kNoCall,
178 kIntrinsified);
179 locations->SetInAt(0, Location::RequiresRegister());
180 locations->SetOut(Location::RequiresFpuRegister());
181}
182
183static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
184 Location input = locations->InAt(0);
185 Location output = locations->Out();
186 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
187 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
188}
189
190static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
191 Location input = locations->InAt(0);
192 Location output = locations->Out();
193 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
194 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
195}
196
197void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
198 CreateFPToIntLocations(arena_, invoke);
199}
200void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
201 CreateIntToFPLocations(arena_, invoke);
202}
203
204void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
205 MoveFPToInt(invoke->GetLocations(), true, GetVIXLAssembler());
206}
207void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
208 MoveIntToFP(invoke->GetLocations(), true, GetVIXLAssembler());
209}
210
211void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
212 CreateFPToIntLocations(arena_, invoke);
213}
214void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
215 CreateIntToFPLocations(arena_, invoke);
216}
217
218void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
219 MoveFPToInt(invoke->GetLocations(), false, GetVIXLAssembler());
220}
221void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
222 MoveIntToFP(invoke->GetLocations(), false, GetVIXLAssembler());
223}
224
225static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
226 LocationSummary* locations = new (arena) LocationSummary(invoke,
227 LocationSummary::kNoCall,
228 kIntrinsified);
229 locations->SetInAt(0, Location::RequiresRegister());
230 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
231}
232
233static void GenReverseBytes(LocationSummary* locations,
234 Primitive::Type type,
235 vixl::MacroAssembler* masm) {
236 Location in = locations->InAt(0);
237 Location out = locations->Out();
238
239 switch (type) {
240 case Primitive::kPrimShort:
241 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
242 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
243 break;
244 case Primitive::kPrimInt:
245 case Primitive::kPrimLong:
246 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
247 break;
248 default:
249 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
250 UNREACHABLE();
251 }
252}
253
254void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
255 CreateIntToIntLocations(arena_, invoke);
256}
257
258void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
259 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
260}
261
262void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
263 CreateIntToIntLocations(arena_, invoke);
264}
265
266void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
267 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
268}
269
270void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
271 CreateIntToIntLocations(arena_, invoke);
272}
273
274void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
275 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
276}
277
Scott Wakeling611d3392015-07-10 11:42:06 +0100278static void GenNumberOfLeadingZeros(LocationSummary* locations,
279 Primitive::Type type,
280 vixl::MacroAssembler* masm) {
281 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
282
283 Location in = locations->InAt(0);
284 Location out = locations->Out();
285
286 __ Clz(RegisterFrom(out, type), RegisterFrom(in, type));
287}
288
289void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
290 CreateIntToIntLocations(arena_, invoke);
291}
292
293void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
294 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
295}
296
297void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
298 CreateIntToIntLocations(arena_, invoke);
299}
300
301void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
302 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
303}
304
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100305static void GenNumberOfTrailingZeros(LocationSummary* locations,
306 Primitive::Type type,
307 vixl::MacroAssembler* masm) {
308 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
309
310 Location in = locations->InAt(0);
311 Location out = locations->Out();
312
313 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
314 __ Clz(RegisterFrom(out, type), RegisterFrom(out, type));
315}
316
317void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
318 CreateIntToIntLocations(arena_, invoke);
319}
320
321void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
322 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
323}
324
325void IntrinsicLocationsBuilderARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
326 CreateIntToIntLocations(arena_, invoke);
327}
328
329void IntrinsicCodeGeneratorARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
330 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
331}
332
333static void GenRotateRight(LocationSummary* locations,
334 Primitive::Type type,
335 vixl::MacroAssembler* masm) {
336 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
337
338 Location in = locations->InAt(0);
339 Location out = locations->Out();
340 Operand rhs = OperandFrom(locations->InAt(1), type);
341
342 if (rhs.IsImmediate()) {
343 uint32_t shift = rhs.immediate() & (RegisterFrom(in, type).SizeInBits() - 1);
344 __ Ror(RegisterFrom(out, type),
345 RegisterFrom(in, type),
346 shift);
347 } else {
348 DCHECK(rhs.shift() == vixl::LSL && rhs.shift_amount() == 0);
349 __ Ror(RegisterFrom(out, type),
350 RegisterFrom(in, type),
351 rhs.reg());
352 }
353}
354
355void IntrinsicLocationsBuilderARM64::VisitIntegerRotateRight(HInvoke* invoke) {
356 LocationSummary* locations = new (arena_) LocationSummary(invoke,
357 LocationSummary::kNoCall,
358 kIntrinsified);
359 locations->SetInAt(0, Location::RequiresRegister());
360 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
361 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
362}
363
364void IntrinsicCodeGeneratorARM64::VisitIntegerRotateRight(HInvoke* invoke) {
365 GenRotateRight(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
366}
367
368void IntrinsicLocationsBuilderARM64::VisitLongRotateRight(HInvoke* invoke) {
369 LocationSummary* locations = new (arena_) LocationSummary(invoke,
370 LocationSummary::kNoCall,
371 kIntrinsified);
372 locations->SetInAt(0, Location::RequiresRegister());
373 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
374 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
375}
376
377void IntrinsicCodeGeneratorARM64::VisitLongRotateRight(HInvoke* invoke) {
378 GenRotateRight(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
379}
380
381static void GenRotateLeft(LocationSummary* locations,
382 Primitive::Type type,
383 vixl::MacroAssembler* masm) {
384 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
385
386 Location in = locations->InAt(0);
387 Location out = locations->Out();
388 Operand rhs = OperandFrom(locations->InAt(1), type);
389
390 if (rhs.IsImmediate()) {
391 uint32_t regsize = RegisterFrom(in, type).SizeInBits();
392 uint32_t shift = (regsize - rhs.immediate()) & (regsize - 1);
393 __ Ror(RegisterFrom(out, type), RegisterFrom(in, type), shift);
394 } else {
395 DCHECK(rhs.shift() == vixl::LSL && rhs.shift_amount() == 0);
396 __ Neg(RegisterFrom(out, type),
397 Operand(RegisterFrom(locations->InAt(1), type)));
398 __ Ror(RegisterFrom(out, type),
399 RegisterFrom(in, type),
400 RegisterFrom(out, type));
401 }
402}
403
404void IntrinsicLocationsBuilderARM64::VisitIntegerRotateLeft(HInvoke* invoke) {
405 LocationSummary* locations = new (arena_) LocationSummary(invoke,
406 LocationSummary::kNoCall,
407 kIntrinsified);
408 locations->SetInAt(0, Location::RequiresRegister());
409 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
410 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
411}
412
413void IntrinsicCodeGeneratorARM64::VisitIntegerRotateLeft(HInvoke* invoke) {
414 GenRotateLeft(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
415}
416
417void IntrinsicLocationsBuilderARM64::VisitLongRotateLeft(HInvoke* invoke) {
418 LocationSummary* locations = new (arena_) LocationSummary(invoke,
419 LocationSummary::kNoCall,
420 kIntrinsified);
421 locations->SetInAt(0, Location::RequiresRegister());
422 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
423 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
424}
425
426void IntrinsicCodeGeneratorARM64::VisitLongRotateLeft(HInvoke* invoke) {
427 GenRotateLeft(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
428}
429
Andreas Gampe878d58c2015-01-15 23:24:00 -0800430static void GenReverse(LocationSummary* locations,
431 Primitive::Type type,
432 vixl::MacroAssembler* masm) {
433 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
434
435 Location in = locations->InAt(0);
436 Location out = locations->Out();
437
438 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
439}
440
441void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
442 CreateIntToIntLocations(arena_, invoke);
443}
444
445void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
446 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
447}
448
449void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
450 CreateIntToIntLocations(arena_, invoke);
451}
452
453void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
454 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
455}
456
457static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800458 LocationSummary* locations = new (arena) LocationSummary(invoke,
459 LocationSummary::kNoCall,
460 kIntrinsified);
461 locations->SetInAt(0, Location::RequiresFpuRegister());
462 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
463}
464
465static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
466 Location in = locations->InAt(0);
467 Location out = locations->Out();
468
469 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
470 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
471
472 __ Fabs(out_reg, in_reg);
473}
474
475void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
476 CreateFPToFPLocations(arena_, invoke);
477}
478
479void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
480 MathAbsFP(invoke->GetLocations(), true, GetVIXLAssembler());
481}
482
483void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
484 CreateFPToFPLocations(arena_, invoke);
485}
486
487void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
488 MathAbsFP(invoke->GetLocations(), false, GetVIXLAssembler());
489}
490
491static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
492 LocationSummary* locations = new (arena) LocationSummary(invoke,
493 LocationSummary::kNoCall,
494 kIntrinsified);
495 locations->SetInAt(0, Location::RequiresRegister());
496 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
497}
498
499static void GenAbsInteger(LocationSummary* locations,
500 bool is64bit,
501 vixl::MacroAssembler* masm) {
502 Location in = locations->InAt(0);
503 Location output = locations->Out();
504
505 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
506 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
507
508 __ Cmp(in_reg, Operand(0));
509 __ Cneg(out_reg, in_reg, lt);
510}
511
512void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
513 CreateIntToInt(arena_, invoke);
514}
515
516void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
517 GenAbsInteger(invoke->GetLocations(), false, GetVIXLAssembler());
518}
519
520void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
521 CreateIntToInt(arena_, invoke);
522}
523
524void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
525 GenAbsInteger(invoke->GetLocations(), true, GetVIXLAssembler());
526}
527
528static void GenMinMaxFP(LocationSummary* locations,
529 bool is_min,
530 bool is_double,
531 vixl::MacroAssembler* masm) {
532 Location op1 = locations->InAt(0);
533 Location op2 = locations->InAt(1);
534 Location out = locations->Out();
535
536 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
537 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
538 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
539 if (is_min) {
540 __ Fmin(out_reg, op1_reg, op2_reg);
541 } else {
542 __ Fmax(out_reg, op1_reg, op2_reg);
543 }
544}
545
546static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
547 LocationSummary* locations = new (arena) LocationSummary(invoke,
548 LocationSummary::kNoCall,
549 kIntrinsified);
550 locations->SetInAt(0, Location::RequiresFpuRegister());
551 locations->SetInAt(1, Location::RequiresFpuRegister());
552 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
553}
554
555void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
556 CreateFPFPToFPLocations(arena_, invoke);
557}
558
559void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
560 GenMinMaxFP(invoke->GetLocations(), true, true, GetVIXLAssembler());
561}
562
563void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
564 CreateFPFPToFPLocations(arena_, invoke);
565}
566
567void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
568 GenMinMaxFP(invoke->GetLocations(), true, false, GetVIXLAssembler());
569}
570
571void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
572 CreateFPFPToFPLocations(arena_, invoke);
573}
574
575void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
576 GenMinMaxFP(invoke->GetLocations(), false, true, GetVIXLAssembler());
577}
578
579void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
580 CreateFPFPToFPLocations(arena_, invoke);
581}
582
583void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
584 GenMinMaxFP(invoke->GetLocations(), false, false, GetVIXLAssembler());
585}
586
587static void GenMinMax(LocationSummary* locations,
588 bool is_min,
589 bool is_long,
590 vixl::MacroAssembler* masm) {
591 Location op1 = locations->InAt(0);
592 Location op2 = locations->InAt(1);
593 Location out = locations->Out();
594
595 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
596 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
597 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
598
599 __ Cmp(op1_reg, op2_reg);
600 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
601}
602
603static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
604 LocationSummary* locations = new (arena) LocationSummary(invoke,
605 LocationSummary::kNoCall,
606 kIntrinsified);
607 locations->SetInAt(0, Location::RequiresRegister());
608 locations->SetInAt(1, Location::RequiresRegister());
609 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
610}
611
612void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
613 CreateIntIntToIntLocations(arena_, invoke);
614}
615
616void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
617 GenMinMax(invoke->GetLocations(), true, false, GetVIXLAssembler());
618}
619
620void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
621 CreateIntIntToIntLocations(arena_, invoke);
622}
623
624void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
625 GenMinMax(invoke->GetLocations(), true, true, GetVIXLAssembler());
626}
627
628void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
629 CreateIntIntToIntLocations(arena_, invoke);
630}
631
632void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
633 GenMinMax(invoke->GetLocations(), false, false, GetVIXLAssembler());
634}
635
636void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
637 CreateIntIntToIntLocations(arena_, invoke);
638}
639
640void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
641 GenMinMax(invoke->GetLocations(), false, true, GetVIXLAssembler());
642}
643
644void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
645 CreateFPToFPLocations(arena_, invoke);
646}
647
648void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
649 LocationSummary* locations = invoke->GetLocations();
650 vixl::MacroAssembler* masm = GetVIXLAssembler();
651 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
652}
653
654void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
655 CreateFPToFPLocations(arena_, invoke);
656}
657
658void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
659 LocationSummary* locations = invoke->GetLocations();
660 vixl::MacroAssembler* masm = GetVIXLAssembler();
661 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
662}
663
664void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
665 CreateFPToFPLocations(arena_, invoke);
666}
667
668void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
669 LocationSummary* locations = invoke->GetLocations();
670 vixl::MacroAssembler* masm = GetVIXLAssembler();
671 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
672}
673
674void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
675 CreateFPToFPLocations(arena_, invoke);
676}
677
678void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
679 LocationSummary* locations = invoke->GetLocations();
680 vixl::MacroAssembler* masm = GetVIXLAssembler();
681 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
682}
683
684static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
685 LocationSummary* locations = new (arena) LocationSummary(invoke,
686 LocationSummary::kNoCall,
687 kIntrinsified);
688 locations->SetInAt(0, Location::RequiresFpuRegister());
689 locations->SetOut(Location::RequiresRegister());
690}
691
692static void GenMathRound(LocationSummary* locations,
693 bool is_double,
694 vixl::MacroAssembler* masm) {
695 FPRegister in_reg = is_double ?
696 DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
697 Register out_reg = is_double ?
698 XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
699 UseScratchRegisterScope temps(masm);
700 FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
701
702 // 0.5 can be encoded as an immediate, so use fmov.
703 if (is_double) {
704 __ Fmov(temp1_reg, static_cast<double>(0.5));
705 } else {
706 __ Fmov(temp1_reg, static_cast<float>(0.5));
707 }
708 __ Fadd(temp1_reg, in_reg, temp1_reg);
709 __ Fcvtms(out_reg, temp1_reg);
710}
711
712void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
713 CreateFPToIntPlusTempLocations(arena_, invoke);
714}
715
716void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
717 GenMathRound(invoke->GetLocations(), true, GetVIXLAssembler());
718}
719
720void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
721 CreateFPToIntPlusTempLocations(arena_, invoke);
722}
723
724void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
725 GenMathRound(invoke->GetLocations(), false, GetVIXLAssembler());
726}
727
728void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
729 CreateIntToIntLocations(arena_, invoke);
730}
731
732void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
733 vixl::MacroAssembler* masm = GetVIXLAssembler();
734 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
735 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
736}
737
738void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
739 CreateIntToIntLocations(arena_, invoke);
740}
741
742void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
743 vixl::MacroAssembler* masm = GetVIXLAssembler();
744 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
745 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
746}
747
748void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
749 CreateIntToIntLocations(arena_, invoke);
750}
751
752void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
753 vixl::MacroAssembler* masm = GetVIXLAssembler();
754 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
755 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
756}
757
758void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
759 CreateIntToIntLocations(arena_, invoke);
760}
761
762void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
763 vixl::MacroAssembler* masm = GetVIXLAssembler();
764 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
765 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
766}
767
768static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
769 LocationSummary* locations = new (arena) LocationSummary(invoke,
770 LocationSummary::kNoCall,
771 kIntrinsified);
772 locations->SetInAt(0, Location::RequiresRegister());
773 locations->SetInAt(1, Location::RequiresRegister());
774}
775
776void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
777 CreateIntIntToVoidLocations(arena_, invoke);
778}
779
780void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
781 vixl::MacroAssembler* masm = GetVIXLAssembler();
782 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
783 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
784}
785
786void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
787 CreateIntIntToVoidLocations(arena_, invoke);
788}
789
790void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
791 vixl::MacroAssembler* masm = GetVIXLAssembler();
792 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
793 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
794}
795
796void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
797 CreateIntIntToVoidLocations(arena_, invoke);
798}
799
800void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
801 vixl::MacroAssembler* masm = GetVIXLAssembler();
802 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
803 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
804}
805
806void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
807 CreateIntIntToVoidLocations(arena_, invoke);
808}
809
810void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
811 vixl::MacroAssembler* masm = GetVIXLAssembler();
812 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
813 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
814}
815
816void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
817 LocationSummary* locations = new (arena_) LocationSummary(invoke,
818 LocationSummary::kNoCall,
819 kIntrinsified);
820 locations->SetOut(Location::RequiresRegister());
821}
822
823void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
824 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
825 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
826}
827
828static void GenUnsafeGet(HInvoke* invoke,
829 Primitive::Type type,
830 bool is_volatile,
831 CodeGeneratorARM64* codegen) {
832 LocationSummary* locations = invoke->GetLocations();
833 DCHECK((type == Primitive::kPrimInt) ||
834 (type == Primitive::kPrimLong) ||
835 (type == Primitive::kPrimNot));
836 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000837 Location base_loc = locations->InAt(1);
838 Register base = WRegisterFrom(base_loc); // Object pointer.
839 Location offset_loc = locations->InAt(2);
840 Register offset = XRegisterFrom(offset_loc); // Long offset.
841 Location trg_loc = locations->Out();
842 Register trg = RegisterFrom(trg_loc, type);
Serban Constantinescu579885a2015-02-22 20:51:33 +0000843 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800844
845 MemOperand mem_op(base.X(), offset);
846 if (is_volatile) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000847 if (use_acquire_release) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800848 codegen->LoadAcquire(invoke, trg, mem_op);
849 } else {
850 codegen->Load(type, trg, mem_op);
851 __ Dmb(InnerShareable, BarrierReads);
852 }
853 } else {
854 codegen->Load(type, trg, mem_op);
855 }
Roland Levillain4d027112015-07-01 15:41:14 +0100856
857 if (type == Primitive::kPrimNot) {
858 DCHECK(trg.IsW());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000859 codegen->MaybeGenerateReadBarrier(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
Roland Levillain4d027112015-07-01 15:41:14 +0100860 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800861}
862
863static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000864 bool can_call = kEmitCompilerReadBarrier &&
865 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
866 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800867 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000868 can_call ?
869 LocationSummary::kCallOnSlowPath :
870 LocationSummary::kNoCall,
Andreas Gampe878d58c2015-01-15 23:24:00 -0800871 kIntrinsified);
872 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
873 locations->SetInAt(1, Location::RequiresRegister());
874 locations->SetInAt(2, Location::RequiresRegister());
875 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
876}
877
878void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
879 CreateIntIntIntToIntLocations(arena_, invoke);
880}
881void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
882 CreateIntIntIntToIntLocations(arena_, invoke);
883}
884void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
885 CreateIntIntIntToIntLocations(arena_, invoke);
886}
887void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
888 CreateIntIntIntToIntLocations(arena_, invoke);
889}
890void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
891 CreateIntIntIntToIntLocations(arena_, invoke);
892}
893void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
894 CreateIntIntIntToIntLocations(arena_, invoke);
895}
896
897void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
898 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
899}
900void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
901 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
902}
903void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
904 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
905}
906void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
907 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
908}
909void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
910 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
911}
912void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
913 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
914}
915
916static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
917 LocationSummary* locations = new (arena) LocationSummary(invoke,
918 LocationSummary::kNoCall,
919 kIntrinsified);
920 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
921 locations->SetInAt(1, Location::RequiresRegister());
922 locations->SetInAt(2, Location::RequiresRegister());
923 locations->SetInAt(3, Location::RequiresRegister());
924}
925
926void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
927 CreateIntIntIntIntToVoid(arena_, invoke);
928}
929void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
930 CreateIntIntIntIntToVoid(arena_, invoke);
931}
932void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
933 CreateIntIntIntIntToVoid(arena_, invoke);
934}
935void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
936 CreateIntIntIntIntToVoid(arena_, invoke);
937}
938void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
939 CreateIntIntIntIntToVoid(arena_, invoke);
940}
941void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
942 CreateIntIntIntIntToVoid(arena_, invoke);
943}
944void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
945 CreateIntIntIntIntToVoid(arena_, invoke);
946}
947void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
948 CreateIntIntIntIntToVoid(arena_, invoke);
949}
950void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
951 CreateIntIntIntIntToVoid(arena_, invoke);
952}
953
954static void GenUnsafePut(LocationSummary* locations,
955 Primitive::Type type,
956 bool is_volatile,
957 bool is_ordered,
958 CodeGeneratorARM64* codegen) {
959 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
960
961 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
962 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
963 Register value = RegisterFrom(locations->InAt(3), type);
Roland Levillain4d027112015-07-01 15:41:14 +0100964 Register source = value;
Serban Constantinescu579885a2015-02-22 20:51:33 +0000965 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800966
967 MemOperand mem_op(base.X(), offset);
968
Roland Levillain4d027112015-07-01 15:41:14 +0100969 {
970 // We use a block to end the scratch scope before the write barrier, thus
971 // freeing the temporary registers so they can be used in `MarkGCCard`.
972 UseScratchRegisterScope temps(masm);
973
974 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
975 DCHECK(value.IsW());
976 Register temp = temps.AcquireW();
977 __ Mov(temp.W(), value.W());
978 codegen->GetAssembler()->PoisonHeapReference(temp.W());
979 source = temp;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800980 }
Roland Levillain4d027112015-07-01 15:41:14 +0100981
982 if (is_volatile || is_ordered) {
983 if (use_acquire_release) {
984 codegen->StoreRelease(type, source, mem_op);
985 } else {
986 __ Dmb(InnerShareable, BarrierAll);
987 codegen->Store(type, source, mem_op);
988 if (is_volatile) {
989 __ Dmb(InnerShareable, BarrierReads);
990 }
991 }
992 } else {
993 codegen->Store(type, source, mem_op);
994 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800995 }
996
997 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100998 bool value_can_be_null = true; // TODO: Worth finding out this information?
999 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001000 }
1001}
1002
1003void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
1004 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
1005}
1006void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
1007 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
1008}
1009void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
1010 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
1011}
1012void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
1013 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
1014}
1015void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1016 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
1017}
1018void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1019 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
1020}
1021void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
1022 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
1023}
1024void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1025 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
1026}
1027void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1028 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
1029}
1030
1031static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
1032 LocationSummary* locations = new (arena) LocationSummary(invoke,
1033 LocationSummary::kNoCall,
1034 kIntrinsified);
1035 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1036 locations->SetInAt(1, Location::RequiresRegister());
1037 locations->SetInAt(2, Location::RequiresRegister());
1038 locations->SetInAt(3, Location::RequiresRegister());
1039 locations->SetInAt(4, Location::RequiresRegister());
1040
1041 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1042}
1043
1044static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
Serban Constantinescu579885a2015-02-22 20:51:33 +00001045 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001046 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
1047
1048 Register out = WRegisterFrom(locations->Out()); // Boolean result.
1049
1050 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
1051 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
1052 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
1053 Register value = RegisterFrom(locations->InAt(4), type); // Value.
1054
1055 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
1056 if (type == Primitive::kPrimNot) {
1057 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001058 bool value_can_be_null = true; // TODO: Worth finding out this information?
1059 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001060 }
1061
1062 UseScratchRegisterScope temps(masm);
1063 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1064 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
1065
1066 Register tmp_32 = tmp_value.W();
1067
1068 __ Add(tmp_ptr, base.X(), Operand(offset));
1069
Roland Levillain4d027112015-07-01 15:41:14 +01001070 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1071 codegen->GetAssembler()->PoisonHeapReference(expected);
1072 codegen->GetAssembler()->PoisonHeapReference(value);
1073 }
1074
Andreas Gampe878d58c2015-01-15 23:24:00 -08001075 // do {
1076 // tmp_value = [tmp_ptr] - expected;
1077 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1078 // result = tmp_value != 0;
1079
1080 vixl::Label loop_head, exit_loop;
Serban Constantinescu579885a2015-02-22 20:51:33 +00001081 if (use_acquire_release) {
1082 __ Bind(&loop_head);
1083 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001084 // TODO: Do we need a read barrier here when `type == Primitive::kPrimNot`?
1085 // Note that this code is not (yet) used when read barriers are
1086 // enabled (see IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject).
Serban Constantinescu579885a2015-02-22 20:51:33 +00001087 __ Cmp(tmp_value, expected);
1088 __ B(&exit_loop, ne);
1089 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
1090 __ Cbnz(tmp_32, &loop_head);
1091 } else {
1092 __ Dmb(InnerShareable, BarrierWrites);
1093 __ Bind(&loop_head);
1094 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001095 // TODO: Do we need a read barrier here when `type == Primitive::kPrimNot`?
1096 // Note that this code is not (yet) used when read barriers are
1097 // enabled (see IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject).
Serban Constantinescu579885a2015-02-22 20:51:33 +00001098 __ Cmp(tmp_value, expected);
1099 __ B(&exit_loop, ne);
1100 __ Stxr(tmp_32, value, MemOperand(tmp_ptr));
1101 __ Cbnz(tmp_32, &loop_head);
1102 __ Dmb(InnerShareable, BarrierAll);
1103 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001104 __ Bind(&exit_loop);
1105 __ Cset(out, eq);
Roland Levillain4d027112015-07-01 15:41:14 +01001106
1107 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1108 codegen->GetAssembler()->UnpoisonHeapReference(value);
1109 codegen->GetAssembler()->UnpoisonHeapReference(expected);
1110 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001111}
1112
1113void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
1114 CreateIntIntIntIntIntToInt(arena_, invoke);
1115}
1116void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
1117 CreateIntIntIntIntIntToInt(arena_, invoke);
1118}
1119void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain985ff702015-10-23 13:25:35 +01001120 // The UnsafeCASObject intrinsic does not always work when heap
1121 // poisoning is enabled (it breaks run-test 004-UnsafeTest); turn it
1122 // off temporarily as a quick fix.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001123 //
Roland Levillain985ff702015-10-23 13:25:35 +01001124 // TODO(rpl): Fix it and turn it back on.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001125 //
1126 // TODO(rpl): Also, we should investigate whether we need a read
1127 // barrier in the generated code.
Roland Levillain985ff702015-10-23 13:25:35 +01001128 if (kPoisonHeapReferences) {
1129 return;
1130 }
1131
Andreas Gampe878d58c2015-01-15 23:24:00 -08001132 CreateIntIntIntIntIntToInt(arena_, invoke);
1133}
1134
1135void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
1136 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
1137}
1138void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
1139 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
1140}
1141void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
1142 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
1143}
1144
1145void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001146 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1147 LocationSummary::kCallOnSlowPath,
1148 kIntrinsified);
1149 locations->SetInAt(0, Location::RequiresRegister());
1150 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray82f34492015-02-04 10:44:23 +00001151 // In case we need to go in the slow path, we can't have the output be the same
1152 // as the input: the current liveness analysis considers the input to be live
1153 // at the point of the call.
1154 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001155}
1156
1157void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
1158 vixl::MacroAssembler* masm = GetVIXLAssembler();
1159 LocationSummary* locations = invoke->GetLocations();
1160
1161 // Location of reference to data array
1162 const MemberOffset value_offset = mirror::String::ValueOffset();
1163 // Location of count
1164 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001165
1166 Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer.
1167 Register idx = WRegisterFrom(locations->InAt(1)); // Index of character.
1168 Register out = WRegisterFrom(locations->Out()); // Result character.
1169
1170 UseScratchRegisterScope temps(masm);
1171 Register temp = temps.AcquireW();
1172 Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling.
1173
1174 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
1175 // the cost.
1176 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
1177 // we will not optimize the code for constants (which would save a register).
1178
1179 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1180 codegen_->AddSlowPath(slow_path);
1181
1182 __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length.
1183 codegen_->MaybeRecordImplicitNullCheck(invoke);
1184 __ Cmp(idx, temp);
1185 __ B(hs, slow_path->GetEntryLabel());
1186
Jeff Hao848f70a2014-01-15 13:49:50 -08001187 __ Add(array_temp, obj, Operand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001188
1189 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -08001190 __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1)); // out := array_temp[idx].
Andreas Gampe878d58c2015-01-15 23:24:00 -08001191
1192 __ Bind(slow_path->GetExitLabel());
1193}
1194
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001195void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001196 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1197 LocationSummary::kCall,
1198 kIntrinsified);
1199 InvokeRuntimeCallingConvention calling_convention;
1200 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1201 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1202 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1203}
1204
1205void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
1206 vixl::MacroAssembler* masm = GetVIXLAssembler();
1207 LocationSummary* locations = invoke->GetLocations();
1208
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001209 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001210 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001211
1212 Register argument = WRegisterFrom(locations->InAt(1));
1213 __ Cmp(argument, 0);
1214 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1215 codegen_->AddSlowPath(slow_path);
1216 __ B(eq, slow_path->GetEntryLabel());
1217
1218 __ Ldr(
1219 lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value()));
1220 __ Blr(lr);
1221 __ Bind(slow_path->GetExitLabel());
1222}
1223
Agi Csakiea34b402015-08-13 17:51:19 -07001224void IntrinsicLocationsBuilderARM64::VisitStringEquals(HInvoke* invoke) {
1225 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1226 LocationSummary::kNoCall,
1227 kIntrinsified);
1228 locations->SetInAt(0, Location::RequiresRegister());
1229 locations->SetInAt(1, Location::RequiresRegister());
1230 // Temporary registers to store lengths of strings and for calculations.
1231 locations->AddTemp(Location::RequiresRegister());
1232 locations->AddTemp(Location::RequiresRegister());
1233
1234 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1235}
1236
1237void IntrinsicCodeGeneratorARM64::VisitStringEquals(HInvoke* invoke) {
1238 vixl::MacroAssembler* masm = GetVIXLAssembler();
1239 LocationSummary* locations = invoke->GetLocations();
1240
1241 Register str = WRegisterFrom(locations->InAt(0));
1242 Register arg = WRegisterFrom(locations->InAt(1));
1243 Register out = XRegisterFrom(locations->Out());
1244
1245 UseScratchRegisterScope scratch_scope(masm);
1246 Register temp = scratch_scope.AcquireW();
1247 Register temp1 = WRegisterFrom(locations->GetTemp(0));
1248 Register temp2 = WRegisterFrom(locations->GetTemp(1));
1249
1250 vixl::Label loop;
1251 vixl::Label end;
1252 vixl::Label return_true;
1253 vixl::Label return_false;
1254
1255 // Get offsets of count, value, and class fields within a string object.
1256 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1257 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1258 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1259
1260 // Note that the null check must have been done earlier.
1261 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1262
1263 // Check if input is null, return false if it is.
1264 __ Cbz(arg, &return_false);
1265
1266 // Reference equality check, return true if same reference.
1267 __ Cmp(str, arg);
1268 __ B(&return_true, eq);
1269
1270 // Instanceof check for the argument by comparing class fields.
1271 // All string objects must have the same type since String cannot be subclassed.
1272 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1273 // If the argument is a string object, its class field must be equal to receiver's class field.
1274 __ Ldr(temp, MemOperand(str.X(), class_offset));
1275 __ Ldr(temp1, MemOperand(arg.X(), class_offset));
1276 __ Cmp(temp, temp1);
1277 __ B(&return_false, ne);
1278
1279 // Load lengths of this and argument strings.
1280 __ Ldr(temp, MemOperand(str.X(), count_offset));
1281 __ Ldr(temp1, MemOperand(arg.X(), count_offset));
1282 // Check if lengths are equal, return false if they're not.
1283 __ Cmp(temp, temp1);
1284 __ B(&return_false, ne);
1285 // Store offset of string value in preparation for comparison loop
1286 __ Mov(temp1, value_offset);
1287 // Return true if both strings are empty.
1288 __ Cbz(temp, &return_true);
1289
1290 // Assertions that must hold in order to compare strings 4 characters at a time.
1291 DCHECK_ALIGNED(value_offset, 8);
1292 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1293
1294 temp1 = temp1.X();
1295 temp2 = temp2.X();
1296
1297 // Loop to compare strings 4 characters at a time starting at the beginning of the string.
1298 // Ok to do this because strings are zero-padded to be 8-byte aligned.
1299 __ Bind(&loop);
1300 __ Ldr(out, MemOperand(str.X(), temp1));
1301 __ Ldr(temp2, MemOperand(arg.X(), temp1));
1302 __ Add(temp1, temp1, Operand(sizeof(uint64_t)));
1303 __ Cmp(out, temp2);
1304 __ B(&return_false, ne);
1305 __ Sub(temp, temp, Operand(4), SetFlags);
1306 __ B(&loop, gt);
1307
1308 // Return true and exit the function.
1309 // If loop does not result in returning false, we return true.
1310 __ Bind(&return_true);
1311 __ Mov(out, 1);
1312 __ B(&end);
1313
1314 // Return false and exit the function.
1315 __ Bind(&return_false);
1316 __ Mov(out, 0);
1317 __ Bind(&end);
1318}
1319
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001320static void GenerateVisitStringIndexOf(HInvoke* invoke,
1321 vixl::MacroAssembler* masm,
1322 CodeGeneratorARM64* codegen,
1323 ArenaAllocator* allocator,
1324 bool start_at_zero) {
1325 LocationSummary* locations = invoke->GetLocations();
1326 Register tmp_reg = WRegisterFrom(locations->GetTemp(0));
1327
1328 // Note that the null check must have been done earlier.
1329 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1330
1331 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1332 // or directly dispatch if we have a constant.
1333 SlowPathCodeARM64* slow_path = nullptr;
1334 if (invoke->InputAt(1)->IsIntConstant()) {
1335 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > 0xFFFFU) {
1336 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1337 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1338 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1339 codegen->AddSlowPath(slow_path);
1340 __ B(slow_path->GetEntryLabel());
1341 __ Bind(slow_path->GetExitLabel());
1342 return;
1343 }
1344 } else {
1345 Register char_reg = WRegisterFrom(locations->InAt(1));
1346 __ Mov(tmp_reg, 0xFFFF);
1347 __ Cmp(char_reg, Operand(tmp_reg));
1348 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1349 codegen->AddSlowPath(slow_path);
1350 __ B(hi, slow_path->GetEntryLabel());
1351 }
1352
1353 if (start_at_zero) {
1354 // Start-index = 0.
1355 __ Mov(tmp_reg, 0);
1356 }
1357
1358 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value()));
1359 __ Blr(lr);
1360
1361 if (slow_path != nullptr) {
1362 __ Bind(slow_path->GetExitLabel());
1363 }
1364}
1365
1366void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) {
1367 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1368 LocationSummary::kCall,
1369 kIntrinsified);
1370 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1371 // best to align the inputs accordingly.
1372 InvokeRuntimeCallingConvention calling_convention;
1373 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1374 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1375 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1376
1377 // Need a temp for slow-path codepoint compare, and need to send start_index=0.
1378 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1379}
1380
1381void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) {
1382 GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), true);
1383}
1384
1385void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1386 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1387 LocationSummary::kCall,
1388 kIntrinsified);
1389 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1390 // best to align the inputs accordingly.
1391 InvokeRuntimeCallingConvention calling_convention;
1392 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1393 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1394 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1395 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1396
1397 // Need a temp for slow-path codepoint compare.
1398 locations->AddTemp(Location::RequiresRegister());
1399}
1400
1401void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1402 GenerateVisitStringIndexOf(invoke, GetVIXLAssembler(), codegen_, GetAllocator(), false);
1403}
1404
Jeff Hao848f70a2014-01-15 13:49:50 -08001405void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1406 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1407 LocationSummary::kCall,
1408 kIntrinsified);
1409 InvokeRuntimeCallingConvention calling_convention;
1410 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1411 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1412 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1413 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1414 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1415}
1416
1417void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1418 vixl::MacroAssembler* masm = GetVIXLAssembler();
1419 LocationSummary* locations = invoke->GetLocations();
1420
1421 Register byte_array = WRegisterFrom(locations->InAt(0));
1422 __ Cmp(byte_array, 0);
1423 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1424 codegen_->AddSlowPath(slow_path);
1425 __ B(eq, slow_path->GetEntryLabel());
1426
1427 __ Ldr(lr,
1428 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value()));
1429 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1430 __ Blr(lr);
1431 __ Bind(slow_path->GetExitLabel());
1432}
1433
1434void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1435 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1436 LocationSummary::kCall,
1437 kIntrinsified);
1438 InvokeRuntimeCallingConvention calling_convention;
1439 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1440 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1441 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1442 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1443}
1444
1445void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1446 vixl::MacroAssembler* masm = GetVIXLAssembler();
1447
1448 __ Ldr(lr,
1449 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value()));
1450 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1451 __ Blr(lr);
1452}
1453
1454void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1455 // The inputs plus one temp.
1456 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1457 LocationSummary::kCall,
1458 kIntrinsified);
1459 InvokeRuntimeCallingConvention calling_convention;
1460 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1461 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1462 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1463 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1464}
1465
1466void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1467 vixl::MacroAssembler* masm = GetVIXLAssembler();
1468 LocationSummary* locations = invoke->GetLocations();
1469
1470 Register string_to_copy = WRegisterFrom(locations->InAt(0));
1471 __ Cmp(string_to_copy, 0);
1472 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1473 codegen_->AddSlowPath(slow_path);
1474 __ B(eq, slow_path->GetEntryLabel());
1475
1476 __ Ldr(lr,
1477 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value()));
1478 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1479 __ Blr(lr);
1480 __ Bind(slow_path->GetExitLabel());
1481}
1482
Andreas Gampe878d58c2015-01-15 23:24:00 -08001483// Unimplemented intrinsics.
1484
1485#define UNIMPLEMENTED_INTRINSIC(Name) \
1486void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1487} \
1488void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1489}
1490
1491UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Nicolas Geoffrayee3cf072015-10-06 11:45:02 +01001492UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001493UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001494UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001495
Roland Levillain4d027112015-07-01 15:41:14 +01001496#undef UNIMPLEMENTED_INTRINSIC
1497
1498#undef __
1499
Andreas Gampe878d58c2015-01-15 23:24:00 -08001500} // namespace arm64
1501} // namespace art