blob: 962c4d5167783717f937f68ef48e4ba939cb0981 [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080021#include "code_generator_arm64.h"
22#include "common_arm64.h"
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
Serban Constantinescu82e52ce2015-03-26 16:50:57 +000031#include "vixl/a64/disasm-a64.h"
32#include "vixl/a64/macro-assembler-a64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080033
34using namespace vixl; // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +000043using helpers::LocationFrom;
Scott Wakeling9ee23f42015-07-23 10:44:35 +010044using helpers::OperandFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080045using helpers::RegisterFrom;
46using helpers::SRegisterFrom;
47using helpers::WRegisterFrom;
48using helpers::XRegisterFrom;
49
Andreas Gampe878d58c2015-01-15 23:24:00 -080050namespace {
51
52ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
53 return MemOperand(XRegisterFrom(location), offset);
54}
55
56} // namespace
57
58vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
59 return codegen_->GetAssembler()->vixl_masm_;
60}
61
62ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
63 return codegen_->GetGraph()->GetArena();
64}
65
66#define __ codegen->GetAssembler()->vixl_masm_->
67
68static void MoveFromReturnRegister(Location trg,
69 Primitive::Type type,
70 CodeGeneratorARM64* codegen) {
71 if (!trg.IsValid()) {
72 DCHECK(type == Primitive::kPrimVoid);
73 return;
74 }
75
76 DCHECK_NE(type, Primitive::kPrimVoid);
77
Jeff Hao848f70a2014-01-15 13:49:50 -080078 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080079 Register trg_reg = RegisterFrom(trg, type);
80 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
81 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
82 } else {
83 FPRegister trg_reg = FPRegisterFrom(trg, type);
84 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
85 __ Fmov(trg_reg, res_reg);
86 }
87}
88
Roland Levillainec525fc2015-04-28 15:50:20 +010089static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010090 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010091 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe878d58c2015-01-15 23:24:00 -080092}
93
94// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
95// call. This will copy the arguments into the positions for a regular call.
96//
97// Note: The actual parameters are required to be in the locations given by the invoke's location
98// summary. If an intrinsic modifies those locations before a slowpath call, they must be
99// restored!
100class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
101 public:
102 explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { }
103
104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
105 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
106 __ Bind(GetEntryLabel());
107
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000108 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800109
Roland Levillainec525fc2015-04-28 15:50:20 +0100110 MoveArguments(invoke_, codegen);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800111
112 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100113 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
114 LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800115 } else {
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000116 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800117 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000118 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800119
120 // Copy the result back to the expected output.
121 Location out = invoke_->GetLocations()->Out();
122 if (out.IsValid()) {
123 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
124 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
125 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
126 }
127
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000128 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800129 __ B(GetExitLabel());
130 }
131
Alexandre Rames9931f312015-06-19 14:47:01 +0100132 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM64"; }
133
Andreas Gampe878d58c2015-01-15 23:24:00 -0800134 private:
135 // The instruction where this slow path is happening.
136 HInvoke* const invoke_;
137
138 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
139};
140
141#undef __
142
143bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
144 Dispatch(invoke);
145 LocationSummary* res = invoke->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000146 if (res == nullptr) {
147 return false;
148 }
149 if (kEmitCompilerReadBarrier && res->CanCall()) {
150 // Generating an intrinsic for this HInvoke may produce an
151 // IntrinsicSlowPathARM64 slow path. Currently this approach
152 // does not work when using read barriers, as the emitted
153 // calling sequence will make use of another slow path
154 // (ReadBarrierForRootSlowPathARM64 for HInvokeStaticOrDirect,
155 // ReadBarrierSlowPathARM64 for HInvokeVirtual). So we bail
156 // out in this case.
157 //
158 // TODO: Find a way to have intrinsics work with read barriers.
159 invoke->SetLocations(nullptr);
160 return false;
161 }
162 return res->Intrinsified();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800163}
164
165#define __ masm->
166
167static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
168 LocationSummary* locations = new (arena) LocationSummary(invoke,
169 LocationSummary::kNoCall,
170 kIntrinsified);
171 locations->SetInAt(0, Location::RequiresFpuRegister());
172 locations->SetOut(Location::RequiresRegister());
173}
174
175static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
176 LocationSummary* locations = new (arena) LocationSummary(invoke,
177 LocationSummary::kNoCall,
178 kIntrinsified);
179 locations->SetInAt(0, Location::RequiresRegister());
180 locations->SetOut(Location::RequiresFpuRegister());
181}
182
183static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
184 Location input = locations->InAt(0);
185 Location output = locations->Out();
186 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
187 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
188}
189
190static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
191 Location input = locations->InAt(0);
192 Location output = locations->Out();
193 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
194 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
195}
196
197void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
198 CreateFPToIntLocations(arena_, invoke);
199}
200void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
201 CreateIntToFPLocations(arena_, invoke);
202}
203
204void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000205 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800206}
207void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000208 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800209}
210
211void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
212 CreateFPToIntLocations(arena_, invoke);
213}
214void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
215 CreateIntToFPLocations(arena_, invoke);
216}
217
218void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000219 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800220}
221void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000222 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800223}
224
225static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
226 LocationSummary* locations = new (arena) LocationSummary(invoke,
227 LocationSummary::kNoCall,
228 kIntrinsified);
229 locations->SetInAt(0, Location::RequiresRegister());
230 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
231}
232
233static void GenReverseBytes(LocationSummary* locations,
234 Primitive::Type type,
235 vixl::MacroAssembler* masm) {
236 Location in = locations->InAt(0);
237 Location out = locations->Out();
238
239 switch (type) {
240 case Primitive::kPrimShort:
241 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
242 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
243 break;
244 case Primitive::kPrimInt:
245 case Primitive::kPrimLong:
246 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
247 break;
248 default:
249 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
250 UNREACHABLE();
251 }
252}
253
254void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
255 CreateIntToIntLocations(arena_, invoke);
256}
257
258void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
259 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
260}
261
262void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
263 CreateIntToIntLocations(arena_, invoke);
264}
265
266void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
267 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
268}
269
270void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
271 CreateIntToIntLocations(arena_, invoke);
272}
273
274void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
275 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
276}
277
Scott Wakeling611d3392015-07-10 11:42:06 +0100278static void GenNumberOfLeadingZeros(LocationSummary* locations,
279 Primitive::Type type,
280 vixl::MacroAssembler* masm) {
281 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
282
283 Location in = locations->InAt(0);
284 Location out = locations->Out();
285
286 __ Clz(RegisterFrom(out, type), RegisterFrom(in, type));
287}
288
289void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
290 CreateIntToIntLocations(arena_, invoke);
291}
292
293void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
294 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
295}
296
297void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
298 CreateIntToIntLocations(arena_, invoke);
299}
300
301void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
302 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
303}
304
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100305static void GenNumberOfTrailingZeros(LocationSummary* locations,
306 Primitive::Type type,
307 vixl::MacroAssembler* masm) {
308 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
309
310 Location in = locations->InAt(0);
311 Location out = locations->Out();
312
313 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
314 __ Clz(RegisterFrom(out, type), RegisterFrom(out, type));
315}
316
317void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
318 CreateIntToIntLocations(arena_, invoke);
319}
320
321void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
322 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
323}
324
325void IntrinsicLocationsBuilderARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
326 CreateIntToIntLocations(arena_, invoke);
327}
328
329void IntrinsicCodeGeneratorARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
330 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
331}
332
333static void GenRotateRight(LocationSummary* locations,
334 Primitive::Type type,
335 vixl::MacroAssembler* masm) {
336 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
337
338 Location in = locations->InAt(0);
339 Location out = locations->Out();
340 Operand rhs = OperandFrom(locations->InAt(1), type);
341
342 if (rhs.IsImmediate()) {
343 uint32_t shift = rhs.immediate() & (RegisterFrom(in, type).SizeInBits() - 1);
344 __ Ror(RegisterFrom(out, type),
345 RegisterFrom(in, type),
346 shift);
347 } else {
348 DCHECK(rhs.shift() == vixl::LSL && rhs.shift_amount() == 0);
349 __ Ror(RegisterFrom(out, type),
350 RegisterFrom(in, type),
351 rhs.reg());
352 }
353}
354
355void IntrinsicLocationsBuilderARM64::VisitIntegerRotateRight(HInvoke* invoke) {
356 LocationSummary* locations = new (arena_) LocationSummary(invoke,
357 LocationSummary::kNoCall,
358 kIntrinsified);
359 locations->SetInAt(0, Location::RequiresRegister());
360 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
361 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
362}
363
364void IntrinsicCodeGeneratorARM64::VisitIntegerRotateRight(HInvoke* invoke) {
365 GenRotateRight(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
366}
367
368void IntrinsicLocationsBuilderARM64::VisitLongRotateRight(HInvoke* invoke) {
369 LocationSummary* locations = new (arena_) LocationSummary(invoke,
370 LocationSummary::kNoCall,
371 kIntrinsified);
372 locations->SetInAt(0, Location::RequiresRegister());
373 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
374 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
375}
376
377void IntrinsicCodeGeneratorARM64::VisitLongRotateRight(HInvoke* invoke) {
378 GenRotateRight(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
379}
380
381static void GenRotateLeft(LocationSummary* locations,
382 Primitive::Type type,
383 vixl::MacroAssembler* masm) {
384 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
385
386 Location in = locations->InAt(0);
387 Location out = locations->Out();
388 Operand rhs = OperandFrom(locations->InAt(1), type);
389
390 if (rhs.IsImmediate()) {
391 uint32_t regsize = RegisterFrom(in, type).SizeInBits();
392 uint32_t shift = (regsize - rhs.immediate()) & (regsize - 1);
393 __ Ror(RegisterFrom(out, type), RegisterFrom(in, type), shift);
394 } else {
395 DCHECK(rhs.shift() == vixl::LSL && rhs.shift_amount() == 0);
396 __ Neg(RegisterFrom(out, type),
397 Operand(RegisterFrom(locations->InAt(1), type)));
398 __ Ror(RegisterFrom(out, type),
399 RegisterFrom(in, type),
400 RegisterFrom(out, type));
401 }
402}
403
404void IntrinsicLocationsBuilderARM64::VisitIntegerRotateLeft(HInvoke* invoke) {
405 LocationSummary* locations = new (arena_) LocationSummary(invoke,
406 LocationSummary::kNoCall,
407 kIntrinsified);
408 locations->SetInAt(0, Location::RequiresRegister());
409 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
410 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
411}
412
413void IntrinsicCodeGeneratorARM64::VisitIntegerRotateLeft(HInvoke* invoke) {
414 GenRotateLeft(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
415}
416
417void IntrinsicLocationsBuilderARM64::VisitLongRotateLeft(HInvoke* invoke) {
418 LocationSummary* locations = new (arena_) LocationSummary(invoke,
419 LocationSummary::kNoCall,
420 kIntrinsified);
421 locations->SetInAt(0, Location::RequiresRegister());
422 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
423 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
424}
425
426void IntrinsicCodeGeneratorARM64::VisitLongRotateLeft(HInvoke* invoke) {
427 GenRotateLeft(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
428}
429
Andreas Gampe878d58c2015-01-15 23:24:00 -0800430static void GenReverse(LocationSummary* locations,
431 Primitive::Type type,
432 vixl::MacroAssembler* masm) {
433 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
434
435 Location in = locations->InAt(0);
436 Location out = locations->Out();
437
438 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
439}
440
441void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
442 CreateIntToIntLocations(arena_, invoke);
443}
444
445void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
446 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
447}
448
449void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
450 CreateIntToIntLocations(arena_, invoke);
451}
452
453void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
454 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
455}
456
457static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800458 LocationSummary* locations = new (arena) LocationSummary(invoke,
459 LocationSummary::kNoCall,
460 kIntrinsified);
461 locations->SetInAt(0, Location::RequiresFpuRegister());
462 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
463}
464
465static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
466 Location in = locations->InAt(0);
467 Location out = locations->Out();
468
469 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
470 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
471
472 __ Fabs(out_reg, in_reg);
473}
474
475void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
476 CreateFPToFPLocations(arena_, invoke);
477}
478
479void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000480 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800481}
482
483void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
484 CreateFPToFPLocations(arena_, invoke);
485}
486
487void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000488 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800489}
490
491static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
492 LocationSummary* locations = new (arena) LocationSummary(invoke,
493 LocationSummary::kNoCall,
494 kIntrinsified);
495 locations->SetInAt(0, Location::RequiresRegister());
496 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
497}
498
499static void GenAbsInteger(LocationSummary* locations,
500 bool is64bit,
501 vixl::MacroAssembler* masm) {
502 Location in = locations->InAt(0);
503 Location output = locations->Out();
504
505 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
506 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
507
508 __ Cmp(in_reg, Operand(0));
509 __ Cneg(out_reg, in_reg, lt);
510}
511
512void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
513 CreateIntToInt(arena_, invoke);
514}
515
516void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000517 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800518}
519
520void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
521 CreateIntToInt(arena_, invoke);
522}
523
524void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000525 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800526}
527
528static void GenMinMaxFP(LocationSummary* locations,
529 bool is_min,
530 bool is_double,
531 vixl::MacroAssembler* masm) {
532 Location op1 = locations->InAt(0);
533 Location op2 = locations->InAt(1);
534 Location out = locations->Out();
535
536 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
537 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
538 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
539 if (is_min) {
540 __ Fmin(out_reg, op1_reg, op2_reg);
541 } else {
542 __ Fmax(out_reg, op1_reg, op2_reg);
543 }
544}
545
546static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
547 LocationSummary* locations = new (arena) LocationSummary(invoke,
548 LocationSummary::kNoCall,
549 kIntrinsified);
550 locations->SetInAt(0, Location::RequiresFpuRegister());
551 locations->SetInAt(1, Location::RequiresFpuRegister());
552 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
553}
554
555void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
556 CreateFPFPToFPLocations(arena_, invoke);
557}
558
559void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000560 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800561}
562
563void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
564 CreateFPFPToFPLocations(arena_, invoke);
565}
566
567void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000568 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800569}
570
571void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
572 CreateFPFPToFPLocations(arena_, invoke);
573}
574
575void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000576 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800577}
578
579void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
580 CreateFPFPToFPLocations(arena_, invoke);
581}
582
583void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000584 GenMinMaxFP(
585 invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800586}
587
588static void GenMinMax(LocationSummary* locations,
589 bool is_min,
590 bool is_long,
591 vixl::MacroAssembler* masm) {
592 Location op1 = locations->InAt(0);
593 Location op2 = locations->InAt(1);
594 Location out = locations->Out();
595
596 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
597 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
598 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
599
600 __ Cmp(op1_reg, op2_reg);
601 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
602}
603
604static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
605 LocationSummary* locations = new (arena) LocationSummary(invoke,
606 LocationSummary::kNoCall,
607 kIntrinsified);
608 locations->SetInAt(0, Location::RequiresRegister());
609 locations->SetInAt(1, Location::RequiresRegister());
610 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
611}
612
613void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
614 CreateIntIntToIntLocations(arena_, invoke);
615}
616
617void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000618 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800619}
620
621void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
622 CreateIntIntToIntLocations(arena_, invoke);
623}
624
625void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000626 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800627}
628
629void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
630 CreateIntIntToIntLocations(arena_, invoke);
631}
632
633void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000634 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800635}
636
637void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
638 CreateIntIntToIntLocations(arena_, invoke);
639}
640
641void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000642 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800643}
644
645void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
646 CreateFPToFPLocations(arena_, invoke);
647}
648
649void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
650 LocationSummary* locations = invoke->GetLocations();
651 vixl::MacroAssembler* masm = GetVIXLAssembler();
652 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
653}
654
655void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
656 CreateFPToFPLocations(arena_, invoke);
657}
658
659void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
660 LocationSummary* locations = invoke->GetLocations();
661 vixl::MacroAssembler* masm = GetVIXLAssembler();
662 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
663}
664
665void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
666 CreateFPToFPLocations(arena_, invoke);
667}
668
669void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
670 LocationSummary* locations = invoke->GetLocations();
671 vixl::MacroAssembler* masm = GetVIXLAssembler();
672 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
673}
674
675void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
676 CreateFPToFPLocations(arena_, invoke);
677}
678
679void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
680 LocationSummary* locations = invoke->GetLocations();
681 vixl::MacroAssembler* masm = GetVIXLAssembler();
682 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
683}
684
685static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
686 LocationSummary* locations = new (arena) LocationSummary(invoke,
687 LocationSummary::kNoCall,
688 kIntrinsified);
689 locations->SetInAt(0, Location::RequiresFpuRegister());
690 locations->SetOut(Location::RequiresRegister());
691}
692
693static void GenMathRound(LocationSummary* locations,
694 bool is_double,
695 vixl::MacroAssembler* masm) {
696 FPRegister in_reg = is_double ?
697 DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
698 Register out_reg = is_double ?
699 XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
700 UseScratchRegisterScope temps(masm);
701 FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
702
703 // 0.5 can be encoded as an immediate, so use fmov.
704 if (is_double) {
705 __ Fmov(temp1_reg, static_cast<double>(0.5));
706 } else {
707 __ Fmov(temp1_reg, static_cast<float>(0.5));
708 }
709 __ Fadd(temp1_reg, in_reg, temp1_reg);
710 __ Fcvtms(out_reg, temp1_reg);
711}
712
713void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
714 CreateFPToIntPlusTempLocations(arena_, invoke);
715}
716
717void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000718 GenMathRound(invoke->GetLocations(), /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800719}
720
721void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
722 CreateFPToIntPlusTempLocations(arena_, invoke);
723}
724
725void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000726 GenMathRound(invoke->GetLocations(), /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800727}
728
729void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
730 CreateIntToIntLocations(arena_, invoke);
731}
732
733void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
734 vixl::MacroAssembler* masm = GetVIXLAssembler();
735 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
736 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
737}
738
739void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
740 CreateIntToIntLocations(arena_, invoke);
741}
742
743void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
744 vixl::MacroAssembler* masm = GetVIXLAssembler();
745 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
746 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
747}
748
749void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
750 CreateIntToIntLocations(arena_, invoke);
751}
752
753void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
754 vixl::MacroAssembler* masm = GetVIXLAssembler();
755 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
756 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
757}
758
759void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
760 CreateIntToIntLocations(arena_, invoke);
761}
762
763void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
764 vixl::MacroAssembler* masm = GetVIXLAssembler();
765 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
766 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
767}
768
769static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
770 LocationSummary* locations = new (arena) LocationSummary(invoke,
771 LocationSummary::kNoCall,
772 kIntrinsified);
773 locations->SetInAt(0, Location::RequiresRegister());
774 locations->SetInAt(1, Location::RequiresRegister());
775}
776
777void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
778 CreateIntIntToVoidLocations(arena_, invoke);
779}
780
781void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
782 vixl::MacroAssembler* masm = GetVIXLAssembler();
783 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
784 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
785}
786
787void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
788 CreateIntIntToVoidLocations(arena_, invoke);
789}
790
791void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
792 vixl::MacroAssembler* masm = GetVIXLAssembler();
793 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
794 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
795}
796
797void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
798 CreateIntIntToVoidLocations(arena_, invoke);
799}
800
801void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
802 vixl::MacroAssembler* masm = GetVIXLAssembler();
803 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
804 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
805}
806
807void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
808 CreateIntIntToVoidLocations(arena_, invoke);
809}
810
811void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
812 vixl::MacroAssembler* masm = GetVIXLAssembler();
813 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
814 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
815}
816
817void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
818 LocationSummary* locations = new (arena_) LocationSummary(invoke,
819 LocationSummary::kNoCall,
820 kIntrinsified);
821 locations->SetOut(Location::RequiresRegister());
822}
823
824void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
825 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
826 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
827}
828
829static void GenUnsafeGet(HInvoke* invoke,
830 Primitive::Type type,
831 bool is_volatile,
832 CodeGeneratorARM64* codegen) {
833 LocationSummary* locations = invoke->GetLocations();
834 DCHECK((type == Primitive::kPrimInt) ||
835 (type == Primitive::kPrimLong) ||
836 (type == Primitive::kPrimNot));
837 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000838 Location base_loc = locations->InAt(1);
839 Register base = WRegisterFrom(base_loc); // Object pointer.
840 Location offset_loc = locations->InAt(2);
841 Register offset = XRegisterFrom(offset_loc); // Long offset.
842 Location trg_loc = locations->Out();
843 Register trg = RegisterFrom(trg_loc, type);
Serban Constantinescu579885a2015-02-22 20:51:33 +0000844 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800845
846 MemOperand mem_op(base.X(), offset);
847 if (is_volatile) {
Serban Constantinescu579885a2015-02-22 20:51:33 +0000848 if (use_acquire_release) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800849 codegen->LoadAcquire(invoke, trg, mem_op);
850 } else {
851 codegen->Load(type, trg, mem_op);
852 __ Dmb(InnerShareable, BarrierReads);
853 }
854 } else {
855 codegen->Load(type, trg, mem_op);
856 }
Roland Levillain4d027112015-07-01 15:41:14 +0100857
858 if (type == Primitive::kPrimNot) {
859 DCHECK(trg.IsW());
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000860 codegen->MaybeGenerateReadBarrier(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
Roland Levillain4d027112015-07-01 15:41:14 +0100861 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800862}
863
864static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000865 bool can_call = kEmitCompilerReadBarrier &&
866 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
867 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800868 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000869 can_call ?
870 LocationSummary::kCallOnSlowPath :
871 LocationSummary::kNoCall,
Andreas Gampe878d58c2015-01-15 23:24:00 -0800872 kIntrinsified);
873 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
874 locations->SetInAt(1, Location::RequiresRegister());
875 locations->SetInAt(2, Location::RequiresRegister());
876 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
877}
878
879void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
880 CreateIntIntIntToIntLocations(arena_, invoke);
881}
882void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
883 CreateIntIntIntToIntLocations(arena_, invoke);
884}
885void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
886 CreateIntIntIntToIntLocations(arena_, invoke);
887}
888void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
889 CreateIntIntIntToIntLocations(arena_, invoke);
890}
891void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
892 CreateIntIntIntToIntLocations(arena_, invoke);
893}
894void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
895 CreateIntIntIntToIntLocations(arena_, invoke);
896}
897
898void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000899 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800900}
901void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000902 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800903}
904void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000905 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800906}
907void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000908 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800909}
910void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000911 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800912}
913void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000914 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800915}
916
917static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
918 LocationSummary* locations = new (arena) LocationSummary(invoke,
919 LocationSummary::kNoCall,
920 kIntrinsified);
921 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
922 locations->SetInAt(1, Location::RequiresRegister());
923 locations->SetInAt(2, Location::RequiresRegister());
924 locations->SetInAt(3, Location::RequiresRegister());
925}
926
927void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
928 CreateIntIntIntIntToVoid(arena_, invoke);
929}
930void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
931 CreateIntIntIntIntToVoid(arena_, invoke);
932}
933void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
934 CreateIntIntIntIntToVoid(arena_, invoke);
935}
936void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
937 CreateIntIntIntIntToVoid(arena_, invoke);
938}
939void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
940 CreateIntIntIntIntToVoid(arena_, invoke);
941}
942void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
943 CreateIntIntIntIntToVoid(arena_, invoke);
944}
945void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
946 CreateIntIntIntIntToVoid(arena_, invoke);
947}
948void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
949 CreateIntIntIntIntToVoid(arena_, invoke);
950}
951void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
952 CreateIntIntIntIntToVoid(arena_, invoke);
953}
954
955static void GenUnsafePut(LocationSummary* locations,
956 Primitive::Type type,
957 bool is_volatile,
958 bool is_ordered,
959 CodeGeneratorARM64* codegen) {
960 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
961
962 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
963 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
964 Register value = RegisterFrom(locations->InAt(3), type);
Roland Levillain4d027112015-07-01 15:41:14 +0100965 Register source = value;
Serban Constantinescu579885a2015-02-22 20:51:33 +0000966 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800967
968 MemOperand mem_op(base.X(), offset);
969
Roland Levillain4d027112015-07-01 15:41:14 +0100970 {
971 // We use a block to end the scratch scope before the write barrier, thus
972 // freeing the temporary registers so they can be used in `MarkGCCard`.
973 UseScratchRegisterScope temps(masm);
974
975 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
976 DCHECK(value.IsW());
977 Register temp = temps.AcquireW();
978 __ Mov(temp.W(), value.W());
979 codegen->GetAssembler()->PoisonHeapReference(temp.W());
980 source = temp;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800981 }
Roland Levillain4d027112015-07-01 15:41:14 +0100982
983 if (is_volatile || is_ordered) {
984 if (use_acquire_release) {
985 codegen->StoreRelease(type, source, mem_op);
986 } else {
987 __ Dmb(InnerShareable, BarrierAll);
988 codegen->Store(type, source, mem_op);
989 if (is_volatile) {
990 __ Dmb(InnerShareable, BarrierReads);
991 }
992 }
993 } else {
994 codegen->Store(type, source, mem_op);
995 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800996 }
997
998 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100999 bool value_can_be_null = true; // TODO: Worth finding out this information?
1000 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001001 }
1002}
1003
1004void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001005 GenUnsafePut(invoke->GetLocations(),
1006 Primitive::kPrimInt,
1007 /* is_volatile */ false,
1008 /* is_ordered */ false,
1009 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001010}
1011void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001012 GenUnsafePut(invoke->GetLocations(),
1013 Primitive::kPrimInt,
1014 /* is_volatile */ false,
1015 /* is_ordered */ true,
1016 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001017}
1018void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001019 GenUnsafePut(invoke->GetLocations(),
1020 Primitive::kPrimInt,
1021 /* is_volatile */ true,
1022 /* is_ordered */ false,
1023 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001024}
1025void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001026 GenUnsafePut(invoke->GetLocations(),
1027 Primitive::kPrimNot,
1028 /* is_volatile */ false,
1029 /* is_ordered */ false,
1030 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001031}
1032void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001033 GenUnsafePut(invoke->GetLocations(),
1034 Primitive::kPrimNot,
1035 /* is_volatile */ false,
1036 /* is_ordered */ true,
1037 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001038}
1039void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001040 GenUnsafePut(invoke->GetLocations(),
1041 Primitive::kPrimNot,
1042 /* is_volatile */ true,
1043 /* is_ordered */ false,
1044 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001045}
1046void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001047 GenUnsafePut(invoke->GetLocations(),
1048 Primitive::kPrimLong,
1049 /* is_volatile */ false,
1050 /* is_ordered */ false,
1051 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001052}
1053void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001054 GenUnsafePut(invoke->GetLocations(),
1055 Primitive::kPrimLong,
1056 /* is_volatile */ false,
1057 /* is_ordered */ true,
1058 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001059}
1060void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001061 GenUnsafePut(invoke->GetLocations(),
1062 Primitive::kPrimLong,
1063 /* is_volatile */ true,
1064 /* is_ordered */ false,
1065 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001066}
1067
1068static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
1069 LocationSummary* locations = new (arena) LocationSummary(invoke,
1070 LocationSummary::kNoCall,
1071 kIntrinsified);
1072 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1073 locations->SetInAt(1, Location::RequiresRegister());
1074 locations->SetInAt(2, Location::RequiresRegister());
1075 locations->SetInAt(3, Location::RequiresRegister());
1076 locations->SetInAt(4, Location::RequiresRegister());
1077
1078 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1079}
1080
1081static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
Serban Constantinescu579885a2015-02-22 20:51:33 +00001082 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001083 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
1084
1085 Register out = WRegisterFrom(locations->Out()); // Boolean result.
1086
1087 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
1088 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
1089 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
1090 Register value = RegisterFrom(locations->InAt(4), type); // Value.
1091
1092 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
1093 if (type == Primitive::kPrimNot) {
1094 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001095 bool value_can_be_null = true; // TODO: Worth finding out this information?
1096 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001097 }
1098
1099 UseScratchRegisterScope temps(masm);
1100 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1101 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
1102
1103 Register tmp_32 = tmp_value.W();
1104
1105 __ Add(tmp_ptr, base.X(), Operand(offset));
1106
Roland Levillain4d027112015-07-01 15:41:14 +01001107 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1108 codegen->GetAssembler()->PoisonHeapReference(expected);
1109 codegen->GetAssembler()->PoisonHeapReference(value);
1110 }
1111
Andreas Gampe878d58c2015-01-15 23:24:00 -08001112 // do {
1113 // tmp_value = [tmp_ptr] - expected;
1114 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1115 // result = tmp_value != 0;
1116
1117 vixl::Label loop_head, exit_loop;
Serban Constantinescu579885a2015-02-22 20:51:33 +00001118 if (use_acquire_release) {
1119 __ Bind(&loop_head);
1120 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001121 // TODO: Do we need a read barrier here when `type == Primitive::kPrimNot`?
1122 // Note that this code is not (yet) used when read barriers are
1123 // enabled (see IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject).
Serban Constantinescu579885a2015-02-22 20:51:33 +00001124 __ Cmp(tmp_value, expected);
1125 __ B(&exit_loop, ne);
1126 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
1127 __ Cbnz(tmp_32, &loop_head);
1128 } else {
1129 __ Dmb(InnerShareable, BarrierWrites);
1130 __ Bind(&loop_head);
1131 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001132 // TODO: Do we need a read barrier here when `type == Primitive::kPrimNot`?
1133 // Note that this code is not (yet) used when read barriers are
1134 // enabled (see IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject).
Serban Constantinescu579885a2015-02-22 20:51:33 +00001135 __ Cmp(tmp_value, expected);
1136 __ B(&exit_loop, ne);
1137 __ Stxr(tmp_32, value, MemOperand(tmp_ptr));
1138 __ Cbnz(tmp_32, &loop_head);
1139 __ Dmb(InnerShareable, BarrierAll);
1140 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001141 __ Bind(&exit_loop);
1142 __ Cset(out, eq);
Roland Levillain4d027112015-07-01 15:41:14 +01001143
1144 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1145 codegen->GetAssembler()->UnpoisonHeapReference(value);
1146 codegen->GetAssembler()->UnpoisonHeapReference(expected);
1147 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001148}
1149
1150void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
1151 CreateIntIntIntIntIntToInt(arena_, invoke);
1152}
1153void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
1154 CreateIntIntIntIntIntToInt(arena_, invoke);
1155}
1156void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain985ff702015-10-23 13:25:35 +01001157 // The UnsafeCASObject intrinsic does not always work when heap
1158 // poisoning is enabled (it breaks run-test 004-UnsafeTest); turn it
1159 // off temporarily as a quick fix.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001160 //
Roland Levillain985ff702015-10-23 13:25:35 +01001161 // TODO(rpl): Fix it and turn it back on.
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001162 //
1163 // TODO(rpl): Also, we should investigate whether we need a read
1164 // barrier in the generated code.
Roland Levillain985ff702015-10-23 13:25:35 +01001165 if (kPoisonHeapReferences) {
1166 return;
1167 }
1168
Andreas Gampe878d58c2015-01-15 23:24:00 -08001169 CreateIntIntIntIntIntToInt(arena_, invoke);
1170}
1171
1172void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
1173 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
1174}
1175void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
1176 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
1177}
1178void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
1179 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
1180}
1181
1182void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001183 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1184 LocationSummary::kCallOnSlowPath,
1185 kIntrinsified);
1186 locations->SetInAt(0, Location::RequiresRegister());
1187 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray82f34492015-02-04 10:44:23 +00001188 // In case we need to go in the slow path, we can't have the output be the same
1189 // as the input: the current liveness analysis considers the input to be live
1190 // at the point of the call.
1191 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001192}
1193
1194void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
1195 vixl::MacroAssembler* masm = GetVIXLAssembler();
1196 LocationSummary* locations = invoke->GetLocations();
1197
1198 // Location of reference to data array
1199 const MemberOffset value_offset = mirror::String::ValueOffset();
1200 // Location of count
1201 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001202
1203 Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer.
1204 Register idx = WRegisterFrom(locations->InAt(1)); // Index of character.
1205 Register out = WRegisterFrom(locations->Out()); // Result character.
1206
1207 UseScratchRegisterScope temps(masm);
1208 Register temp = temps.AcquireW();
1209 Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling.
1210
1211 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
1212 // the cost.
1213 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
1214 // we will not optimize the code for constants (which would save a register).
1215
1216 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1217 codegen_->AddSlowPath(slow_path);
1218
1219 __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length.
1220 codegen_->MaybeRecordImplicitNullCheck(invoke);
1221 __ Cmp(idx, temp);
1222 __ B(hs, slow_path->GetEntryLabel());
1223
Jeff Hao848f70a2014-01-15 13:49:50 -08001224 __ Add(array_temp, obj, Operand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001225
1226 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -08001227 __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1)); // out := array_temp[idx].
Andreas Gampe878d58c2015-01-15 23:24:00 -08001228
1229 __ Bind(slow_path->GetExitLabel());
1230}
1231
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001232void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001233 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1234 LocationSummary::kCall,
1235 kIntrinsified);
1236 InvokeRuntimeCallingConvention calling_convention;
1237 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1238 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1239 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1240}
1241
1242void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
1243 vixl::MacroAssembler* masm = GetVIXLAssembler();
1244 LocationSummary* locations = invoke->GetLocations();
1245
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001246 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001247 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001248
1249 Register argument = WRegisterFrom(locations->InAt(1));
1250 __ Cmp(argument, 0);
1251 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1252 codegen_->AddSlowPath(slow_path);
1253 __ B(eq, slow_path->GetEntryLabel());
1254
1255 __ Ldr(
1256 lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value()));
1257 __ Blr(lr);
1258 __ Bind(slow_path->GetExitLabel());
1259}
1260
Agi Csakiea34b402015-08-13 17:51:19 -07001261void IntrinsicLocationsBuilderARM64::VisitStringEquals(HInvoke* invoke) {
1262 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1263 LocationSummary::kNoCall,
1264 kIntrinsified);
1265 locations->SetInAt(0, Location::RequiresRegister());
1266 locations->SetInAt(1, Location::RequiresRegister());
1267 // Temporary registers to store lengths of strings and for calculations.
1268 locations->AddTemp(Location::RequiresRegister());
1269 locations->AddTemp(Location::RequiresRegister());
1270
1271 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1272}
1273
1274void IntrinsicCodeGeneratorARM64::VisitStringEquals(HInvoke* invoke) {
1275 vixl::MacroAssembler* masm = GetVIXLAssembler();
1276 LocationSummary* locations = invoke->GetLocations();
1277
1278 Register str = WRegisterFrom(locations->InAt(0));
1279 Register arg = WRegisterFrom(locations->InAt(1));
1280 Register out = XRegisterFrom(locations->Out());
1281
1282 UseScratchRegisterScope scratch_scope(masm);
1283 Register temp = scratch_scope.AcquireW();
1284 Register temp1 = WRegisterFrom(locations->GetTemp(0));
1285 Register temp2 = WRegisterFrom(locations->GetTemp(1));
1286
1287 vixl::Label loop;
1288 vixl::Label end;
1289 vixl::Label return_true;
1290 vixl::Label return_false;
1291
1292 // Get offsets of count, value, and class fields within a string object.
1293 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1294 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1295 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1296
1297 // Note that the null check must have been done earlier.
1298 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1299
1300 // Check if input is null, return false if it is.
1301 __ Cbz(arg, &return_false);
1302
1303 // Reference equality check, return true if same reference.
1304 __ Cmp(str, arg);
1305 __ B(&return_true, eq);
1306
1307 // Instanceof check for the argument by comparing class fields.
1308 // All string objects must have the same type since String cannot be subclassed.
1309 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1310 // If the argument is a string object, its class field must be equal to receiver's class field.
1311 __ Ldr(temp, MemOperand(str.X(), class_offset));
1312 __ Ldr(temp1, MemOperand(arg.X(), class_offset));
1313 __ Cmp(temp, temp1);
1314 __ B(&return_false, ne);
1315
1316 // Load lengths of this and argument strings.
1317 __ Ldr(temp, MemOperand(str.X(), count_offset));
1318 __ Ldr(temp1, MemOperand(arg.X(), count_offset));
1319 // Check if lengths are equal, return false if they're not.
1320 __ Cmp(temp, temp1);
1321 __ B(&return_false, ne);
1322 // Store offset of string value in preparation for comparison loop
1323 __ Mov(temp1, value_offset);
1324 // Return true if both strings are empty.
1325 __ Cbz(temp, &return_true);
1326
1327 // Assertions that must hold in order to compare strings 4 characters at a time.
1328 DCHECK_ALIGNED(value_offset, 8);
1329 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1330
1331 temp1 = temp1.X();
1332 temp2 = temp2.X();
1333
1334 // Loop to compare strings 4 characters at a time starting at the beginning of the string.
1335 // Ok to do this because strings are zero-padded to be 8-byte aligned.
1336 __ Bind(&loop);
1337 __ Ldr(out, MemOperand(str.X(), temp1));
1338 __ Ldr(temp2, MemOperand(arg.X(), temp1));
1339 __ Add(temp1, temp1, Operand(sizeof(uint64_t)));
1340 __ Cmp(out, temp2);
1341 __ B(&return_false, ne);
1342 __ Sub(temp, temp, Operand(4), SetFlags);
1343 __ B(&loop, gt);
1344
1345 // Return true and exit the function.
1346 // If loop does not result in returning false, we return true.
1347 __ Bind(&return_true);
1348 __ Mov(out, 1);
1349 __ B(&end);
1350
1351 // Return false and exit the function.
1352 __ Bind(&return_false);
1353 __ Mov(out, 0);
1354 __ Bind(&end);
1355}
1356
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001357static void GenerateVisitStringIndexOf(HInvoke* invoke,
1358 vixl::MacroAssembler* masm,
1359 CodeGeneratorARM64* codegen,
1360 ArenaAllocator* allocator,
1361 bool start_at_zero) {
1362 LocationSummary* locations = invoke->GetLocations();
1363 Register tmp_reg = WRegisterFrom(locations->GetTemp(0));
1364
1365 // Note that the null check must have been done earlier.
1366 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1367
1368 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1369 // or directly dispatch if we have a constant.
1370 SlowPathCodeARM64* slow_path = nullptr;
1371 if (invoke->InputAt(1)->IsIntConstant()) {
1372 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > 0xFFFFU) {
1373 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1374 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1375 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1376 codegen->AddSlowPath(slow_path);
1377 __ B(slow_path->GetEntryLabel());
1378 __ Bind(slow_path->GetExitLabel());
1379 return;
1380 }
1381 } else {
1382 Register char_reg = WRegisterFrom(locations->InAt(1));
1383 __ Mov(tmp_reg, 0xFFFF);
1384 __ Cmp(char_reg, Operand(tmp_reg));
1385 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1386 codegen->AddSlowPath(slow_path);
1387 __ B(hi, slow_path->GetEntryLabel());
1388 }
1389
1390 if (start_at_zero) {
1391 // Start-index = 0.
1392 __ Mov(tmp_reg, 0);
1393 }
1394
1395 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value()));
1396 __ Blr(lr);
1397
1398 if (slow_path != nullptr) {
1399 __ Bind(slow_path->GetExitLabel());
1400 }
1401}
1402
1403void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) {
1404 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1405 LocationSummary::kCall,
1406 kIntrinsified);
1407 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1408 // best to align the inputs accordingly.
1409 InvokeRuntimeCallingConvention calling_convention;
1410 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1411 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1412 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1413
1414 // Need a temp for slow-path codepoint compare, and need to send start_index=0.
1415 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1416}
1417
1418void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001419 GenerateVisitStringIndexOf(
1420 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001421}
1422
1423void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1424 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1425 LocationSummary::kCall,
1426 kIntrinsified);
1427 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1428 // best to align the inputs accordingly.
1429 InvokeRuntimeCallingConvention calling_convention;
1430 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1431 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1432 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1433 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1434
1435 // Need a temp for slow-path codepoint compare.
1436 locations->AddTemp(Location::RequiresRegister());
1437}
1438
1439void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001440 GenerateVisitStringIndexOf(
1441 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001442}
1443
Jeff Hao848f70a2014-01-15 13:49:50 -08001444void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1445 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1446 LocationSummary::kCall,
1447 kIntrinsified);
1448 InvokeRuntimeCallingConvention calling_convention;
1449 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1450 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1451 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1452 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1453 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1454}
1455
1456void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1457 vixl::MacroAssembler* masm = GetVIXLAssembler();
1458 LocationSummary* locations = invoke->GetLocations();
1459
1460 Register byte_array = WRegisterFrom(locations->InAt(0));
1461 __ Cmp(byte_array, 0);
1462 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1463 codegen_->AddSlowPath(slow_path);
1464 __ B(eq, slow_path->GetEntryLabel());
1465
1466 __ Ldr(lr,
1467 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value()));
1468 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1469 __ Blr(lr);
1470 __ Bind(slow_path->GetExitLabel());
1471}
1472
1473void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1474 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1475 LocationSummary::kCall,
1476 kIntrinsified);
1477 InvokeRuntimeCallingConvention calling_convention;
1478 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1479 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1480 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1481 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1482}
1483
1484void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1485 vixl::MacroAssembler* masm = GetVIXLAssembler();
1486
1487 __ Ldr(lr,
1488 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value()));
1489 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1490 __ Blr(lr);
1491}
1492
1493void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1494 // The inputs plus one temp.
1495 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1496 LocationSummary::kCall,
1497 kIntrinsified);
1498 InvokeRuntimeCallingConvention calling_convention;
1499 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1500 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1501 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1502 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1503}
1504
1505void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1506 vixl::MacroAssembler* masm = GetVIXLAssembler();
1507 LocationSummary* locations = invoke->GetLocations();
1508
1509 Register string_to_copy = WRegisterFrom(locations->InAt(0));
1510 __ Cmp(string_to_copy, 0);
1511 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1512 codegen_->AddSlowPath(slow_path);
1513 __ B(eq, slow_path->GetEntryLabel());
1514
1515 __ Ldr(lr,
1516 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value()));
1517 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1518 __ Blr(lr);
1519 __ Bind(slow_path->GetExitLabel());
1520}
1521
Andreas Gampe878d58c2015-01-15 23:24:00 -08001522// Unimplemented intrinsics.
1523
1524#define UNIMPLEMENTED_INTRINSIC(Name) \
1525void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1526} \
1527void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1528}
1529
1530UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Nicolas Geoffrayee3cf072015-10-06 11:45:02 +01001531UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001532UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001533UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001534
Roland Levillain4d027112015-07-01 15:41:14 +01001535#undef UNIMPLEMENTED_INTRINSIC
1536
1537#undef __
1538
Andreas Gampe878d58c2015-01-15 23:24:00 -08001539} // namespace arm64
1540} // namespace art