blob: 07d9d87035fef45e2df65377f3bda943906cbe37 [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080021#include "code_generator_arm64.h"
22#include "common_arm64.h"
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
Serban Constantinescu82e52ce2015-03-26 16:50:57 +000031#include "vixl/a64/disasm-a64.h"
32#include "vixl/a64/macro-assembler-a64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080033
34using namespace vixl; // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +000043using helpers::LocationFrom;
Scott Wakeling9ee23f42015-07-23 10:44:35 +010044using helpers::OperandFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080045using helpers::RegisterFrom;
46using helpers::SRegisterFrom;
47using helpers::WRegisterFrom;
48using helpers::XRegisterFrom;
xueliang.zhong49924c92016-03-03 10:52:51 +000049using helpers::InputRegisterAt;
Scott Wakeling1f36f412016-04-21 11:13:45 +010050using helpers::OutputRegister;
Andreas Gampe878d58c2015-01-15 23:24:00 -080051
Andreas Gampe878d58c2015-01-15 23:24:00 -080052namespace {
53
54ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
55 return MemOperand(XRegisterFrom(location), offset);
56}
57
58} // namespace
59
60vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
61 return codegen_->GetAssembler()->vixl_masm_;
62}
63
64ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
65 return codegen_->GetGraph()->GetArena();
66}
67
68#define __ codegen->GetAssembler()->vixl_masm_->
69
70static void MoveFromReturnRegister(Location trg,
71 Primitive::Type type,
72 CodeGeneratorARM64* codegen) {
73 if (!trg.IsValid()) {
74 DCHECK(type == Primitive::kPrimVoid);
75 return;
76 }
77
78 DCHECK_NE(type, Primitive::kPrimVoid);
79
Jeff Hao848f70a2014-01-15 13:49:50 -080080 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080081 Register trg_reg = RegisterFrom(trg, type);
82 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
83 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
84 } else {
85 FPRegister trg_reg = FPRegisterFrom(trg, type);
86 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
87 __ Fmov(trg_reg, res_reg);
88 }
89}
90
Roland Levillainec525fc2015-04-28 15:50:20 +010091static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010092 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010093 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe878d58c2015-01-15 23:24:00 -080094}
95
96// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
97// call. This will copy the arguments into the positions for a regular call.
98//
99// Note: The actual parameters are required to be in the locations given by the invoke's location
100// summary. If an intrinsic modifies those locations before a slowpath call, they must be
101// restored!
102class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
103 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000104 explicit IntrinsicSlowPathARM64(HInvoke* invoke)
105 : SlowPathCodeARM64(invoke), invoke_(invoke) { }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800106
107 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
108 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
109 __ Bind(GetEntryLabel());
110
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000111 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800112
Roland Levillainec525fc2015-04-28 15:50:20 +0100113 MoveArguments(invoke_, codegen);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800114
115 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100116 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
117 LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800118 } else {
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000119 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800120 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000121 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800122
123 // Copy the result back to the expected output.
124 Location out = invoke_->GetLocations()->Out();
125 if (out.IsValid()) {
126 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
127 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
128 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
129 }
130
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000131 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800132 __ B(GetExitLabel());
133 }
134
Alexandre Rames9931f312015-06-19 14:47:01 +0100135 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM64"; }
136
Andreas Gampe878d58c2015-01-15 23:24:00 -0800137 private:
138 // The instruction where this slow path is happening.
139 HInvoke* const invoke_;
140
141 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
142};
143
144#undef __
145
146bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
147 Dispatch(invoke);
148 LocationSummary* res = invoke->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000149 if (res == nullptr) {
150 return false;
151 }
152 if (kEmitCompilerReadBarrier && res->CanCall()) {
153 // Generating an intrinsic for this HInvoke may produce an
154 // IntrinsicSlowPathARM64 slow path. Currently this approach
155 // does not work when using read barriers, as the emitted
156 // calling sequence will make use of another slow path
157 // (ReadBarrierForRootSlowPathARM64 for HInvokeStaticOrDirect,
158 // ReadBarrierSlowPathARM64 for HInvokeVirtual). So we bail
159 // out in this case.
160 //
161 // TODO: Find a way to have intrinsics work with read barriers.
162 invoke->SetLocations(nullptr);
163 return false;
164 }
165 return res->Intrinsified();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800166}
167
168#define __ masm->
169
170static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
171 LocationSummary* locations = new (arena) LocationSummary(invoke,
172 LocationSummary::kNoCall,
173 kIntrinsified);
174 locations->SetInAt(0, Location::RequiresFpuRegister());
175 locations->SetOut(Location::RequiresRegister());
176}
177
178static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
179 LocationSummary* locations = new (arena) LocationSummary(invoke,
180 LocationSummary::kNoCall,
181 kIntrinsified);
182 locations->SetInAt(0, Location::RequiresRegister());
183 locations->SetOut(Location::RequiresFpuRegister());
184}
185
186static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
187 Location input = locations->InAt(0);
188 Location output = locations->Out();
189 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
190 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
191}
192
193static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
194 Location input = locations->InAt(0);
195 Location output = locations->Out();
196 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
197 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
198}
199
200void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
201 CreateFPToIntLocations(arena_, invoke);
202}
203void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
204 CreateIntToFPLocations(arena_, invoke);
205}
206
207void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000208 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800209}
210void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000211 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800212}
213
214void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
215 CreateFPToIntLocations(arena_, invoke);
216}
217void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
218 CreateIntToFPLocations(arena_, invoke);
219}
220
221void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000222 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800223}
224void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000225 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800226}
227
228static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
229 LocationSummary* locations = new (arena) LocationSummary(invoke,
230 LocationSummary::kNoCall,
231 kIntrinsified);
232 locations->SetInAt(0, Location::RequiresRegister());
233 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
234}
235
236static void GenReverseBytes(LocationSummary* locations,
237 Primitive::Type type,
238 vixl::MacroAssembler* masm) {
239 Location in = locations->InAt(0);
240 Location out = locations->Out();
241
242 switch (type) {
243 case Primitive::kPrimShort:
244 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
245 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
246 break;
247 case Primitive::kPrimInt:
248 case Primitive::kPrimLong:
249 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
250 break;
251 default:
252 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
253 UNREACHABLE();
254 }
255}
256
257void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
258 CreateIntToIntLocations(arena_, invoke);
259}
260
261void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
262 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
263}
264
265void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
266 CreateIntToIntLocations(arena_, invoke);
267}
268
269void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
270 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
271}
272
273void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
274 CreateIntToIntLocations(arena_, invoke);
275}
276
277void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
278 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
279}
280
Aart Bik7b565022016-01-28 14:36:22 -0800281static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
282 LocationSummary* locations = new (arena) LocationSummary(invoke,
283 LocationSummary::kNoCall,
284 kIntrinsified);
285 locations->SetInAt(0, Location::RequiresRegister());
286 locations->SetInAt(1, Location::RequiresRegister());
287 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
288}
289
Scott Wakeling611d3392015-07-10 11:42:06 +0100290static void GenNumberOfLeadingZeros(LocationSummary* locations,
291 Primitive::Type type,
292 vixl::MacroAssembler* masm) {
293 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
294
295 Location in = locations->InAt(0);
296 Location out = locations->Out();
297
298 __ Clz(RegisterFrom(out, type), RegisterFrom(in, type));
299}
300
301void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
302 CreateIntToIntLocations(arena_, invoke);
303}
304
305void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
306 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
307}
308
309void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
310 CreateIntToIntLocations(arena_, invoke);
311}
312
313void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
314 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
315}
316
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100317static void GenNumberOfTrailingZeros(LocationSummary* locations,
318 Primitive::Type type,
319 vixl::MacroAssembler* masm) {
320 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
321
322 Location in = locations->InAt(0);
323 Location out = locations->Out();
324
325 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
326 __ Clz(RegisterFrom(out, type), RegisterFrom(out, type));
327}
328
329void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
330 CreateIntToIntLocations(arena_, invoke);
331}
332
333void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
334 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
335}
336
337void IntrinsicLocationsBuilderARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
338 CreateIntToIntLocations(arena_, invoke);
339}
340
341void IntrinsicCodeGeneratorARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
342 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
343}
344
Andreas Gampe878d58c2015-01-15 23:24:00 -0800345static void GenReverse(LocationSummary* locations,
346 Primitive::Type type,
347 vixl::MacroAssembler* masm) {
348 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
349
350 Location in = locations->InAt(0);
351 Location out = locations->Out();
352
353 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
354}
355
356void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
357 CreateIntToIntLocations(arena_, invoke);
358}
359
360void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
361 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
362}
363
364void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
365 CreateIntToIntLocations(arena_, invoke);
366}
367
368void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
369 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
370}
371
Roland Levillainfa3912e2016-04-01 18:21:55 +0100372static void GenBitCount(HInvoke* instr, Primitive::Type type, vixl::MacroAssembler* masm) {
373 DCHECK(Primitive::IsIntOrLongType(type)) << type;
374 DCHECK_EQ(instr->GetType(), Primitive::kPrimInt);
375 DCHECK_EQ(Primitive::PrimitiveKind(instr->InputAt(0)->GetType()), type);
xueliang.zhong49924c92016-03-03 10:52:51 +0000376
xueliang.zhong49924c92016-03-03 10:52:51 +0000377 UseScratchRegisterScope temps(masm);
378
Nicolas Geoffray457413a2016-03-04 11:10:17 +0000379 Register src = InputRegisterAt(instr, 0);
Roland Levillainfa3912e2016-04-01 18:21:55 +0100380 Register dst = RegisterFrom(instr->GetLocations()->Out(), type);
381 FPRegister fpr = (type == Primitive::kPrimLong) ? temps.AcquireD() : temps.AcquireS();
xueliang.zhong49924c92016-03-03 10:52:51 +0000382
383 __ Fmov(fpr, src);
Nicolas Geoffray457413a2016-03-04 11:10:17 +0000384 __ Cnt(fpr.V8B(), fpr.V8B());
385 __ Addv(fpr.B(), fpr.V8B());
xueliang.zhong49924c92016-03-03 10:52:51 +0000386 __ Fmov(dst, fpr);
387}
388
389void IntrinsicLocationsBuilderARM64::VisitLongBitCount(HInvoke* invoke) {
390 CreateIntToIntLocations(arena_, invoke);
391}
392
393void IntrinsicCodeGeneratorARM64::VisitLongBitCount(HInvoke* invoke) {
Roland Levillainfa3912e2016-04-01 18:21:55 +0100394 GenBitCount(invoke, Primitive::kPrimLong, GetVIXLAssembler());
xueliang.zhong49924c92016-03-03 10:52:51 +0000395}
396
397void IntrinsicLocationsBuilderARM64::VisitIntegerBitCount(HInvoke* invoke) {
398 CreateIntToIntLocations(arena_, invoke);
399}
400
401void IntrinsicCodeGeneratorARM64::VisitIntegerBitCount(HInvoke* invoke) {
Roland Levillainfa3912e2016-04-01 18:21:55 +0100402 GenBitCount(invoke, Primitive::kPrimInt, GetVIXLAssembler());
xueliang.zhong49924c92016-03-03 10:52:51 +0000403}
404
Andreas Gampe878d58c2015-01-15 23:24:00 -0800405static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800406 LocationSummary* locations = new (arena) LocationSummary(invoke,
407 LocationSummary::kNoCall,
408 kIntrinsified);
409 locations->SetInAt(0, Location::RequiresFpuRegister());
410 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
411}
412
413static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
414 Location in = locations->InAt(0);
415 Location out = locations->Out();
416
417 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
418 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
419
420 __ Fabs(out_reg, in_reg);
421}
422
423void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
424 CreateFPToFPLocations(arena_, invoke);
425}
426
427void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000428 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800429}
430
431void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
432 CreateFPToFPLocations(arena_, invoke);
433}
434
435void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000436 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800437}
438
439static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
440 LocationSummary* locations = new (arena) LocationSummary(invoke,
441 LocationSummary::kNoCall,
442 kIntrinsified);
443 locations->SetInAt(0, Location::RequiresRegister());
444 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
445}
446
447static void GenAbsInteger(LocationSummary* locations,
448 bool is64bit,
449 vixl::MacroAssembler* masm) {
450 Location in = locations->InAt(0);
451 Location output = locations->Out();
452
453 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
454 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
455
456 __ Cmp(in_reg, Operand(0));
457 __ Cneg(out_reg, in_reg, lt);
458}
459
460void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
461 CreateIntToInt(arena_, invoke);
462}
463
464void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000465 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800466}
467
468void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
469 CreateIntToInt(arena_, invoke);
470}
471
472void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000473 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800474}
475
476static void GenMinMaxFP(LocationSummary* locations,
477 bool is_min,
478 bool is_double,
479 vixl::MacroAssembler* masm) {
480 Location op1 = locations->InAt(0);
481 Location op2 = locations->InAt(1);
482 Location out = locations->Out();
483
484 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
485 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
486 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
487 if (is_min) {
488 __ Fmin(out_reg, op1_reg, op2_reg);
489 } else {
490 __ Fmax(out_reg, op1_reg, op2_reg);
491 }
492}
493
494static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
495 LocationSummary* locations = new (arena) LocationSummary(invoke,
496 LocationSummary::kNoCall,
497 kIntrinsified);
498 locations->SetInAt(0, Location::RequiresFpuRegister());
499 locations->SetInAt(1, Location::RequiresFpuRegister());
500 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
501}
502
503void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
504 CreateFPFPToFPLocations(arena_, invoke);
505}
506
507void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000508 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800509}
510
511void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
512 CreateFPFPToFPLocations(arena_, invoke);
513}
514
515void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000516 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800517}
518
519void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
520 CreateFPFPToFPLocations(arena_, invoke);
521}
522
523void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000524 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800525}
526
527void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
528 CreateFPFPToFPLocations(arena_, invoke);
529}
530
531void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000532 GenMinMaxFP(
533 invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800534}
535
536static void GenMinMax(LocationSummary* locations,
537 bool is_min,
538 bool is_long,
539 vixl::MacroAssembler* masm) {
540 Location op1 = locations->InAt(0);
541 Location op2 = locations->InAt(1);
542 Location out = locations->Out();
543
544 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
545 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
546 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
547
548 __ Cmp(op1_reg, op2_reg);
549 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
550}
551
Andreas Gampe878d58c2015-01-15 23:24:00 -0800552void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
553 CreateIntIntToIntLocations(arena_, invoke);
554}
555
556void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000557 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800558}
559
560void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
561 CreateIntIntToIntLocations(arena_, invoke);
562}
563
564void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000565 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800566}
567
568void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
569 CreateIntIntToIntLocations(arena_, invoke);
570}
571
572void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000573 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800574}
575
576void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
577 CreateIntIntToIntLocations(arena_, invoke);
578}
579
580void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000581 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800582}
583
584void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
585 CreateFPToFPLocations(arena_, invoke);
586}
587
588void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
589 LocationSummary* locations = invoke->GetLocations();
590 vixl::MacroAssembler* masm = GetVIXLAssembler();
591 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
592}
593
594void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
595 CreateFPToFPLocations(arena_, invoke);
596}
597
598void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
599 LocationSummary* locations = invoke->GetLocations();
600 vixl::MacroAssembler* masm = GetVIXLAssembler();
601 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
602}
603
604void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
605 CreateFPToFPLocations(arena_, invoke);
606}
607
608void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
609 LocationSummary* locations = invoke->GetLocations();
610 vixl::MacroAssembler* masm = GetVIXLAssembler();
611 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
612}
613
614void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
615 CreateFPToFPLocations(arena_, invoke);
616}
617
618void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
619 LocationSummary* locations = invoke->GetLocations();
620 vixl::MacroAssembler* masm = GetVIXLAssembler();
621 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
622}
623
624static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
625 LocationSummary* locations = new (arena) LocationSummary(invoke,
626 LocationSummary::kNoCall,
627 kIntrinsified);
628 locations->SetInAt(0, Location::RequiresFpuRegister());
629 locations->SetOut(Location::RequiresRegister());
630}
631
632static void GenMathRound(LocationSummary* locations,
633 bool is_double,
634 vixl::MacroAssembler* masm) {
635 FPRegister in_reg = is_double ?
636 DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
637 Register out_reg = is_double ?
638 XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
639 UseScratchRegisterScope temps(masm);
640 FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
641
642 // 0.5 can be encoded as an immediate, so use fmov.
643 if (is_double) {
644 __ Fmov(temp1_reg, static_cast<double>(0.5));
645 } else {
646 __ Fmov(temp1_reg, static_cast<float>(0.5));
647 }
648 __ Fadd(temp1_reg, in_reg, temp1_reg);
649 __ Fcvtms(out_reg, temp1_reg);
650}
651
652void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
Andreas Gampee6d0d8d2015-12-28 09:54:29 -0800653 // See intrinsics.h.
654 if (kRoundIsPlusPointFive) {
655 CreateFPToIntPlusTempLocations(arena_, invoke);
656 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800657}
658
659void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000660 GenMathRound(invoke->GetLocations(), /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800661}
662
663void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
Andreas Gampee6d0d8d2015-12-28 09:54:29 -0800664 // See intrinsics.h.
665 if (kRoundIsPlusPointFive) {
666 CreateFPToIntPlusTempLocations(arena_, invoke);
667 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800668}
669
670void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000671 GenMathRound(invoke->GetLocations(), /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800672}
673
674void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
675 CreateIntToIntLocations(arena_, invoke);
676}
677
678void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
679 vixl::MacroAssembler* masm = GetVIXLAssembler();
680 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
681 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
682}
683
684void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
685 CreateIntToIntLocations(arena_, invoke);
686}
687
688void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
689 vixl::MacroAssembler* masm = GetVIXLAssembler();
690 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
691 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
692}
693
694void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
695 CreateIntToIntLocations(arena_, invoke);
696}
697
698void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
699 vixl::MacroAssembler* masm = GetVIXLAssembler();
700 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
701 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
702}
703
704void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
705 CreateIntToIntLocations(arena_, invoke);
706}
707
708void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
709 vixl::MacroAssembler* masm = GetVIXLAssembler();
710 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
711 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
712}
713
714static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
715 LocationSummary* locations = new (arena) LocationSummary(invoke,
716 LocationSummary::kNoCall,
717 kIntrinsified);
718 locations->SetInAt(0, Location::RequiresRegister());
719 locations->SetInAt(1, Location::RequiresRegister());
720}
721
722void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
723 CreateIntIntToVoidLocations(arena_, invoke);
724}
725
726void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
727 vixl::MacroAssembler* masm = GetVIXLAssembler();
728 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
729 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
730}
731
732void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
733 CreateIntIntToVoidLocations(arena_, invoke);
734}
735
736void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
737 vixl::MacroAssembler* masm = GetVIXLAssembler();
738 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
739 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
740}
741
742void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
743 CreateIntIntToVoidLocations(arena_, invoke);
744}
745
746void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
747 vixl::MacroAssembler* masm = GetVIXLAssembler();
748 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
749 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
750}
751
752void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
753 CreateIntIntToVoidLocations(arena_, invoke);
754}
755
756void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
757 vixl::MacroAssembler* masm = GetVIXLAssembler();
758 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
759 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
760}
761
762void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
763 LocationSummary* locations = new (arena_) LocationSummary(invoke,
764 LocationSummary::kNoCall,
765 kIntrinsified);
766 locations->SetOut(Location::RequiresRegister());
767}
768
769void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
770 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
771 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
772}
773
774static void GenUnsafeGet(HInvoke* invoke,
775 Primitive::Type type,
776 bool is_volatile,
777 CodeGeneratorARM64* codegen) {
778 LocationSummary* locations = invoke->GetLocations();
779 DCHECK((type == Primitive::kPrimInt) ||
780 (type == Primitive::kPrimLong) ||
781 (type == Primitive::kPrimNot));
782 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000783 Location base_loc = locations->InAt(1);
784 Register base = WRegisterFrom(base_loc); // Object pointer.
785 Location offset_loc = locations->InAt(2);
786 Register offset = XRegisterFrom(offset_loc); // Long offset.
787 Location trg_loc = locations->Out();
788 Register trg = RegisterFrom(trg_loc, type);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800789
Roland Levillain44015862016-01-22 11:47:17 +0000790 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
791 // UnsafeGetObject/UnsafeGetObjectVolatile with Baker's read barrier case.
792 UseScratchRegisterScope temps(masm);
793 Register temp = temps.AcquireW();
794 codegen->GenerateArrayLoadWithBakerReadBarrier(
795 invoke, trg_loc, base, 0U, offset_loc, temp, /* needs_null_check */ false);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800796 } else {
Roland Levillain44015862016-01-22 11:47:17 +0000797 // Other cases.
798 MemOperand mem_op(base.X(), offset);
799 if (is_volatile) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +0000800 codegen->LoadAcquire(invoke, trg, mem_op, /* needs_null_check */ true);
Roland Levillain44015862016-01-22 11:47:17 +0000801 } else {
802 codegen->Load(type, trg, mem_op);
803 }
Roland Levillain4d027112015-07-01 15:41:14 +0100804
Roland Levillain44015862016-01-22 11:47:17 +0000805 if (type == Primitive::kPrimNot) {
806 DCHECK(trg.IsW());
807 codegen->MaybeGenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
808 }
Roland Levillain4d027112015-07-01 15:41:14 +0100809 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800810}
811
812static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000813 bool can_call = kEmitCompilerReadBarrier &&
814 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
815 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800816 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000817 can_call ?
818 LocationSummary::kCallOnSlowPath :
819 LocationSummary::kNoCall,
Andreas Gampe878d58c2015-01-15 23:24:00 -0800820 kIntrinsified);
821 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
822 locations->SetInAt(1, Location::RequiresRegister());
823 locations->SetInAt(2, Location::RequiresRegister());
824 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
825}
826
827void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
828 CreateIntIntIntToIntLocations(arena_, invoke);
829}
830void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
831 CreateIntIntIntToIntLocations(arena_, invoke);
832}
833void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
834 CreateIntIntIntToIntLocations(arena_, invoke);
835}
836void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
837 CreateIntIntIntToIntLocations(arena_, invoke);
838}
839void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
840 CreateIntIntIntToIntLocations(arena_, invoke);
841}
842void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
843 CreateIntIntIntToIntLocations(arena_, invoke);
844}
845
846void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000847 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800848}
849void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000850 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800851}
852void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000853 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800854}
855void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000856 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800857}
858void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000859 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800860}
861void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000862 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800863}
864
865static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
866 LocationSummary* locations = new (arena) LocationSummary(invoke,
867 LocationSummary::kNoCall,
868 kIntrinsified);
869 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
870 locations->SetInAt(1, Location::RequiresRegister());
871 locations->SetInAt(2, Location::RequiresRegister());
872 locations->SetInAt(3, Location::RequiresRegister());
873}
874
875void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
876 CreateIntIntIntIntToVoid(arena_, invoke);
877}
878void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
879 CreateIntIntIntIntToVoid(arena_, invoke);
880}
881void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
882 CreateIntIntIntIntToVoid(arena_, invoke);
883}
884void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
885 CreateIntIntIntIntToVoid(arena_, invoke);
886}
887void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
888 CreateIntIntIntIntToVoid(arena_, invoke);
889}
890void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
891 CreateIntIntIntIntToVoid(arena_, invoke);
892}
893void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
894 CreateIntIntIntIntToVoid(arena_, invoke);
895}
896void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
897 CreateIntIntIntIntToVoid(arena_, invoke);
898}
899void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
900 CreateIntIntIntIntToVoid(arena_, invoke);
901}
902
903static void GenUnsafePut(LocationSummary* locations,
904 Primitive::Type type,
905 bool is_volatile,
906 bool is_ordered,
907 CodeGeneratorARM64* codegen) {
908 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
909
910 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
911 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
912 Register value = RegisterFrom(locations->InAt(3), type);
Roland Levillain4d027112015-07-01 15:41:14 +0100913 Register source = value;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800914 MemOperand mem_op(base.X(), offset);
915
Roland Levillain4d027112015-07-01 15:41:14 +0100916 {
917 // We use a block to end the scratch scope before the write barrier, thus
918 // freeing the temporary registers so they can be used in `MarkGCCard`.
919 UseScratchRegisterScope temps(masm);
920
921 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
922 DCHECK(value.IsW());
923 Register temp = temps.AcquireW();
924 __ Mov(temp.W(), value.W());
925 codegen->GetAssembler()->PoisonHeapReference(temp.W());
926 source = temp;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800927 }
Roland Levillain4d027112015-07-01 15:41:14 +0100928
929 if (is_volatile || is_ordered) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +0000930 codegen->StoreRelease(type, source, mem_op);
Roland Levillain4d027112015-07-01 15:41:14 +0100931 } else {
932 codegen->Store(type, source, mem_op);
933 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800934 }
935
936 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100937 bool value_can_be_null = true; // TODO: Worth finding out this information?
938 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800939 }
940}
941
942void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000943 GenUnsafePut(invoke->GetLocations(),
944 Primitive::kPrimInt,
945 /* is_volatile */ false,
946 /* is_ordered */ false,
947 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800948}
949void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000950 GenUnsafePut(invoke->GetLocations(),
951 Primitive::kPrimInt,
952 /* is_volatile */ false,
953 /* is_ordered */ true,
954 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800955}
956void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000957 GenUnsafePut(invoke->GetLocations(),
958 Primitive::kPrimInt,
959 /* is_volatile */ true,
960 /* is_ordered */ false,
961 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800962}
963void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000964 GenUnsafePut(invoke->GetLocations(),
965 Primitive::kPrimNot,
966 /* is_volatile */ false,
967 /* is_ordered */ false,
968 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800969}
970void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000971 GenUnsafePut(invoke->GetLocations(),
972 Primitive::kPrimNot,
973 /* is_volatile */ false,
974 /* is_ordered */ true,
975 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800976}
977void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000978 GenUnsafePut(invoke->GetLocations(),
979 Primitive::kPrimNot,
980 /* is_volatile */ true,
981 /* is_ordered */ false,
982 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800983}
984void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000985 GenUnsafePut(invoke->GetLocations(),
986 Primitive::kPrimLong,
987 /* is_volatile */ false,
988 /* is_ordered */ false,
989 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800990}
991void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000992 GenUnsafePut(invoke->GetLocations(),
993 Primitive::kPrimLong,
994 /* is_volatile */ false,
995 /* is_ordered */ true,
996 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800997}
998void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000999 GenUnsafePut(invoke->GetLocations(),
1000 Primitive::kPrimLong,
1001 /* is_volatile */ true,
1002 /* is_ordered */ false,
1003 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001004}
1005
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001006static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena,
1007 HInvoke* invoke,
1008 Primitive::Type type) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001009 LocationSummary* locations = new (arena) LocationSummary(invoke,
1010 LocationSummary::kNoCall,
1011 kIntrinsified);
1012 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1013 locations->SetInAt(1, Location::RequiresRegister());
1014 locations->SetInAt(2, Location::RequiresRegister());
1015 locations->SetInAt(3, Location::RequiresRegister());
1016 locations->SetInAt(4, Location::RequiresRegister());
1017
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001018 // If heap poisoning is enabled, we don't want the unpoisoning
1019 // operations to potentially clobber the output.
1020 Location::OutputOverlap overlaps = (kPoisonHeapReferences && type == Primitive::kPrimNot)
1021 ? Location::kOutputOverlap
1022 : Location::kNoOutputOverlap;
1023 locations->SetOut(Location::RequiresRegister(), overlaps);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001024}
1025
1026static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001027 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
1028
1029 Register out = WRegisterFrom(locations->Out()); // Boolean result.
1030
1031 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
1032 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
1033 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
1034 Register value = RegisterFrom(locations->InAt(4), type); // Value.
1035
1036 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
1037 if (type == Primitive::kPrimNot) {
1038 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001039 bool value_can_be_null = true; // TODO: Worth finding out this information?
1040 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001041 }
1042
1043 UseScratchRegisterScope temps(masm);
1044 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1045 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
1046
1047 Register tmp_32 = tmp_value.W();
1048
1049 __ Add(tmp_ptr, base.X(), Operand(offset));
1050
Roland Levillain4d027112015-07-01 15:41:14 +01001051 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1052 codegen->GetAssembler()->PoisonHeapReference(expected);
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001053 if (value.Is(expected)) {
1054 // Do not poison `value`, as it is the same register as
1055 // `expected`, which has just been poisoned.
1056 } else {
1057 codegen->GetAssembler()->PoisonHeapReference(value);
1058 }
Roland Levillain4d027112015-07-01 15:41:14 +01001059 }
1060
Andreas Gampe878d58c2015-01-15 23:24:00 -08001061 // do {
1062 // tmp_value = [tmp_ptr] - expected;
1063 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1064 // result = tmp_value != 0;
1065
1066 vixl::Label loop_head, exit_loop;
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001067 __ Bind(&loop_head);
1068 // TODO: When `type == Primitive::kPrimNot`, add a read barrier for
1069 // the reference stored in the object before attempting the CAS,
1070 // similar to the one in the art::Unsafe_compareAndSwapObject JNI
1071 // implementation.
1072 //
1073 // Note that this code is not (yet) used when read barriers are
1074 // enabled (see IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject).
1075 DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier));
1076 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
1077 __ Cmp(tmp_value, expected);
1078 __ B(&exit_loop, ne);
1079 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
1080 __ Cbnz(tmp_32, &loop_head);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001081 __ Bind(&exit_loop);
1082 __ Cset(out, eq);
Roland Levillain4d027112015-07-01 15:41:14 +01001083
1084 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +01001085 codegen->GetAssembler()->UnpoisonHeapReference(expected);
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001086 if (value.Is(expected)) {
1087 // Do not unpoison `value`, as it is the same register as
1088 // `expected`, which has just been unpoisoned.
1089 } else {
1090 codegen->GetAssembler()->UnpoisonHeapReference(value);
1091 }
Roland Levillain4d027112015-07-01 15:41:14 +01001092 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001093}
1094
1095void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001096 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001097}
1098void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001099 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001100}
1101void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain391b8662015-12-18 11:43:38 +00001102 // The UnsafeCASObject intrinsic is missing a read barrier, and
1103 // therefore sometimes does not work as expected (b/25883050).
1104 // Turn it off temporarily as a quick fix, until the read barrier is
1105 // implemented (see TODO in GenCAS below).
1106 //
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001107 // TODO(rpl): Fix this issue and re-enable this intrinsic with read barriers.
1108 if (kEmitCompilerReadBarrier) {
Roland Levillain985ff702015-10-23 13:25:35 +01001109 return;
1110 }
1111
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001112 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001113}
1114
1115void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
1116 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
1117}
1118void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
1119 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
1120}
1121void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
1122 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
1123}
1124
1125void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001126 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1127 LocationSummary::kCallOnSlowPath,
1128 kIntrinsified);
1129 locations->SetInAt(0, Location::RequiresRegister());
1130 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray82f34492015-02-04 10:44:23 +00001131 // In case we need to go in the slow path, we can't have the output be the same
1132 // as the input: the current liveness analysis considers the input to be live
1133 // at the point of the call.
1134 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001135}
1136
1137void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
1138 vixl::MacroAssembler* masm = GetVIXLAssembler();
1139 LocationSummary* locations = invoke->GetLocations();
1140
1141 // Location of reference to data array
1142 const MemberOffset value_offset = mirror::String::ValueOffset();
1143 // Location of count
1144 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001145
1146 Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer.
1147 Register idx = WRegisterFrom(locations->InAt(1)); // Index of character.
1148 Register out = WRegisterFrom(locations->Out()); // Result character.
1149
1150 UseScratchRegisterScope temps(masm);
1151 Register temp = temps.AcquireW();
1152 Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling.
1153
1154 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
1155 // the cost.
1156 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
1157 // we will not optimize the code for constants (which would save a register).
1158
1159 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1160 codegen_->AddSlowPath(slow_path);
1161
1162 __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length.
1163 codegen_->MaybeRecordImplicitNullCheck(invoke);
1164 __ Cmp(idx, temp);
1165 __ B(hs, slow_path->GetEntryLabel());
1166
Jeff Hao848f70a2014-01-15 13:49:50 -08001167 __ Add(array_temp, obj, Operand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001168
1169 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -08001170 __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1)); // out := array_temp[idx].
Andreas Gampe878d58c2015-01-15 23:24:00 -08001171
1172 __ Bind(slow_path->GetExitLabel());
1173}
1174
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001175void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001176 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Scott Wakeling1f36f412016-04-21 11:13:45 +01001177 invoke->InputAt(1)->CanBeNull()
1178 ? LocationSummary::kCallOnSlowPath
1179 : LocationSummary::kNoCall,
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001180 kIntrinsified);
Scott Wakeling1f36f412016-04-21 11:13:45 +01001181 locations->SetInAt(0, Location::RequiresRegister());
1182 locations->SetInAt(1, Location::RequiresRegister());
1183 locations->AddTemp(Location::RequiresRegister());
1184 locations->AddTemp(Location::RequiresRegister());
1185 locations->AddTemp(Location::RequiresRegister());
1186 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001187}
1188
1189void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
1190 vixl::MacroAssembler* masm = GetVIXLAssembler();
1191 LocationSummary* locations = invoke->GetLocations();
1192
Scott Wakeling1f36f412016-04-21 11:13:45 +01001193 Register str = XRegisterFrom(locations->InAt(0));
1194 Register arg = XRegisterFrom(locations->InAt(1));
1195 Register out = OutputRegister(invoke);
1196
1197 Register temp0 = WRegisterFrom(locations->GetTemp(0));
1198 Register temp1 = WRegisterFrom(locations->GetTemp(1));
1199 Register temp2 = WRegisterFrom(locations->GetTemp(2));
1200
1201 vixl::Label loop;
1202 vixl::Label find_char_diff;
1203 vixl::Label end;
1204
1205 // Get offsets of count and value fields within a string object.
1206 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1207 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1208
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001209 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001210 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001211
Scott Wakeling1f36f412016-04-21 11:13:45 +01001212 // Take slow path and throw if input can be and is null.
1213 SlowPathCodeARM64* slow_path = nullptr;
1214 const bool can_slow_path = invoke->InputAt(1)->CanBeNull();
1215 if (can_slow_path) {
1216 slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1217 codegen_->AddSlowPath(slow_path);
1218 __ Cbz(arg, slow_path->GetEntryLabel());
1219 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001220
Scott Wakeling1f36f412016-04-21 11:13:45 +01001221 // Reference equality check, return 0 if same reference.
1222 __ Subs(out, str, arg);
1223 __ B(&end, eq);
1224 // Load lengths of this and argument strings.
1225 __ Ldr(temp0, MemOperand(str.X(), count_offset));
1226 __ Ldr(temp1, MemOperand(arg.X(), count_offset));
1227 // Return zero if both strings are empty.
1228 __ Orr(out, temp0, temp1);
1229 __ Cbz(out, &end);
1230 // out = length diff.
1231 __ Subs(out, temp0, temp1);
1232 // temp2 = min(len(str), len(arg)).
1233 __ Csel(temp2, temp1, temp0, ge);
1234 // Shorter string is empty?
1235 __ Cbz(temp2, &end);
1236
1237 // Store offset of string value in preparation for comparison loop.
1238 __ Mov(temp1, value_offset);
1239
1240 UseScratchRegisterScope scratch_scope(masm);
1241 Register temp4 = scratch_scope.AcquireX();
1242
1243 // Assertions that must hold in order to compare strings 4 characters at a time.
1244 DCHECK_ALIGNED(value_offset, 8);
1245 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1246
1247 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1248 DCHECK_EQ(char_size, 2u);
1249
1250 // Promote temp0 to an X reg, ready for LDR.
1251 temp0 = temp0.X();
1252
1253 // Loop to compare 4x16-bit characters at a time (ok because of string data alignment).
1254 __ Bind(&loop);
1255 __ Ldr(temp4, MemOperand(str.X(), temp1));
1256 __ Ldr(temp0, MemOperand(arg.X(), temp1));
1257 __ Cmp(temp4, temp0);
1258 __ B(ne, &find_char_diff);
1259 __ Add(temp1, temp1, char_size * 4);
1260 __ Subs(temp2, temp2, 4);
1261 __ B(gt, &loop);
1262 __ B(&end);
1263
1264 // Promote temp1 to an X reg, ready for EOR.
1265 temp1 = temp1.X();
1266
1267 // Find the single 16-bit character difference.
1268 __ Bind(&find_char_diff);
1269 // Get the bit position of the first character that differs.
1270 __ Eor(temp1, temp0, temp4);
1271 __ Rbit(temp1, temp1);
1272 __ Clz(temp1, temp1);
Scott Wakeling1f36f412016-04-21 11:13:45 +01001273 // If the number of 16-bit chars remaining <= the index where the difference occurs (0-3), then
1274 // the difference occurs outside the remaining string data, so just return length diff (out).
1275 __ Cmp(temp2, Operand(temp1, LSR, 4));
1276 __ B(le, &end);
1277 // Extract the characters and calculate the difference.
Scott Wakelinge5ed20b2016-05-20 10:41:38 +01001278 __ Bic(temp1, temp1, 0xf);
Scott Wakeling1f36f412016-04-21 11:13:45 +01001279 __ Lsr(temp0, temp0, temp1);
1280 __ Lsr(temp4, temp4, temp1);
1281 __ And(temp4, temp4, 0xffff);
1282 __ Sub(out, temp4, Operand(temp0, UXTH));
1283
1284 __ Bind(&end);
1285
1286 if (can_slow_path) {
1287 __ Bind(slow_path->GetExitLabel());
1288 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001289}
1290
Agi Csakiea34b402015-08-13 17:51:19 -07001291void IntrinsicLocationsBuilderARM64::VisitStringEquals(HInvoke* invoke) {
1292 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1293 LocationSummary::kNoCall,
1294 kIntrinsified);
1295 locations->SetInAt(0, Location::RequiresRegister());
1296 locations->SetInAt(1, Location::RequiresRegister());
1297 // Temporary registers to store lengths of strings and for calculations.
1298 locations->AddTemp(Location::RequiresRegister());
1299 locations->AddTemp(Location::RequiresRegister());
1300
1301 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1302}
1303
1304void IntrinsicCodeGeneratorARM64::VisitStringEquals(HInvoke* invoke) {
1305 vixl::MacroAssembler* masm = GetVIXLAssembler();
1306 LocationSummary* locations = invoke->GetLocations();
1307
1308 Register str = WRegisterFrom(locations->InAt(0));
1309 Register arg = WRegisterFrom(locations->InAt(1));
1310 Register out = XRegisterFrom(locations->Out());
1311
1312 UseScratchRegisterScope scratch_scope(masm);
1313 Register temp = scratch_scope.AcquireW();
1314 Register temp1 = WRegisterFrom(locations->GetTemp(0));
1315 Register temp2 = WRegisterFrom(locations->GetTemp(1));
1316
1317 vixl::Label loop;
1318 vixl::Label end;
1319 vixl::Label return_true;
1320 vixl::Label return_false;
1321
1322 // Get offsets of count, value, and class fields within a string object.
1323 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1324 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1325 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1326
1327 // Note that the null check must have been done earlier.
1328 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1329
1330 // Check if input is null, return false if it is.
1331 __ Cbz(arg, &return_false);
1332
1333 // Reference equality check, return true if same reference.
1334 __ Cmp(str, arg);
1335 __ B(&return_true, eq);
1336
1337 // Instanceof check for the argument by comparing class fields.
1338 // All string objects must have the same type since String cannot be subclassed.
1339 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1340 // If the argument is a string object, its class field must be equal to receiver's class field.
1341 __ Ldr(temp, MemOperand(str.X(), class_offset));
1342 __ Ldr(temp1, MemOperand(arg.X(), class_offset));
1343 __ Cmp(temp, temp1);
1344 __ B(&return_false, ne);
1345
1346 // Load lengths of this and argument strings.
1347 __ Ldr(temp, MemOperand(str.X(), count_offset));
1348 __ Ldr(temp1, MemOperand(arg.X(), count_offset));
1349 // Check if lengths are equal, return false if they're not.
1350 __ Cmp(temp, temp1);
1351 __ B(&return_false, ne);
1352 // Store offset of string value in preparation for comparison loop
1353 __ Mov(temp1, value_offset);
1354 // Return true if both strings are empty.
1355 __ Cbz(temp, &return_true);
1356
1357 // Assertions that must hold in order to compare strings 4 characters at a time.
1358 DCHECK_ALIGNED(value_offset, 8);
1359 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1360
1361 temp1 = temp1.X();
1362 temp2 = temp2.X();
1363
1364 // Loop to compare strings 4 characters at a time starting at the beginning of the string.
1365 // Ok to do this because strings are zero-padded to be 8-byte aligned.
1366 __ Bind(&loop);
1367 __ Ldr(out, MemOperand(str.X(), temp1));
1368 __ Ldr(temp2, MemOperand(arg.X(), temp1));
1369 __ Add(temp1, temp1, Operand(sizeof(uint64_t)));
1370 __ Cmp(out, temp2);
1371 __ B(&return_false, ne);
1372 __ Sub(temp, temp, Operand(4), SetFlags);
1373 __ B(&loop, gt);
1374
1375 // Return true and exit the function.
1376 // If loop does not result in returning false, we return true.
1377 __ Bind(&return_true);
1378 __ Mov(out, 1);
1379 __ B(&end);
1380
1381 // Return false and exit the function.
1382 __ Bind(&return_false);
1383 __ Mov(out, 0);
1384 __ Bind(&end);
1385}
1386
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001387static void GenerateVisitStringIndexOf(HInvoke* invoke,
1388 vixl::MacroAssembler* masm,
1389 CodeGeneratorARM64* codegen,
1390 ArenaAllocator* allocator,
1391 bool start_at_zero) {
1392 LocationSummary* locations = invoke->GetLocations();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001393
1394 // Note that the null check must have been done earlier.
1395 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1396
1397 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001398 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001399 SlowPathCodeARM64* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001400 HInstruction* code_point = invoke->InputAt(1);
1401 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001402 if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) > 0xFFFFU) {
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001403 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1404 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1405 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1406 codegen->AddSlowPath(slow_path);
1407 __ B(slow_path->GetEntryLabel());
1408 __ Bind(slow_path->GetExitLabel());
1409 return;
1410 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001411 } else if (code_point->GetType() != Primitive::kPrimChar) {
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001412 Register char_reg = WRegisterFrom(locations->InAt(1));
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001413 __ Tst(char_reg, 0xFFFF0000);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001414 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1415 codegen->AddSlowPath(slow_path);
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001416 __ B(ne, slow_path->GetEntryLabel());
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001417 }
1418
1419 if (start_at_zero) {
1420 // Start-index = 0.
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001421 Register tmp_reg = WRegisterFrom(locations->GetTemp(0));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001422 __ Mov(tmp_reg, 0);
1423 }
1424
1425 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value()));
Roland Levillain42ad2882016-02-29 18:26:54 +00001426 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001427 __ Blr(lr);
1428
1429 if (slow_path != nullptr) {
1430 __ Bind(slow_path->GetExitLabel());
1431 }
1432}
1433
1434void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) {
1435 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1436 LocationSummary::kCall,
1437 kIntrinsified);
1438 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1439 // best to align the inputs accordingly.
1440 InvokeRuntimeCallingConvention calling_convention;
1441 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1442 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1443 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1444
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001445 // Need to send start_index=0.
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001446 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1447}
1448
1449void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001450 GenerateVisitStringIndexOf(
1451 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001452}
1453
1454void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1455 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1456 LocationSummary::kCall,
1457 kIntrinsified);
1458 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1459 // best to align the inputs accordingly.
1460 InvokeRuntimeCallingConvention calling_convention;
1461 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1462 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1463 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1464 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001465}
1466
1467void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001468 GenerateVisitStringIndexOf(
1469 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001470}
1471
Jeff Hao848f70a2014-01-15 13:49:50 -08001472void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1473 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1474 LocationSummary::kCall,
1475 kIntrinsified);
1476 InvokeRuntimeCallingConvention calling_convention;
1477 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1478 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1479 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1480 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1481 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1482}
1483
1484void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1485 vixl::MacroAssembler* masm = GetVIXLAssembler();
1486 LocationSummary* locations = invoke->GetLocations();
1487
1488 Register byte_array = WRegisterFrom(locations->InAt(0));
1489 __ Cmp(byte_array, 0);
1490 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1491 codegen_->AddSlowPath(slow_path);
1492 __ B(eq, slow_path->GetEntryLabel());
1493
1494 __ Ldr(lr,
1495 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value()));
Roland Levillainf969a202016-03-09 16:14:00 +00001496 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001497 __ Blr(lr);
Roland Levillainf969a202016-03-09 16:14:00 +00001498 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001499 __ Bind(slow_path->GetExitLabel());
1500}
1501
1502void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1503 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1504 LocationSummary::kCall,
1505 kIntrinsified);
1506 InvokeRuntimeCallingConvention calling_convention;
1507 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1508 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1509 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1510 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1511}
1512
1513void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1514 vixl::MacroAssembler* masm = GetVIXLAssembler();
1515
Roland Levillaincc3839c2016-02-29 16:23:48 +00001516 // No need to emit code checking whether `locations->InAt(2)` is a null
1517 // pointer, as callers of the native method
1518 //
1519 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1520 //
1521 // all include a null check on `data` before calling that method.
Jeff Hao848f70a2014-01-15 13:49:50 -08001522 __ Ldr(lr,
1523 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value()));
Roland Levillainf969a202016-03-09 16:14:00 +00001524 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001525 __ Blr(lr);
Roland Levillainf969a202016-03-09 16:14:00 +00001526 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001527}
1528
1529void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
Jeff Hao848f70a2014-01-15 13:49:50 -08001530 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1531 LocationSummary::kCall,
1532 kIntrinsified);
1533 InvokeRuntimeCallingConvention calling_convention;
1534 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
Jeff Hao848f70a2014-01-15 13:49:50 -08001535 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1536}
1537
1538void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1539 vixl::MacroAssembler* masm = GetVIXLAssembler();
1540 LocationSummary* locations = invoke->GetLocations();
1541
1542 Register string_to_copy = WRegisterFrom(locations->InAt(0));
1543 __ Cmp(string_to_copy, 0);
1544 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1545 codegen_->AddSlowPath(slow_path);
1546 __ B(eq, slow_path->GetEntryLabel());
1547
1548 __ Ldr(lr,
1549 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value()));
Roland Levillainf969a202016-03-09 16:14:00 +00001550 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001551 __ Blr(lr);
Roland Levillainf969a202016-03-09 16:14:00 +00001552 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001553 __ Bind(slow_path->GetExitLabel());
1554}
1555
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001556static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1557 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1558 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType()));
1559 DCHECK(Primitive::IsFloatingPointType(invoke->GetType()));
1560
1561 LocationSummary* const locations = new (arena) LocationSummary(invoke,
1562 LocationSummary::kCall,
1563 kIntrinsified);
1564 InvokeRuntimeCallingConvention calling_convention;
1565
1566 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
1567 locations->SetOut(calling_convention.GetReturnLocation(invoke->GetType()));
1568}
1569
1570static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1571 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
1572 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType()));
1573 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(1)->GetType()));
1574 DCHECK(Primitive::IsFloatingPointType(invoke->GetType()));
1575
1576 LocationSummary* const locations = new (arena) LocationSummary(invoke,
1577 LocationSummary::kCall,
1578 kIntrinsified);
1579 InvokeRuntimeCallingConvention calling_convention;
1580
1581 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
1582 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
1583 locations->SetOut(calling_convention.GetReturnLocation(invoke->GetType()));
1584}
1585
1586static void GenFPToFPCall(HInvoke* invoke,
1587 vixl::MacroAssembler* masm,
1588 CodeGeneratorARM64* codegen,
1589 QuickEntrypointEnum entry) {
1590 __ Ldr(lr, MemOperand(tr, GetThreadOffset<kArm64WordSize>(entry).Int32Value()));
1591 __ Blr(lr);
1592 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
1593}
1594
1595void IntrinsicLocationsBuilderARM64::VisitMathCos(HInvoke* invoke) {
1596 CreateFPToFPCallLocations(arena_, invoke);
1597}
1598
1599void IntrinsicCodeGeneratorARM64::VisitMathCos(HInvoke* invoke) {
1600 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCos);
1601}
1602
1603void IntrinsicLocationsBuilderARM64::VisitMathSin(HInvoke* invoke) {
1604 CreateFPToFPCallLocations(arena_, invoke);
1605}
1606
1607void IntrinsicCodeGeneratorARM64::VisitMathSin(HInvoke* invoke) {
1608 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickSin);
1609}
1610
1611void IntrinsicLocationsBuilderARM64::VisitMathAcos(HInvoke* invoke) {
1612 CreateFPToFPCallLocations(arena_, invoke);
1613}
1614
1615void IntrinsicCodeGeneratorARM64::VisitMathAcos(HInvoke* invoke) {
1616 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAcos);
1617}
1618
1619void IntrinsicLocationsBuilderARM64::VisitMathAsin(HInvoke* invoke) {
1620 CreateFPToFPCallLocations(arena_, invoke);
1621}
1622
1623void IntrinsicCodeGeneratorARM64::VisitMathAsin(HInvoke* invoke) {
1624 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAsin);
1625}
1626
1627void IntrinsicLocationsBuilderARM64::VisitMathAtan(HInvoke* invoke) {
1628 CreateFPToFPCallLocations(arena_, invoke);
1629}
1630
1631void IntrinsicCodeGeneratorARM64::VisitMathAtan(HInvoke* invoke) {
1632 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAtan);
1633}
1634
1635void IntrinsicLocationsBuilderARM64::VisitMathCbrt(HInvoke* invoke) {
1636 CreateFPToFPCallLocations(arena_, invoke);
1637}
1638
1639void IntrinsicCodeGeneratorARM64::VisitMathCbrt(HInvoke* invoke) {
1640 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCbrt);
1641}
1642
1643void IntrinsicLocationsBuilderARM64::VisitMathCosh(HInvoke* invoke) {
1644 CreateFPToFPCallLocations(arena_, invoke);
1645}
1646
1647void IntrinsicCodeGeneratorARM64::VisitMathCosh(HInvoke* invoke) {
1648 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCosh);
1649}
1650
1651void IntrinsicLocationsBuilderARM64::VisitMathExp(HInvoke* invoke) {
1652 CreateFPToFPCallLocations(arena_, invoke);
1653}
1654
1655void IntrinsicCodeGeneratorARM64::VisitMathExp(HInvoke* invoke) {
1656 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickExp);
1657}
1658
1659void IntrinsicLocationsBuilderARM64::VisitMathExpm1(HInvoke* invoke) {
1660 CreateFPToFPCallLocations(arena_, invoke);
1661}
1662
1663void IntrinsicCodeGeneratorARM64::VisitMathExpm1(HInvoke* invoke) {
1664 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickExpm1);
1665}
1666
1667void IntrinsicLocationsBuilderARM64::VisitMathLog(HInvoke* invoke) {
1668 CreateFPToFPCallLocations(arena_, invoke);
1669}
1670
1671void IntrinsicCodeGeneratorARM64::VisitMathLog(HInvoke* invoke) {
1672 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickLog);
1673}
1674
1675void IntrinsicLocationsBuilderARM64::VisitMathLog10(HInvoke* invoke) {
1676 CreateFPToFPCallLocations(arena_, invoke);
1677}
1678
1679void IntrinsicCodeGeneratorARM64::VisitMathLog10(HInvoke* invoke) {
1680 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickLog10);
1681}
1682
1683void IntrinsicLocationsBuilderARM64::VisitMathSinh(HInvoke* invoke) {
1684 CreateFPToFPCallLocations(arena_, invoke);
1685}
1686
1687void IntrinsicCodeGeneratorARM64::VisitMathSinh(HInvoke* invoke) {
1688 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickSinh);
1689}
1690
1691void IntrinsicLocationsBuilderARM64::VisitMathTan(HInvoke* invoke) {
1692 CreateFPToFPCallLocations(arena_, invoke);
1693}
1694
1695void IntrinsicCodeGeneratorARM64::VisitMathTan(HInvoke* invoke) {
1696 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickTan);
1697}
1698
1699void IntrinsicLocationsBuilderARM64::VisitMathTanh(HInvoke* invoke) {
1700 CreateFPToFPCallLocations(arena_, invoke);
1701}
1702
1703void IntrinsicCodeGeneratorARM64::VisitMathTanh(HInvoke* invoke) {
1704 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickTanh);
1705}
1706
1707void IntrinsicLocationsBuilderARM64::VisitMathAtan2(HInvoke* invoke) {
1708 CreateFPFPToFPCallLocations(arena_, invoke);
1709}
1710
1711void IntrinsicCodeGeneratorARM64::VisitMathAtan2(HInvoke* invoke) {
1712 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAtan2);
1713}
1714
1715void IntrinsicLocationsBuilderARM64::VisitMathHypot(HInvoke* invoke) {
1716 CreateFPFPToFPCallLocations(arena_, invoke);
1717}
1718
1719void IntrinsicCodeGeneratorARM64::VisitMathHypot(HInvoke* invoke) {
1720 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickHypot);
1721}
1722
1723void IntrinsicLocationsBuilderARM64::VisitMathNextAfter(HInvoke* invoke) {
1724 CreateFPFPToFPCallLocations(arena_, invoke);
1725}
1726
1727void IntrinsicCodeGeneratorARM64::VisitMathNextAfter(HInvoke* invoke) {
1728 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickNextAfter);
1729}
1730
Tim Zhang25abd6c2016-01-19 23:39:24 +08001731void IntrinsicLocationsBuilderARM64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1732 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1733 LocationSummary::kNoCall,
1734 kIntrinsified);
1735 locations->SetInAt(0, Location::RequiresRegister());
1736 locations->SetInAt(1, Location::RequiresRegister());
1737 locations->SetInAt(2, Location::RequiresRegister());
1738 locations->SetInAt(3, Location::RequiresRegister());
1739 locations->SetInAt(4, Location::RequiresRegister());
1740
1741 locations->AddTemp(Location::RequiresRegister());
1742 locations->AddTemp(Location::RequiresRegister());
1743}
1744
1745void IntrinsicCodeGeneratorARM64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1746 vixl::MacroAssembler* masm = GetVIXLAssembler();
1747 LocationSummary* locations = invoke->GetLocations();
1748
1749 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1750 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1751 DCHECK_EQ(char_size, 2u);
1752
1753 // Location of data in char array buffer.
1754 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1755
1756 // Location of char array data in string.
1757 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1758
1759 // void getCharsNoCheck(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1760 // Since getChars() calls getCharsNoCheck() - we use registers rather than constants.
1761 Register srcObj = XRegisterFrom(locations->InAt(0));
1762 Register srcBegin = XRegisterFrom(locations->InAt(1));
1763 Register srcEnd = XRegisterFrom(locations->InAt(2));
1764 Register dstObj = XRegisterFrom(locations->InAt(3));
1765 Register dstBegin = XRegisterFrom(locations->InAt(4));
1766
1767 Register src_ptr = XRegisterFrom(locations->GetTemp(0));
1768 Register src_ptr_end = XRegisterFrom(locations->GetTemp(1));
1769
1770 UseScratchRegisterScope temps(masm);
1771 Register dst_ptr = temps.AcquireX();
1772 Register tmp = temps.AcquireW();
1773
1774 // src range to copy.
1775 __ Add(src_ptr, srcObj, Operand(value_offset));
1776 __ Add(src_ptr_end, src_ptr, Operand(srcEnd, LSL, 1));
1777 __ Add(src_ptr, src_ptr, Operand(srcBegin, LSL, 1));
1778
1779 // dst to be copied.
1780 __ Add(dst_ptr, dstObj, Operand(data_offset));
1781 __ Add(dst_ptr, dst_ptr, Operand(dstBegin, LSL, 1));
1782
1783 // Do the copy.
1784 vixl::Label loop, done;
1785 __ Bind(&loop);
1786 __ Cmp(src_ptr, src_ptr_end);
1787 __ B(&done, eq);
1788 __ Ldrh(tmp, MemOperand(src_ptr, char_size, vixl::PostIndex));
1789 __ Strh(tmp, MemOperand(dst_ptr, char_size, vixl::PostIndex));
1790 __ B(&loop);
1791 __ Bind(&done);
1792}
1793
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001794// Mirrors ARRAYCOPY_SHORT_CHAR_ARRAY_THRESHOLD in libcore, so we can choose to use the native
1795// implementation there for longer copy lengths.
donghui.baic2ec9ad2016-03-10 14:02:55 +08001796static constexpr int32_t kSystemArrayCopyCharThreshold = 32;
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001797
1798static void SetSystemArrayCopyLocationRequires(LocationSummary* locations,
1799 uint32_t at,
1800 HInstruction* input) {
1801 HIntConstant* const_input = input->AsIntConstant();
1802 if (const_input != nullptr && !vixl::Assembler::IsImmAddSub(const_input->GetValue())) {
1803 locations->SetInAt(at, Location::RequiresRegister());
1804 } else {
1805 locations->SetInAt(at, Location::RegisterOrConstant(input));
1806 }
1807}
1808
1809void IntrinsicLocationsBuilderARM64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1810 // Check to see if we have known failures that will cause us to have to bail out
1811 // to the runtime, and just generate the runtime call directly.
1812 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1813 HIntConstant* dst_pos = invoke->InputAt(3)->AsIntConstant();
1814
1815 // The positions must be non-negative.
1816 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
1817 (dst_pos != nullptr && dst_pos->GetValue() < 0)) {
1818 // We will have to fail anyways.
1819 return;
1820 }
1821
1822 // The length must be >= 0 and not so long that we would (currently) prefer libcore's
1823 // native implementation.
1824 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1825 if (length != nullptr) {
1826 int32_t len = length->GetValue();
donghui.baic2ec9ad2016-03-10 14:02:55 +08001827 if (len < 0 || len > kSystemArrayCopyCharThreshold) {
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001828 // Just call as normal.
1829 return;
1830 }
1831 }
1832
1833 ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetArena();
1834 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1835 LocationSummary::kCallOnSlowPath,
1836 kIntrinsified);
1837 // arraycopy(char[] src, int src_pos, char[] dst, int dst_pos, int length).
1838 locations->SetInAt(0, Location::RequiresRegister());
1839 SetSystemArrayCopyLocationRequires(locations, 1, invoke->InputAt(1));
1840 locations->SetInAt(2, Location::RequiresRegister());
1841 SetSystemArrayCopyLocationRequires(locations, 3, invoke->InputAt(3));
1842 SetSystemArrayCopyLocationRequires(locations, 4, invoke->InputAt(4));
1843
1844 locations->AddTemp(Location::RequiresRegister());
1845 locations->AddTemp(Location::RequiresRegister());
1846 locations->AddTemp(Location::RequiresRegister());
1847}
1848
1849static void CheckSystemArrayCopyPosition(vixl::MacroAssembler* masm,
1850 const Location& pos,
1851 const Register& input,
1852 const Location& length,
1853 SlowPathCodeARM64* slow_path,
1854 const Register& input_len,
1855 const Register& temp,
1856 bool length_is_input_length = false) {
1857 const int32_t length_offset = mirror::Array::LengthOffset().Int32Value();
1858 if (pos.IsConstant()) {
1859 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1860 if (pos_const == 0) {
1861 if (!length_is_input_length) {
1862 // Check that length(input) >= length.
1863 __ Ldr(temp, MemOperand(input, length_offset));
1864 __ Cmp(temp, OperandFrom(length, Primitive::kPrimInt));
1865 __ B(slow_path->GetEntryLabel(), lt);
1866 }
1867 } else {
1868 // Check that length(input) >= pos.
1869 __ Ldr(input_len, MemOperand(input, length_offset));
1870 __ Subs(temp, input_len, pos_const);
1871 __ B(slow_path->GetEntryLabel(), lt);
1872
1873 // Check that (length(input) - pos) >= length.
1874 __ Cmp(temp, OperandFrom(length, Primitive::kPrimInt));
1875 __ B(slow_path->GetEntryLabel(), lt);
1876 }
1877 } else if (length_is_input_length) {
1878 // The only way the copy can succeed is if pos is zero.
1879 __ Cbnz(WRegisterFrom(pos), slow_path->GetEntryLabel());
1880 } else {
1881 // Check that pos >= 0.
1882 Register pos_reg = WRegisterFrom(pos);
1883 __ Tbnz(pos_reg, pos_reg.size() - 1, slow_path->GetEntryLabel());
1884
1885 // Check that pos <= length(input) && (length(input) - pos) >= length.
1886 __ Ldr(temp, MemOperand(input, length_offset));
1887 __ Subs(temp, temp, pos_reg);
1888 // Ccmp if length(input) >= pos, else definitely bail to slow path (N!=V == lt).
1889 __ Ccmp(temp, OperandFrom(length, Primitive::kPrimInt), NFlag, ge);
1890 __ B(slow_path->GetEntryLabel(), lt);
1891 }
1892}
1893
1894// Compute base source address, base destination address, and end source address
1895// for System.arraycopy* intrinsics.
1896static void GenSystemArrayCopyAddresses(vixl::MacroAssembler* masm,
1897 Primitive::Type type,
1898 const Register& src,
1899 const Location& src_pos,
1900 const Register& dst,
1901 const Location& dst_pos,
1902 const Location& copy_length,
1903 const Register& src_base,
1904 const Register& dst_base,
1905 const Register& src_end) {
1906 DCHECK(type == Primitive::kPrimNot || type == Primitive::kPrimChar)
Roland Levillainebea3d22016-04-12 15:42:57 +01001907 << "Unexpected element type: " << type;
1908 const int32_t element_size = Primitive::ComponentSize(type);
1909 const int32_t element_size_shift = Primitive::ComponentSizeShift(type);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001910
Roland Levillainebea3d22016-04-12 15:42:57 +01001911 uint32_t data_offset = mirror::Array::DataOffset(element_size).Uint32Value();
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001912 if (src_pos.IsConstant()) {
1913 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
Roland Levillainebea3d22016-04-12 15:42:57 +01001914 __ Add(src_base, src, element_size * constant + data_offset);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001915 } else {
Roland Levillainebea3d22016-04-12 15:42:57 +01001916 __ Add(src_base, src, data_offset);
1917 __ Add(src_base, src_base, Operand(XRegisterFrom(src_pos), LSL, element_size_shift));
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001918 }
1919
1920 if (dst_pos.IsConstant()) {
1921 int32_t constant = dst_pos.GetConstant()->AsIntConstant()->GetValue();
Roland Levillainebea3d22016-04-12 15:42:57 +01001922 __ Add(dst_base, dst, element_size * constant + data_offset);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001923 } else {
Roland Levillainebea3d22016-04-12 15:42:57 +01001924 __ Add(dst_base, dst, data_offset);
1925 __ Add(dst_base, dst_base, Operand(XRegisterFrom(dst_pos), LSL, element_size_shift));
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001926 }
1927
1928 if (copy_length.IsConstant()) {
1929 int32_t constant = copy_length.GetConstant()->AsIntConstant()->GetValue();
Roland Levillainebea3d22016-04-12 15:42:57 +01001930 __ Add(src_end, src_base, element_size * constant);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001931 } else {
Roland Levillainebea3d22016-04-12 15:42:57 +01001932 __ Add(src_end, src_base, Operand(XRegisterFrom(copy_length), LSL, element_size_shift));
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001933 }
1934}
1935
1936void IntrinsicCodeGeneratorARM64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1937 vixl::MacroAssembler* masm = GetVIXLAssembler();
1938 LocationSummary* locations = invoke->GetLocations();
1939 Register src = XRegisterFrom(locations->InAt(0));
1940 Location src_pos = locations->InAt(1);
1941 Register dst = XRegisterFrom(locations->InAt(2));
1942 Location dst_pos = locations->InAt(3);
1943 Location length = locations->InAt(4);
1944
1945 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1946 codegen_->AddSlowPath(slow_path);
1947
1948 // If source and destination are the same, take the slow path. Overlapping copy regions must be
1949 // copied in reverse and we can't know in all cases if it's needed.
1950 __ Cmp(src, dst);
1951 __ B(slow_path->GetEntryLabel(), eq);
1952
1953 // Bail out if the source is null.
1954 __ Cbz(src, slow_path->GetEntryLabel());
1955
1956 // Bail out if the destination is null.
1957 __ Cbz(dst, slow_path->GetEntryLabel());
1958
1959 if (!length.IsConstant()) {
1960 // If the length is negative, bail out.
1961 __ Tbnz(WRegisterFrom(length), kWRegSize - 1, slow_path->GetEntryLabel());
1962 // If the length > 32 then (currently) prefer libcore's native implementation.
donghui.baic2ec9ad2016-03-10 14:02:55 +08001963 __ Cmp(WRegisterFrom(length), kSystemArrayCopyCharThreshold);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001964 __ B(slow_path->GetEntryLabel(), gt);
1965 } else {
1966 // We have already checked in the LocationsBuilder for the constant case.
1967 DCHECK_GE(length.GetConstant()->AsIntConstant()->GetValue(), 0);
1968 DCHECK_LE(length.GetConstant()->AsIntConstant()->GetValue(), 32);
1969 }
1970
1971 Register src_curr_addr = WRegisterFrom(locations->GetTemp(0));
1972 Register dst_curr_addr = WRegisterFrom(locations->GetTemp(1));
1973 Register src_stop_addr = WRegisterFrom(locations->GetTemp(2));
1974
1975 CheckSystemArrayCopyPosition(masm,
1976 src_pos,
1977 src,
1978 length,
1979 slow_path,
1980 src_curr_addr,
1981 dst_curr_addr,
1982 false);
1983
1984 CheckSystemArrayCopyPosition(masm,
1985 dst_pos,
1986 dst,
1987 length,
1988 slow_path,
1989 src_curr_addr,
1990 dst_curr_addr,
1991 false);
1992
1993 src_curr_addr = src_curr_addr.X();
1994 dst_curr_addr = dst_curr_addr.X();
1995 src_stop_addr = src_stop_addr.X();
1996
1997 GenSystemArrayCopyAddresses(masm,
1998 Primitive::kPrimChar,
1999 src,
2000 src_pos,
2001 dst,
2002 dst_pos,
2003 length,
2004 src_curr_addr,
2005 dst_curr_addr,
2006 src_stop_addr);
2007
2008 // Iterate over the arrays and do a raw copy of the chars.
2009 const int32_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
2010 UseScratchRegisterScope temps(masm);
2011 Register tmp = temps.AcquireW();
2012 vixl::Label loop, done;
2013 __ Bind(&loop);
2014 __ Cmp(src_curr_addr, src_stop_addr);
2015 __ B(&done, eq);
2016 __ Ldrh(tmp, MemOperand(src_curr_addr, char_size, vixl::PostIndex));
2017 __ Strh(tmp, MemOperand(dst_curr_addr, char_size, vixl::PostIndex));
2018 __ B(&loop);
2019 __ Bind(&done);
2020
2021 __ Bind(slow_path->GetExitLabel());
2022}
2023
donghui.baic2ec9ad2016-03-10 14:02:55 +08002024// We can choose to use the native implementation there for longer copy lengths.
2025static constexpr int32_t kSystemArrayCopyThreshold = 128;
2026
2027// CodeGenerator::CreateSystemArrayCopyLocationSummary use three temporary registers.
2028// We want to use two temporary registers in order to reduce the register pressure in arm64.
2029// So we don't use the CodeGenerator::CreateSystemArrayCopyLocationSummary.
2030void IntrinsicLocationsBuilderARM64::VisitSystemArrayCopy(HInvoke* invoke) {
2031 // Check to see if we have known failures that will cause us to have to bail out
2032 // to the runtime, and just generate the runtime call directly.
2033 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
2034 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
2035
2036 // The positions must be non-negative.
2037 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
2038 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
2039 // We will have to fail anyways.
2040 return;
2041 }
2042
2043 // The length must be >= 0.
2044 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
2045 if (length != nullptr) {
2046 int32_t len = length->GetValue();
2047 if (len < 0 || len >= kSystemArrayCopyThreshold) {
2048 // Just call as normal.
2049 return;
2050 }
2051 }
2052
2053 SystemArrayCopyOptimizations optimizations(invoke);
2054
2055 if (optimizations.GetDestinationIsSource()) {
2056 if (src_pos != nullptr && dest_pos != nullptr && src_pos->GetValue() < dest_pos->GetValue()) {
2057 // We only support backward copying if source and destination are the same.
2058 return;
2059 }
2060 }
2061
2062 if (optimizations.GetDestinationIsPrimitiveArray() || optimizations.GetSourceIsPrimitiveArray()) {
2063 // We currently don't intrinsify primitive copying.
2064 return;
2065 }
2066
2067 ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetArena();
2068 LocationSummary* locations = new (allocator) LocationSummary(invoke,
2069 LocationSummary::kCallOnSlowPath,
2070 kIntrinsified);
2071 // arraycopy(Object src, int src_pos, Object dest, int dest_pos, int length).
2072 locations->SetInAt(0, Location::RequiresRegister());
2073 SetSystemArrayCopyLocationRequires(locations, 1, invoke->InputAt(1));
2074 locations->SetInAt(2, Location::RequiresRegister());
2075 SetSystemArrayCopyLocationRequires(locations, 3, invoke->InputAt(3));
2076 SetSystemArrayCopyLocationRequires(locations, 4, invoke->InputAt(4));
2077
2078 locations->AddTemp(Location::RequiresRegister());
2079 locations->AddTemp(Location::RequiresRegister());
2080}
2081
2082void IntrinsicCodeGeneratorARM64::VisitSystemArrayCopy(HInvoke* invoke) {
2083 vixl::MacroAssembler* masm = GetVIXLAssembler();
2084 LocationSummary* locations = invoke->GetLocations();
2085
2086 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2087 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2088 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2089 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2090
2091 Register src = XRegisterFrom(locations->InAt(0));
2092 Location src_pos = locations->InAt(1);
2093 Register dest = XRegisterFrom(locations->InAt(2));
2094 Location dest_pos = locations->InAt(3);
2095 Location length = locations->InAt(4);
2096 Register temp1 = WRegisterFrom(locations->GetTemp(0));
2097 Register temp2 = WRegisterFrom(locations->GetTemp(1));
2098
2099 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
2100 codegen_->AddSlowPath(slow_path);
2101
2102 vixl::Label conditions_on_positions_validated;
2103 SystemArrayCopyOptimizations optimizations(invoke);
2104
2105 if (!optimizations.GetDestinationIsSource() &&
2106 (!src_pos.IsConstant() || !dest_pos.IsConstant())) {
2107 __ Cmp(src, dest);
2108 }
2109 // If source and destination are the same, we go to slow path if we need to do
2110 // forward copying.
2111 if (src_pos.IsConstant()) {
2112 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
2113 if (dest_pos.IsConstant()) {
2114 // Checked when building locations.
2115 DCHECK(!optimizations.GetDestinationIsSource()
2116 || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
2117 } else {
2118 if (!optimizations.GetDestinationIsSource()) {
2119 __ B(&conditions_on_positions_validated, ne);
2120 }
2121 __ Cmp(WRegisterFrom(dest_pos), src_pos_constant);
2122 __ B(slow_path->GetEntryLabel(), gt);
2123 }
2124 } else {
2125 if (!optimizations.GetDestinationIsSource()) {
2126 __ B(&conditions_on_positions_validated, ne);
2127 }
2128 __ Cmp(RegisterFrom(src_pos, invoke->InputAt(1)->GetType()),
2129 OperandFrom(dest_pos, invoke->InputAt(3)->GetType()));
2130 __ B(slow_path->GetEntryLabel(), lt);
2131 }
2132
2133 __ Bind(&conditions_on_positions_validated);
2134
2135 if (!optimizations.GetSourceIsNotNull()) {
2136 // Bail out if the source is null.
2137 __ Cbz(src, slow_path->GetEntryLabel());
2138 }
2139
2140 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
2141 // Bail out if the destination is null.
2142 __ Cbz(dest, slow_path->GetEntryLabel());
2143 }
2144
2145 // We have already checked in the LocationsBuilder for the constant case.
2146 if (!length.IsConstant() &&
2147 !optimizations.GetCountIsSourceLength() &&
2148 !optimizations.GetCountIsDestinationLength()) {
2149 // If the length is negative, bail out.
2150 __ Tbnz(WRegisterFrom(length), kWRegSize - 1, slow_path->GetEntryLabel());
2151 // If the length >= 128 then (currently) prefer native implementation.
2152 __ Cmp(WRegisterFrom(length), kSystemArrayCopyThreshold);
2153 __ B(slow_path->GetEntryLabel(), ge);
2154 }
2155 // Validity checks: source.
2156 CheckSystemArrayCopyPosition(masm,
2157 src_pos,
2158 src,
2159 length,
2160 slow_path,
2161 temp1,
2162 temp2,
2163 optimizations.GetCountIsSourceLength());
2164
2165 // Validity checks: dest.
2166 CheckSystemArrayCopyPosition(masm,
2167 dest_pos,
2168 dest,
2169 length,
2170 slow_path,
2171 temp1,
2172 temp2,
2173 optimizations.GetCountIsDestinationLength());
2174 {
2175 // We use a block to end the scratch scope before the write barrier, thus
2176 // freeing the temporary registers so they can be used in `MarkGCCard`.
2177 UseScratchRegisterScope temps(masm);
2178 Register temp3 = temps.AcquireW();
2179 if (!optimizations.GetDoesNotNeedTypeCheck()) {
2180 // Check whether all elements of the source array are assignable to the component
2181 // type of the destination array. We do two checks: the classes are the same,
2182 // or the destination is Object[]. If none of these checks succeed, we go to the
2183 // slow path.
2184 __ Ldr(temp1, MemOperand(dest, class_offset));
2185 __ Ldr(temp2, MemOperand(src, class_offset));
2186 bool did_unpoison = false;
2187 if (!optimizations.GetDestinationIsNonPrimitiveArray() ||
2188 !optimizations.GetSourceIsNonPrimitiveArray()) {
2189 // One or two of the references need to be unpoisoned. Unpoison them
2190 // both to make the identity check valid.
2191 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2192 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp2);
2193 did_unpoison = true;
2194 }
2195
2196 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
2197 // Bail out if the destination is not a non primitive array.
2198 // /* HeapReference<Class> */ temp3 = temp1->component_type_
2199 __ Ldr(temp3, HeapOperand(temp1, component_offset));
2200 __ Cbz(temp3, slow_path->GetEntryLabel());
2201 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3);
2202 __ Ldrh(temp3, HeapOperand(temp3, primitive_offset));
2203 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2204 __ Cbnz(temp3, slow_path->GetEntryLabel());
2205 }
2206
2207 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
2208 // Bail out if the source is not a non primitive array.
2209 // /* HeapReference<Class> */ temp3 = temp2->component_type_
2210 __ Ldr(temp3, HeapOperand(temp2, component_offset));
2211 __ Cbz(temp3, slow_path->GetEntryLabel());
2212 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3);
2213 __ Ldrh(temp3, HeapOperand(temp3, primitive_offset));
2214 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2215 __ Cbnz(temp3, slow_path->GetEntryLabel());
2216 }
2217
2218 __ Cmp(temp1, temp2);
2219
2220 if (optimizations.GetDestinationIsTypedObjectArray()) {
2221 vixl::Label do_copy;
2222 __ B(&do_copy, eq);
2223 if (!did_unpoison) {
2224 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2225 }
2226 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2227 __ Ldr(temp1, HeapOperand(temp1, component_offset));
2228 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2229 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2230 __ Ldr(temp1, HeapOperand(temp1, super_offset));
2231 // No need to unpoison the result, we're comparing against null.
2232 __ Cbnz(temp1, slow_path->GetEntryLabel());
2233 __ Bind(&do_copy);
2234 } else {
2235 __ B(slow_path->GetEntryLabel(), ne);
2236 }
2237 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
2238 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
2239 // Bail out if the source is not a non primitive array.
2240 // /* HeapReference<Class> */ temp1 = src->klass_
2241 __ Ldr(temp1, HeapOperand(src.W(), class_offset));
2242 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2243 // /* HeapReference<Class> */ temp3 = temp1->component_type_
2244 __ Ldr(temp3, HeapOperand(temp1, component_offset));
2245 __ Cbz(temp3, slow_path->GetEntryLabel());
2246 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3);
2247 __ Ldrh(temp3, HeapOperand(temp3, primitive_offset));
2248 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2249 __ Cbnz(temp3, slow_path->GetEntryLabel());
2250 }
2251
2252 Register src_curr_addr = temp1.X();
2253 Register dst_curr_addr = temp2.X();
2254 Register src_stop_addr = temp3.X();
2255
2256 GenSystemArrayCopyAddresses(masm,
2257 Primitive::kPrimNot,
2258 src,
2259 src_pos,
2260 dest,
2261 dest_pos,
2262 length,
2263 src_curr_addr,
2264 dst_curr_addr,
2265 src_stop_addr);
2266
2267 // Iterate over the arrays and do a raw copy of the objects. We don't need to
2268 // poison/unpoison, nor do any read barrier as the next uses of the destination
2269 // array will do it.
2270 vixl::Label loop, done;
2271 const int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
2272 __ Bind(&loop);
2273 __ Cmp(src_curr_addr, src_stop_addr);
2274 __ B(&done, eq);
2275 {
2276 Register tmp = temps.AcquireW();
2277 __ Ldr(tmp, MemOperand(src_curr_addr, element_size, vixl::PostIndex));
2278 __ Str(tmp, MemOperand(dst_curr_addr, element_size, vixl::PostIndex));
2279 }
2280 __ B(&loop);
2281 __ Bind(&done);
2282 }
2283 // We only need one card marking on the destination array.
2284 codegen_->MarkGCCard(dest.W(), Register(), /* value_can_be_null */ false);
2285
2286 __ Bind(slow_path->GetExitLabel());
2287}
2288
Anton Kirilova3ffea22016-04-07 17:02:37 +01002289static void GenIsInfinite(LocationSummary* locations,
2290 bool is64bit,
2291 vixl::MacroAssembler* masm) {
2292 Operand infinity;
2293 Register out;
2294
2295 if (is64bit) {
2296 infinity = kPositiveInfinityDouble;
2297 out = XRegisterFrom(locations->Out());
2298 } else {
2299 infinity = kPositiveInfinityFloat;
2300 out = WRegisterFrom(locations->Out());
2301 }
2302
2303 const Register zero = vixl::Assembler::AppropriateZeroRegFor(out);
2304
2305 MoveFPToInt(locations, is64bit, masm);
2306 __ Eor(out, out, infinity);
2307 // We don't care about the sign bit, so shift left.
2308 __ Cmp(zero, Operand(out, LSL, 1));
2309 __ Cset(out, eq);
2310}
2311
2312void IntrinsicLocationsBuilderARM64::VisitFloatIsInfinite(HInvoke* invoke) {
2313 CreateFPToIntLocations(arena_, invoke);
2314}
2315
2316void IntrinsicCodeGeneratorARM64::VisitFloatIsInfinite(HInvoke* invoke) {
2317 GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
2318}
2319
2320void IntrinsicLocationsBuilderARM64::VisitDoubleIsInfinite(HInvoke* invoke) {
2321 CreateFPToIntLocations(arena_, invoke);
2322}
2323
2324void IntrinsicCodeGeneratorARM64::VisitDoubleIsInfinite(HInvoke* invoke) {
2325 GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
2326}
2327
Aart Bik2f9fcc92016-03-01 15:16:54 -08002328UNIMPLEMENTED_INTRINSIC(ARM64, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002329UNIMPLEMENTED_INTRINSIC(ARM64, IntegerHighestOneBit)
2330UNIMPLEMENTED_INTRINSIC(ARM64, LongHighestOneBit)
2331UNIMPLEMENTED_INTRINSIC(ARM64, IntegerLowestOneBit)
2332UNIMPLEMENTED_INTRINSIC(ARM64, LongLowestOneBit)
Andreas Gampe878d58c2015-01-15 23:24:00 -08002333
Aart Bik0e54c012016-03-04 12:08:31 -08002334// 1.8.
2335UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndAddInt)
2336UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndAddLong)
2337UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetInt)
2338UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetLong)
2339UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002340
Aart Bik2f9fcc92016-03-01 15:16:54 -08002341UNREACHABLE_INTRINSICS(ARM64)
Roland Levillain4d027112015-07-01 15:41:14 +01002342
2343#undef __
2344
Andreas Gampe878d58c2015-01-15 23:24:00 -08002345} // namespace arm64
2346} // namespace art