blob: 98d041a5fcb5ef38e8f5d02f674c7e7c60709f34 [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080021#include "code_generator_arm64.h"
22#include "common_arm64.h"
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
29#include "utils/arm64/constants_arm64.h"
30
Serban Constantinescu82e52ce2015-03-26 16:50:57 +000031#include "vixl/a64/disasm-a64.h"
32#include "vixl/a64/macro-assembler-a64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080033
34using namespace vixl; // NOLINT(build/namespaces)
35
36namespace art {
37
38namespace arm64 {
39
40using helpers::DRegisterFrom;
41using helpers::FPRegisterFrom;
42using helpers::HeapOperand;
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +000043using helpers::LocationFrom;
Scott Wakeling9ee23f42015-07-23 10:44:35 +010044using helpers::OperandFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080045using helpers::RegisterFrom;
46using helpers::SRegisterFrom;
47using helpers::WRegisterFrom;
48using helpers::XRegisterFrom;
49
Andreas Gampe878d58c2015-01-15 23:24:00 -080050namespace {
51
52ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
53 return MemOperand(XRegisterFrom(location), offset);
54}
55
56} // namespace
57
58vixl::MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
59 return codegen_->GetAssembler()->vixl_masm_;
60}
61
62ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
63 return codegen_->GetGraph()->GetArena();
64}
65
66#define __ codegen->GetAssembler()->vixl_masm_->
67
68static void MoveFromReturnRegister(Location trg,
69 Primitive::Type type,
70 CodeGeneratorARM64* codegen) {
71 if (!trg.IsValid()) {
72 DCHECK(type == Primitive::kPrimVoid);
73 return;
74 }
75
76 DCHECK_NE(type, Primitive::kPrimVoid);
77
Jeff Hao848f70a2014-01-15 13:49:50 -080078 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080079 Register trg_reg = RegisterFrom(trg, type);
80 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
81 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
82 } else {
83 FPRegister trg_reg = FPRegisterFrom(trg, type);
84 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
85 __ Fmov(trg_reg, res_reg);
86 }
87}
88
Roland Levillainec525fc2015-04-28 15:50:20 +010089static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010090 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010091 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe878d58c2015-01-15 23:24:00 -080092}
93
94// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
95// call. This will copy the arguments into the positions for a regular call.
96//
97// Note: The actual parameters are required to be in the locations given by the invoke's location
98// summary. If an intrinsic modifies those locations before a slowpath call, they must be
99// restored!
100class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
101 public:
102 explicit IntrinsicSlowPathARM64(HInvoke* invoke) : invoke_(invoke) { }
103
104 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
105 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
106 __ Bind(GetEntryLabel());
107
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000108 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800109
Roland Levillainec525fc2015-04-28 15:50:20 +0100110 MoveArguments(invoke_, codegen);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800111
112 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100113 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
114 LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800115 } else {
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000116 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800117 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000118 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800119
120 // Copy the result back to the expected output.
121 Location out = invoke_->GetLocations()->Out();
122 if (out.IsValid()) {
123 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
124 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
125 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
126 }
127
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000128 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800129 __ B(GetExitLabel());
130 }
131
Alexandre Rames9931f312015-06-19 14:47:01 +0100132 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM64"; }
133
Andreas Gampe878d58c2015-01-15 23:24:00 -0800134 private:
135 // The instruction where this slow path is happening.
136 HInvoke* const invoke_;
137
138 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
139};
140
141#undef __
142
143bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
144 Dispatch(invoke);
145 LocationSummary* res = invoke->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000146 if (res == nullptr) {
147 return false;
148 }
149 if (kEmitCompilerReadBarrier && res->CanCall()) {
150 // Generating an intrinsic for this HInvoke may produce an
151 // IntrinsicSlowPathARM64 slow path. Currently this approach
152 // does not work when using read barriers, as the emitted
153 // calling sequence will make use of another slow path
154 // (ReadBarrierForRootSlowPathARM64 for HInvokeStaticOrDirect,
155 // ReadBarrierSlowPathARM64 for HInvokeVirtual). So we bail
156 // out in this case.
157 //
158 // TODO: Find a way to have intrinsics work with read barriers.
159 invoke->SetLocations(nullptr);
160 return false;
161 }
162 return res->Intrinsified();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800163}
164
165#define __ masm->
166
167static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
168 LocationSummary* locations = new (arena) LocationSummary(invoke,
169 LocationSummary::kNoCall,
170 kIntrinsified);
171 locations->SetInAt(0, Location::RequiresFpuRegister());
172 locations->SetOut(Location::RequiresRegister());
173}
174
175static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
176 LocationSummary* locations = new (arena) LocationSummary(invoke,
177 LocationSummary::kNoCall,
178 kIntrinsified);
179 locations->SetInAt(0, Location::RequiresRegister());
180 locations->SetOut(Location::RequiresFpuRegister());
181}
182
183static void MoveFPToInt(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
184 Location input = locations->InAt(0);
185 Location output = locations->Out();
186 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
187 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
188}
189
190static void MoveIntToFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
191 Location input = locations->InAt(0);
192 Location output = locations->Out();
193 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
194 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
195}
196
197void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
198 CreateFPToIntLocations(arena_, invoke);
199}
200void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
201 CreateIntToFPLocations(arena_, invoke);
202}
203
204void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000205 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800206}
207void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000208 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800209}
210
211void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
212 CreateFPToIntLocations(arena_, invoke);
213}
214void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
215 CreateIntToFPLocations(arena_, invoke);
216}
217
218void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000219 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800220}
221void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000222 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800223}
224
225static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
226 LocationSummary* locations = new (arena) LocationSummary(invoke,
227 LocationSummary::kNoCall,
228 kIntrinsified);
229 locations->SetInAt(0, Location::RequiresRegister());
230 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
231}
232
233static void GenReverseBytes(LocationSummary* locations,
234 Primitive::Type type,
235 vixl::MacroAssembler* masm) {
236 Location in = locations->InAt(0);
237 Location out = locations->Out();
238
239 switch (type) {
240 case Primitive::kPrimShort:
241 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
242 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
243 break;
244 case Primitive::kPrimInt:
245 case Primitive::kPrimLong:
246 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
247 break;
248 default:
249 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
250 UNREACHABLE();
251 }
252}
253
254void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
255 CreateIntToIntLocations(arena_, invoke);
256}
257
258void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
259 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
260}
261
262void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
263 CreateIntToIntLocations(arena_, invoke);
264}
265
266void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
267 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
268}
269
270void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
271 CreateIntToIntLocations(arena_, invoke);
272}
273
274void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
275 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
276}
277
Aart Bik7b565022016-01-28 14:36:22 -0800278static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
279 LocationSummary* locations = new (arena) LocationSummary(invoke,
280 LocationSummary::kNoCall,
281 kIntrinsified);
282 locations->SetInAt(0, Location::RequiresRegister());
283 locations->SetInAt(1, Location::RequiresRegister());
284 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
285}
286
287static void GenCompare(LocationSummary* locations, bool is_long, vixl::MacroAssembler* masm) {
288 Location op1 = locations->InAt(0);
289 Location op2 = locations->InAt(1);
290 Location out = locations->Out();
291
292 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
293 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
294 Register out_reg = WRegisterFrom(out);
295
296 __ Cmp(op1_reg, op2_reg);
297 __ Cset(out_reg, gt); // out == +1 if GT or 0 otherwise
298 __ Cinv(out_reg, out_reg, lt); // out == -1 if LT or unchanged otherwise
299}
300
301void IntrinsicLocationsBuilderARM64::VisitIntegerCompare(HInvoke* invoke) {
302 CreateIntIntToIntLocations(arena_, invoke);
303}
304
305void IntrinsicCodeGeneratorARM64::VisitIntegerCompare(HInvoke* invoke) {
306 GenCompare(invoke->GetLocations(), /* is_long */ false, GetVIXLAssembler());
307}
308
309void IntrinsicLocationsBuilderARM64::VisitLongCompare(HInvoke* invoke) {
310 CreateIntIntToIntLocations(arena_, invoke);
311}
312
313void IntrinsicCodeGeneratorARM64::VisitLongCompare(HInvoke* invoke) {
314 GenCompare(invoke->GetLocations(), /* is_long */ true, GetVIXLAssembler());
315}
316
Scott Wakeling611d3392015-07-10 11:42:06 +0100317static void GenNumberOfLeadingZeros(LocationSummary* locations,
318 Primitive::Type type,
319 vixl::MacroAssembler* masm) {
320 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
321
322 Location in = locations->InAt(0);
323 Location out = locations->Out();
324
325 __ Clz(RegisterFrom(out, type), RegisterFrom(in, type));
326}
327
328void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
329 CreateIntToIntLocations(arena_, invoke);
330}
331
332void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
333 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
334}
335
336void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
337 CreateIntToIntLocations(arena_, invoke);
338}
339
340void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
341 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
342}
343
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100344static void GenNumberOfTrailingZeros(LocationSummary* locations,
345 Primitive::Type type,
346 vixl::MacroAssembler* masm) {
347 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
348
349 Location in = locations->InAt(0);
350 Location out = locations->Out();
351
352 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
353 __ Clz(RegisterFrom(out, type), RegisterFrom(out, type));
354}
355
356void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
357 CreateIntToIntLocations(arena_, invoke);
358}
359
360void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
361 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
362}
363
364void IntrinsicLocationsBuilderARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
365 CreateIntToIntLocations(arena_, invoke);
366}
367
368void IntrinsicCodeGeneratorARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
369 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
370}
371
Andreas Gampe878d58c2015-01-15 23:24:00 -0800372static void GenReverse(LocationSummary* locations,
373 Primitive::Type type,
374 vixl::MacroAssembler* masm) {
375 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
376
377 Location in = locations->InAt(0);
378 Location out = locations->Out();
379
380 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
381}
382
383void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
384 CreateIntToIntLocations(arena_, invoke);
385}
386
387void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
388 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
389}
390
391void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
392 CreateIntToIntLocations(arena_, invoke);
393}
394
395void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
396 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
397}
398
399static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800400 LocationSummary* locations = new (arena) LocationSummary(invoke,
401 LocationSummary::kNoCall,
402 kIntrinsified);
403 locations->SetInAt(0, Location::RequiresFpuRegister());
404 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
405}
406
407static void MathAbsFP(LocationSummary* locations, bool is64bit, vixl::MacroAssembler* masm) {
408 Location in = locations->InAt(0);
409 Location out = locations->Out();
410
411 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
412 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
413
414 __ Fabs(out_reg, in_reg);
415}
416
417void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
418 CreateFPToFPLocations(arena_, invoke);
419}
420
421void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000422 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800423}
424
425void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
426 CreateFPToFPLocations(arena_, invoke);
427}
428
429void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000430 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800431}
432
433static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
434 LocationSummary* locations = new (arena) LocationSummary(invoke,
435 LocationSummary::kNoCall,
436 kIntrinsified);
437 locations->SetInAt(0, Location::RequiresRegister());
438 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
439}
440
441static void GenAbsInteger(LocationSummary* locations,
442 bool is64bit,
443 vixl::MacroAssembler* masm) {
444 Location in = locations->InAt(0);
445 Location output = locations->Out();
446
447 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
448 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
449
450 __ Cmp(in_reg, Operand(0));
451 __ Cneg(out_reg, in_reg, lt);
452}
453
454void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
455 CreateIntToInt(arena_, invoke);
456}
457
458void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000459 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800460}
461
462void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
463 CreateIntToInt(arena_, invoke);
464}
465
466void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000467 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800468}
469
470static void GenMinMaxFP(LocationSummary* locations,
471 bool is_min,
472 bool is_double,
473 vixl::MacroAssembler* masm) {
474 Location op1 = locations->InAt(0);
475 Location op2 = locations->InAt(1);
476 Location out = locations->Out();
477
478 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
479 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
480 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
481 if (is_min) {
482 __ Fmin(out_reg, op1_reg, op2_reg);
483 } else {
484 __ Fmax(out_reg, op1_reg, op2_reg);
485 }
486}
487
488static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
489 LocationSummary* locations = new (arena) LocationSummary(invoke,
490 LocationSummary::kNoCall,
491 kIntrinsified);
492 locations->SetInAt(0, Location::RequiresFpuRegister());
493 locations->SetInAt(1, Location::RequiresFpuRegister());
494 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
495}
496
497void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
498 CreateFPFPToFPLocations(arena_, invoke);
499}
500
501void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000502 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800503}
504
505void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
506 CreateFPFPToFPLocations(arena_, invoke);
507}
508
509void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000510 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800511}
512
513void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
514 CreateFPFPToFPLocations(arena_, invoke);
515}
516
517void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000518 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800519}
520
521void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
522 CreateFPFPToFPLocations(arena_, invoke);
523}
524
525void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000526 GenMinMaxFP(
527 invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800528}
529
530static void GenMinMax(LocationSummary* locations,
531 bool is_min,
532 bool is_long,
533 vixl::MacroAssembler* masm) {
534 Location op1 = locations->InAt(0);
535 Location op2 = locations->InAt(1);
536 Location out = locations->Out();
537
538 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
539 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
540 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
541
542 __ Cmp(op1_reg, op2_reg);
543 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
544}
545
Andreas Gampe878d58c2015-01-15 23:24:00 -0800546void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
547 CreateIntIntToIntLocations(arena_, invoke);
548}
549
550void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000551 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800552}
553
554void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
555 CreateIntIntToIntLocations(arena_, invoke);
556}
557
558void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000559 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800560}
561
562void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
563 CreateIntIntToIntLocations(arena_, invoke);
564}
565
566void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000567 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800568}
569
570void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
571 CreateIntIntToIntLocations(arena_, invoke);
572}
573
574void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000575 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800576}
577
578void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
579 CreateFPToFPLocations(arena_, invoke);
580}
581
582void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
583 LocationSummary* locations = invoke->GetLocations();
584 vixl::MacroAssembler* masm = GetVIXLAssembler();
585 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
586}
587
588void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
589 CreateFPToFPLocations(arena_, invoke);
590}
591
592void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
593 LocationSummary* locations = invoke->GetLocations();
594 vixl::MacroAssembler* masm = GetVIXLAssembler();
595 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
596}
597
598void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
599 CreateFPToFPLocations(arena_, invoke);
600}
601
602void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
603 LocationSummary* locations = invoke->GetLocations();
604 vixl::MacroAssembler* masm = GetVIXLAssembler();
605 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
606}
607
608void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
609 CreateFPToFPLocations(arena_, invoke);
610}
611
612void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
613 LocationSummary* locations = invoke->GetLocations();
614 vixl::MacroAssembler* masm = GetVIXLAssembler();
615 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
616}
617
618static void CreateFPToIntPlusTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
619 LocationSummary* locations = new (arena) LocationSummary(invoke,
620 LocationSummary::kNoCall,
621 kIntrinsified);
622 locations->SetInAt(0, Location::RequiresFpuRegister());
623 locations->SetOut(Location::RequiresRegister());
624}
625
626static void GenMathRound(LocationSummary* locations,
627 bool is_double,
628 vixl::MacroAssembler* masm) {
629 FPRegister in_reg = is_double ?
630 DRegisterFrom(locations->InAt(0)) : SRegisterFrom(locations->InAt(0));
631 Register out_reg = is_double ?
632 XRegisterFrom(locations->Out()) : WRegisterFrom(locations->Out());
633 UseScratchRegisterScope temps(masm);
634 FPRegister temp1_reg = temps.AcquireSameSizeAs(in_reg);
635
636 // 0.5 can be encoded as an immediate, so use fmov.
637 if (is_double) {
638 __ Fmov(temp1_reg, static_cast<double>(0.5));
639 } else {
640 __ Fmov(temp1_reg, static_cast<float>(0.5));
641 }
642 __ Fadd(temp1_reg, in_reg, temp1_reg);
643 __ Fcvtms(out_reg, temp1_reg);
644}
645
646void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
Andreas Gampee6d0d8d2015-12-28 09:54:29 -0800647 // See intrinsics.h.
648 if (kRoundIsPlusPointFive) {
649 CreateFPToIntPlusTempLocations(arena_, invoke);
650 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800651}
652
653void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000654 GenMathRound(invoke->GetLocations(), /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800655}
656
657void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
Andreas Gampee6d0d8d2015-12-28 09:54:29 -0800658 // See intrinsics.h.
659 if (kRoundIsPlusPointFive) {
660 CreateFPToIntPlusTempLocations(arena_, invoke);
661 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800662}
663
664void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000665 GenMathRound(invoke->GetLocations(), /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800666}
667
668void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
669 CreateIntToIntLocations(arena_, invoke);
670}
671
672void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
673 vixl::MacroAssembler* masm = GetVIXLAssembler();
674 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
675 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
676}
677
678void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
679 CreateIntToIntLocations(arena_, invoke);
680}
681
682void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
683 vixl::MacroAssembler* masm = GetVIXLAssembler();
684 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
685 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
686}
687
688void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
689 CreateIntToIntLocations(arena_, invoke);
690}
691
692void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
693 vixl::MacroAssembler* masm = GetVIXLAssembler();
694 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
695 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
696}
697
698void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
699 CreateIntToIntLocations(arena_, invoke);
700}
701
702void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
703 vixl::MacroAssembler* masm = GetVIXLAssembler();
704 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
705 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
706}
707
708static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
709 LocationSummary* locations = new (arena) LocationSummary(invoke,
710 LocationSummary::kNoCall,
711 kIntrinsified);
712 locations->SetInAt(0, Location::RequiresRegister());
713 locations->SetInAt(1, Location::RequiresRegister());
714}
715
716void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
717 CreateIntIntToVoidLocations(arena_, invoke);
718}
719
720void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
721 vixl::MacroAssembler* masm = GetVIXLAssembler();
722 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
723 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
724}
725
726void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
727 CreateIntIntToVoidLocations(arena_, invoke);
728}
729
730void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
731 vixl::MacroAssembler* masm = GetVIXLAssembler();
732 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
733 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
734}
735
736void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
737 CreateIntIntToVoidLocations(arena_, invoke);
738}
739
740void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
741 vixl::MacroAssembler* masm = GetVIXLAssembler();
742 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
743 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
744}
745
746void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
747 CreateIntIntToVoidLocations(arena_, invoke);
748}
749
750void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
751 vixl::MacroAssembler* masm = GetVIXLAssembler();
752 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
753 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
754}
755
756void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
757 LocationSummary* locations = new (arena_) LocationSummary(invoke,
758 LocationSummary::kNoCall,
759 kIntrinsified);
760 locations->SetOut(Location::RequiresRegister());
761}
762
763void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
764 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
765 MemOperand(tr, Thread::PeerOffset<8>().Int32Value()));
766}
767
768static void GenUnsafeGet(HInvoke* invoke,
769 Primitive::Type type,
770 bool is_volatile,
771 CodeGeneratorARM64* codegen) {
772 LocationSummary* locations = invoke->GetLocations();
773 DCHECK((type == Primitive::kPrimInt) ||
774 (type == Primitive::kPrimLong) ||
775 (type == Primitive::kPrimNot));
776 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000777 Location base_loc = locations->InAt(1);
778 Register base = WRegisterFrom(base_loc); // Object pointer.
779 Location offset_loc = locations->InAt(2);
780 Register offset = XRegisterFrom(offset_loc); // Long offset.
781 Location trg_loc = locations->Out();
782 Register trg = RegisterFrom(trg_loc, type);
Serban Constantinescu579885a2015-02-22 20:51:33 +0000783 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800784
Roland Levillain44015862016-01-22 11:47:17 +0000785 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
786 // UnsafeGetObject/UnsafeGetObjectVolatile with Baker's read barrier case.
787 UseScratchRegisterScope temps(masm);
788 Register temp = temps.AcquireW();
789 codegen->GenerateArrayLoadWithBakerReadBarrier(
790 invoke, trg_loc, base, 0U, offset_loc, temp, /* needs_null_check */ false);
791 if (is_volatile && !use_acquire_release) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800792 __ Dmb(InnerShareable, BarrierReads);
793 }
794 } else {
Roland Levillain44015862016-01-22 11:47:17 +0000795 // Other cases.
796 MemOperand mem_op(base.X(), offset);
797 if (is_volatile) {
798 if (use_acquire_release) {
799 codegen->LoadAcquire(invoke, trg, mem_op, /* needs_null_check */ true);
800 } else {
801 codegen->Load(type, trg, mem_op);
802 __ Dmb(InnerShareable, BarrierReads);
803 }
804 } else {
805 codegen->Load(type, trg, mem_op);
806 }
Roland Levillain4d027112015-07-01 15:41:14 +0100807
Roland Levillain44015862016-01-22 11:47:17 +0000808 if (type == Primitive::kPrimNot) {
809 DCHECK(trg.IsW());
810 codegen->MaybeGenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
811 }
Roland Levillain4d027112015-07-01 15:41:14 +0100812 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800813}
814
815static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000816 bool can_call = kEmitCompilerReadBarrier &&
817 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
818 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800819 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000820 can_call ?
821 LocationSummary::kCallOnSlowPath :
822 LocationSummary::kNoCall,
Andreas Gampe878d58c2015-01-15 23:24:00 -0800823 kIntrinsified);
824 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
825 locations->SetInAt(1, Location::RequiresRegister());
826 locations->SetInAt(2, Location::RequiresRegister());
827 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
828}
829
830void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
831 CreateIntIntIntToIntLocations(arena_, invoke);
832}
833void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
834 CreateIntIntIntToIntLocations(arena_, invoke);
835}
836void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
837 CreateIntIntIntToIntLocations(arena_, invoke);
838}
839void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
840 CreateIntIntIntToIntLocations(arena_, invoke);
841}
842void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
843 CreateIntIntIntToIntLocations(arena_, invoke);
844}
845void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
846 CreateIntIntIntToIntLocations(arena_, invoke);
847}
848
849void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000850 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800851}
852void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000853 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800854}
855void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000856 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800857}
858void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000859 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800860}
861void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000862 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800863}
864void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000865 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800866}
867
868static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
869 LocationSummary* locations = new (arena) LocationSummary(invoke,
870 LocationSummary::kNoCall,
871 kIntrinsified);
872 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
873 locations->SetInAt(1, Location::RequiresRegister());
874 locations->SetInAt(2, Location::RequiresRegister());
875 locations->SetInAt(3, Location::RequiresRegister());
876}
877
878void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
879 CreateIntIntIntIntToVoid(arena_, invoke);
880}
881void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
882 CreateIntIntIntIntToVoid(arena_, invoke);
883}
884void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
885 CreateIntIntIntIntToVoid(arena_, invoke);
886}
887void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
888 CreateIntIntIntIntToVoid(arena_, invoke);
889}
890void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
891 CreateIntIntIntIntToVoid(arena_, invoke);
892}
893void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
894 CreateIntIntIntIntToVoid(arena_, invoke);
895}
896void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
897 CreateIntIntIntIntToVoid(arena_, invoke);
898}
899void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
900 CreateIntIntIntIntToVoid(arena_, invoke);
901}
902void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
903 CreateIntIntIntIntToVoid(arena_, invoke);
904}
905
906static void GenUnsafePut(LocationSummary* locations,
907 Primitive::Type type,
908 bool is_volatile,
909 bool is_ordered,
910 CodeGeneratorARM64* codegen) {
911 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
912
913 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
914 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
915 Register value = RegisterFrom(locations->InAt(3), type);
Roland Levillain4d027112015-07-01 15:41:14 +0100916 Register source = value;
Serban Constantinescu579885a2015-02-22 20:51:33 +0000917 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800918
919 MemOperand mem_op(base.X(), offset);
920
Roland Levillain4d027112015-07-01 15:41:14 +0100921 {
922 // We use a block to end the scratch scope before the write barrier, thus
923 // freeing the temporary registers so they can be used in `MarkGCCard`.
924 UseScratchRegisterScope temps(masm);
925
926 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
927 DCHECK(value.IsW());
928 Register temp = temps.AcquireW();
929 __ Mov(temp.W(), value.W());
930 codegen->GetAssembler()->PoisonHeapReference(temp.W());
931 source = temp;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800932 }
Roland Levillain4d027112015-07-01 15:41:14 +0100933
934 if (is_volatile || is_ordered) {
935 if (use_acquire_release) {
936 codegen->StoreRelease(type, source, mem_op);
937 } else {
938 __ Dmb(InnerShareable, BarrierAll);
939 codegen->Store(type, source, mem_op);
940 if (is_volatile) {
941 __ Dmb(InnerShareable, BarrierReads);
942 }
943 }
944 } else {
945 codegen->Store(type, source, mem_op);
946 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800947 }
948
949 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100950 bool value_can_be_null = true; // TODO: Worth finding out this information?
951 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800952 }
953}
954
955void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000956 GenUnsafePut(invoke->GetLocations(),
957 Primitive::kPrimInt,
958 /* is_volatile */ false,
959 /* is_ordered */ false,
960 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800961}
962void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000963 GenUnsafePut(invoke->GetLocations(),
964 Primitive::kPrimInt,
965 /* is_volatile */ false,
966 /* is_ordered */ true,
967 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800968}
969void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000970 GenUnsafePut(invoke->GetLocations(),
971 Primitive::kPrimInt,
972 /* is_volatile */ true,
973 /* is_ordered */ false,
974 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800975}
976void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000977 GenUnsafePut(invoke->GetLocations(),
978 Primitive::kPrimNot,
979 /* is_volatile */ false,
980 /* is_ordered */ false,
981 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800982}
983void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000984 GenUnsafePut(invoke->GetLocations(),
985 Primitive::kPrimNot,
986 /* is_volatile */ false,
987 /* is_ordered */ true,
988 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800989}
990void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000991 GenUnsafePut(invoke->GetLocations(),
992 Primitive::kPrimNot,
993 /* is_volatile */ true,
994 /* is_ordered */ false,
995 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800996}
997void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000998 GenUnsafePut(invoke->GetLocations(),
999 Primitive::kPrimLong,
1000 /* is_volatile */ false,
1001 /* is_ordered */ false,
1002 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001003}
1004void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001005 GenUnsafePut(invoke->GetLocations(),
1006 Primitive::kPrimLong,
1007 /* is_volatile */ false,
1008 /* is_ordered */ true,
1009 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001010}
1011void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001012 GenUnsafePut(invoke->GetLocations(),
1013 Primitive::kPrimLong,
1014 /* is_volatile */ true,
1015 /* is_ordered */ false,
1016 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001017}
1018
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001019static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena,
1020 HInvoke* invoke,
1021 Primitive::Type type) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001022 LocationSummary* locations = new (arena) LocationSummary(invoke,
1023 LocationSummary::kNoCall,
1024 kIntrinsified);
1025 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1026 locations->SetInAt(1, Location::RequiresRegister());
1027 locations->SetInAt(2, Location::RequiresRegister());
1028 locations->SetInAt(3, Location::RequiresRegister());
1029 locations->SetInAt(4, Location::RequiresRegister());
1030
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001031 // If heap poisoning is enabled, we don't want the unpoisoning
1032 // operations to potentially clobber the output.
1033 Location::OutputOverlap overlaps = (kPoisonHeapReferences && type == Primitive::kPrimNot)
1034 ? Location::kOutputOverlap
1035 : Location::kNoOutputOverlap;
1036 locations->SetOut(Location::RequiresRegister(), overlaps);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001037}
1038
1039static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
Serban Constantinescu579885a2015-02-22 20:51:33 +00001040 bool use_acquire_release = codegen->GetInstructionSetFeatures().PreferAcquireRelease();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001041 vixl::MacroAssembler* masm = codegen->GetAssembler()->vixl_masm_;
1042
1043 Register out = WRegisterFrom(locations->Out()); // Boolean result.
1044
1045 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
1046 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
1047 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
1048 Register value = RegisterFrom(locations->InAt(4), type); // Value.
1049
1050 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
1051 if (type == Primitive::kPrimNot) {
1052 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001053 bool value_can_be_null = true; // TODO: Worth finding out this information?
1054 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001055 }
1056
1057 UseScratchRegisterScope temps(masm);
1058 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1059 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
1060
1061 Register tmp_32 = tmp_value.W();
1062
1063 __ Add(tmp_ptr, base.X(), Operand(offset));
1064
Roland Levillain4d027112015-07-01 15:41:14 +01001065 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1066 codegen->GetAssembler()->PoisonHeapReference(expected);
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001067 if (value.Is(expected)) {
1068 // Do not poison `value`, as it is the same register as
1069 // `expected`, which has just been poisoned.
1070 } else {
1071 codegen->GetAssembler()->PoisonHeapReference(value);
1072 }
Roland Levillain4d027112015-07-01 15:41:14 +01001073 }
1074
Andreas Gampe878d58c2015-01-15 23:24:00 -08001075 // do {
1076 // tmp_value = [tmp_ptr] - expected;
1077 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1078 // result = tmp_value != 0;
1079
1080 vixl::Label loop_head, exit_loop;
Serban Constantinescu579885a2015-02-22 20:51:33 +00001081 if (use_acquire_release) {
1082 __ Bind(&loop_head);
Roland Levillain44015862016-01-22 11:47:17 +00001083 // TODO: When `type == Primitive::kPrimNot`, add a read barrier for
1084 // the reference stored in the object before attempting the CAS,
1085 // similar to the one in the art::Unsafe_compareAndSwapObject JNI
1086 // implementation.
1087 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001088 // Note that this code is not (yet) used when read barriers are
1089 // enabled (see IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject).
Roland Levillain44015862016-01-22 11:47:17 +00001090 DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier));
1091 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
Serban Constantinescu579885a2015-02-22 20:51:33 +00001092 __ Cmp(tmp_value, expected);
1093 __ B(&exit_loop, ne);
1094 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
1095 __ Cbnz(tmp_32, &loop_head);
1096 } else {
Roland Levillain4bedb382016-01-12 12:01:04 +00001097 // Emit a `Dmb(InnerShareable, BarrierAll)` (DMB ISH) instruction
1098 // instead of a `Dmb(InnerShareable, BarrierWrites)` (DMB ISHST)
1099 // one, as the latter allows a preceding load to be delayed past
1100 // the STXR instruction below.
1101 __ Dmb(InnerShareable, BarrierAll);
Serban Constantinescu579885a2015-02-22 20:51:33 +00001102 __ Bind(&loop_head);
Roland Levillain391b8662015-12-18 11:43:38 +00001103 // TODO: When `type == Primitive::kPrimNot`, add a read barrier for
1104 // the reference stored in the object before attempting the CAS,
1105 // similar to the one in the art::Unsafe_compareAndSwapObject JNI
1106 // implementation.
1107 //
Roland Levillain22ccc3a2015-11-24 13:10:05 +00001108 // Note that this code is not (yet) used when read barriers are
1109 // enabled (see IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject).
Roland Levillain391b8662015-12-18 11:43:38 +00001110 DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier));
1111 __ Ldxr(tmp_value, MemOperand(tmp_ptr));
Serban Constantinescu579885a2015-02-22 20:51:33 +00001112 __ Cmp(tmp_value, expected);
1113 __ B(&exit_loop, ne);
1114 __ Stxr(tmp_32, value, MemOperand(tmp_ptr));
1115 __ Cbnz(tmp_32, &loop_head);
1116 __ Dmb(InnerShareable, BarrierAll);
1117 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001118 __ Bind(&exit_loop);
1119 __ Cset(out, eq);
Roland Levillain4d027112015-07-01 15:41:14 +01001120
1121 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +01001122 codegen->GetAssembler()->UnpoisonHeapReference(expected);
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001123 if (value.Is(expected)) {
1124 // Do not unpoison `value`, as it is the same register as
1125 // `expected`, which has just been unpoisoned.
1126 } else {
1127 codegen->GetAssembler()->UnpoisonHeapReference(value);
1128 }
Roland Levillain4d027112015-07-01 15:41:14 +01001129 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001130}
1131
1132void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001133 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001134}
1135void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001136 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001137}
1138void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain391b8662015-12-18 11:43:38 +00001139 // The UnsafeCASObject intrinsic is missing a read barrier, and
1140 // therefore sometimes does not work as expected (b/25883050).
1141 // Turn it off temporarily as a quick fix, until the read barrier is
1142 // implemented (see TODO in GenCAS below).
1143 //
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001144 // TODO(rpl): Fix this issue and re-enable this intrinsic with read barriers.
1145 if (kEmitCompilerReadBarrier) {
Roland Levillain985ff702015-10-23 13:25:35 +01001146 return;
1147 }
1148
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001149 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001150}
1151
1152void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
1153 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
1154}
1155void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
1156 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
1157}
1158void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
1159 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
1160}
1161
1162void IntrinsicLocationsBuilderARM64::VisitStringCharAt(HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001163 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1164 LocationSummary::kCallOnSlowPath,
1165 kIntrinsified);
1166 locations->SetInAt(0, Location::RequiresRegister());
1167 locations->SetInAt(1, Location::RequiresRegister());
Nicolas Geoffray82f34492015-02-04 10:44:23 +00001168 // In case we need to go in the slow path, we can't have the output be the same
1169 // as the input: the current liveness analysis considers the input to be live
1170 // at the point of the call.
1171 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001172}
1173
1174void IntrinsicCodeGeneratorARM64::VisitStringCharAt(HInvoke* invoke) {
1175 vixl::MacroAssembler* masm = GetVIXLAssembler();
1176 LocationSummary* locations = invoke->GetLocations();
1177
1178 // Location of reference to data array
1179 const MemberOffset value_offset = mirror::String::ValueOffset();
1180 // Location of count
1181 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001182
1183 Register obj = WRegisterFrom(locations->InAt(0)); // String object pointer.
1184 Register idx = WRegisterFrom(locations->InAt(1)); // Index of character.
1185 Register out = WRegisterFrom(locations->Out()); // Result character.
1186
1187 UseScratchRegisterScope temps(masm);
1188 Register temp = temps.AcquireW();
1189 Register array_temp = temps.AcquireW(); // We can trade this for worse scheduling.
1190
1191 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
1192 // the cost.
1193 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
1194 // we will not optimize the code for constants (which would save a register).
1195
1196 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1197 codegen_->AddSlowPath(slow_path);
1198
1199 __ Ldr(temp, HeapOperand(obj, count_offset)); // temp = str.length.
1200 codegen_->MaybeRecordImplicitNullCheck(invoke);
1201 __ Cmp(idx, temp);
1202 __ B(hs, slow_path->GetEntryLabel());
1203
Jeff Hao848f70a2014-01-15 13:49:50 -08001204 __ Add(array_temp, obj, Operand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe878d58c2015-01-15 23:24:00 -08001205
1206 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -08001207 __ Ldrh(out, MemOperand(array_temp.X(), idx, UXTW, 1)); // out := array_temp[idx].
Andreas Gampe878d58c2015-01-15 23:24:00 -08001208
1209 __ Bind(slow_path->GetExitLabel());
1210}
1211
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001212void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001213 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1214 LocationSummary::kCall,
1215 kIntrinsified);
1216 InvokeRuntimeCallingConvention calling_convention;
1217 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1218 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1219 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1220}
1221
1222void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
1223 vixl::MacroAssembler* masm = GetVIXLAssembler();
1224 LocationSummary* locations = invoke->GetLocations();
1225
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001226 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001227 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001228
1229 Register argument = WRegisterFrom(locations->InAt(1));
1230 __ Cmp(argument, 0);
1231 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1232 codegen_->AddSlowPath(slow_path);
1233 __ B(eq, slow_path->GetEntryLabel());
1234
1235 __ Ldr(
1236 lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pStringCompareTo).Int32Value()));
1237 __ Blr(lr);
1238 __ Bind(slow_path->GetExitLabel());
1239}
1240
Agi Csakiea34b402015-08-13 17:51:19 -07001241void IntrinsicLocationsBuilderARM64::VisitStringEquals(HInvoke* invoke) {
1242 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1243 LocationSummary::kNoCall,
1244 kIntrinsified);
1245 locations->SetInAt(0, Location::RequiresRegister());
1246 locations->SetInAt(1, Location::RequiresRegister());
1247 // Temporary registers to store lengths of strings and for calculations.
1248 locations->AddTemp(Location::RequiresRegister());
1249 locations->AddTemp(Location::RequiresRegister());
1250
1251 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1252}
1253
1254void IntrinsicCodeGeneratorARM64::VisitStringEquals(HInvoke* invoke) {
1255 vixl::MacroAssembler* masm = GetVIXLAssembler();
1256 LocationSummary* locations = invoke->GetLocations();
1257
1258 Register str = WRegisterFrom(locations->InAt(0));
1259 Register arg = WRegisterFrom(locations->InAt(1));
1260 Register out = XRegisterFrom(locations->Out());
1261
1262 UseScratchRegisterScope scratch_scope(masm);
1263 Register temp = scratch_scope.AcquireW();
1264 Register temp1 = WRegisterFrom(locations->GetTemp(0));
1265 Register temp2 = WRegisterFrom(locations->GetTemp(1));
1266
1267 vixl::Label loop;
1268 vixl::Label end;
1269 vixl::Label return_true;
1270 vixl::Label return_false;
1271
1272 // Get offsets of count, value, and class fields within a string object.
1273 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1274 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1275 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1276
1277 // Note that the null check must have been done earlier.
1278 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1279
1280 // Check if input is null, return false if it is.
1281 __ Cbz(arg, &return_false);
1282
1283 // Reference equality check, return true if same reference.
1284 __ Cmp(str, arg);
1285 __ B(&return_true, eq);
1286
1287 // Instanceof check for the argument by comparing class fields.
1288 // All string objects must have the same type since String cannot be subclassed.
1289 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1290 // If the argument is a string object, its class field must be equal to receiver's class field.
1291 __ Ldr(temp, MemOperand(str.X(), class_offset));
1292 __ Ldr(temp1, MemOperand(arg.X(), class_offset));
1293 __ Cmp(temp, temp1);
1294 __ B(&return_false, ne);
1295
1296 // Load lengths of this and argument strings.
1297 __ Ldr(temp, MemOperand(str.X(), count_offset));
1298 __ Ldr(temp1, MemOperand(arg.X(), count_offset));
1299 // Check if lengths are equal, return false if they're not.
1300 __ Cmp(temp, temp1);
1301 __ B(&return_false, ne);
1302 // Store offset of string value in preparation for comparison loop
1303 __ Mov(temp1, value_offset);
1304 // Return true if both strings are empty.
1305 __ Cbz(temp, &return_true);
1306
1307 // Assertions that must hold in order to compare strings 4 characters at a time.
1308 DCHECK_ALIGNED(value_offset, 8);
1309 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1310
1311 temp1 = temp1.X();
1312 temp2 = temp2.X();
1313
1314 // Loop to compare strings 4 characters at a time starting at the beginning of the string.
1315 // Ok to do this because strings are zero-padded to be 8-byte aligned.
1316 __ Bind(&loop);
1317 __ Ldr(out, MemOperand(str.X(), temp1));
1318 __ Ldr(temp2, MemOperand(arg.X(), temp1));
1319 __ Add(temp1, temp1, Operand(sizeof(uint64_t)));
1320 __ Cmp(out, temp2);
1321 __ B(&return_false, ne);
1322 __ Sub(temp, temp, Operand(4), SetFlags);
1323 __ B(&loop, gt);
1324
1325 // Return true and exit the function.
1326 // If loop does not result in returning false, we return true.
1327 __ Bind(&return_true);
1328 __ Mov(out, 1);
1329 __ B(&end);
1330
1331 // Return false and exit the function.
1332 __ Bind(&return_false);
1333 __ Mov(out, 0);
1334 __ Bind(&end);
1335}
1336
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001337static void GenerateVisitStringIndexOf(HInvoke* invoke,
1338 vixl::MacroAssembler* masm,
1339 CodeGeneratorARM64* codegen,
1340 ArenaAllocator* allocator,
1341 bool start_at_zero) {
1342 LocationSummary* locations = invoke->GetLocations();
1343 Register tmp_reg = WRegisterFrom(locations->GetTemp(0));
1344
1345 // Note that the null check must have been done earlier.
1346 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1347
1348 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1349 // or directly dispatch if we have a constant.
1350 SlowPathCodeARM64* slow_path = nullptr;
1351 if (invoke->InputAt(1)->IsIntConstant()) {
1352 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) > 0xFFFFU) {
1353 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1354 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1355 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1356 codegen->AddSlowPath(slow_path);
1357 __ B(slow_path->GetEntryLabel());
1358 __ Bind(slow_path->GetExitLabel());
1359 return;
1360 }
1361 } else {
1362 Register char_reg = WRegisterFrom(locations->InAt(1));
1363 __ Mov(tmp_reg, 0xFFFF);
1364 __ Cmp(char_reg, Operand(tmp_reg));
1365 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1366 codegen->AddSlowPath(slow_path);
1367 __ B(hi, slow_path->GetEntryLabel());
1368 }
1369
1370 if (start_at_zero) {
1371 // Start-index = 0.
1372 __ Mov(tmp_reg, 0);
1373 }
1374
1375 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pIndexOf).Int32Value()));
1376 __ Blr(lr);
1377
1378 if (slow_path != nullptr) {
1379 __ Bind(slow_path->GetExitLabel());
1380 }
1381}
1382
1383void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) {
1384 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1385 LocationSummary::kCall,
1386 kIntrinsified);
1387 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1388 // best to align the inputs accordingly.
1389 InvokeRuntimeCallingConvention calling_convention;
1390 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1391 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1392 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1393
1394 // Need a temp for slow-path codepoint compare, and need to send start_index=0.
1395 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1396}
1397
1398void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001399 GenerateVisitStringIndexOf(
1400 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001401}
1402
1403void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1404 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1405 LocationSummary::kCall,
1406 kIntrinsified);
1407 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1408 // best to align the inputs accordingly.
1409 InvokeRuntimeCallingConvention calling_convention;
1410 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1411 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1412 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1413 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1414
1415 // Need a temp for slow-path codepoint compare.
1416 locations->AddTemp(Location::RequiresRegister());
1417}
1418
1419void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001420 GenerateVisitStringIndexOf(
1421 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001422}
1423
Jeff Hao848f70a2014-01-15 13:49:50 -08001424void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1425 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1426 LocationSummary::kCall,
1427 kIntrinsified);
1428 InvokeRuntimeCallingConvention calling_convention;
1429 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1430 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1431 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1432 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1433 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1434}
1435
1436void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1437 vixl::MacroAssembler* masm = GetVIXLAssembler();
1438 LocationSummary* locations = invoke->GetLocations();
1439
1440 Register byte_array = WRegisterFrom(locations->InAt(0));
1441 __ Cmp(byte_array, 0);
1442 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1443 codegen_->AddSlowPath(slow_path);
1444 __ B(eq, slow_path->GetEntryLabel());
1445
1446 __ Ldr(lr,
1447 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromBytes).Int32Value()));
1448 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1449 __ Blr(lr);
1450 __ Bind(slow_path->GetExitLabel());
1451}
1452
1453void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1454 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1455 LocationSummary::kCall,
1456 kIntrinsified);
1457 InvokeRuntimeCallingConvention calling_convention;
1458 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1459 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1460 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1461 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1462}
1463
1464void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1465 vixl::MacroAssembler* masm = GetVIXLAssembler();
1466
1467 __ Ldr(lr,
1468 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromChars).Int32Value()));
1469 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1470 __ Blr(lr);
1471}
1472
1473void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1474 // The inputs plus one temp.
1475 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1476 LocationSummary::kCall,
1477 kIntrinsified);
1478 InvokeRuntimeCallingConvention calling_convention;
1479 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1480 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1481 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1482 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1483}
1484
1485void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
1486 vixl::MacroAssembler* masm = GetVIXLAssembler();
1487 LocationSummary* locations = invoke->GetLocations();
1488
1489 Register string_to_copy = WRegisterFrom(locations->InAt(0));
1490 __ Cmp(string_to_copy, 0);
1491 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1492 codegen_->AddSlowPath(slow_path);
1493 __ B(eq, slow_path->GetEntryLabel());
1494
1495 __ Ldr(lr,
1496 MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64WordSize, pAllocStringFromString).Int32Value()));
1497 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1498 __ Blr(lr);
1499 __ Bind(slow_path->GetExitLabel());
1500}
1501
Andreas Gampe878d58c2015-01-15 23:24:00 -08001502// Unimplemented intrinsics.
1503
1504#define UNIMPLEMENTED_INTRINSIC(Name) \
1505void IntrinsicLocationsBuilderARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1506} \
1507void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1508}
1509
Aart Bik3f67e692016-01-15 14:35:12 -08001510UNIMPLEMENTED_INTRINSIC(IntegerBitCount)
Aart Bik3f67e692016-01-15 14:35:12 -08001511UNIMPLEMENTED_INTRINSIC(LongBitCount)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001512UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Nicolas Geoffrayee3cf072015-10-06 11:45:02 +01001513UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001514UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001515UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe878d58c2015-01-15 23:24:00 -08001516
Mark Mendella4f12202015-08-06 15:23:34 -04001517UNIMPLEMENTED_INTRINSIC(MathCos)
1518UNIMPLEMENTED_INTRINSIC(MathSin)
1519UNIMPLEMENTED_INTRINSIC(MathAcos)
1520UNIMPLEMENTED_INTRINSIC(MathAsin)
1521UNIMPLEMENTED_INTRINSIC(MathAtan)
1522UNIMPLEMENTED_INTRINSIC(MathAtan2)
1523UNIMPLEMENTED_INTRINSIC(MathCbrt)
1524UNIMPLEMENTED_INTRINSIC(MathCosh)
1525UNIMPLEMENTED_INTRINSIC(MathExp)
1526UNIMPLEMENTED_INTRINSIC(MathExpm1)
1527UNIMPLEMENTED_INTRINSIC(MathHypot)
1528UNIMPLEMENTED_INTRINSIC(MathLog)
1529UNIMPLEMENTED_INTRINSIC(MathLog10)
1530UNIMPLEMENTED_INTRINSIC(MathNextAfter)
1531UNIMPLEMENTED_INTRINSIC(MathSinh)
1532UNIMPLEMENTED_INTRINSIC(MathTan)
1533UNIMPLEMENTED_INTRINSIC(MathTanh)
1534
Aart Bik59c94542016-01-25 14:20:58 -08001535UNIMPLEMENTED_INTRINSIC(FloatIsInfinite)
1536UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite)
1537UNIMPLEMENTED_INTRINSIC(FloatIsNaN)
1538UNIMPLEMENTED_INTRINSIC(DoubleIsNaN)
1539
Aart Bik59c94542016-01-25 14:20:58 -08001540UNIMPLEMENTED_INTRINSIC(IntegerHighestOneBit)
1541UNIMPLEMENTED_INTRINSIC(LongHighestOneBit)
1542UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit)
1543UNIMPLEMENTED_INTRINSIC(LongLowestOneBit)
1544UNIMPLEMENTED_INTRINSIC(IntegerSignum)
1545UNIMPLEMENTED_INTRINSIC(LongSignum)
1546
1547// Rotate operations are handled as HRor instructions.
1548UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
1549UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
1550UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
1551UNIMPLEMENTED_INTRINSIC(LongRotateRight)
1552
Roland Levillain4d027112015-07-01 15:41:14 +01001553#undef UNIMPLEMENTED_INTRINSIC
1554
1555#undef __
1556
Andreas Gampe878d58c2015-01-15 23:24:00 -08001557} // namespace arm64
1558} // namespace art