blob: 9cfe3ce5695b55c5c4ef1f8923bc8ad91c6d1bff [file] [log] [blame]
Andreas Gampe878d58c2015-01-15 23:24:00 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm64.h"
18
Serban Constantinescu579885a2015-02-22 20:51:33 +000019#include "arch/arm64/instruction_set_features_arm64.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080021#include "code_generator_arm64.h"
22#include "common_arm64.h"
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "intrinsics.h"
25#include "mirror/array-inl.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm64/assembler_arm64.h"
Andreas Gampe878d58c2015-01-15 23:24:00 -080029
Scott Wakeling97c72b72016-06-24 16:19:36 +010030using namespace vixl::aarch64; // NOLINT(build/namespaces)
Andreas Gampe878d58c2015-01-15 23:24:00 -080031
Artem Serovaf4e42a2016-08-08 15:11:24 +010032// TODO(VIXL): Make VIXL compile with -Wshadow.
Scott Wakeling97c72b72016-06-24 16:19:36 +010033#pragma GCC diagnostic push
34#pragma GCC diagnostic ignored "-Wshadow"
Artem Serovaf4e42a2016-08-08 15:11:24 +010035#include "aarch64/disasm-aarch64.h"
36#include "aarch64/macro-assembler-aarch64.h"
Scott Wakeling97c72b72016-06-24 16:19:36 +010037#pragma GCC diagnostic pop
Andreas Gampe878d58c2015-01-15 23:24:00 -080038
39namespace art {
40
41namespace arm64 {
42
43using helpers::DRegisterFrom;
44using helpers::FPRegisterFrom;
45using helpers::HeapOperand;
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +000046using helpers::LocationFrom;
Scott Wakeling9ee23f42015-07-23 10:44:35 +010047using helpers::OperandFrom;
Andreas Gampe878d58c2015-01-15 23:24:00 -080048using helpers::RegisterFrom;
49using helpers::SRegisterFrom;
50using helpers::WRegisterFrom;
51using helpers::XRegisterFrom;
xueliang.zhong49924c92016-03-03 10:52:51 +000052using helpers::InputRegisterAt;
Scott Wakeling1f36f412016-04-21 11:13:45 +010053using helpers::OutputRegister;
Andreas Gampe878d58c2015-01-15 23:24:00 -080054
Andreas Gampe878d58c2015-01-15 23:24:00 -080055namespace {
56
57ALWAYS_INLINE inline MemOperand AbsoluteHeapOperandFrom(Location location, size_t offset = 0) {
58 return MemOperand(XRegisterFrom(location), offset);
59}
60
61} // namespace
62
Scott Wakeling97c72b72016-06-24 16:19:36 +010063MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {
Alexandre Rames087930f2016-08-02 13:45:28 +010064 return codegen_->GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -080065}
66
67ArenaAllocator* IntrinsicCodeGeneratorARM64::GetAllocator() {
68 return codegen_->GetGraph()->GetArena();
69}
70
Alexandre Rames087930f2016-08-02 13:45:28 +010071#define __ codegen->GetVIXLAssembler()->
Andreas Gampe878d58c2015-01-15 23:24:00 -080072
73static void MoveFromReturnRegister(Location trg,
74 Primitive::Type type,
75 CodeGeneratorARM64* codegen) {
76 if (!trg.IsValid()) {
77 DCHECK(type == Primitive::kPrimVoid);
78 return;
79 }
80
81 DCHECK_NE(type, Primitive::kPrimVoid);
82
Jeff Hao848f70a2014-01-15 13:49:50 -080083 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe878d58c2015-01-15 23:24:00 -080084 Register trg_reg = RegisterFrom(trg, type);
85 Register res_reg = RegisterFrom(ARM64ReturnLocation(type), type);
86 __ Mov(trg_reg, res_reg, kDiscardForSameWReg);
87 } else {
88 FPRegister trg_reg = FPRegisterFrom(trg, type);
89 FPRegister res_reg = FPRegisterFrom(ARM64ReturnLocation(type), type);
90 __ Fmov(trg_reg, res_reg);
91 }
92}
93
Roland Levillainec525fc2015-04-28 15:50:20 +010094static void MoveArguments(HInvoke* invoke, CodeGeneratorARM64* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010095 InvokeDexCallingConventionVisitorARM64 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010096 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe878d58c2015-01-15 23:24:00 -080097}
98
99// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
100// call. This will copy the arguments into the positions for a regular call.
101//
102// Note: The actual parameters are required to be in the locations given by the invoke's location
103// summary. If an intrinsic modifies those locations before a slowpath call, they must be
104// restored!
105class IntrinsicSlowPathARM64 : public SlowPathCodeARM64 {
106 public:
David Srbecky9cd6d372016-02-09 15:24:47 +0000107 explicit IntrinsicSlowPathARM64(HInvoke* invoke)
108 : SlowPathCodeARM64(invoke), invoke_(invoke) { }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800109
110 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
111 CodeGeneratorARM64* codegen = down_cast<CodeGeneratorARM64*>(codegen_in);
112 __ Bind(GetEntryLabel());
113
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000114 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800115
Roland Levillainec525fc2015-04-28 15:50:20 +0100116 MoveArguments(invoke_, codegen);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800117
118 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100119 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
120 LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800121 } else {
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000122 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), LocationFrom(kArtMethodRegister));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800123 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000124 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800125
126 // Copy the result back to the expected output.
127 Location out = invoke_->GetLocations()->Out();
128 if (out.IsValid()) {
129 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
130 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
131 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
132 }
133
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000134 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800135 __ B(GetExitLabel());
136 }
137
Alexandre Rames9931f312015-06-19 14:47:01 +0100138 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM64"; }
139
Andreas Gampe878d58c2015-01-15 23:24:00 -0800140 private:
141 // The instruction where this slow path is happening.
142 HInvoke* const invoke_;
143
144 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM64);
145};
146
147#undef __
148
149bool IntrinsicLocationsBuilderARM64::TryDispatch(HInvoke* invoke) {
150 Dispatch(invoke);
151 LocationSummary* res = invoke->GetLocations();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000152 if (res == nullptr) {
153 return false;
154 }
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000155 return res->Intrinsified();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800156}
157
158#define __ masm->
159
160static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
161 LocationSummary* locations = new (arena) LocationSummary(invoke,
162 LocationSummary::kNoCall,
163 kIntrinsified);
164 locations->SetInAt(0, Location::RequiresFpuRegister());
165 locations->SetOut(Location::RequiresRegister());
166}
167
168static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
169 LocationSummary* locations = new (arena) LocationSummary(invoke,
170 LocationSummary::kNoCall,
171 kIntrinsified);
172 locations->SetInAt(0, Location::RequiresRegister());
173 locations->SetOut(Location::RequiresFpuRegister());
174}
175
Scott Wakeling97c72b72016-06-24 16:19:36 +0100176static void MoveFPToInt(LocationSummary* locations, bool is64bit, MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800177 Location input = locations->InAt(0);
178 Location output = locations->Out();
179 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output),
180 is64bit ? DRegisterFrom(input) : SRegisterFrom(input));
181}
182
Scott Wakeling97c72b72016-06-24 16:19:36 +0100183static void MoveIntToFP(LocationSummary* locations, bool is64bit, MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800184 Location input = locations->InAt(0);
185 Location output = locations->Out();
186 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output),
187 is64bit ? XRegisterFrom(input) : WRegisterFrom(input));
188}
189
190void IntrinsicLocationsBuilderARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
191 CreateFPToIntLocations(arena_, invoke);
192}
193void IntrinsicLocationsBuilderARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
194 CreateIntToFPLocations(arena_, invoke);
195}
196
197void IntrinsicCodeGeneratorARM64::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000198 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800199}
200void IntrinsicCodeGeneratorARM64::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000201 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800202}
203
204void IntrinsicLocationsBuilderARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
205 CreateFPToIntLocations(arena_, invoke);
206}
207void IntrinsicLocationsBuilderARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
208 CreateIntToFPLocations(arena_, invoke);
209}
210
211void IntrinsicCodeGeneratorARM64::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000212 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800213}
214void IntrinsicCodeGeneratorARM64::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000215 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800216}
217
218static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
219 LocationSummary* locations = new (arena) LocationSummary(invoke,
220 LocationSummary::kNoCall,
221 kIntrinsified);
222 locations->SetInAt(0, Location::RequiresRegister());
223 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
224}
225
226static void GenReverseBytes(LocationSummary* locations,
227 Primitive::Type type,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100228 MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800229 Location in = locations->InAt(0);
230 Location out = locations->Out();
231
232 switch (type) {
233 case Primitive::kPrimShort:
234 __ Rev16(WRegisterFrom(out), WRegisterFrom(in));
235 __ Sxth(WRegisterFrom(out), WRegisterFrom(out));
236 break;
237 case Primitive::kPrimInt:
238 case Primitive::kPrimLong:
239 __ Rev(RegisterFrom(out, type), RegisterFrom(in, type));
240 break;
241 default:
242 LOG(FATAL) << "Unexpected size for reverse-bytes: " << type;
243 UNREACHABLE();
244 }
245}
246
247void IntrinsicLocationsBuilderARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
248 CreateIntToIntLocations(arena_, invoke);
249}
250
251void IntrinsicCodeGeneratorARM64::VisitIntegerReverseBytes(HInvoke* invoke) {
252 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
253}
254
255void IntrinsicLocationsBuilderARM64::VisitLongReverseBytes(HInvoke* invoke) {
256 CreateIntToIntLocations(arena_, invoke);
257}
258
259void IntrinsicCodeGeneratorARM64::VisitLongReverseBytes(HInvoke* invoke) {
260 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
261}
262
263void IntrinsicLocationsBuilderARM64::VisitShortReverseBytes(HInvoke* invoke) {
264 CreateIntToIntLocations(arena_, invoke);
265}
266
267void IntrinsicCodeGeneratorARM64::VisitShortReverseBytes(HInvoke* invoke) {
268 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler());
269}
270
Aart Bik7b565022016-01-28 14:36:22 -0800271static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
272 LocationSummary* locations = new (arena) LocationSummary(invoke,
273 LocationSummary::kNoCall,
274 kIntrinsified);
275 locations->SetInAt(0, Location::RequiresRegister());
276 locations->SetInAt(1, Location::RequiresRegister());
277 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
278}
279
Scott Wakeling611d3392015-07-10 11:42:06 +0100280static void GenNumberOfLeadingZeros(LocationSummary* locations,
281 Primitive::Type type,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100282 MacroAssembler* masm) {
Scott Wakeling611d3392015-07-10 11:42:06 +0100283 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
284
285 Location in = locations->InAt(0);
286 Location out = locations->Out();
287
288 __ Clz(RegisterFrom(out, type), RegisterFrom(in, type));
289}
290
291void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
292 CreateIntToIntLocations(arena_, invoke);
293}
294
295void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
296 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
297}
298
299void IntrinsicLocationsBuilderARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
300 CreateIntToIntLocations(arena_, invoke);
301}
302
303void IntrinsicCodeGeneratorARM64::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
304 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
305}
306
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100307static void GenNumberOfTrailingZeros(LocationSummary* locations,
308 Primitive::Type type,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100309 MacroAssembler* masm) {
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100310 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
311
312 Location in = locations->InAt(0);
313 Location out = locations->Out();
314
315 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
316 __ Clz(RegisterFrom(out, type), RegisterFrom(out, type));
317}
318
319void IntrinsicLocationsBuilderARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
320 CreateIntToIntLocations(arena_, invoke);
321}
322
323void IntrinsicCodeGeneratorARM64::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
324 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
325}
326
327void IntrinsicLocationsBuilderARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
328 CreateIntToIntLocations(arena_, invoke);
329}
330
331void IntrinsicCodeGeneratorARM64::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
332 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
333}
334
Andreas Gampe878d58c2015-01-15 23:24:00 -0800335static void GenReverse(LocationSummary* locations,
336 Primitive::Type type,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100337 MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800338 DCHECK(type == Primitive::kPrimInt || type == Primitive::kPrimLong);
339
340 Location in = locations->InAt(0);
341 Location out = locations->Out();
342
343 __ Rbit(RegisterFrom(out, type), RegisterFrom(in, type));
344}
345
346void IntrinsicLocationsBuilderARM64::VisitIntegerReverse(HInvoke* invoke) {
347 CreateIntToIntLocations(arena_, invoke);
348}
349
350void IntrinsicCodeGeneratorARM64::VisitIntegerReverse(HInvoke* invoke) {
351 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler());
352}
353
354void IntrinsicLocationsBuilderARM64::VisitLongReverse(HInvoke* invoke) {
355 CreateIntToIntLocations(arena_, invoke);
356}
357
358void IntrinsicCodeGeneratorARM64::VisitLongReverse(HInvoke* invoke) {
359 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler());
360}
361
Scott Wakeling97c72b72016-06-24 16:19:36 +0100362static void GenBitCount(HInvoke* instr, Primitive::Type type, MacroAssembler* masm) {
Roland Levillainfa3912e2016-04-01 18:21:55 +0100363 DCHECK(Primitive::IsIntOrLongType(type)) << type;
364 DCHECK_EQ(instr->GetType(), Primitive::kPrimInt);
365 DCHECK_EQ(Primitive::PrimitiveKind(instr->InputAt(0)->GetType()), type);
xueliang.zhong49924c92016-03-03 10:52:51 +0000366
xueliang.zhong49924c92016-03-03 10:52:51 +0000367 UseScratchRegisterScope temps(masm);
368
Nicolas Geoffray457413a2016-03-04 11:10:17 +0000369 Register src = InputRegisterAt(instr, 0);
Roland Levillainfa3912e2016-04-01 18:21:55 +0100370 Register dst = RegisterFrom(instr->GetLocations()->Out(), type);
371 FPRegister fpr = (type == Primitive::kPrimLong) ? temps.AcquireD() : temps.AcquireS();
xueliang.zhong49924c92016-03-03 10:52:51 +0000372
373 __ Fmov(fpr, src);
Nicolas Geoffray457413a2016-03-04 11:10:17 +0000374 __ Cnt(fpr.V8B(), fpr.V8B());
375 __ Addv(fpr.B(), fpr.V8B());
xueliang.zhong49924c92016-03-03 10:52:51 +0000376 __ Fmov(dst, fpr);
377}
378
379void IntrinsicLocationsBuilderARM64::VisitLongBitCount(HInvoke* invoke) {
380 CreateIntToIntLocations(arena_, invoke);
381}
382
383void IntrinsicCodeGeneratorARM64::VisitLongBitCount(HInvoke* invoke) {
Roland Levillainfa3912e2016-04-01 18:21:55 +0100384 GenBitCount(invoke, Primitive::kPrimLong, GetVIXLAssembler());
xueliang.zhong49924c92016-03-03 10:52:51 +0000385}
386
387void IntrinsicLocationsBuilderARM64::VisitIntegerBitCount(HInvoke* invoke) {
388 CreateIntToIntLocations(arena_, invoke);
389}
390
391void IntrinsicCodeGeneratorARM64::VisitIntegerBitCount(HInvoke* invoke) {
Roland Levillainfa3912e2016-04-01 18:21:55 +0100392 GenBitCount(invoke, Primitive::kPrimInt, GetVIXLAssembler());
xueliang.zhong49924c92016-03-03 10:52:51 +0000393}
394
Andreas Gampe878d58c2015-01-15 23:24:00 -0800395static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800396 LocationSummary* locations = new (arena) LocationSummary(invoke,
397 LocationSummary::kNoCall,
398 kIntrinsified);
399 locations->SetInAt(0, Location::RequiresFpuRegister());
400 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
401}
402
Scott Wakeling97c72b72016-06-24 16:19:36 +0100403static void MathAbsFP(LocationSummary* locations, bool is64bit, MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800404 Location in = locations->InAt(0);
405 Location out = locations->Out();
406
407 FPRegister in_reg = is64bit ? DRegisterFrom(in) : SRegisterFrom(in);
408 FPRegister out_reg = is64bit ? DRegisterFrom(out) : SRegisterFrom(out);
409
410 __ Fabs(out_reg, in_reg);
411}
412
413void IntrinsicLocationsBuilderARM64::VisitMathAbsDouble(HInvoke* invoke) {
414 CreateFPToFPLocations(arena_, invoke);
415}
416
417void IntrinsicCodeGeneratorARM64::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000418 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800419}
420
421void IntrinsicLocationsBuilderARM64::VisitMathAbsFloat(HInvoke* invoke) {
422 CreateFPToFPLocations(arena_, invoke);
423}
424
425void IntrinsicCodeGeneratorARM64::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000426 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800427}
428
429static void CreateIntToInt(ArenaAllocator* arena, HInvoke* invoke) {
430 LocationSummary* locations = new (arena) LocationSummary(invoke,
431 LocationSummary::kNoCall,
432 kIntrinsified);
433 locations->SetInAt(0, Location::RequiresRegister());
434 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
435}
436
437static void GenAbsInteger(LocationSummary* locations,
438 bool is64bit,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100439 MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800440 Location in = locations->InAt(0);
441 Location output = locations->Out();
442
443 Register in_reg = is64bit ? XRegisterFrom(in) : WRegisterFrom(in);
444 Register out_reg = is64bit ? XRegisterFrom(output) : WRegisterFrom(output);
445
446 __ Cmp(in_reg, Operand(0));
447 __ Cneg(out_reg, in_reg, lt);
448}
449
450void IntrinsicLocationsBuilderARM64::VisitMathAbsInt(HInvoke* invoke) {
451 CreateIntToInt(arena_, invoke);
452}
453
454void IntrinsicCodeGeneratorARM64::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000455 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800456}
457
458void IntrinsicLocationsBuilderARM64::VisitMathAbsLong(HInvoke* invoke) {
459 CreateIntToInt(arena_, invoke);
460}
461
462void IntrinsicCodeGeneratorARM64::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000463 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800464}
465
466static void GenMinMaxFP(LocationSummary* locations,
467 bool is_min,
468 bool is_double,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100469 MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800470 Location op1 = locations->InAt(0);
471 Location op2 = locations->InAt(1);
472 Location out = locations->Out();
473
474 FPRegister op1_reg = is_double ? DRegisterFrom(op1) : SRegisterFrom(op1);
475 FPRegister op2_reg = is_double ? DRegisterFrom(op2) : SRegisterFrom(op2);
476 FPRegister out_reg = is_double ? DRegisterFrom(out) : SRegisterFrom(out);
477 if (is_min) {
478 __ Fmin(out_reg, op1_reg, op2_reg);
479 } else {
480 __ Fmax(out_reg, op1_reg, op2_reg);
481 }
482}
483
484static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
485 LocationSummary* locations = new (arena) LocationSummary(invoke,
486 LocationSummary::kNoCall,
487 kIntrinsified);
488 locations->SetInAt(0, Location::RequiresFpuRegister());
489 locations->SetInAt(1, Location::RequiresFpuRegister());
490 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
491}
492
493void IntrinsicLocationsBuilderARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
494 CreateFPFPToFPLocations(arena_, invoke);
495}
496
497void IntrinsicCodeGeneratorARM64::VisitMathMinDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000498 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800499}
500
501void IntrinsicLocationsBuilderARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
502 CreateFPFPToFPLocations(arena_, invoke);
503}
504
505void IntrinsicCodeGeneratorARM64::VisitMathMinFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000506 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800507}
508
509void IntrinsicLocationsBuilderARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
510 CreateFPFPToFPLocations(arena_, invoke);
511}
512
513void IntrinsicCodeGeneratorARM64::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000514 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800515}
516
517void IntrinsicLocationsBuilderARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
518 CreateFPFPToFPLocations(arena_, invoke);
519}
520
521void IntrinsicCodeGeneratorARM64::VisitMathMaxFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000522 GenMinMaxFP(
523 invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800524}
525
526static void GenMinMax(LocationSummary* locations,
527 bool is_min,
528 bool is_long,
Scott Wakeling97c72b72016-06-24 16:19:36 +0100529 MacroAssembler* masm) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800530 Location op1 = locations->InAt(0);
531 Location op2 = locations->InAt(1);
532 Location out = locations->Out();
533
534 Register op1_reg = is_long ? XRegisterFrom(op1) : WRegisterFrom(op1);
535 Register op2_reg = is_long ? XRegisterFrom(op2) : WRegisterFrom(op2);
536 Register out_reg = is_long ? XRegisterFrom(out) : WRegisterFrom(out);
537
538 __ Cmp(op1_reg, op2_reg);
539 __ Csel(out_reg, op1_reg, op2_reg, is_min ? lt : gt);
540}
541
Andreas Gampe878d58c2015-01-15 23:24:00 -0800542void IntrinsicLocationsBuilderARM64::VisitMathMinIntInt(HInvoke* invoke) {
543 CreateIntIntToIntLocations(arena_, invoke);
544}
545
546void IntrinsicCodeGeneratorARM64::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000547 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800548}
549
550void IntrinsicLocationsBuilderARM64::VisitMathMinLongLong(HInvoke* invoke) {
551 CreateIntIntToIntLocations(arena_, invoke);
552}
553
554void IntrinsicCodeGeneratorARM64::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000555 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800556}
557
558void IntrinsicLocationsBuilderARM64::VisitMathMaxIntInt(HInvoke* invoke) {
559 CreateIntIntToIntLocations(arena_, invoke);
560}
561
562void IntrinsicCodeGeneratorARM64::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000563 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800564}
565
566void IntrinsicLocationsBuilderARM64::VisitMathMaxLongLong(HInvoke* invoke) {
567 CreateIntIntToIntLocations(arena_, invoke);
568}
569
570void IntrinsicCodeGeneratorARM64::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000571 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800572}
573
574void IntrinsicLocationsBuilderARM64::VisitMathSqrt(HInvoke* invoke) {
575 CreateFPToFPLocations(arena_, invoke);
576}
577
578void IntrinsicCodeGeneratorARM64::VisitMathSqrt(HInvoke* invoke) {
579 LocationSummary* locations = invoke->GetLocations();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100580 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800581 __ Fsqrt(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
582}
583
584void IntrinsicLocationsBuilderARM64::VisitMathCeil(HInvoke* invoke) {
585 CreateFPToFPLocations(arena_, invoke);
586}
587
588void IntrinsicCodeGeneratorARM64::VisitMathCeil(HInvoke* invoke) {
589 LocationSummary* locations = invoke->GetLocations();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100590 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800591 __ Frintp(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
592}
593
594void IntrinsicLocationsBuilderARM64::VisitMathFloor(HInvoke* invoke) {
595 CreateFPToFPLocations(arena_, invoke);
596}
597
598void IntrinsicCodeGeneratorARM64::VisitMathFloor(HInvoke* invoke) {
599 LocationSummary* locations = invoke->GetLocations();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100600 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800601 __ Frintm(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
602}
603
604void IntrinsicLocationsBuilderARM64::VisitMathRint(HInvoke* invoke) {
605 CreateFPToFPLocations(arena_, invoke);
606}
607
608void IntrinsicCodeGeneratorARM64::VisitMathRint(HInvoke* invoke) {
609 LocationSummary* locations = invoke->GetLocations();
Scott Wakeling97c72b72016-06-24 16:19:36 +0100610 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800611 __ Frintn(DRegisterFrom(locations->Out()), DRegisterFrom(locations->InAt(0)));
612}
613
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100614static void CreateFPToIntPlusFPTempLocations(ArenaAllocator* arena, HInvoke* invoke) {
Andreas Gampe878d58c2015-01-15 23:24:00 -0800615 LocationSummary* locations = new (arena) LocationSummary(invoke,
616 LocationSummary::kNoCall,
617 kIntrinsified);
618 locations->SetInAt(0, Location::RequiresFpuRegister());
619 locations->SetOut(Location::RequiresRegister());
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100620 locations->AddTemp(Location::RequiresFpuRegister());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800621}
622
Scott Wakeling97c72b72016-06-24 16:19:36 +0100623static void GenMathRound(HInvoke* invoke, bool is_double, vixl::aarch64::MacroAssembler* masm) {
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100624 // Java 8 API definition for Math.round():
625 // Return the closest long or int to the argument, with ties rounding to positive infinity.
626 //
627 // There is no single instruction in ARMv8 that can support the above definition.
628 // We choose to use FCVTAS here, because it has closest semantic.
629 // FCVTAS performs rounding to nearest integer, ties away from zero.
630 // For most inputs (positive values, zero or NaN), this instruction is enough.
631 // We only need a few handling code after FCVTAS if the input is negative half value.
632 //
633 // The reason why we didn't choose FCVTPS instruction here is that
634 // although it performs rounding toward positive infinity, it doesn't perform rounding to nearest.
635 // For example, FCVTPS(-1.9) = -1 and FCVTPS(1.1) = 2.
636 // If we were using this instruction, for most inputs, more handling code would be needed.
637 LocationSummary* l = invoke->GetLocations();
638 FPRegister in_reg = is_double ? DRegisterFrom(l->InAt(0)) : SRegisterFrom(l->InAt(0));
639 FPRegister tmp_fp = is_double ? DRegisterFrom(l->GetTemp(0)) : SRegisterFrom(l->GetTemp(0));
640 Register out_reg = is_double ? XRegisterFrom(l->Out()) : WRegisterFrom(l->Out());
Scott Wakeling97c72b72016-06-24 16:19:36 +0100641 vixl::aarch64::Label done;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800642
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100643 // Round to nearest integer, ties away from zero.
644 __ Fcvtas(out_reg, in_reg);
645
646 // For positive values, zero or NaN inputs, rounding is done.
Scott Wakeling97c72b72016-06-24 16:19:36 +0100647 __ Tbz(out_reg, out_reg.GetSizeInBits() - 1, &done);
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100648
649 // Handle input < 0 cases.
650 // If input is negative but not a tie, previous result (round to nearest) is valid.
651 // If input is a negative tie, out_reg += 1.
652 __ Frinta(tmp_fp, in_reg);
653 __ Fsub(tmp_fp, in_reg, tmp_fp);
654 __ Fcmp(tmp_fp, 0.5);
655 __ Cinc(out_reg, out_reg, eq);
656
657 __ Bind(&done);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800658}
659
660void IntrinsicLocationsBuilderARM64::VisitMathRoundDouble(HInvoke* invoke) {
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100661 CreateFPToIntPlusFPTempLocations(arena_, invoke);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800662}
663
664void IntrinsicCodeGeneratorARM64::VisitMathRoundDouble(HInvoke* invoke) {
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100665 GenMathRound(invoke, /* is_double */ true, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800666}
667
668void IntrinsicLocationsBuilderARM64::VisitMathRoundFloat(HInvoke* invoke) {
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100669 CreateFPToIntPlusFPTempLocations(arena_, invoke);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800670}
671
672void IntrinsicCodeGeneratorARM64::VisitMathRoundFloat(HInvoke* invoke) {
xueliang.zhongd1e153c2016-05-27 18:56:13 +0100673 GenMathRound(invoke, /* is_double */ false, GetVIXLAssembler());
Andreas Gampe878d58c2015-01-15 23:24:00 -0800674}
675
676void IntrinsicLocationsBuilderARM64::VisitMemoryPeekByte(HInvoke* invoke) {
677 CreateIntToIntLocations(arena_, invoke);
678}
679
680void IntrinsicCodeGeneratorARM64::VisitMemoryPeekByte(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100681 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800682 __ Ldrsb(WRegisterFrom(invoke->GetLocations()->Out()),
683 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
684}
685
686void IntrinsicLocationsBuilderARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
687 CreateIntToIntLocations(arena_, invoke);
688}
689
690void IntrinsicCodeGeneratorARM64::VisitMemoryPeekIntNative(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100691 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800692 __ Ldr(WRegisterFrom(invoke->GetLocations()->Out()),
693 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
694}
695
696void IntrinsicLocationsBuilderARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
697 CreateIntToIntLocations(arena_, invoke);
698}
699
700void IntrinsicCodeGeneratorARM64::VisitMemoryPeekLongNative(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100701 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800702 __ Ldr(XRegisterFrom(invoke->GetLocations()->Out()),
703 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
704}
705
706void IntrinsicLocationsBuilderARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
707 CreateIntToIntLocations(arena_, invoke);
708}
709
710void IntrinsicCodeGeneratorARM64::VisitMemoryPeekShortNative(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100711 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800712 __ Ldrsh(WRegisterFrom(invoke->GetLocations()->Out()),
713 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
714}
715
716static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
717 LocationSummary* locations = new (arena) LocationSummary(invoke,
718 LocationSummary::kNoCall,
719 kIntrinsified);
720 locations->SetInAt(0, Location::RequiresRegister());
721 locations->SetInAt(1, Location::RequiresRegister());
722}
723
724void IntrinsicLocationsBuilderARM64::VisitMemoryPokeByte(HInvoke* invoke) {
725 CreateIntIntToVoidLocations(arena_, invoke);
726}
727
728void IntrinsicCodeGeneratorARM64::VisitMemoryPokeByte(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100729 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800730 __ Strb(WRegisterFrom(invoke->GetLocations()->InAt(1)),
731 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
732}
733
734void IntrinsicLocationsBuilderARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
735 CreateIntIntToVoidLocations(arena_, invoke);
736}
737
738void IntrinsicCodeGeneratorARM64::VisitMemoryPokeIntNative(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100739 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800740 __ Str(WRegisterFrom(invoke->GetLocations()->InAt(1)),
741 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
742}
743
744void IntrinsicLocationsBuilderARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
745 CreateIntIntToVoidLocations(arena_, invoke);
746}
747
748void IntrinsicCodeGeneratorARM64::VisitMemoryPokeLongNative(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100749 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800750 __ Str(XRegisterFrom(invoke->GetLocations()->InAt(1)),
751 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
752}
753
754void IntrinsicLocationsBuilderARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
755 CreateIntIntToVoidLocations(arena_, invoke);
756}
757
758void IntrinsicCodeGeneratorARM64::VisitMemoryPokeShortNative(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +0100759 MacroAssembler* masm = GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800760 __ Strh(WRegisterFrom(invoke->GetLocations()->InAt(1)),
761 AbsoluteHeapOperandFrom(invoke->GetLocations()->InAt(0), 0));
762}
763
764void IntrinsicLocationsBuilderARM64::VisitThreadCurrentThread(HInvoke* invoke) {
765 LocationSummary* locations = new (arena_) LocationSummary(invoke,
766 LocationSummary::kNoCall,
767 kIntrinsified);
768 locations->SetOut(Location::RequiresRegister());
769}
770
771void IntrinsicCodeGeneratorARM64::VisitThreadCurrentThread(HInvoke* invoke) {
772 codegen_->Load(Primitive::kPrimNot, WRegisterFrom(invoke->GetLocations()->Out()),
Andreas Gampe542451c2016-07-26 09:02:02 -0700773 MemOperand(tr, Thread::PeerOffset<kArm64PointerSize>().Int32Value()));
Andreas Gampe878d58c2015-01-15 23:24:00 -0800774}
775
776static void GenUnsafeGet(HInvoke* invoke,
777 Primitive::Type type,
778 bool is_volatile,
779 CodeGeneratorARM64* codegen) {
780 LocationSummary* locations = invoke->GetLocations();
781 DCHECK((type == Primitive::kPrimInt) ||
782 (type == Primitive::kPrimLong) ||
783 (type == Primitive::kPrimNot));
Alexandre Rames087930f2016-08-02 13:45:28 +0100784 MacroAssembler* masm = codegen->GetVIXLAssembler();
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000785 Location base_loc = locations->InAt(1);
786 Register base = WRegisterFrom(base_loc); // Object pointer.
787 Location offset_loc = locations->InAt(2);
788 Register offset = XRegisterFrom(offset_loc); // Long offset.
789 Location trg_loc = locations->Out();
790 Register trg = RegisterFrom(trg_loc, type);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800791
Roland Levillain44015862016-01-22 11:47:17 +0000792 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
793 // UnsafeGetObject/UnsafeGetObjectVolatile with Baker's read barrier case.
794 UseScratchRegisterScope temps(masm);
795 Register temp = temps.AcquireW();
Roland Levillainbfea3352016-06-23 13:48:47 +0100796 codegen->GenerateReferenceLoadWithBakerReadBarrier(invoke,
797 trg_loc,
798 base,
799 /* offset */ 0U,
800 /* index */ offset_loc,
801 /* scale_factor */ 0U,
802 temp,
803 /* needs_null_check */ false,
804 is_volatile);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800805 } else {
Roland Levillain44015862016-01-22 11:47:17 +0000806 // Other cases.
807 MemOperand mem_op(base.X(), offset);
808 if (is_volatile) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +0000809 codegen->LoadAcquire(invoke, trg, mem_op, /* needs_null_check */ true);
Roland Levillain44015862016-01-22 11:47:17 +0000810 } else {
811 codegen->Load(type, trg, mem_op);
812 }
Roland Levillain4d027112015-07-01 15:41:14 +0100813
Roland Levillain44015862016-01-22 11:47:17 +0000814 if (type == Primitive::kPrimNot) {
815 DCHECK(trg.IsW());
816 codegen->MaybeGenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
817 }
Roland Levillain4d027112015-07-01 15:41:14 +0100818 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800819}
820
821static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000822 bool can_call = kEmitCompilerReadBarrier &&
823 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
824 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800825 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain22ccc3a2015-11-24 13:10:05 +0000826 can_call ?
827 LocationSummary::kCallOnSlowPath :
828 LocationSummary::kNoCall,
Andreas Gampe878d58c2015-01-15 23:24:00 -0800829 kIntrinsified);
830 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
831 locations->SetInAt(1, Location::RequiresRegister());
832 locations->SetInAt(2, Location::RequiresRegister());
Roland Levillainbfea3352016-06-23 13:48:47 +0100833 locations->SetOut(Location::RequiresRegister(),
834 can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800835}
836
837void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
838 CreateIntIntIntToIntLocations(arena_, invoke);
839}
840void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
841 CreateIntIntIntToIntLocations(arena_, invoke);
842}
843void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLong(HInvoke* invoke) {
844 CreateIntIntIntToIntLocations(arena_, invoke);
845}
846void IntrinsicLocationsBuilderARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
847 CreateIntIntIntToIntLocations(arena_, invoke);
848}
849void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObject(HInvoke* invoke) {
850 CreateIntIntIntToIntLocations(arena_, invoke);
851}
852void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
853 CreateIntIntIntToIntLocations(arena_, invoke);
854}
855
856void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000857 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800858}
859void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000860 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800861}
862void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000863 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800864}
865void IntrinsicCodeGeneratorARM64::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000866 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800867}
868void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000869 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800870}
871void IntrinsicCodeGeneratorARM64::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000872 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800873}
874
875static void CreateIntIntIntIntToVoid(ArenaAllocator* arena, HInvoke* invoke) {
876 LocationSummary* locations = new (arena) LocationSummary(invoke,
877 LocationSummary::kNoCall,
878 kIntrinsified);
879 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
880 locations->SetInAt(1, Location::RequiresRegister());
881 locations->SetInAt(2, Location::RequiresRegister());
882 locations->SetInAt(3, Location::RequiresRegister());
883}
884
885void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
886 CreateIntIntIntIntToVoid(arena_, invoke);
887}
888void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
889 CreateIntIntIntIntToVoid(arena_, invoke);
890}
891void IntrinsicLocationsBuilderARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
892 CreateIntIntIntIntToVoid(arena_, invoke);
893}
894void IntrinsicLocationsBuilderARM64::VisitUnsafePutObject(HInvoke* invoke) {
895 CreateIntIntIntIntToVoid(arena_, invoke);
896}
897void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
898 CreateIntIntIntIntToVoid(arena_, invoke);
899}
900void IntrinsicLocationsBuilderARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
901 CreateIntIntIntIntToVoid(arena_, invoke);
902}
903void IntrinsicLocationsBuilderARM64::VisitUnsafePutLong(HInvoke* invoke) {
904 CreateIntIntIntIntToVoid(arena_, invoke);
905}
906void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
907 CreateIntIntIntIntToVoid(arena_, invoke);
908}
909void IntrinsicLocationsBuilderARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
910 CreateIntIntIntIntToVoid(arena_, invoke);
911}
912
913static void GenUnsafePut(LocationSummary* locations,
914 Primitive::Type type,
915 bool is_volatile,
916 bool is_ordered,
917 CodeGeneratorARM64* codegen) {
Alexandre Rames087930f2016-08-02 13:45:28 +0100918 MacroAssembler* masm = codegen->GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -0800919
920 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
921 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
922 Register value = RegisterFrom(locations->InAt(3), type);
Roland Levillain4d027112015-07-01 15:41:14 +0100923 Register source = value;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800924 MemOperand mem_op(base.X(), offset);
925
Roland Levillain4d027112015-07-01 15:41:14 +0100926 {
927 // We use a block to end the scratch scope before the write barrier, thus
928 // freeing the temporary registers so they can be used in `MarkGCCard`.
929 UseScratchRegisterScope temps(masm);
930
931 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
932 DCHECK(value.IsW());
933 Register temp = temps.AcquireW();
934 __ Mov(temp.W(), value.W());
935 codegen->GetAssembler()->PoisonHeapReference(temp.W());
936 source = temp;
Andreas Gampe878d58c2015-01-15 23:24:00 -0800937 }
Roland Levillain4d027112015-07-01 15:41:14 +0100938
939 if (is_volatile || is_ordered) {
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +0000940 codegen->StoreRelease(type, source, mem_op);
Roland Levillain4d027112015-07-01 15:41:14 +0100941 } else {
942 codegen->Store(type, source, mem_op);
943 }
Andreas Gampe878d58c2015-01-15 23:24:00 -0800944 }
945
946 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100947 bool value_can_be_null = true; // TODO: Worth finding out this information?
948 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800949 }
950}
951
952void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000953 GenUnsafePut(invoke->GetLocations(),
954 Primitive::kPrimInt,
955 /* is_volatile */ false,
956 /* is_ordered */ false,
957 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800958}
959void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000960 GenUnsafePut(invoke->GetLocations(),
961 Primitive::kPrimInt,
962 /* is_volatile */ false,
963 /* is_ordered */ true,
964 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800965}
966void IntrinsicCodeGeneratorARM64::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000967 GenUnsafePut(invoke->GetLocations(),
968 Primitive::kPrimInt,
969 /* is_volatile */ true,
970 /* is_ordered */ false,
971 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800972}
973void IntrinsicCodeGeneratorARM64::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000974 GenUnsafePut(invoke->GetLocations(),
975 Primitive::kPrimNot,
976 /* is_volatile */ false,
977 /* is_ordered */ false,
978 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800979}
980void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000981 GenUnsafePut(invoke->GetLocations(),
982 Primitive::kPrimNot,
983 /* is_volatile */ false,
984 /* is_ordered */ true,
985 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800986}
987void IntrinsicCodeGeneratorARM64::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000988 GenUnsafePut(invoke->GetLocations(),
989 Primitive::kPrimNot,
990 /* is_volatile */ true,
991 /* is_ordered */ false,
992 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -0800993}
994void IntrinsicCodeGeneratorARM64::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000995 GenUnsafePut(invoke->GetLocations(),
996 Primitive::kPrimLong,
997 /* is_volatile */ false,
998 /* is_ordered */ false,
999 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001000}
1001void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001002 GenUnsafePut(invoke->GetLocations(),
1003 Primitive::kPrimLong,
1004 /* is_volatile */ false,
1005 /* is_ordered */ true,
1006 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001007}
1008void IntrinsicCodeGeneratorARM64::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001009 GenUnsafePut(invoke->GetLocations(),
1010 Primitive::kPrimLong,
1011 /* is_volatile */ true,
1012 /* is_ordered */ false,
1013 codegen_);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001014}
1015
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001016static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena,
1017 HInvoke* invoke,
1018 Primitive::Type type) {
Andreas Gampe878d58c2015-01-15 23:24:00 -08001019 LocationSummary* locations = new (arena) LocationSummary(invoke,
1020 LocationSummary::kNoCall,
1021 kIntrinsified);
1022 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1023 locations->SetInAt(1, Location::RequiresRegister());
1024 locations->SetInAt(2, Location::RequiresRegister());
1025 locations->SetInAt(3, Location::RequiresRegister());
1026 locations->SetInAt(4, Location::RequiresRegister());
1027
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001028 // If heap poisoning is enabled, we don't want the unpoisoning
1029 // operations to potentially clobber the output.
1030 Location::OutputOverlap overlaps = (kPoisonHeapReferences && type == Primitive::kPrimNot)
1031 ? Location::kOutputOverlap
1032 : Location::kNoOutputOverlap;
1033 locations->SetOut(Location::RequiresRegister(), overlaps);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001034}
1035
1036static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM64* codegen) {
Alexandre Rames087930f2016-08-02 13:45:28 +01001037 MacroAssembler* masm = codegen->GetVIXLAssembler();
Andreas Gampe878d58c2015-01-15 23:24:00 -08001038
1039 Register out = WRegisterFrom(locations->Out()); // Boolean result.
1040
1041 Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
1042 Register offset = XRegisterFrom(locations->InAt(2)); // Long offset.
1043 Register expected = RegisterFrom(locations->InAt(3), type); // Expected.
1044 Register value = RegisterFrom(locations->InAt(4), type); // Value.
1045
1046 // This needs to be before the temp registers, as MarkGCCard also uses VIXL temps.
1047 if (type == Primitive::kPrimNot) {
1048 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001049 bool value_can_be_null = true; // TODO: Worth finding out this information?
1050 codegen->MarkGCCard(base, value, value_can_be_null);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001051 }
1052
1053 UseScratchRegisterScope temps(masm);
1054 Register tmp_ptr = temps.AcquireX(); // Pointer to actual memory.
1055 Register tmp_value = temps.AcquireSameSizeAs(value); // Value in memory.
1056
1057 Register tmp_32 = tmp_value.W();
1058
1059 __ Add(tmp_ptr, base.X(), Operand(offset));
1060
Roland Levillain4d027112015-07-01 15:41:14 +01001061 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1062 codegen->GetAssembler()->PoisonHeapReference(expected);
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001063 if (value.Is(expected)) {
1064 // Do not poison `value`, as it is the same register as
1065 // `expected`, which has just been poisoned.
1066 } else {
1067 codegen->GetAssembler()->PoisonHeapReference(value);
1068 }
Roland Levillain4d027112015-07-01 15:41:14 +01001069 }
1070
Andreas Gampe878d58c2015-01-15 23:24:00 -08001071 // do {
1072 // tmp_value = [tmp_ptr] - expected;
1073 // } while (tmp_value == 0 && failure([tmp_ptr] <- r_new_value));
1074 // result = tmp_value != 0;
1075
Scott Wakeling97c72b72016-06-24 16:19:36 +01001076 vixl::aarch64::Label loop_head, exit_loop;
Serban Constantinescu4a6a67c2016-01-27 09:19:56 +00001077 __ Bind(&loop_head);
1078 // TODO: When `type == Primitive::kPrimNot`, add a read barrier for
1079 // the reference stored in the object before attempting the CAS,
1080 // similar to the one in the art::Unsafe_compareAndSwapObject JNI
1081 // implementation.
1082 //
1083 // Note that this code is not (yet) used when read barriers are
1084 // enabled (see IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject).
1085 DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier));
1086 __ Ldaxr(tmp_value, MemOperand(tmp_ptr));
1087 __ Cmp(tmp_value, expected);
1088 __ B(&exit_loop, ne);
1089 __ Stlxr(tmp_32, value, MemOperand(tmp_ptr));
1090 __ Cbnz(tmp_32, &loop_head);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001091 __ Bind(&exit_loop);
1092 __ Cset(out, eq);
Roland Levillain4d027112015-07-01 15:41:14 +01001093
1094 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +01001095 codegen->GetAssembler()->UnpoisonHeapReference(expected);
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001096 if (value.Is(expected)) {
1097 // Do not unpoison `value`, as it is the same register as
1098 // `expected`, which has just been unpoisoned.
1099 } else {
1100 codegen->GetAssembler()->UnpoisonHeapReference(value);
1101 }
Roland Levillain4d027112015-07-01 15:41:14 +01001102 }
Andreas Gampe878d58c2015-01-15 23:24:00 -08001103}
1104
1105void IntrinsicLocationsBuilderARM64::VisitUnsafeCASInt(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001106 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001107}
1108void IntrinsicLocationsBuilderARM64::VisitUnsafeCASLong(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001109 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001110}
1111void IntrinsicLocationsBuilderARM64::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain391b8662015-12-18 11:43:38 +00001112 // The UnsafeCASObject intrinsic is missing a read barrier, and
1113 // therefore sometimes does not work as expected (b/25883050).
1114 // Turn it off temporarily as a quick fix, until the read barrier is
Roland Levillain3d312422016-06-23 13:53:42 +01001115 // implemented (see TODO in GenCAS).
Roland Levillain391b8662015-12-18 11:43:38 +00001116 //
Roland Levillain3d312422016-06-23 13:53:42 +01001117 // TODO(rpl): Implement read barrier support in GenCAS and re-enable
1118 // this intrinsic.
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001119 if (kEmitCompilerReadBarrier) {
Roland Levillain985ff702015-10-23 13:25:35 +01001120 return;
1121 }
1122
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001123 CreateIntIntIntIntIntToInt(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe878d58c2015-01-15 23:24:00 -08001124}
1125
1126void IntrinsicCodeGeneratorARM64::VisitUnsafeCASInt(HInvoke* invoke) {
1127 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
1128}
1129void IntrinsicCodeGeneratorARM64::VisitUnsafeCASLong(HInvoke* invoke) {
1130 GenCas(invoke->GetLocations(), Primitive::kPrimLong, codegen_);
1131}
1132void IntrinsicCodeGeneratorARM64::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain3d312422016-06-23 13:53:42 +01001133 // The UnsafeCASObject intrinsic is missing a read barrier, and
1134 // therefore sometimes does not work as expected (b/25883050).
1135 // Turn it off temporarily as a quick fix, until the read barrier is
1136 // implemented (see TODO in GenCAS).
1137 //
1138 // TODO(rpl): Implement read barrier support in GenCAS and re-enable
1139 // this intrinsic.
1140 DCHECK(!kEmitCompilerReadBarrier);
1141
Andreas Gampe878d58c2015-01-15 23:24:00 -08001142 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
1143}
1144
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001145void IntrinsicLocationsBuilderARM64::VisitStringCompareTo(HInvoke* invoke) {
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001146 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Scott Wakeling1f36f412016-04-21 11:13:45 +01001147 invoke->InputAt(1)->CanBeNull()
1148 ? LocationSummary::kCallOnSlowPath
1149 : LocationSummary::kNoCall,
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001150 kIntrinsified);
Scott Wakeling1f36f412016-04-21 11:13:45 +01001151 locations->SetInAt(0, Location::RequiresRegister());
1152 locations->SetInAt(1, Location::RequiresRegister());
1153 locations->AddTemp(Location::RequiresRegister());
1154 locations->AddTemp(Location::RequiresRegister());
1155 locations->AddTemp(Location::RequiresRegister());
1156 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001157}
1158
1159void IntrinsicCodeGeneratorARM64::VisitStringCompareTo(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001160 MacroAssembler* masm = GetVIXLAssembler();
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001161 LocationSummary* locations = invoke->GetLocations();
1162
Alexandre Rames2ea91532016-08-11 17:04:14 +01001163 Register str = InputRegisterAt(invoke, 0);
1164 Register arg = InputRegisterAt(invoke, 1);
1165 DCHECK(str.IsW());
1166 DCHECK(arg.IsW());
Scott Wakeling1f36f412016-04-21 11:13:45 +01001167 Register out = OutputRegister(invoke);
1168
1169 Register temp0 = WRegisterFrom(locations->GetTemp(0));
1170 Register temp1 = WRegisterFrom(locations->GetTemp(1));
1171 Register temp2 = WRegisterFrom(locations->GetTemp(2));
1172
Scott Wakeling97c72b72016-06-24 16:19:36 +01001173 vixl::aarch64::Label loop;
1174 vixl::aarch64::Label find_char_diff;
1175 vixl::aarch64::Label end;
Scott Wakeling1f36f412016-04-21 11:13:45 +01001176
1177 // Get offsets of count and value fields within a string object.
1178 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1179 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1180
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001181 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001182 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001183
Scott Wakeling1f36f412016-04-21 11:13:45 +01001184 // Take slow path and throw if input can be and is null.
1185 SlowPathCodeARM64* slow_path = nullptr;
1186 const bool can_slow_path = invoke->InputAt(1)->CanBeNull();
1187 if (can_slow_path) {
1188 slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1189 codegen_->AddSlowPath(slow_path);
1190 __ Cbz(arg, slow_path->GetEntryLabel());
1191 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001192
Scott Wakeling1f36f412016-04-21 11:13:45 +01001193 // Reference equality check, return 0 if same reference.
1194 __ Subs(out, str, arg);
1195 __ B(&end, eq);
1196 // Load lengths of this and argument strings.
Alexandre Rames2ea91532016-08-11 17:04:14 +01001197 __ Ldr(temp0, HeapOperand(str, count_offset));
1198 __ Ldr(temp1, HeapOperand(arg, count_offset));
Scott Wakeling1f36f412016-04-21 11:13:45 +01001199 // Return zero if both strings are empty.
1200 __ Orr(out, temp0, temp1);
1201 __ Cbz(out, &end);
1202 // out = length diff.
1203 __ Subs(out, temp0, temp1);
1204 // temp2 = min(len(str), len(arg)).
1205 __ Csel(temp2, temp1, temp0, ge);
1206 // Shorter string is empty?
1207 __ Cbz(temp2, &end);
1208
1209 // Store offset of string value in preparation for comparison loop.
1210 __ Mov(temp1, value_offset);
1211
1212 UseScratchRegisterScope scratch_scope(masm);
1213 Register temp4 = scratch_scope.AcquireX();
1214
1215 // Assertions that must hold in order to compare strings 4 characters at a time.
1216 DCHECK_ALIGNED(value_offset, 8);
1217 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1218
1219 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1220 DCHECK_EQ(char_size, 2u);
1221
1222 // Promote temp0 to an X reg, ready for LDR.
1223 temp0 = temp0.X();
1224
1225 // Loop to compare 4x16-bit characters at a time (ok because of string data alignment).
1226 __ Bind(&loop);
Alexandre Rames2ea91532016-08-11 17:04:14 +01001227 __ Ldr(temp4, MemOperand(str.X(), temp1.X()));
1228 __ Ldr(temp0, MemOperand(arg.X(), temp1.X()));
Scott Wakeling1f36f412016-04-21 11:13:45 +01001229 __ Cmp(temp4, temp0);
1230 __ B(ne, &find_char_diff);
1231 __ Add(temp1, temp1, char_size * 4);
1232 __ Subs(temp2, temp2, 4);
1233 __ B(gt, &loop);
1234 __ B(&end);
1235
1236 // Promote temp1 to an X reg, ready for EOR.
1237 temp1 = temp1.X();
1238
1239 // Find the single 16-bit character difference.
1240 __ Bind(&find_char_diff);
1241 // Get the bit position of the first character that differs.
1242 __ Eor(temp1, temp0, temp4);
1243 __ Rbit(temp1, temp1);
1244 __ Clz(temp1, temp1);
Scott Wakeling1f36f412016-04-21 11:13:45 +01001245 // If the number of 16-bit chars remaining <= the index where the difference occurs (0-3), then
1246 // the difference occurs outside the remaining string data, so just return length diff (out).
Alexandre Rames2ea91532016-08-11 17:04:14 +01001247 __ Cmp(temp2, Operand(temp1.W(), LSR, 4));
Scott Wakeling1f36f412016-04-21 11:13:45 +01001248 __ B(le, &end);
1249 // Extract the characters and calculate the difference.
Scott Wakelinge5ed20b2016-05-20 10:41:38 +01001250 __ Bic(temp1, temp1, 0xf);
Scott Wakeling1f36f412016-04-21 11:13:45 +01001251 __ Lsr(temp0, temp0, temp1);
1252 __ Lsr(temp4, temp4, temp1);
1253 __ And(temp4, temp4, 0xffff);
Alexandre Rames2ea91532016-08-11 17:04:14 +01001254 __ Sub(out, temp4.W(), Operand(temp0.W(), UXTH));
Scott Wakeling1f36f412016-04-21 11:13:45 +01001255
1256 __ Bind(&end);
1257
1258 if (can_slow_path) {
1259 __ Bind(slow_path->GetExitLabel());
1260 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001261}
1262
Agi Csakiea34b402015-08-13 17:51:19 -07001263void IntrinsicLocationsBuilderARM64::VisitStringEquals(HInvoke* invoke) {
1264 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1265 LocationSummary::kNoCall,
1266 kIntrinsified);
1267 locations->SetInAt(0, Location::RequiresRegister());
1268 locations->SetInAt(1, Location::RequiresRegister());
1269 // Temporary registers to store lengths of strings and for calculations.
1270 locations->AddTemp(Location::RequiresRegister());
1271 locations->AddTemp(Location::RequiresRegister());
1272
1273 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1274}
1275
1276void IntrinsicCodeGeneratorARM64::VisitStringEquals(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001277 MacroAssembler* masm = GetVIXLAssembler();
Agi Csakiea34b402015-08-13 17:51:19 -07001278 LocationSummary* locations = invoke->GetLocations();
1279
1280 Register str = WRegisterFrom(locations->InAt(0));
1281 Register arg = WRegisterFrom(locations->InAt(1));
1282 Register out = XRegisterFrom(locations->Out());
1283
1284 UseScratchRegisterScope scratch_scope(masm);
1285 Register temp = scratch_scope.AcquireW();
1286 Register temp1 = WRegisterFrom(locations->GetTemp(0));
1287 Register temp2 = WRegisterFrom(locations->GetTemp(1));
1288
Scott Wakeling97c72b72016-06-24 16:19:36 +01001289 vixl::aarch64::Label loop;
1290 vixl::aarch64::Label end;
1291 vixl::aarch64::Label return_true;
1292 vixl::aarch64::Label return_false;
Agi Csakiea34b402015-08-13 17:51:19 -07001293
1294 // Get offsets of count, value, and class fields within a string object.
1295 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1296 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1297 const int32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1298
1299 // Note that the null check must have been done earlier.
1300 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1301
Vladimir Marko53b52002016-05-24 19:30:45 +01001302 StringEqualsOptimizations optimizations(invoke);
1303 if (!optimizations.GetArgumentNotNull()) {
1304 // Check if input is null, return false if it is.
1305 __ Cbz(arg, &return_false);
1306 }
Agi Csakiea34b402015-08-13 17:51:19 -07001307
1308 // Reference equality check, return true if same reference.
1309 __ Cmp(str, arg);
1310 __ B(&return_true, eq);
1311
Vladimir Marko53b52002016-05-24 19:30:45 +01001312 if (!optimizations.GetArgumentIsString()) {
1313 // Instanceof check for the argument by comparing class fields.
1314 // All string objects must have the same type since String cannot be subclassed.
1315 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1316 // If the argument is a string object, its class field must be equal to receiver's class field.
1317 __ Ldr(temp, MemOperand(str.X(), class_offset));
1318 __ Ldr(temp1, MemOperand(arg.X(), class_offset));
1319 __ Cmp(temp, temp1);
1320 __ B(&return_false, ne);
1321 }
Agi Csakiea34b402015-08-13 17:51:19 -07001322
1323 // Load lengths of this and argument strings.
1324 __ Ldr(temp, MemOperand(str.X(), count_offset));
1325 __ Ldr(temp1, MemOperand(arg.X(), count_offset));
1326 // Check if lengths are equal, return false if they're not.
1327 __ Cmp(temp, temp1);
1328 __ B(&return_false, ne);
1329 // Store offset of string value in preparation for comparison loop
1330 __ Mov(temp1, value_offset);
1331 // Return true if both strings are empty.
1332 __ Cbz(temp, &return_true);
1333
1334 // Assertions that must hold in order to compare strings 4 characters at a time.
1335 DCHECK_ALIGNED(value_offset, 8);
1336 static_assert(IsAligned<8>(kObjectAlignment), "String of odd length is not zero padded");
1337
1338 temp1 = temp1.X();
1339 temp2 = temp2.X();
1340
1341 // Loop to compare strings 4 characters at a time starting at the beginning of the string.
1342 // Ok to do this because strings are zero-padded to be 8-byte aligned.
1343 __ Bind(&loop);
1344 __ Ldr(out, MemOperand(str.X(), temp1));
1345 __ Ldr(temp2, MemOperand(arg.X(), temp1));
1346 __ Add(temp1, temp1, Operand(sizeof(uint64_t)));
1347 __ Cmp(out, temp2);
1348 __ B(&return_false, ne);
1349 __ Sub(temp, temp, Operand(4), SetFlags);
1350 __ B(&loop, gt);
1351
1352 // Return true and exit the function.
1353 // If loop does not result in returning false, we return true.
1354 __ Bind(&return_true);
1355 __ Mov(out, 1);
1356 __ B(&end);
1357
1358 // Return false and exit the function.
1359 __ Bind(&return_false);
1360 __ Mov(out, 0);
1361 __ Bind(&end);
1362}
1363
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001364static void GenerateVisitStringIndexOf(HInvoke* invoke,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001365 MacroAssembler* masm,
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001366 CodeGeneratorARM64* codegen,
1367 ArenaAllocator* allocator,
1368 bool start_at_zero) {
1369 LocationSummary* locations = invoke->GetLocations();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001370
1371 // Note that the null check must have been done earlier.
1372 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1373
1374 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001375 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001376 SlowPathCodeARM64* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001377 HInstruction* code_point = invoke->InputAt(1);
1378 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001379 if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) > 0xFFFFU) {
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001380 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1381 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1382 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1383 codegen->AddSlowPath(slow_path);
1384 __ B(slow_path->GetEntryLabel());
1385 __ Bind(slow_path->GetExitLabel());
1386 return;
1387 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001388 } else if (code_point->GetType() != Primitive::kPrimChar) {
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001389 Register char_reg = WRegisterFrom(locations->InAt(1));
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001390 __ Tst(char_reg, 0xFFFF0000);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001391 slow_path = new (allocator) IntrinsicSlowPathARM64(invoke);
1392 codegen->AddSlowPath(slow_path);
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001393 __ B(ne, slow_path->GetEntryLabel());
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001394 }
1395
1396 if (start_at_zero) {
1397 // Start-index = 0.
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001398 Register tmp_reg = WRegisterFrom(locations->GetTemp(0));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001399 __ Mov(tmp_reg, 0);
1400 }
1401
Andreas Gampe542451c2016-07-26 09:02:02 -07001402 __ Ldr(lr, MemOperand(tr, QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, pIndexOf).Int32Value()));
Roland Levillain42ad2882016-02-29 18:26:54 +00001403 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001404 __ Blr(lr);
1405
1406 if (slow_path != nullptr) {
1407 __ Bind(slow_path->GetExitLabel());
1408 }
1409}
1410
1411void IntrinsicLocationsBuilderARM64::VisitStringIndexOf(HInvoke* invoke) {
1412 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001413 LocationSummary::kCallOnMainAndSlowPath,
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001414 kIntrinsified);
1415 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1416 // best to align the inputs accordingly.
1417 InvokeRuntimeCallingConvention calling_convention;
1418 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1419 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1420 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
1421
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001422 // Need to send start_index=0.
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001423 locations->AddTemp(LocationFrom(calling_convention.GetRegisterAt(2)));
1424}
1425
1426void IntrinsicCodeGeneratorARM64::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001427 GenerateVisitStringIndexOf(
1428 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001429}
1430
1431void IntrinsicLocationsBuilderARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
1432 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001433 LocationSummary::kCallOnMainAndSlowPath,
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001434 kIntrinsified);
1435 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1436 // best to align the inputs accordingly.
1437 InvokeRuntimeCallingConvention calling_convention;
1438 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1439 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1440 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1441 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimInt));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001442}
1443
1444void IntrinsicCodeGeneratorARM64::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001445 GenerateVisitStringIndexOf(
1446 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001447}
1448
Jeff Hao848f70a2014-01-15 13:49:50 -08001449void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
1450 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001451 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001452 kIntrinsified);
1453 InvokeRuntimeCallingConvention calling_convention;
1454 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1455 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1456 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1457 locations->SetInAt(3, LocationFrom(calling_convention.GetRegisterAt(3)));
1458 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1459}
1460
1461void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromBytes(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001462 MacroAssembler* masm = GetVIXLAssembler();
Jeff Hao848f70a2014-01-15 13:49:50 -08001463 LocationSummary* locations = invoke->GetLocations();
1464
1465 Register byte_array = WRegisterFrom(locations->InAt(0));
1466 __ Cmp(byte_array, 0);
1467 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1468 codegen_->AddSlowPath(slow_path);
1469 __ B(eq, slow_path->GetEntryLabel());
1470
1471 __ Ldr(lr,
Andreas Gampe542451c2016-07-26 09:02:02 -07001472 MemOperand(tr,
1473 QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, pAllocStringFromBytes).Int32Value()));
Roland Levillainf969a202016-03-09 16:14:00 +00001474 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001475 __ Blr(lr);
Roland Levillainf969a202016-03-09 16:14:00 +00001476 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001477 __ Bind(slow_path->GetExitLabel());
1478}
1479
1480void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
1481 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001482 LocationSummary::kCallOnMainOnly,
Jeff Hao848f70a2014-01-15 13:49:50 -08001483 kIntrinsified);
1484 InvokeRuntimeCallingConvention calling_convention;
1485 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
1486 locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
1487 locations->SetInAt(2, LocationFrom(calling_convention.GetRegisterAt(2)));
1488 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1489}
1490
1491void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromChars(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001492 MacroAssembler* masm = GetVIXLAssembler();
Jeff Hao848f70a2014-01-15 13:49:50 -08001493
Roland Levillaincc3839c2016-02-29 16:23:48 +00001494 // No need to emit code checking whether `locations->InAt(2)` is a null
1495 // pointer, as callers of the native method
1496 //
1497 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1498 //
1499 // all include a null check on `data` before calling that method.
Jeff Hao848f70a2014-01-15 13:49:50 -08001500 __ Ldr(lr,
Andreas Gampe542451c2016-07-26 09:02:02 -07001501 MemOperand(tr,
1502 QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, pAllocStringFromChars).Int32Value()));
Roland Levillainf969a202016-03-09 16:14:00 +00001503 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001504 __ Blr(lr);
Roland Levillainf969a202016-03-09 16:14:00 +00001505 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001506}
1507
1508void IntrinsicLocationsBuilderARM64::VisitStringNewStringFromString(HInvoke* invoke) {
Jeff Hao848f70a2014-01-15 13:49:50 -08001509 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001510 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001511 kIntrinsified);
1512 InvokeRuntimeCallingConvention calling_convention;
1513 locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
Jeff Hao848f70a2014-01-15 13:49:50 -08001514 locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
1515}
1516
1517void IntrinsicCodeGeneratorARM64::VisitStringNewStringFromString(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001518 MacroAssembler* masm = GetVIXLAssembler();
Jeff Hao848f70a2014-01-15 13:49:50 -08001519 LocationSummary* locations = invoke->GetLocations();
1520
1521 Register string_to_copy = WRegisterFrom(locations->InAt(0));
1522 __ Cmp(string_to_copy, 0);
1523 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1524 codegen_->AddSlowPath(slow_path);
1525 __ B(eq, slow_path->GetEntryLabel());
1526
1527 __ Ldr(lr,
Andreas Gampe542451c2016-07-26 09:02:02 -07001528 MemOperand(tr,
1529 QUICK_ENTRYPOINT_OFFSET(kArm64PointerSize, pAllocStringFromString).Int32Value()));
Roland Levillainf969a202016-03-09 16:14:00 +00001530 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001531 __ Blr(lr);
Roland Levillainf969a202016-03-09 16:14:00 +00001532 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001533 __ Bind(slow_path->GetExitLabel());
1534}
1535
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001536static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1537 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1538 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType()));
1539 DCHECK(Primitive::IsFloatingPointType(invoke->GetType()));
1540
1541 LocationSummary* const locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001542 LocationSummary::kCallOnMainOnly,
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001543 kIntrinsified);
1544 InvokeRuntimeCallingConvention calling_convention;
1545
1546 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
1547 locations->SetOut(calling_convention.GetReturnLocation(invoke->GetType()));
1548}
1549
1550static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1551 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
1552 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(0)->GetType()));
1553 DCHECK(Primitive::IsFloatingPointType(invoke->InputAt(1)->GetType()));
1554 DCHECK(Primitive::IsFloatingPointType(invoke->GetType()));
1555
1556 LocationSummary* const locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001557 LocationSummary::kCallOnMainOnly,
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001558 kIntrinsified);
1559 InvokeRuntimeCallingConvention calling_convention;
1560
1561 locations->SetInAt(0, LocationFrom(calling_convention.GetFpuRegisterAt(0)));
1562 locations->SetInAt(1, LocationFrom(calling_convention.GetFpuRegisterAt(1)));
1563 locations->SetOut(calling_convention.GetReturnLocation(invoke->GetType()));
1564}
1565
1566static void GenFPToFPCall(HInvoke* invoke,
Scott Wakeling97c72b72016-06-24 16:19:36 +01001567 MacroAssembler* masm,
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001568 CodeGeneratorARM64* codegen,
1569 QuickEntrypointEnum entry) {
Andreas Gampe542451c2016-07-26 09:02:02 -07001570 __ Ldr(lr, MemOperand(tr,
1571 GetThreadOffset<kArm64PointerSize>(entry).Int32Value()));
Anton Kirilov02fc24e2016-01-20 16:48:19 +00001572 __ Blr(lr);
1573 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
1574}
1575
1576void IntrinsicLocationsBuilderARM64::VisitMathCos(HInvoke* invoke) {
1577 CreateFPToFPCallLocations(arena_, invoke);
1578}
1579
1580void IntrinsicCodeGeneratorARM64::VisitMathCos(HInvoke* invoke) {
1581 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCos);
1582}
1583
1584void IntrinsicLocationsBuilderARM64::VisitMathSin(HInvoke* invoke) {
1585 CreateFPToFPCallLocations(arena_, invoke);
1586}
1587
1588void IntrinsicCodeGeneratorARM64::VisitMathSin(HInvoke* invoke) {
1589 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickSin);
1590}
1591
1592void IntrinsicLocationsBuilderARM64::VisitMathAcos(HInvoke* invoke) {
1593 CreateFPToFPCallLocations(arena_, invoke);
1594}
1595
1596void IntrinsicCodeGeneratorARM64::VisitMathAcos(HInvoke* invoke) {
1597 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAcos);
1598}
1599
1600void IntrinsicLocationsBuilderARM64::VisitMathAsin(HInvoke* invoke) {
1601 CreateFPToFPCallLocations(arena_, invoke);
1602}
1603
1604void IntrinsicCodeGeneratorARM64::VisitMathAsin(HInvoke* invoke) {
1605 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAsin);
1606}
1607
1608void IntrinsicLocationsBuilderARM64::VisitMathAtan(HInvoke* invoke) {
1609 CreateFPToFPCallLocations(arena_, invoke);
1610}
1611
1612void IntrinsicCodeGeneratorARM64::VisitMathAtan(HInvoke* invoke) {
1613 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAtan);
1614}
1615
1616void IntrinsicLocationsBuilderARM64::VisitMathCbrt(HInvoke* invoke) {
1617 CreateFPToFPCallLocations(arena_, invoke);
1618}
1619
1620void IntrinsicCodeGeneratorARM64::VisitMathCbrt(HInvoke* invoke) {
1621 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCbrt);
1622}
1623
1624void IntrinsicLocationsBuilderARM64::VisitMathCosh(HInvoke* invoke) {
1625 CreateFPToFPCallLocations(arena_, invoke);
1626}
1627
1628void IntrinsicCodeGeneratorARM64::VisitMathCosh(HInvoke* invoke) {
1629 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickCosh);
1630}
1631
1632void IntrinsicLocationsBuilderARM64::VisitMathExp(HInvoke* invoke) {
1633 CreateFPToFPCallLocations(arena_, invoke);
1634}
1635
1636void IntrinsicCodeGeneratorARM64::VisitMathExp(HInvoke* invoke) {
1637 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickExp);
1638}
1639
1640void IntrinsicLocationsBuilderARM64::VisitMathExpm1(HInvoke* invoke) {
1641 CreateFPToFPCallLocations(arena_, invoke);
1642}
1643
1644void IntrinsicCodeGeneratorARM64::VisitMathExpm1(HInvoke* invoke) {
1645 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickExpm1);
1646}
1647
1648void IntrinsicLocationsBuilderARM64::VisitMathLog(HInvoke* invoke) {
1649 CreateFPToFPCallLocations(arena_, invoke);
1650}
1651
1652void IntrinsicCodeGeneratorARM64::VisitMathLog(HInvoke* invoke) {
1653 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickLog);
1654}
1655
1656void IntrinsicLocationsBuilderARM64::VisitMathLog10(HInvoke* invoke) {
1657 CreateFPToFPCallLocations(arena_, invoke);
1658}
1659
1660void IntrinsicCodeGeneratorARM64::VisitMathLog10(HInvoke* invoke) {
1661 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickLog10);
1662}
1663
1664void IntrinsicLocationsBuilderARM64::VisitMathSinh(HInvoke* invoke) {
1665 CreateFPToFPCallLocations(arena_, invoke);
1666}
1667
1668void IntrinsicCodeGeneratorARM64::VisitMathSinh(HInvoke* invoke) {
1669 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickSinh);
1670}
1671
1672void IntrinsicLocationsBuilderARM64::VisitMathTan(HInvoke* invoke) {
1673 CreateFPToFPCallLocations(arena_, invoke);
1674}
1675
1676void IntrinsicCodeGeneratorARM64::VisitMathTan(HInvoke* invoke) {
1677 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickTan);
1678}
1679
1680void IntrinsicLocationsBuilderARM64::VisitMathTanh(HInvoke* invoke) {
1681 CreateFPToFPCallLocations(arena_, invoke);
1682}
1683
1684void IntrinsicCodeGeneratorARM64::VisitMathTanh(HInvoke* invoke) {
1685 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickTanh);
1686}
1687
1688void IntrinsicLocationsBuilderARM64::VisitMathAtan2(HInvoke* invoke) {
1689 CreateFPFPToFPCallLocations(arena_, invoke);
1690}
1691
1692void IntrinsicCodeGeneratorARM64::VisitMathAtan2(HInvoke* invoke) {
1693 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickAtan2);
1694}
1695
1696void IntrinsicLocationsBuilderARM64::VisitMathHypot(HInvoke* invoke) {
1697 CreateFPFPToFPCallLocations(arena_, invoke);
1698}
1699
1700void IntrinsicCodeGeneratorARM64::VisitMathHypot(HInvoke* invoke) {
1701 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickHypot);
1702}
1703
1704void IntrinsicLocationsBuilderARM64::VisitMathNextAfter(HInvoke* invoke) {
1705 CreateFPFPToFPCallLocations(arena_, invoke);
1706}
1707
1708void IntrinsicCodeGeneratorARM64::VisitMathNextAfter(HInvoke* invoke) {
1709 GenFPToFPCall(invoke, GetVIXLAssembler(), codegen_, kQuickNextAfter);
1710}
1711
Tim Zhang25abd6c2016-01-19 23:39:24 +08001712void IntrinsicLocationsBuilderARM64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1713 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1714 LocationSummary::kNoCall,
1715 kIntrinsified);
1716 locations->SetInAt(0, Location::RequiresRegister());
1717 locations->SetInAt(1, Location::RequiresRegister());
1718 locations->SetInAt(2, Location::RequiresRegister());
1719 locations->SetInAt(3, Location::RequiresRegister());
1720 locations->SetInAt(4, Location::RequiresRegister());
1721
1722 locations->AddTemp(Location::RequiresRegister());
1723 locations->AddTemp(Location::RequiresRegister());
Scott Wakelingdf109d92016-04-22 11:35:56 +01001724 locations->AddTemp(Location::RequiresRegister());
Tim Zhang25abd6c2016-01-19 23:39:24 +08001725}
1726
1727void IntrinsicCodeGeneratorARM64::VisitStringGetCharsNoCheck(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001728 MacroAssembler* masm = GetVIXLAssembler();
Tim Zhang25abd6c2016-01-19 23:39:24 +08001729 LocationSummary* locations = invoke->GetLocations();
1730
1731 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1732 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1733 DCHECK_EQ(char_size, 2u);
1734
1735 // Location of data in char array buffer.
1736 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1737
1738 // Location of char array data in string.
1739 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1740
1741 // void getCharsNoCheck(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1742 // Since getChars() calls getCharsNoCheck() - we use registers rather than constants.
1743 Register srcObj = XRegisterFrom(locations->InAt(0));
1744 Register srcBegin = XRegisterFrom(locations->InAt(1));
1745 Register srcEnd = XRegisterFrom(locations->InAt(2));
1746 Register dstObj = XRegisterFrom(locations->InAt(3));
1747 Register dstBegin = XRegisterFrom(locations->InAt(4));
1748
1749 Register src_ptr = XRegisterFrom(locations->GetTemp(0));
Scott Wakelingdf109d92016-04-22 11:35:56 +01001750 Register num_chr = XRegisterFrom(locations->GetTemp(1));
1751 Register tmp1 = XRegisterFrom(locations->GetTemp(2));
Tim Zhang25abd6c2016-01-19 23:39:24 +08001752
1753 UseScratchRegisterScope temps(masm);
1754 Register dst_ptr = temps.AcquireX();
Scott Wakelingdf109d92016-04-22 11:35:56 +01001755 Register tmp2 = temps.AcquireX();
Tim Zhang25abd6c2016-01-19 23:39:24 +08001756
Scott Wakelingdf109d92016-04-22 11:35:56 +01001757 // src address to copy from.
Tim Zhang25abd6c2016-01-19 23:39:24 +08001758 __ Add(src_ptr, srcObj, Operand(value_offset));
Tim Zhang25abd6c2016-01-19 23:39:24 +08001759 __ Add(src_ptr, src_ptr, Operand(srcBegin, LSL, 1));
1760
Scott Wakelingdf109d92016-04-22 11:35:56 +01001761 // dst address start to copy to.
Tim Zhang25abd6c2016-01-19 23:39:24 +08001762 __ Add(dst_ptr, dstObj, Operand(data_offset));
1763 __ Add(dst_ptr, dst_ptr, Operand(dstBegin, LSL, 1));
1764
Scott Wakelingdf109d92016-04-22 11:35:56 +01001765 __ Sub(num_chr, srcEnd, srcBegin);
1766
Tim Zhang25abd6c2016-01-19 23:39:24 +08001767 // Do the copy.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001768 vixl::aarch64::Label loop;
1769 vixl::aarch64::Label done;
1770 vixl::aarch64::Label remainder;
Scott Wakelingdf109d92016-04-22 11:35:56 +01001771
1772 // Early out for valid zero-length retrievals.
1773 __ Cbz(num_chr, &done);
1774
1775 // Save repairing the value of num_chr on the < 8 character path.
1776 __ Subs(tmp1, num_chr, 8);
1777 __ B(lt, &remainder);
1778
1779 // Keep the result of the earlier subs, we are going to fetch at least 8 characters.
1780 __ Mov(num_chr, tmp1);
1781
1782 // Main loop used for longer fetches loads and stores 8x16-bit characters at a time.
1783 // (Unaligned addresses are acceptable here and not worth inlining extra code to rectify.)
Tim Zhang25abd6c2016-01-19 23:39:24 +08001784 __ Bind(&loop);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001785 __ Ldp(tmp1, tmp2, MemOperand(src_ptr, char_size * 8, PostIndex));
Scott Wakelingdf109d92016-04-22 11:35:56 +01001786 __ Subs(num_chr, num_chr, 8);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001787 __ Stp(tmp1, tmp2, MemOperand(dst_ptr, char_size * 8, PostIndex));
Scott Wakelingdf109d92016-04-22 11:35:56 +01001788 __ B(ge, &loop);
1789
1790 __ Adds(num_chr, num_chr, 8);
1791 __ B(eq, &done);
1792
1793 // Main loop for < 8 character case and remainder handling. Loads and stores one
1794 // 16-bit Java character at a time.
1795 __ Bind(&remainder);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001796 __ Ldrh(tmp1, MemOperand(src_ptr, char_size, PostIndex));
Scott Wakelingdf109d92016-04-22 11:35:56 +01001797 __ Subs(num_chr, num_chr, 1);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001798 __ Strh(tmp1, MemOperand(dst_ptr, char_size, PostIndex));
Scott Wakelingdf109d92016-04-22 11:35:56 +01001799 __ B(gt, &remainder);
1800
Tim Zhang25abd6c2016-01-19 23:39:24 +08001801 __ Bind(&done);
1802}
1803
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001804// Mirrors ARRAYCOPY_SHORT_CHAR_ARRAY_THRESHOLD in libcore, so we can choose to use the native
1805// implementation there for longer copy lengths.
donghui.baic2ec9ad2016-03-10 14:02:55 +08001806static constexpr int32_t kSystemArrayCopyCharThreshold = 32;
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001807
1808static void SetSystemArrayCopyLocationRequires(LocationSummary* locations,
1809 uint32_t at,
1810 HInstruction* input) {
1811 HIntConstant* const_input = input->AsIntConstant();
Scott Wakeling97c72b72016-06-24 16:19:36 +01001812 if (const_input != nullptr && !vixl::aarch64::Assembler::IsImmAddSub(const_input->GetValue())) {
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001813 locations->SetInAt(at, Location::RequiresRegister());
1814 } else {
1815 locations->SetInAt(at, Location::RegisterOrConstant(input));
1816 }
1817}
1818
1819void IntrinsicLocationsBuilderARM64::VisitSystemArrayCopyChar(HInvoke* invoke) {
1820 // Check to see if we have known failures that will cause us to have to bail out
1821 // to the runtime, and just generate the runtime call directly.
1822 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1823 HIntConstant* dst_pos = invoke->InputAt(3)->AsIntConstant();
1824
1825 // The positions must be non-negative.
1826 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
1827 (dst_pos != nullptr && dst_pos->GetValue() < 0)) {
1828 // We will have to fail anyways.
1829 return;
1830 }
1831
1832 // The length must be >= 0 and not so long that we would (currently) prefer libcore's
1833 // native implementation.
1834 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1835 if (length != nullptr) {
1836 int32_t len = length->GetValue();
donghui.baic2ec9ad2016-03-10 14:02:55 +08001837 if (len < 0 || len > kSystemArrayCopyCharThreshold) {
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001838 // Just call as normal.
1839 return;
1840 }
1841 }
1842
1843 ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetArena();
1844 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1845 LocationSummary::kCallOnSlowPath,
1846 kIntrinsified);
1847 // arraycopy(char[] src, int src_pos, char[] dst, int dst_pos, int length).
1848 locations->SetInAt(0, Location::RequiresRegister());
1849 SetSystemArrayCopyLocationRequires(locations, 1, invoke->InputAt(1));
1850 locations->SetInAt(2, Location::RequiresRegister());
1851 SetSystemArrayCopyLocationRequires(locations, 3, invoke->InputAt(3));
1852 SetSystemArrayCopyLocationRequires(locations, 4, invoke->InputAt(4));
1853
1854 locations->AddTemp(Location::RequiresRegister());
1855 locations->AddTemp(Location::RequiresRegister());
1856 locations->AddTemp(Location::RequiresRegister());
1857}
1858
Scott Wakeling97c72b72016-06-24 16:19:36 +01001859static void CheckSystemArrayCopyPosition(MacroAssembler* masm,
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001860 const Location& pos,
1861 const Register& input,
1862 const Location& length,
1863 SlowPathCodeARM64* slow_path,
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001864 const Register& temp,
1865 bool length_is_input_length = false) {
1866 const int32_t length_offset = mirror::Array::LengthOffset().Int32Value();
1867 if (pos.IsConstant()) {
1868 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1869 if (pos_const == 0) {
1870 if (!length_is_input_length) {
1871 // Check that length(input) >= length.
1872 __ Ldr(temp, MemOperand(input, length_offset));
1873 __ Cmp(temp, OperandFrom(length, Primitive::kPrimInt));
1874 __ B(slow_path->GetEntryLabel(), lt);
1875 }
1876 } else {
1877 // Check that length(input) >= pos.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001878 __ Ldr(temp, MemOperand(input, length_offset));
1879 __ Subs(temp, temp, pos_const);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001880 __ B(slow_path->GetEntryLabel(), lt);
1881
1882 // Check that (length(input) - pos) >= length.
1883 __ Cmp(temp, OperandFrom(length, Primitive::kPrimInt));
1884 __ B(slow_path->GetEntryLabel(), lt);
1885 }
1886 } else if (length_is_input_length) {
1887 // The only way the copy can succeed is if pos is zero.
1888 __ Cbnz(WRegisterFrom(pos), slow_path->GetEntryLabel());
1889 } else {
1890 // Check that pos >= 0.
1891 Register pos_reg = WRegisterFrom(pos);
Scott Wakeling97c72b72016-06-24 16:19:36 +01001892 __ Tbnz(pos_reg, pos_reg.GetSizeInBits() - 1, slow_path->GetEntryLabel());
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001893
1894 // Check that pos <= length(input) && (length(input) - pos) >= length.
1895 __ Ldr(temp, MemOperand(input, length_offset));
1896 __ Subs(temp, temp, pos_reg);
1897 // Ccmp if length(input) >= pos, else definitely bail to slow path (N!=V == lt).
1898 __ Ccmp(temp, OperandFrom(length, Primitive::kPrimInt), NFlag, ge);
1899 __ B(slow_path->GetEntryLabel(), lt);
1900 }
1901}
1902
1903// Compute base source address, base destination address, and end source address
1904// for System.arraycopy* intrinsics.
Scott Wakeling97c72b72016-06-24 16:19:36 +01001905static void GenSystemArrayCopyAddresses(MacroAssembler* masm,
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001906 Primitive::Type type,
1907 const Register& src,
1908 const Location& src_pos,
1909 const Register& dst,
1910 const Location& dst_pos,
1911 const Location& copy_length,
1912 const Register& src_base,
1913 const Register& dst_base,
1914 const Register& src_end) {
1915 DCHECK(type == Primitive::kPrimNot || type == Primitive::kPrimChar)
Roland Levillainebea3d22016-04-12 15:42:57 +01001916 << "Unexpected element type: " << type;
1917 const int32_t element_size = Primitive::ComponentSize(type);
1918 const int32_t element_size_shift = Primitive::ComponentSizeShift(type);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001919
Roland Levillainebea3d22016-04-12 15:42:57 +01001920 uint32_t data_offset = mirror::Array::DataOffset(element_size).Uint32Value();
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001921 if (src_pos.IsConstant()) {
1922 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
Roland Levillainebea3d22016-04-12 15:42:57 +01001923 __ Add(src_base, src, element_size * constant + data_offset);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001924 } else {
Roland Levillainebea3d22016-04-12 15:42:57 +01001925 __ Add(src_base, src, data_offset);
1926 __ Add(src_base, src_base, Operand(XRegisterFrom(src_pos), LSL, element_size_shift));
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001927 }
1928
1929 if (dst_pos.IsConstant()) {
1930 int32_t constant = dst_pos.GetConstant()->AsIntConstant()->GetValue();
Roland Levillainebea3d22016-04-12 15:42:57 +01001931 __ Add(dst_base, dst, element_size * constant + data_offset);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001932 } else {
Roland Levillainebea3d22016-04-12 15:42:57 +01001933 __ Add(dst_base, dst, data_offset);
1934 __ Add(dst_base, dst_base, Operand(XRegisterFrom(dst_pos), LSL, element_size_shift));
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001935 }
1936
1937 if (copy_length.IsConstant()) {
1938 int32_t constant = copy_length.GetConstant()->AsIntConstant()->GetValue();
Roland Levillainebea3d22016-04-12 15:42:57 +01001939 __ Add(src_end, src_base, element_size * constant);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001940 } else {
Roland Levillainebea3d22016-04-12 15:42:57 +01001941 __ Add(src_end, src_base, Operand(XRegisterFrom(copy_length), LSL, element_size_shift));
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001942 }
1943}
1944
1945void IntrinsicCodeGeneratorARM64::VisitSystemArrayCopyChar(HInvoke* invoke) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01001946 MacroAssembler* masm = GetVIXLAssembler();
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001947 LocationSummary* locations = invoke->GetLocations();
1948 Register src = XRegisterFrom(locations->InAt(0));
1949 Location src_pos = locations->InAt(1);
1950 Register dst = XRegisterFrom(locations->InAt(2));
1951 Location dst_pos = locations->InAt(3);
1952 Location length = locations->InAt(4);
1953
1954 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
1955 codegen_->AddSlowPath(slow_path);
1956
1957 // If source and destination are the same, take the slow path. Overlapping copy regions must be
1958 // copied in reverse and we can't know in all cases if it's needed.
1959 __ Cmp(src, dst);
1960 __ B(slow_path->GetEntryLabel(), eq);
1961
1962 // Bail out if the source is null.
1963 __ Cbz(src, slow_path->GetEntryLabel());
1964
1965 // Bail out if the destination is null.
1966 __ Cbz(dst, slow_path->GetEntryLabel());
1967
1968 if (!length.IsConstant()) {
1969 // If the length is negative, bail out.
1970 __ Tbnz(WRegisterFrom(length), kWRegSize - 1, slow_path->GetEntryLabel());
1971 // If the length > 32 then (currently) prefer libcore's native implementation.
donghui.baic2ec9ad2016-03-10 14:02:55 +08001972 __ Cmp(WRegisterFrom(length), kSystemArrayCopyCharThreshold);
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001973 __ B(slow_path->GetEntryLabel(), gt);
1974 } else {
1975 // We have already checked in the LocationsBuilder for the constant case.
1976 DCHECK_GE(length.GetConstant()->AsIntConstant()->GetValue(), 0);
1977 DCHECK_LE(length.GetConstant()->AsIntConstant()->GetValue(), 32);
1978 }
1979
1980 Register src_curr_addr = WRegisterFrom(locations->GetTemp(0));
1981 Register dst_curr_addr = WRegisterFrom(locations->GetTemp(1));
1982 Register src_stop_addr = WRegisterFrom(locations->GetTemp(2));
1983
1984 CheckSystemArrayCopyPosition(masm,
1985 src_pos,
1986 src,
1987 length,
1988 slow_path,
1989 src_curr_addr,
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001990 false);
1991
1992 CheckSystemArrayCopyPosition(masm,
1993 dst_pos,
1994 dst,
1995 length,
1996 slow_path,
1997 src_curr_addr,
Scott Wakelingd3d0da52016-02-29 15:17:20 +00001998 false);
1999
2000 src_curr_addr = src_curr_addr.X();
2001 dst_curr_addr = dst_curr_addr.X();
2002 src_stop_addr = src_stop_addr.X();
2003
2004 GenSystemArrayCopyAddresses(masm,
2005 Primitive::kPrimChar,
2006 src,
2007 src_pos,
2008 dst,
2009 dst_pos,
2010 length,
2011 src_curr_addr,
2012 dst_curr_addr,
2013 src_stop_addr);
2014
2015 // Iterate over the arrays and do a raw copy of the chars.
2016 const int32_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
2017 UseScratchRegisterScope temps(masm);
2018 Register tmp = temps.AcquireW();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002019 vixl::aarch64::Label loop, done;
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002020 __ Bind(&loop);
2021 __ Cmp(src_curr_addr, src_stop_addr);
2022 __ B(&done, eq);
Scott Wakeling97c72b72016-06-24 16:19:36 +01002023 __ Ldrh(tmp, MemOperand(src_curr_addr, char_size, PostIndex));
2024 __ Strh(tmp, MemOperand(dst_curr_addr, char_size, PostIndex));
Scott Wakelingd3d0da52016-02-29 15:17:20 +00002025 __ B(&loop);
2026 __ Bind(&done);
2027
2028 __ Bind(slow_path->GetExitLabel());
2029}
2030
donghui.baic2ec9ad2016-03-10 14:02:55 +08002031// We can choose to use the native implementation there for longer copy lengths.
2032static constexpr int32_t kSystemArrayCopyThreshold = 128;
2033
2034// CodeGenerator::CreateSystemArrayCopyLocationSummary use three temporary registers.
2035// We want to use two temporary registers in order to reduce the register pressure in arm64.
2036// So we don't use the CodeGenerator::CreateSystemArrayCopyLocationSummary.
2037void IntrinsicLocationsBuilderARM64::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain3d312422016-06-23 13:53:42 +01002038 // TODO(rpl): Implement read barriers in the SystemArrayCopy
2039 // intrinsic and re-enable it (b/29516905).
2040 if (kEmitCompilerReadBarrier) {
2041 return;
2042 }
2043
donghui.baic2ec9ad2016-03-10 14:02:55 +08002044 // Check to see if we have known failures that will cause us to have to bail out
2045 // to the runtime, and just generate the runtime call directly.
2046 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
2047 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
2048
2049 // The positions must be non-negative.
2050 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
2051 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
2052 // We will have to fail anyways.
2053 return;
2054 }
2055
2056 // The length must be >= 0.
2057 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
2058 if (length != nullptr) {
2059 int32_t len = length->GetValue();
2060 if (len < 0 || len >= kSystemArrayCopyThreshold) {
2061 // Just call as normal.
2062 return;
2063 }
2064 }
2065
2066 SystemArrayCopyOptimizations optimizations(invoke);
2067
2068 if (optimizations.GetDestinationIsSource()) {
2069 if (src_pos != nullptr && dest_pos != nullptr && src_pos->GetValue() < dest_pos->GetValue()) {
2070 // We only support backward copying if source and destination are the same.
2071 return;
2072 }
2073 }
2074
2075 if (optimizations.GetDestinationIsPrimitiveArray() || optimizations.GetSourceIsPrimitiveArray()) {
2076 // We currently don't intrinsify primitive copying.
2077 return;
2078 }
2079
2080 ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetArena();
2081 LocationSummary* locations = new (allocator) LocationSummary(invoke,
2082 LocationSummary::kCallOnSlowPath,
2083 kIntrinsified);
2084 // arraycopy(Object src, int src_pos, Object dest, int dest_pos, int length).
2085 locations->SetInAt(0, Location::RequiresRegister());
2086 SetSystemArrayCopyLocationRequires(locations, 1, invoke->InputAt(1));
2087 locations->SetInAt(2, Location::RequiresRegister());
2088 SetSystemArrayCopyLocationRequires(locations, 3, invoke->InputAt(3));
2089 SetSystemArrayCopyLocationRequires(locations, 4, invoke->InputAt(4));
2090
2091 locations->AddTemp(Location::RequiresRegister());
2092 locations->AddTemp(Location::RequiresRegister());
2093}
2094
2095void IntrinsicCodeGeneratorARM64::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain3d312422016-06-23 13:53:42 +01002096 // TODO(rpl): Implement read barriers in the SystemArrayCopy
2097 // intrinsic and re-enable it (b/29516905).
2098 DCHECK(!kEmitCompilerReadBarrier);
2099
Scott Wakeling97c72b72016-06-24 16:19:36 +01002100 MacroAssembler* masm = GetVIXLAssembler();
donghui.baic2ec9ad2016-03-10 14:02:55 +08002101 LocationSummary* locations = invoke->GetLocations();
2102
2103 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2104 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2105 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2106 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
2107
2108 Register src = XRegisterFrom(locations->InAt(0));
2109 Location src_pos = locations->InAt(1);
2110 Register dest = XRegisterFrom(locations->InAt(2));
2111 Location dest_pos = locations->InAt(3);
2112 Location length = locations->InAt(4);
2113 Register temp1 = WRegisterFrom(locations->GetTemp(0));
2114 Register temp2 = WRegisterFrom(locations->GetTemp(1));
2115
2116 SlowPathCodeARM64* slow_path = new (GetAllocator()) IntrinsicSlowPathARM64(invoke);
2117 codegen_->AddSlowPath(slow_path);
2118
Scott Wakeling97c72b72016-06-24 16:19:36 +01002119 vixl::aarch64::Label conditions_on_positions_validated;
donghui.baic2ec9ad2016-03-10 14:02:55 +08002120 SystemArrayCopyOptimizations optimizations(invoke);
2121
donghui.baic2ec9ad2016-03-10 14:02:55 +08002122 // If source and destination are the same, we go to slow path if we need to do
2123 // forward copying.
2124 if (src_pos.IsConstant()) {
2125 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
2126 if (dest_pos.IsConstant()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01002127 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2128 if (optimizations.GetDestinationIsSource()) {
2129 // Checked when building locations.
2130 DCHECK_GE(src_pos_constant, dest_pos_constant);
2131 } else if (src_pos_constant < dest_pos_constant) {
2132 __ Cmp(src, dest);
2133 __ B(slow_path->GetEntryLabel(), eq);
2134 }
donghui.baic2ec9ad2016-03-10 14:02:55 +08002135 // Checked when building locations.
2136 DCHECK(!optimizations.GetDestinationIsSource()
2137 || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
2138 } else {
2139 if (!optimizations.GetDestinationIsSource()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01002140 __ Cmp(src, dest);
donghui.baic2ec9ad2016-03-10 14:02:55 +08002141 __ B(&conditions_on_positions_validated, ne);
2142 }
2143 __ Cmp(WRegisterFrom(dest_pos), src_pos_constant);
2144 __ B(slow_path->GetEntryLabel(), gt);
2145 }
2146 } else {
2147 if (!optimizations.GetDestinationIsSource()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01002148 __ Cmp(src, dest);
donghui.baic2ec9ad2016-03-10 14:02:55 +08002149 __ B(&conditions_on_positions_validated, ne);
2150 }
2151 __ Cmp(RegisterFrom(src_pos, invoke->InputAt(1)->GetType()),
2152 OperandFrom(dest_pos, invoke->InputAt(3)->GetType()));
2153 __ B(slow_path->GetEntryLabel(), lt);
2154 }
2155
2156 __ Bind(&conditions_on_positions_validated);
2157
2158 if (!optimizations.GetSourceIsNotNull()) {
2159 // Bail out if the source is null.
2160 __ Cbz(src, slow_path->GetEntryLabel());
2161 }
2162
2163 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
2164 // Bail out if the destination is null.
2165 __ Cbz(dest, slow_path->GetEntryLabel());
2166 }
2167
2168 // We have already checked in the LocationsBuilder for the constant case.
2169 if (!length.IsConstant() &&
2170 !optimizations.GetCountIsSourceLength() &&
2171 !optimizations.GetCountIsDestinationLength()) {
2172 // If the length is negative, bail out.
2173 __ Tbnz(WRegisterFrom(length), kWRegSize - 1, slow_path->GetEntryLabel());
2174 // If the length >= 128 then (currently) prefer native implementation.
2175 __ Cmp(WRegisterFrom(length), kSystemArrayCopyThreshold);
2176 __ B(slow_path->GetEntryLabel(), ge);
2177 }
2178 // Validity checks: source.
2179 CheckSystemArrayCopyPosition(masm,
2180 src_pos,
2181 src,
2182 length,
2183 slow_path,
2184 temp1,
donghui.baic2ec9ad2016-03-10 14:02:55 +08002185 optimizations.GetCountIsSourceLength());
2186
2187 // Validity checks: dest.
2188 CheckSystemArrayCopyPosition(masm,
2189 dest_pos,
2190 dest,
2191 length,
2192 slow_path,
2193 temp1,
donghui.baic2ec9ad2016-03-10 14:02:55 +08002194 optimizations.GetCountIsDestinationLength());
2195 {
2196 // We use a block to end the scratch scope before the write barrier, thus
2197 // freeing the temporary registers so they can be used in `MarkGCCard`.
2198 UseScratchRegisterScope temps(masm);
2199 Register temp3 = temps.AcquireW();
2200 if (!optimizations.GetDoesNotNeedTypeCheck()) {
2201 // Check whether all elements of the source array are assignable to the component
2202 // type of the destination array. We do two checks: the classes are the same,
2203 // or the destination is Object[]. If none of these checks succeed, we go to the
2204 // slow path.
2205 __ Ldr(temp1, MemOperand(dest, class_offset));
2206 __ Ldr(temp2, MemOperand(src, class_offset));
2207 bool did_unpoison = false;
2208 if (!optimizations.GetDestinationIsNonPrimitiveArray() ||
2209 !optimizations.GetSourceIsNonPrimitiveArray()) {
2210 // One or two of the references need to be unpoisoned. Unpoison them
2211 // both to make the identity check valid.
2212 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2213 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp2);
2214 did_unpoison = true;
2215 }
2216
2217 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
2218 // Bail out if the destination is not a non primitive array.
2219 // /* HeapReference<Class> */ temp3 = temp1->component_type_
2220 __ Ldr(temp3, HeapOperand(temp1, component_offset));
2221 __ Cbz(temp3, slow_path->GetEntryLabel());
2222 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3);
2223 __ Ldrh(temp3, HeapOperand(temp3, primitive_offset));
2224 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2225 __ Cbnz(temp3, slow_path->GetEntryLabel());
2226 }
2227
2228 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
2229 // Bail out if the source is not a non primitive array.
2230 // /* HeapReference<Class> */ temp3 = temp2->component_type_
2231 __ Ldr(temp3, HeapOperand(temp2, component_offset));
2232 __ Cbz(temp3, slow_path->GetEntryLabel());
2233 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3);
2234 __ Ldrh(temp3, HeapOperand(temp3, primitive_offset));
2235 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2236 __ Cbnz(temp3, slow_path->GetEntryLabel());
2237 }
2238
2239 __ Cmp(temp1, temp2);
2240
2241 if (optimizations.GetDestinationIsTypedObjectArray()) {
Scott Wakeling97c72b72016-06-24 16:19:36 +01002242 vixl::aarch64::Label do_copy;
donghui.baic2ec9ad2016-03-10 14:02:55 +08002243 __ B(&do_copy, eq);
2244 if (!did_unpoison) {
2245 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2246 }
2247 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2248 __ Ldr(temp1, HeapOperand(temp1, component_offset));
2249 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2250 // /* HeapReference<Class> */ temp1 = temp1->super_class_
2251 __ Ldr(temp1, HeapOperand(temp1, super_offset));
2252 // No need to unpoison the result, we're comparing against null.
2253 __ Cbnz(temp1, slow_path->GetEntryLabel());
2254 __ Bind(&do_copy);
2255 } else {
2256 __ B(slow_path->GetEntryLabel(), ne);
2257 }
2258 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
2259 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
2260 // Bail out if the source is not a non primitive array.
2261 // /* HeapReference<Class> */ temp1 = src->klass_
2262 __ Ldr(temp1, HeapOperand(src.W(), class_offset));
2263 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp1);
2264 // /* HeapReference<Class> */ temp3 = temp1->component_type_
2265 __ Ldr(temp3, HeapOperand(temp1, component_offset));
2266 __ Cbz(temp3, slow_path->GetEntryLabel());
2267 codegen_->GetAssembler()->MaybeUnpoisonHeapReference(temp3);
2268 __ Ldrh(temp3, HeapOperand(temp3, primitive_offset));
2269 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
2270 __ Cbnz(temp3, slow_path->GetEntryLabel());
2271 }
2272
2273 Register src_curr_addr = temp1.X();
2274 Register dst_curr_addr = temp2.X();
2275 Register src_stop_addr = temp3.X();
2276
2277 GenSystemArrayCopyAddresses(masm,
2278 Primitive::kPrimNot,
2279 src,
2280 src_pos,
2281 dest,
2282 dest_pos,
2283 length,
2284 src_curr_addr,
2285 dst_curr_addr,
2286 src_stop_addr);
2287
2288 // Iterate over the arrays and do a raw copy of the objects. We don't need to
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002289 // poison/unpoison.
Scott Wakeling97c72b72016-06-24 16:19:36 +01002290 vixl::aarch64::Label loop, done;
donghui.baic2ec9ad2016-03-10 14:02:55 +08002291 const int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
2292 __ Bind(&loop);
2293 __ Cmp(src_curr_addr, src_stop_addr);
2294 __ B(&done, eq);
2295 {
2296 Register tmp = temps.AcquireW();
Scott Wakeling97c72b72016-06-24 16:19:36 +01002297 __ Ldr(tmp, MemOperand(src_curr_addr, element_size, PostIndex));
2298 __ Str(tmp, MemOperand(dst_curr_addr, element_size, PostIndex));
donghui.baic2ec9ad2016-03-10 14:02:55 +08002299 }
2300 __ B(&loop);
2301 __ Bind(&done);
2302 }
2303 // We only need one card marking on the destination array.
2304 codegen_->MarkGCCard(dest.W(), Register(), /* value_can_be_null */ false);
2305
2306 __ Bind(slow_path->GetExitLabel());
2307}
2308
Anton Kirilova3ffea22016-04-07 17:02:37 +01002309static void GenIsInfinite(LocationSummary* locations,
2310 bool is64bit,
Scott Wakeling97c72b72016-06-24 16:19:36 +01002311 MacroAssembler* masm) {
Anton Kirilova3ffea22016-04-07 17:02:37 +01002312 Operand infinity;
2313 Register out;
2314
2315 if (is64bit) {
2316 infinity = kPositiveInfinityDouble;
2317 out = XRegisterFrom(locations->Out());
2318 } else {
2319 infinity = kPositiveInfinityFloat;
2320 out = WRegisterFrom(locations->Out());
2321 }
2322
Scott Wakeling97c72b72016-06-24 16:19:36 +01002323 const Register zero = vixl::aarch64::Assembler::AppropriateZeroRegFor(out);
Anton Kirilova3ffea22016-04-07 17:02:37 +01002324
2325 MoveFPToInt(locations, is64bit, masm);
2326 __ Eor(out, out, infinity);
2327 // We don't care about the sign bit, so shift left.
2328 __ Cmp(zero, Operand(out, LSL, 1));
2329 __ Cset(out, eq);
2330}
2331
2332void IntrinsicLocationsBuilderARM64::VisitFloatIsInfinite(HInvoke* invoke) {
2333 CreateFPToIntLocations(arena_, invoke);
2334}
2335
2336void IntrinsicCodeGeneratorARM64::VisitFloatIsInfinite(HInvoke* invoke) {
2337 GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler());
2338}
2339
2340void IntrinsicLocationsBuilderARM64::VisitDoubleIsInfinite(HInvoke* invoke) {
2341 CreateFPToIntLocations(arena_, invoke);
2342}
2343
2344void IntrinsicCodeGeneratorARM64::VisitDoubleIsInfinite(HInvoke* invoke) {
2345 GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler());
2346}
2347
Aart Bik2f9fcc92016-03-01 15:16:54 -08002348UNIMPLEMENTED_INTRINSIC(ARM64, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002349UNIMPLEMENTED_INTRINSIC(ARM64, IntegerHighestOneBit)
2350UNIMPLEMENTED_INTRINSIC(ARM64, LongHighestOneBit)
2351UNIMPLEMENTED_INTRINSIC(ARM64, IntegerLowestOneBit)
2352UNIMPLEMENTED_INTRINSIC(ARM64, LongLowestOneBit)
Andreas Gampe878d58c2015-01-15 23:24:00 -08002353
Aart Bik0e54c012016-03-04 12:08:31 -08002354// 1.8.
2355UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndAddInt)
2356UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndAddLong)
2357UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetInt)
2358UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetLong)
2359UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002360
Aart Bik2f9fcc92016-03-01 15:16:54 -08002361UNREACHABLE_INTRINSICS(ARM64)
Roland Levillain4d027112015-07-01 15:41:14 +01002362
2363#undef __
2364
Andreas Gampe878d58c2015-01-15 23:24:00 -08002365} // namespace arm64
2366} // namespace art