blob: 71fadfbcc2bdd100f26c83f15b23705638ee4414 [file] [log] [blame]
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080021#include "code_generator_arm.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080025#include "mirror/string.h"
26#include "thread.h"
27#include "utils/arm/assembler_arm.h"
28
29namespace art {
30
31namespace arm {
32
33ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
34 return codegen_->GetAssembler();
35}
36
37ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
38 return codegen_->GetGraph()->GetArena();
39}
40
41#define __ codegen->GetAssembler()->
42
43static void MoveFromReturnRegister(Location trg, Primitive::Type type, CodeGeneratorARM* codegen) {
44 if (!trg.IsValid()) {
45 DCHECK(type == Primitive::kPrimVoid);
46 return;
47 }
48
49 DCHECK_NE(type, Primitive::kPrimVoid);
50
Jeff Hao848f70a2014-01-15 13:49:50 -080051 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080052 if (type == Primitive::kPrimLong) {
53 Register trg_reg_lo = trg.AsRegisterPairLow<Register>();
54 Register trg_reg_hi = trg.AsRegisterPairHigh<Register>();
55 Register res_reg_lo = R0;
56 Register res_reg_hi = R1;
57 if (trg_reg_lo != res_reg_hi) {
58 if (trg_reg_lo != res_reg_lo) {
59 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
60 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
61 } else {
62 DCHECK_EQ(trg_reg_lo + 1, trg_reg_hi);
63 }
64 } else {
65 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
66 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
67 }
68 } else {
69 Register trg_reg = trg.AsRegister<Register>();
70 Register res_reg = R0;
71 if (trg_reg != res_reg) {
72 __ mov(trg_reg, ShifterOperand(res_reg));
73 }
74 }
75 } else {
76 UNIMPLEMENTED(FATAL) << "Floating-point return.";
77 }
78}
79
Roland Levillainec525fc2015-04-28 15:50:20 +010080static void MoveArguments(HInvoke* invoke, CodeGeneratorARM* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010081 InvokeDexCallingConventionVisitorARM calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010082 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080083}
84
85// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
86// call. This will copy the arguments into the positions for a regular call.
87//
88// Note: The actual parameters are required to be in the locations given by the invoke's location
89// summary. If an intrinsic modifies those locations before a slowpath call, they must be
90// restored!
91class IntrinsicSlowPathARM : public SlowPathCodeARM {
92 public:
93 explicit IntrinsicSlowPathARM(HInvoke* invoke) : invoke_(invoke) { }
94
95 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
96 CodeGeneratorARM* codegen = down_cast<CodeGeneratorARM*>(codegen_in);
97 __ Bind(GetEntryLabel());
98
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +000099 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800100
Roland Levillainec525fc2015-04-28 15:50:20 +0100101 MoveArguments(invoke_, codegen);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800102
103 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100104 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
105 Location::RegisterLocation(kArtMethodRegister));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000106 RecordPcInfo(codegen, invoke_, invoke_->GetDexPc());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800107 } else {
108 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
109 UNREACHABLE();
110 }
111
112 // Copy the result back to the expected output.
113 Location out = invoke_->GetLocations()->Out();
114 if (out.IsValid()) {
115 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
116 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
117 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
118 }
119
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000120 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800121 __ b(GetExitLabel());
122 }
123
Alexandre Rames9931f312015-06-19 14:47:01 +0100124 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM"; }
125
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800126 private:
127 // The instruction where this slow path is happening.
128 HInvoke* const invoke_;
129
130 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM);
131};
132
133#undef __
134
135bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
136 Dispatch(invoke);
137 LocationSummary* res = invoke->GetLocations();
138 return res != nullptr && res->Intrinsified();
139}
140
141#define __ assembler->
142
143static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
144 LocationSummary* locations = new (arena) LocationSummary(invoke,
145 LocationSummary::kNoCall,
146 kIntrinsified);
147 locations->SetInAt(0, Location::RequiresFpuRegister());
148 locations->SetOut(Location::RequiresRegister());
149}
150
151static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
152 LocationSummary* locations = new (arena) LocationSummary(invoke,
153 LocationSummary::kNoCall,
154 kIntrinsified);
155 locations->SetInAt(0, Location::RequiresRegister());
156 locations->SetOut(Location::RequiresFpuRegister());
157}
158
159static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
160 Location input = locations->InAt(0);
161 Location output = locations->Out();
162 if (is64bit) {
163 __ vmovrrd(output.AsRegisterPairLow<Register>(),
164 output.AsRegisterPairHigh<Register>(),
165 FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
166 } else {
167 __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
168 }
169}
170
171static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
172 Location input = locations->InAt(0);
173 Location output = locations->Out();
174 if (is64bit) {
175 __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
176 input.AsRegisterPairLow<Register>(),
177 input.AsRegisterPairHigh<Register>());
178 } else {
179 __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
180 }
181}
182
183void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
184 CreateFPToIntLocations(arena_, invoke);
185}
186void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
187 CreateIntToFPLocations(arena_, invoke);
188}
189
190void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
191 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
192}
193void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
194 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
195}
196
197void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
198 CreateFPToIntLocations(arena_, invoke);
199}
200void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
201 CreateIntToFPLocations(arena_, invoke);
202}
203
204void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
205 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
206}
207void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
208 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
209}
210
211static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
212 LocationSummary* locations = new (arena) LocationSummary(invoke,
213 LocationSummary::kNoCall,
214 kIntrinsified);
215 locations->SetInAt(0, Location::RequiresRegister());
216 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
217}
218
219static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
220 LocationSummary* locations = new (arena) LocationSummary(invoke,
221 LocationSummary::kNoCall,
222 kIntrinsified);
223 locations->SetInAt(0, Location::RequiresFpuRegister());
224 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
225}
226
227static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
228 Location in = locations->InAt(0);
229 Location out = locations->Out();
230
231 if (is64bit) {
232 __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
233 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
234 } else {
235 __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
236 }
237}
238
239void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
240 CreateFPToFPLocations(arena_, invoke);
241}
242
243void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
244 MathAbsFP(invoke->GetLocations(), true, GetAssembler());
245}
246
247void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
248 CreateFPToFPLocations(arena_, invoke);
249}
250
251void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
252 MathAbsFP(invoke->GetLocations(), false, GetAssembler());
253}
254
255static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
256 LocationSummary* locations = new (arena) LocationSummary(invoke,
257 LocationSummary::kNoCall,
258 kIntrinsified);
259 locations->SetInAt(0, Location::RequiresRegister());
260 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
261
262 locations->AddTemp(Location::RequiresRegister());
263}
264
265static void GenAbsInteger(LocationSummary* locations,
266 bool is64bit,
267 ArmAssembler* assembler) {
268 Location in = locations->InAt(0);
269 Location output = locations->Out();
270
271 Register mask = locations->GetTemp(0).AsRegister<Register>();
272
273 if (is64bit) {
274 Register in_reg_lo = in.AsRegisterPairLow<Register>();
275 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
276 Register out_reg_lo = output.AsRegisterPairLow<Register>();
277 Register out_reg_hi = output.AsRegisterPairHigh<Register>();
278
279 DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
280
281 __ Asr(mask, in_reg_hi, 31);
282 __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
283 __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
284 __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
285 __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
286 } else {
287 Register in_reg = in.AsRegister<Register>();
288 Register out_reg = output.AsRegister<Register>();
289
290 __ Asr(mask, in_reg, 31);
291 __ add(out_reg, in_reg, ShifterOperand(mask));
292 __ eor(out_reg, mask, ShifterOperand(out_reg));
293 }
294}
295
296void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
297 CreateIntToIntPlusTemp(arena_, invoke);
298}
299
300void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
301 GenAbsInteger(invoke->GetLocations(), false, GetAssembler());
302}
303
304
305void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
306 CreateIntToIntPlusTemp(arena_, invoke);
307}
308
309void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
310 GenAbsInteger(invoke->GetLocations(), true, GetAssembler());
311}
312
313static void GenMinMax(LocationSummary* locations,
314 bool is_min,
315 ArmAssembler* assembler) {
316 Register op1 = locations->InAt(0).AsRegister<Register>();
317 Register op2 = locations->InAt(1).AsRegister<Register>();
318 Register out = locations->Out().AsRegister<Register>();
319
320 __ cmp(op1, ShifterOperand(op2));
321
322 __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
323 __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
324 __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
325}
326
327static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
328 LocationSummary* locations = new (arena) LocationSummary(invoke,
329 LocationSummary::kNoCall,
330 kIntrinsified);
331 locations->SetInAt(0, Location::RequiresRegister());
332 locations->SetInAt(1, Location::RequiresRegister());
333 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
334}
335
336void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
337 CreateIntIntToIntLocations(arena_, invoke);
338}
339
340void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
341 GenMinMax(invoke->GetLocations(), true, GetAssembler());
342}
343
344void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
345 CreateIntIntToIntLocations(arena_, invoke);
346}
347
348void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
349 GenMinMax(invoke->GetLocations(), false, GetAssembler());
350}
351
352void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
353 CreateFPToFPLocations(arena_, invoke);
354}
355
356void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
357 LocationSummary* locations = invoke->GetLocations();
358 ArmAssembler* assembler = GetAssembler();
359 __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
360 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
361}
362
363void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
364 CreateIntToIntLocations(arena_, invoke);
365}
366
367void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
368 ArmAssembler* assembler = GetAssembler();
369 // Ignore upper 4B of long address.
370 __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
371 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
372}
373
374void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
375 CreateIntToIntLocations(arena_, invoke);
376}
377
378void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
379 ArmAssembler* assembler = GetAssembler();
380 // Ignore upper 4B of long address.
381 __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
382 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
383}
384
385void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
386 CreateIntToIntLocations(arena_, invoke);
387}
388
389void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
390 ArmAssembler* assembler = GetAssembler();
391 // Ignore upper 4B of long address.
392 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
393 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
394 // exception. So we can't use ldrd as addr may be unaligned.
395 Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
396 Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
397 if (addr == lo) {
398 __ ldr(hi, Address(addr, 4));
399 __ ldr(lo, Address(addr, 0));
400 } else {
401 __ ldr(lo, Address(addr, 0));
402 __ ldr(hi, Address(addr, 4));
403 }
404}
405
406void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
407 CreateIntToIntLocations(arena_, invoke);
408}
409
410void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
411 ArmAssembler* assembler = GetAssembler();
412 // Ignore upper 4B of long address.
413 __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
414 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
415}
416
417static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
418 LocationSummary* locations = new (arena) LocationSummary(invoke,
419 LocationSummary::kNoCall,
420 kIntrinsified);
421 locations->SetInAt(0, Location::RequiresRegister());
422 locations->SetInAt(1, Location::RequiresRegister());
423}
424
425void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
426 CreateIntIntToVoidLocations(arena_, invoke);
427}
428
429void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
430 ArmAssembler* assembler = GetAssembler();
431 __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
432 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
433}
434
435void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
436 CreateIntIntToVoidLocations(arena_, invoke);
437}
438
439void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
440 ArmAssembler* assembler = GetAssembler();
441 __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
442 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
443}
444
445void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
446 CreateIntIntToVoidLocations(arena_, invoke);
447}
448
449void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
450 ArmAssembler* assembler = GetAssembler();
451 // Ignore upper 4B of long address.
452 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
453 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
454 // exception. So we can't use ldrd as addr may be unaligned.
455 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
456 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
457}
458
459void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
460 CreateIntIntToVoidLocations(arena_, invoke);
461}
462
463void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
464 ArmAssembler* assembler = GetAssembler();
465 __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
466 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
467}
468
469void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
470 LocationSummary* locations = new (arena_) LocationSummary(invoke,
471 LocationSummary::kNoCall,
472 kIntrinsified);
473 locations->SetOut(Location::RequiresRegister());
474}
475
476void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
477 ArmAssembler* assembler = GetAssembler();
478 __ LoadFromOffset(kLoadWord,
479 invoke->GetLocations()->Out().AsRegister<Register>(),
480 TR,
481 Thread::PeerOffset<kArmPointerSize>().Int32Value());
482}
483
484static void GenUnsafeGet(HInvoke* invoke,
485 Primitive::Type type,
486 bool is_volatile,
487 CodeGeneratorARM* codegen) {
488 LocationSummary* locations = invoke->GetLocations();
489 DCHECK((type == Primitive::kPrimInt) ||
490 (type == Primitive::kPrimLong) ||
491 (type == Primitive::kPrimNot));
492 ArmAssembler* assembler = codegen->GetAssembler();
493 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
494 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
495
496 if (type == Primitive::kPrimLong) {
497 Register trg_lo = locations->Out().AsRegisterPairLow<Register>();
498 __ add(IP, base, ShifterOperand(offset));
499 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
500 Register trg_hi = locations->Out().AsRegisterPairHigh<Register>();
501 __ ldrexd(trg_lo, trg_hi, IP);
502 } else {
503 __ ldrd(trg_lo, Address(IP));
504 }
505 } else {
506 Register trg = locations->Out().AsRegister<Register>();
507 __ ldr(trg, Address(base, offset));
508 }
509
510 if (is_volatile) {
511 __ dmb(ISH);
512 }
513}
514
515static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
516 LocationSummary* locations = new (arena) LocationSummary(invoke,
517 LocationSummary::kNoCall,
518 kIntrinsified);
519 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
520 locations->SetInAt(1, Location::RequiresRegister());
521 locations->SetInAt(2, Location::RequiresRegister());
522 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
523}
524
525void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
526 CreateIntIntIntToIntLocations(arena_, invoke);
527}
528void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
529 CreateIntIntIntToIntLocations(arena_, invoke);
530}
531void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
532 CreateIntIntIntToIntLocations(arena_, invoke);
533}
534void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
535 CreateIntIntIntToIntLocations(arena_, invoke);
536}
537void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
538 CreateIntIntIntToIntLocations(arena_, invoke);
539}
540void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
541 CreateIntIntIntToIntLocations(arena_, invoke);
542}
543
544void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
545 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
546}
547void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
548 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
549}
550void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
551 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
552}
553void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
554 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
555}
556void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
557 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
558}
559void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
560 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
561}
562
563static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
564 const ArmInstructionSetFeatures& features,
565 Primitive::Type type,
566 bool is_volatile,
567 HInvoke* invoke) {
568 LocationSummary* locations = new (arena) LocationSummary(invoke,
569 LocationSummary::kNoCall,
570 kIntrinsified);
571 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
572 locations->SetInAt(1, Location::RequiresRegister());
573 locations->SetInAt(2, Location::RequiresRegister());
574 locations->SetInAt(3, Location::RequiresRegister());
575
576 if (type == Primitive::kPrimLong) {
577 // Potentially need temps for ldrexd-strexd loop.
578 if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
579 locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
580 locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
581 }
582 } else if (type == Primitive::kPrimNot) {
583 // Temps for card-marking.
584 locations->AddTemp(Location::RequiresRegister()); // Temp.
585 locations->AddTemp(Location::RequiresRegister()); // Card.
586 }
587}
588
589void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
590 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
591}
592void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
593 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
594}
595void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
596 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, true, invoke);
597}
598void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
599 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
600}
601void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
602 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
603}
604void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
605 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, true, invoke);
606}
607void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
608 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
609}
610void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
611 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
612}
613void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
614 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, true, invoke);
615}
616
617static void GenUnsafePut(LocationSummary* locations,
618 Primitive::Type type,
619 bool is_volatile,
620 bool is_ordered,
621 CodeGeneratorARM* codegen) {
622 ArmAssembler* assembler = codegen->GetAssembler();
623
624 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
625 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
626 Register value;
627
628 if (is_volatile || is_ordered) {
629 __ dmb(ISH);
630 }
631
632 if (type == Primitive::kPrimLong) {
633 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
634 value = value_lo;
635 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
636 Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
637 Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
638 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
639
640 __ add(IP, base, ShifterOperand(offset));
641 Label loop_head;
642 __ Bind(&loop_head);
643 __ ldrexd(temp_lo, temp_hi, IP);
644 __ strexd(temp_lo, value_lo, value_hi, IP);
645 __ cmp(temp_lo, ShifterOperand(0));
646 __ b(&loop_head, NE);
647 } else {
648 __ add(IP, base, ShifterOperand(offset));
649 __ strd(value_lo, Address(IP));
650 }
651 } else {
652 value = locations->InAt(3).AsRegister<Register>();
653 __ str(value, Address(base, offset));
654 }
655
656 if (is_volatile) {
657 __ dmb(ISH);
658 }
659
660 if (type == Primitive::kPrimNot) {
661 Register temp = locations->GetTemp(0).AsRegister<Register>();
662 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100663 bool value_can_be_null = true; // TODO: Worth finding out this information?
664 codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800665 }
666}
667
668void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
669 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
670}
671void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
672 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
673}
674void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
675 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
676}
677void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
678 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
679}
680void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
681 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
682}
683void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
684 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
685}
686void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
687 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
688}
689void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
690 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
691}
692void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
693 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
694}
695
696static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
697 HInvoke* invoke) {
698 LocationSummary* locations = new (arena) LocationSummary(invoke,
699 LocationSummary::kNoCall,
700 kIntrinsified);
701 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
702 locations->SetInAt(1, Location::RequiresRegister());
703 locations->SetInAt(2, Location::RequiresRegister());
704 locations->SetInAt(3, Location::RequiresRegister());
705 locations->SetInAt(4, Location::RequiresRegister());
706
707 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
708
709 locations->AddTemp(Location::RequiresRegister()); // Pointer.
710 locations->AddTemp(Location::RequiresRegister()); // Temp 1.
711 locations->AddTemp(Location::RequiresRegister()); // Temp 2.
712}
713
714static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
715 DCHECK_NE(type, Primitive::kPrimLong);
716
717 ArmAssembler* assembler = codegen->GetAssembler();
718
719 Register out = locations->Out().AsRegister<Register>(); // Boolean result.
720
721 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
722 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Offset (discard high 4B).
723 Register expected_lo = locations->InAt(3).AsRegister<Register>(); // Expected.
724 Register value_lo = locations->InAt(4).AsRegister<Register>(); // Value.
725
726 Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>(); // Pointer to actual memory.
727 Register tmp_lo = locations->GetTemp(1).AsRegister<Register>(); // Value in memory.
728
729 if (type == Primitive::kPrimNot) {
730 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
731 // object and scan the receiver at the next GC for nothing.
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100732 bool value_can_be_null = true; // TODO: Worth finding out this information?
733 codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800734 }
735
736 // Prevent reordering with prior memory operations.
737 __ dmb(ISH);
738
739 __ add(tmp_ptr, base, ShifterOperand(offset));
740
741 // do {
742 // tmp = [r_ptr] - expected;
743 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
744 // result = tmp != 0;
745
746 Label loop_head;
747 __ Bind(&loop_head);
748
749 __ ldrex(tmp_lo, tmp_ptr);
750
751 __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
752
753 __ it(EQ, ItState::kItT);
754 __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
755 __ cmp(tmp_lo, ShifterOperand(1), EQ);
756
757 __ b(&loop_head, EQ);
758
759 __ dmb(ISH);
760
761 __ rsbs(out, tmp_lo, ShifterOperand(1));
762 __ it(CC);
763 __ mov(out, ShifterOperand(0), CC);
764}
765
Andreas Gampeca714582015-04-03 19:41:34 -0700766void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800767 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
768}
Andreas Gampeca714582015-04-03 19:41:34 -0700769void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800770 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
771}
772void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
773 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
774}
775void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
776 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
777}
778
779void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) {
780 LocationSummary* locations = new (arena_) LocationSummary(invoke,
781 LocationSummary::kCallOnSlowPath,
782 kIntrinsified);
783 locations->SetInAt(0, Location::RequiresRegister());
784 locations->SetInAt(1, Location::RequiresRegister());
785 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
786
787 locations->AddTemp(Location::RequiresRegister());
788 locations->AddTemp(Location::RequiresRegister());
789}
790
791void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) {
792 ArmAssembler* assembler = GetAssembler();
793 LocationSummary* locations = invoke->GetLocations();
794
795 // Location of reference to data array
796 const MemberOffset value_offset = mirror::String::ValueOffset();
797 // Location of count
798 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800799
800 Register obj = locations->InAt(0).AsRegister<Register>(); // String object pointer.
801 Register idx = locations->InAt(1).AsRegister<Register>(); // Index of character.
802 Register out = locations->Out().AsRegister<Register>(); // Result character.
803
804 Register temp = locations->GetTemp(0).AsRegister<Register>();
805 Register array_temp = locations->GetTemp(1).AsRegister<Register>();
806
807 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
808 // the cost.
809 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
810 // we will not optimize the code for constants (which would save a register).
811
812 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
813 codegen_->AddSlowPath(slow_path);
814
815 __ ldr(temp, Address(obj, count_offset.Int32Value())); // temp = str.length.
816 codegen_->MaybeRecordImplicitNullCheck(invoke);
817 __ cmp(idx, ShifterOperand(temp));
818 __ b(slow_path->GetEntryLabel(), CS);
819
Jeff Hao848f70a2014-01-15 13:49:50 -0800820 __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800821
822 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -0800823 __ ldrh(out, Address(array_temp, idx, LSL, 1)); // out := array_temp[idx].
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800824
825 __ Bind(slow_path->GetExitLabel());
826}
827
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000828void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
829 // The inputs plus one temp.
830 LocationSummary* locations = new (arena_) LocationSummary(invoke,
831 LocationSummary::kCall,
832 kIntrinsified);
833 InvokeRuntimeCallingConvention calling_convention;
834 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
835 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
836 locations->SetOut(Location::RegisterLocation(R0));
837}
838
839void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
840 ArmAssembler* assembler = GetAssembler();
841 LocationSummary* locations = invoke->GetLocations();
842
Nicolas Geoffray512e04d2015-03-27 17:21:24 +0000843 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +0100844 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000845
846 Register argument = locations->InAt(1).AsRegister<Register>();
847 __ cmp(argument, ShifterOperand(0));
848 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
849 codegen_->AddSlowPath(slow_path);
850 __ b(slow_path->GetEntryLabel(), EQ);
851
852 __ LoadFromOffset(
853 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value());
854 __ blx(LR);
855 __ Bind(slow_path->GetExitLabel());
856}
857
Andreas Gampeba6fdbc2015-05-07 22:31:55 -0700858static void GenerateVisitStringIndexOf(HInvoke* invoke,
859 ArmAssembler* assembler,
860 CodeGeneratorARM* codegen,
861 ArenaAllocator* allocator,
862 bool start_at_zero) {
863 LocationSummary* locations = invoke->GetLocations();
864 Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
865
866 // Note that the null check must have been done earlier.
867 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
868
869 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
870 // or directly dispatch if we have a constant.
871 SlowPathCodeARM* slow_path = nullptr;
872 if (invoke->InputAt(1)->IsIntConstant()) {
873 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
874 std::numeric_limits<uint16_t>::max()) {
875 // Always needs the slow-path. We could directly dispatch to it, but this case should be
876 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
877 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
878 codegen->AddSlowPath(slow_path);
879 __ b(slow_path->GetEntryLabel());
880 __ Bind(slow_path->GetExitLabel());
881 return;
882 }
883 } else {
884 Register char_reg = locations->InAt(1).AsRegister<Register>();
885 __ LoadImmediate(tmp_reg, std::numeric_limits<uint16_t>::max());
886 __ cmp(char_reg, ShifterOperand(tmp_reg));
887 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
888 codegen->AddSlowPath(slow_path);
889 __ b(slow_path->GetEntryLabel(), HI);
890 }
891
892 if (start_at_zero) {
893 DCHECK_EQ(tmp_reg, R2);
894 // Start-index = 0.
895 __ LoadImmediate(tmp_reg, 0);
896 }
897
898 __ LoadFromOffset(kLoadWord, LR, TR,
899 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value());
900 __ blx(LR);
901
902 if (slow_path != nullptr) {
903 __ Bind(slow_path->GetExitLabel());
904 }
905}
906
907void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
908 LocationSummary* locations = new (arena_) LocationSummary(invoke,
909 LocationSummary::kCall,
910 kIntrinsified);
911 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
912 // best to align the inputs accordingly.
913 InvokeRuntimeCallingConvention calling_convention;
914 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
915 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
916 locations->SetOut(Location::RegisterLocation(R0));
917
918 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
919 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
920}
921
922void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
923 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
924}
925
926void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
927 LocationSummary* locations = new (arena_) LocationSummary(invoke,
928 LocationSummary::kCall,
929 kIntrinsified);
930 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
931 // best to align the inputs accordingly.
932 InvokeRuntimeCallingConvention calling_convention;
933 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
934 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
935 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
936 locations->SetOut(Location::RegisterLocation(R0));
937
938 // Need a temp for slow-path codepoint compare.
939 locations->AddTemp(Location::RequiresRegister());
940}
941
942void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
943 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
944}
945
Jeff Hao848f70a2014-01-15 13:49:50 -0800946void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
947 LocationSummary* locations = new (arena_) LocationSummary(invoke,
948 LocationSummary::kCall,
949 kIntrinsified);
950 InvokeRuntimeCallingConvention calling_convention;
951 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
952 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
953 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
954 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
955 locations->SetOut(Location::RegisterLocation(R0));
956}
957
958void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
959 ArmAssembler* assembler = GetAssembler();
960 LocationSummary* locations = invoke->GetLocations();
961
962 Register byte_array = locations->InAt(0).AsRegister<Register>();
963 __ cmp(byte_array, ShifterOperand(0));
964 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
965 codegen_->AddSlowPath(slow_path);
966 __ b(slow_path->GetEntryLabel(), EQ);
967
968 __ LoadFromOffset(
969 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
970 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
971 __ blx(LR);
972 __ Bind(slow_path->GetExitLabel());
973}
974
975void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
976 LocationSummary* locations = new (arena_) LocationSummary(invoke,
977 LocationSummary::kCall,
978 kIntrinsified);
979 InvokeRuntimeCallingConvention calling_convention;
980 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
981 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
982 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
983 locations->SetOut(Location::RegisterLocation(R0));
984}
985
986void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
987 ArmAssembler* assembler = GetAssembler();
988
989 __ LoadFromOffset(
990 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
991 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
992 __ blx(LR);
993}
994
995void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
996 LocationSummary* locations = new (arena_) LocationSummary(invoke,
997 LocationSummary::kCall,
998 kIntrinsified);
999 InvokeRuntimeCallingConvention calling_convention;
1000 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1001 locations->SetOut(Location::RegisterLocation(R0));
1002}
1003
1004void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1005 ArmAssembler* assembler = GetAssembler();
1006 LocationSummary* locations = invoke->GetLocations();
1007
1008 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1009 __ cmp(string_to_copy, ShifterOperand(0));
1010 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1011 codegen_->AddSlowPath(slow_path);
1012 __ b(slow_path->GetEntryLabel(), EQ);
1013
1014 __ LoadFromOffset(kLoadWord,
1015 LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
1016 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1017 __ blx(LR);
1018 __ Bind(slow_path->GetExitLabel());
1019}
1020
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001021// Unimplemented intrinsics.
1022
1023#define UNIMPLEMENTED_INTRINSIC(Name) \
1024void IntrinsicLocationsBuilderARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1025} \
1026void IntrinsicCodeGeneratorARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1027}
1028
1029UNIMPLEMENTED_INTRINSIC(IntegerReverse)
1030UNIMPLEMENTED_INTRINSIC(IntegerReverseBytes)
1031UNIMPLEMENTED_INTRINSIC(LongReverse)
1032UNIMPLEMENTED_INTRINSIC(LongReverseBytes)
1033UNIMPLEMENTED_INTRINSIC(ShortReverseBytes)
1034UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
1035UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
1036UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
1037UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
1038UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
1039UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
1040UNIMPLEMENTED_INTRINSIC(MathCeil) // Could be done by changing rounding mode, maybe?
1041UNIMPLEMENTED_INTRINSIC(MathFloor) // Could be done by changing rounding mode, maybe?
1042UNIMPLEMENTED_INTRINSIC(MathRint)
1043UNIMPLEMENTED_INTRINSIC(MathRoundDouble) // Could be done by changing rounding mode, maybe?
1044UNIMPLEMENTED_INTRINSIC(MathRoundFloat) // Could be done by changing rounding mode, maybe?
1045UNIMPLEMENTED_INTRINSIC(UnsafeCASLong) // High register pressure.
1046UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001047UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001048UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001049
1050} // namespace arm
1051} // namespace art