blob: 749bedf99e7b0fa98614aebbf23f601cb4568f3f [file] [log] [blame]
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080021#include "code_generator_arm.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080025#include "mirror/string.h"
26#include "thread.h"
27#include "utils/arm/assembler_arm.h"
28
29namespace art {
30
31namespace arm {
32
33ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
34 return codegen_->GetAssembler();
35}
36
37ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
38 return codegen_->GetGraph()->GetArena();
39}
40
41#define __ codegen->GetAssembler()->
42
43static void MoveFromReturnRegister(Location trg, Primitive::Type type, CodeGeneratorARM* codegen) {
44 if (!trg.IsValid()) {
45 DCHECK(type == Primitive::kPrimVoid);
46 return;
47 }
48
49 DCHECK_NE(type, Primitive::kPrimVoid);
50
Jeff Hao848f70a2014-01-15 13:49:50 -080051 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080052 if (type == Primitive::kPrimLong) {
53 Register trg_reg_lo = trg.AsRegisterPairLow<Register>();
54 Register trg_reg_hi = trg.AsRegisterPairHigh<Register>();
55 Register res_reg_lo = R0;
56 Register res_reg_hi = R1;
57 if (trg_reg_lo != res_reg_hi) {
58 if (trg_reg_lo != res_reg_lo) {
59 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
60 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
61 } else {
62 DCHECK_EQ(trg_reg_lo + 1, trg_reg_hi);
63 }
64 } else {
65 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
66 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
67 }
68 } else {
69 Register trg_reg = trg.AsRegister<Register>();
70 Register res_reg = R0;
71 if (trg_reg != res_reg) {
72 __ mov(trg_reg, ShifterOperand(res_reg));
73 }
74 }
75 } else {
76 UNIMPLEMENTED(FATAL) << "Floating-point return.";
77 }
78}
79
Roland Levillainec525fc2015-04-28 15:50:20 +010080static void MoveArguments(HInvoke* invoke, CodeGeneratorARM* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010081 InvokeDexCallingConventionVisitorARM calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010082 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080083}
84
85// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
86// call. This will copy the arguments into the positions for a regular call.
87//
88// Note: The actual parameters are required to be in the locations given by the invoke's location
89// summary. If an intrinsic modifies those locations before a slowpath call, they must be
90// restored!
91class IntrinsicSlowPathARM : public SlowPathCodeARM {
92 public:
93 explicit IntrinsicSlowPathARM(HInvoke* invoke) : invoke_(invoke) { }
94
95 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
96 CodeGeneratorARM* codegen = down_cast<CodeGeneratorARM*>(codegen_in);
97 __ Bind(GetEntryLabel());
98
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +000099 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800100
Roland Levillainec525fc2015-04-28 15:50:20 +0100101 MoveArguments(invoke_, codegen);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800102
103 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100104 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
105 Location::RegisterLocation(kArtMethodRegister));
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000106 RecordPcInfo(codegen, invoke_, invoke_->GetDexPc());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800107 } else {
108 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
109 UNREACHABLE();
110 }
111
112 // Copy the result back to the expected output.
113 Location out = invoke_->GetLocations()->Out();
114 if (out.IsValid()) {
115 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
116 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
117 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
118 }
119
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000120 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800121 __ b(GetExitLabel());
122 }
123
124 private:
125 // The instruction where this slow path is happening.
126 HInvoke* const invoke_;
127
128 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM);
129};
130
131#undef __
132
133bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
134 Dispatch(invoke);
135 LocationSummary* res = invoke->GetLocations();
136 return res != nullptr && res->Intrinsified();
137}
138
139#define __ assembler->
140
141static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
142 LocationSummary* locations = new (arena) LocationSummary(invoke,
143 LocationSummary::kNoCall,
144 kIntrinsified);
145 locations->SetInAt(0, Location::RequiresFpuRegister());
146 locations->SetOut(Location::RequiresRegister());
147}
148
149static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
150 LocationSummary* locations = new (arena) LocationSummary(invoke,
151 LocationSummary::kNoCall,
152 kIntrinsified);
153 locations->SetInAt(0, Location::RequiresRegister());
154 locations->SetOut(Location::RequiresFpuRegister());
155}
156
157static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
158 Location input = locations->InAt(0);
159 Location output = locations->Out();
160 if (is64bit) {
161 __ vmovrrd(output.AsRegisterPairLow<Register>(),
162 output.AsRegisterPairHigh<Register>(),
163 FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
164 } else {
165 __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
166 }
167}
168
169static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
170 Location input = locations->InAt(0);
171 Location output = locations->Out();
172 if (is64bit) {
173 __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
174 input.AsRegisterPairLow<Register>(),
175 input.AsRegisterPairHigh<Register>());
176 } else {
177 __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
178 }
179}
180
181void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
182 CreateFPToIntLocations(arena_, invoke);
183}
184void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
185 CreateIntToFPLocations(arena_, invoke);
186}
187
188void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
189 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
190}
191void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
192 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
193}
194
195void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
196 CreateFPToIntLocations(arena_, invoke);
197}
198void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
199 CreateIntToFPLocations(arena_, invoke);
200}
201
202void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
203 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
204}
205void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
206 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
207}
208
209static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
210 LocationSummary* locations = new (arena) LocationSummary(invoke,
211 LocationSummary::kNoCall,
212 kIntrinsified);
213 locations->SetInAt(0, Location::RequiresRegister());
214 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
215}
216
217static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
218 LocationSummary* locations = new (arena) LocationSummary(invoke,
219 LocationSummary::kNoCall,
220 kIntrinsified);
221 locations->SetInAt(0, Location::RequiresFpuRegister());
222 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
223}
224
225static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
226 Location in = locations->InAt(0);
227 Location out = locations->Out();
228
229 if (is64bit) {
230 __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
231 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
232 } else {
233 __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
234 }
235}
236
237void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
238 CreateFPToFPLocations(arena_, invoke);
239}
240
241void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
242 MathAbsFP(invoke->GetLocations(), true, GetAssembler());
243}
244
245void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
246 CreateFPToFPLocations(arena_, invoke);
247}
248
249void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
250 MathAbsFP(invoke->GetLocations(), false, GetAssembler());
251}
252
253static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
254 LocationSummary* locations = new (arena) LocationSummary(invoke,
255 LocationSummary::kNoCall,
256 kIntrinsified);
257 locations->SetInAt(0, Location::RequiresRegister());
258 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
259
260 locations->AddTemp(Location::RequiresRegister());
261}
262
263static void GenAbsInteger(LocationSummary* locations,
264 bool is64bit,
265 ArmAssembler* assembler) {
266 Location in = locations->InAt(0);
267 Location output = locations->Out();
268
269 Register mask = locations->GetTemp(0).AsRegister<Register>();
270
271 if (is64bit) {
272 Register in_reg_lo = in.AsRegisterPairLow<Register>();
273 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
274 Register out_reg_lo = output.AsRegisterPairLow<Register>();
275 Register out_reg_hi = output.AsRegisterPairHigh<Register>();
276
277 DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
278
279 __ Asr(mask, in_reg_hi, 31);
280 __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
281 __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
282 __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
283 __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
284 } else {
285 Register in_reg = in.AsRegister<Register>();
286 Register out_reg = output.AsRegister<Register>();
287
288 __ Asr(mask, in_reg, 31);
289 __ add(out_reg, in_reg, ShifterOperand(mask));
290 __ eor(out_reg, mask, ShifterOperand(out_reg));
291 }
292}
293
294void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
295 CreateIntToIntPlusTemp(arena_, invoke);
296}
297
298void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
299 GenAbsInteger(invoke->GetLocations(), false, GetAssembler());
300}
301
302
303void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
304 CreateIntToIntPlusTemp(arena_, invoke);
305}
306
307void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
308 GenAbsInteger(invoke->GetLocations(), true, GetAssembler());
309}
310
311static void GenMinMax(LocationSummary* locations,
312 bool is_min,
313 ArmAssembler* assembler) {
314 Register op1 = locations->InAt(0).AsRegister<Register>();
315 Register op2 = locations->InAt(1).AsRegister<Register>();
316 Register out = locations->Out().AsRegister<Register>();
317
318 __ cmp(op1, ShifterOperand(op2));
319
320 __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
321 __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
322 __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
323}
324
325static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
326 LocationSummary* locations = new (arena) LocationSummary(invoke,
327 LocationSummary::kNoCall,
328 kIntrinsified);
329 locations->SetInAt(0, Location::RequiresRegister());
330 locations->SetInAt(1, Location::RequiresRegister());
331 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
332}
333
334void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
335 CreateIntIntToIntLocations(arena_, invoke);
336}
337
338void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
339 GenMinMax(invoke->GetLocations(), true, GetAssembler());
340}
341
342void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
343 CreateIntIntToIntLocations(arena_, invoke);
344}
345
346void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
347 GenMinMax(invoke->GetLocations(), false, GetAssembler());
348}
349
350void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
351 CreateFPToFPLocations(arena_, invoke);
352}
353
354void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
355 LocationSummary* locations = invoke->GetLocations();
356 ArmAssembler* assembler = GetAssembler();
357 __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
358 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
359}
360
361void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
362 CreateIntToIntLocations(arena_, invoke);
363}
364
365void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
366 ArmAssembler* assembler = GetAssembler();
367 // Ignore upper 4B of long address.
368 __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
369 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
370}
371
372void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
373 CreateIntToIntLocations(arena_, invoke);
374}
375
376void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
377 ArmAssembler* assembler = GetAssembler();
378 // Ignore upper 4B of long address.
379 __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
380 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
381}
382
383void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
384 CreateIntToIntLocations(arena_, invoke);
385}
386
387void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
388 ArmAssembler* assembler = GetAssembler();
389 // Ignore upper 4B of long address.
390 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
391 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
392 // exception. So we can't use ldrd as addr may be unaligned.
393 Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
394 Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
395 if (addr == lo) {
396 __ ldr(hi, Address(addr, 4));
397 __ ldr(lo, Address(addr, 0));
398 } else {
399 __ ldr(lo, Address(addr, 0));
400 __ ldr(hi, Address(addr, 4));
401 }
402}
403
404void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
405 CreateIntToIntLocations(arena_, invoke);
406}
407
408void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
409 ArmAssembler* assembler = GetAssembler();
410 // Ignore upper 4B of long address.
411 __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
412 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
413}
414
415static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
416 LocationSummary* locations = new (arena) LocationSummary(invoke,
417 LocationSummary::kNoCall,
418 kIntrinsified);
419 locations->SetInAt(0, Location::RequiresRegister());
420 locations->SetInAt(1, Location::RequiresRegister());
421}
422
423void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
424 CreateIntIntToVoidLocations(arena_, invoke);
425}
426
427void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
428 ArmAssembler* assembler = GetAssembler();
429 __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
430 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
431}
432
433void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
434 CreateIntIntToVoidLocations(arena_, invoke);
435}
436
437void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
438 ArmAssembler* assembler = GetAssembler();
439 __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
440 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
441}
442
443void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
444 CreateIntIntToVoidLocations(arena_, invoke);
445}
446
447void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
448 ArmAssembler* assembler = GetAssembler();
449 // Ignore upper 4B of long address.
450 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
451 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
452 // exception. So we can't use ldrd as addr may be unaligned.
453 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
454 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
455}
456
457void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
458 CreateIntIntToVoidLocations(arena_, invoke);
459}
460
461void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
462 ArmAssembler* assembler = GetAssembler();
463 __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
464 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
465}
466
467void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
468 LocationSummary* locations = new (arena_) LocationSummary(invoke,
469 LocationSummary::kNoCall,
470 kIntrinsified);
471 locations->SetOut(Location::RequiresRegister());
472}
473
474void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
475 ArmAssembler* assembler = GetAssembler();
476 __ LoadFromOffset(kLoadWord,
477 invoke->GetLocations()->Out().AsRegister<Register>(),
478 TR,
479 Thread::PeerOffset<kArmPointerSize>().Int32Value());
480}
481
482static void GenUnsafeGet(HInvoke* invoke,
483 Primitive::Type type,
484 bool is_volatile,
485 CodeGeneratorARM* codegen) {
486 LocationSummary* locations = invoke->GetLocations();
487 DCHECK((type == Primitive::kPrimInt) ||
488 (type == Primitive::kPrimLong) ||
489 (type == Primitive::kPrimNot));
490 ArmAssembler* assembler = codegen->GetAssembler();
491 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
492 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
493
494 if (type == Primitive::kPrimLong) {
495 Register trg_lo = locations->Out().AsRegisterPairLow<Register>();
496 __ add(IP, base, ShifterOperand(offset));
497 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
498 Register trg_hi = locations->Out().AsRegisterPairHigh<Register>();
499 __ ldrexd(trg_lo, trg_hi, IP);
500 } else {
501 __ ldrd(trg_lo, Address(IP));
502 }
503 } else {
504 Register trg = locations->Out().AsRegister<Register>();
505 __ ldr(trg, Address(base, offset));
506 }
507
508 if (is_volatile) {
509 __ dmb(ISH);
510 }
511}
512
513static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
514 LocationSummary* locations = new (arena) LocationSummary(invoke,
515 LocationSummary::kNoCall,
516 kIntrinsified);
517 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
518 locations->SetInAt(1, Location::RequiresRegister());
519 locations->SetInAt(2, Location::RequiresRegister());
520 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
521}
522
523void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
524 CreateIntIntIntToIntLocations(arena_, invoke);
525}
526void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
527 CreateIntIntIntToIntLocations(arena_, invoke);
528}
529void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
530 CreateIntIntIntToIntLocations(arena_, invoke);
531}
532void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
533 CreateIntIntIntToIntLocations(arena_, invoke);
534}
535void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
536 CreateIntIntIntToIntLocations(arena_, invoke);
537}
538void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
539 CreateIntIntIntToIntLocations(arena_, invoke);
540}
541
542void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
543 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
544}
545void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
546 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
547}
548void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
549 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
550}
551void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
552 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
553}
554void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
555 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
556}
557void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
558 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
559}
560
561static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
562 const ArmInstructionSetFeatures& features,
563 Primitive::Type type,
564 bool is_volatile,
565 HInvoke* invoke) {
566 LocationSummary* locations = new (arena) LocationSummary(invoke,
567 LocationSummary::kNoCall,
568 kIntrinsified);
569 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
570 locations->SetInAt(1, Location::RequiresRegister());
571 locations->SetInAt(2, Location::RequiresRegister());
572 locations->SetInAt(3, Location::RequiresRegister());
573
574 if (type == Primitive::kPrimLong) {
575 // Potentially need temps for ldrexd-strexd loop.
576 if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
577 locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
578 locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
579 }
580 } else if (type == Primitive::kPrimNot) {
581 // Temps for card-marking.
582 locations->AddTemp(Location::RequiresRegister()); // Temp.
583 locations->AddTemp(Location::RequiresRegister()); // Card.
584 }
585}
586
587void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
588 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
589}
590void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
591 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
592}
593void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
594 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, true, invoke);
595}
596void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
597 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
598}
599void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
600 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
601}
602void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
603 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, true, invoke);
604}
605void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
606 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
607}
608void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
609 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
610}
611void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
612 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, true, invoke);
613}
614
615static void GenUnsafePut(LocationSummary* locations,
616 Primitive::Type type,
617 bool is_volatile,
618 bool is_ordered,
619 CodeGeneratorARM* codegen) {
620 ArmAssembler* assembler = codegen->GetAssembler();
621
622 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
623 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
624 Register value;
625
626 if (is_volatile || is_ordered) {
627 __ dmb(ISH);
628 }
629
630 if (type == Primitive::kPrimLong) {
631 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
632 value = value_lo;
633 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
634 Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
635 Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
636 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
637
638 __ add(IP, base, ShifterOperand(offset));
639 Label loop_head;
640 __ Bind(&loop_head);
641 __ ldrexd(temp_lo, temp_hi, IP);
642 __ strexd(temp_lo, value_lo, value_hi, IP);
643 __ cmp(temp_lo, ShifterOperand(0));
644 __ b(&loop_head, NE);
645 } else {
646 __ add(IP, base, ShifterOperand(offset));
647 __ strd(value_lo, Address(IP));
648 }
649 } else {
650 value = locations->InAt(3).AsRegister<Register>();
651 __ str(value, Address(base, offset));
652 }
653
654 if (is_volatile) {
655 __ dmb(ISH);
656 }
657
658 if (type == Primitive::kPrimNot) {
659 Register temp = locations->GetTemp(0).AsRegister<Register>();
660 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100661 bool value_can_be_null = true; // TODO: Worth finding out this information?
662 codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800663 }
664}
665
666void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
667 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
668}
669void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
670 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
671}
672void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
673 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
674}
675void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
676 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
677}
678void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
679 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
680}
681void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
682 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
683}
684void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
685 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
686}
687void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
688 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
689}
690void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
691 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
692}
693
694static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
695 HInvoke* invoke) {
696 LocationSummary* locations = new (arena) LocationSummary(invoke,
697 LocationSummary::kNoCall,
698 kIntrinsified);
699 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
700 locations->SetInAt(1, Location::RequiresRegister());
701 locations->SetInAt(2, Location::RequiresRegister());
702 locations->SetInAt(3, Location::RequiresRegister());
703 locations->SetInAt(4, Location::RequiresRegister());
704
705 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
706
707 locations->AddTemp(Location::RequiresRegister()); // Pointer.
708 locations->AddTemp(Location::RequiresRegister()); // Temp 1.
709 locations->AddTemp(Location::RequiresRegister()); // Temp 2.
710}
711
712static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
713 DCHECK_NE(type, Primitive::kPrimLong);
714
715 ArmAssembler* assembler = codegen->GetAssembler();
716
717 Register out = locations->Out().AsRegister<Register>(); // Boolean result.
718
719 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
720 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Offset (discard high 4B).
721 Register expected_lo = locations->InAt(3).AsRegister<Register>(); // Expected.
722 Register value_lo = locations->InAt(4).AsRegister<Register>(); // Value.
723
724 Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>(); // Pointer to actual memory.
725 Register tmp_lo = locations->GetTemp(1).AsRegister<Register>(); // Value in memory.
726
727 if (type == Primitive::kPrimNot) {
728 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
729 // object and scan the receiver at the next GC for nothing.
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100730 bool value_can_be_null = true; // TODO: Worth finding out this information?
731 codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800732 }
733
734 // Prevent reordering with prior memory operations.
735 __ dmb(ISH);
736
737 __ add(tmp_ptr, base, ShifterOperand(offset));
738
739 // do {
740 // tmp = [r_ptr] - expected;
741 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
742 // result = tmp != 0;
743
744 Label loop_head;
745 __ Bind(&loop_head);
746
747 __ ldrex(tmp_lo, tmp_ptr);
748
749 __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
750
751 __ it(EQ, ItState::kItT);
752 __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
753 __ cmp(tmp_lo, ShifterOperand(1), EQ);
754
755 __ b(&loop_head, EQ);
756
757 __ dmb(ISH);
758
759 __ rsbs(out, tmp_lo, ShifterOperand(1));
760 __ it(CC);
761 __ mov(out, ShifterOperand(0), CC);
762}
763
Andreas Gampeca714582015-04-03 19:41:34 -0700764void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800765 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
766}
Andreas Gampeca714582015-04-03 19:41:34 -0700767void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800768 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
769}
770void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
771 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
772}
773void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
774 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
775}
776
777void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) {
778 LocationSummary* locations = new (arena_) LocationSummary(invoke,
779 LocationSummary::kCallOnSlowPath,
780 kIntrinsified);
781 locations->SetInAt(0, Location::RequiresRegister());
782 locations->SetInAt(1, Location::RequiresRegister());
783 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
784
785 locations->AddTemp(Location::RequiresRegister());
786 locations->AddTemp(Location::RequiresRegister());
787}
788
789void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) {
790 ArmAssembler* assembler = GetAssembler();
791 LocationSummary* locations = invoke->GetLocations();
792
793 // Location of reference to data array
794 const MemberOffset value_offset = mirror::String::ValueOffset();
795 // Location of count
796 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800797
798 Register obj = locations->InAt(0).AsRegister<Register>(); // String object pointer.
799 Register idx = locations->InAt(1).AsRegister<Register>(); // Index of character.
800 Register out = locations->Out().AsRegister<Register>(); // Result character.
801
802 Register temp = locations->GetTemp(0).AsRegister<Register>();
803 Register array_temp = locations->GetTemp(1).AsRegister<Register>();
804
805 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
806 // the cost.
807 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
808 // we will not optimize the code for constants (which would save a register).
809
810 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
811 codegen_->AddSlowPath(slow_path);
812
813 __ ldr(temp, Address(obj, count_offset.Int32Value())); // temp = str.length.
814 codegen_->MaybeRecordImplicitNullCheck(invoke);
815 __ cmp(idx, ShifterOperand(temp));
816 __ b(slow_path->GetEntryLabel(), CS);
817
Jeff Hao848f70a2014-01-15 13:49:50 -0800818 __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800819
820 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -0800821 __ ldrh(out, Address(array_temp, idx, LSL, 1)); // out := array_temp[idx].
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800822
823 __ Bind(slow_path->GetExitLabel());
824}
825
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000826void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
827 // The inputs plus one temp.
828 LocationSummary* locations = new (arena_) LocationSummary(invoke,
829 LocationSummary::kCall,
830 kIntrinsified);
831 InvokeRuntimeCallingConvention calling_convention;
832 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
833 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
834 locations->SetOut(Location::RegisterLocation(R0));
835}
836
837void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
838 ArmAssembler* assembler = GetAssembler();
839 LocationSummary* locations = invoke->GetLocations();
840
Nicolas Geoffray512e04d2015-03-27 17:21:24 +0000841 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +0100842 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000843
844 Register argument = locations->InAt(1).AsRegister<Register>();
845 __ cmp(argument, ShifterOperand(0));
846 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
847 codegen_->AddSlowPath(slow_path);
848 __ b(slow_path->GetEntryLabel(), EQ);
849
850 __ LoadFromOffset(
851 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value());
852 __ blx(LR);
853 __ Bind(slow_path->GetExitLabel());
854}
855
Andreas Gampeba6fdbc2015-05-07 22:31:55 -0700856static void GenerateVisitStringIndexOf(HInvoke* invoke,
857 ArmAssembler* assembler,
858 CodeGeneratorARM* codegen,
859 ArenaAllocator* allocator,
860 bool start_at_zero) {
861 LocationSummary* locations = invoke->GetLocations();
862 Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
863
864 // Note that the null check must have been done earlier.
865 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
866
867 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
868 // or directly dispatch if we have a constant.
869 SlowPathCodeARM* slow_path = nullptr;
870 if (invoke->InputAt(1)->IsIntConstant()) {
871 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
872 std::numeric_limits<uint16_t>::max()) {
873 // Always needs the slow-path. We could directly dispatch to it, but this case should be
874 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
875 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
876 codegen->AddSlowPath(slow_path);
877 __ b(slow_path->GetEntryLabel());
878 __ Bind(slow_path->GetExitLabel());
879 return;
880 }
881 } else {
882 Register char_reg = locations->InAt(1).AsRegister<Register>();
883 __ LoadImmediate(tmp_reg, std::numeric_limits<uint16_t>::max());
884 __ cmp(char_reg, ShifterOperand(tmp_reg));
885 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
886 codegen->AddSlowPath(slow_path);
887 __ b(slow_path->GetEntryLabel(), HI);
888 }
889
890 if (start_at_zero) {
891 DCHECK_EQ(tmp_reg, R2);
892 // Start-index = 0.
893 __ LoadImmediate(tmp_reg, 0);
894 }
895
896 __ LoadFromOffset(kLoadWord, LR, TR,
897 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value());
898 __ blx(LR);
899
900 if (slow_path != nullptr) {
901 __ Bind(slow_path->GetExitLabel());
902 }
903}
904
905void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
906 LocationSummary* locations = new (arena_) LocationSummary(invoke,
907 LocationSummary::kCall,
908 kIntrinsified);
909 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
910 // best to align the inputs accordingly.
911 InvokeRuntimeCallingConvention calling_convention;
912 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
913 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
914 locations->SetOut(Location::RegisterLocation(R0));
915
916 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
917 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
918}
919
920void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
921 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
922}
923
924void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
925 LocationSummary* locations = new (arena_) LocationSummary(invoke,
926 LocationSummary::kCall,
927 kIntrinsified);
928 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
929 // best to align the inputs accordingly.
930 InvokeRuntimeCallingConvention calling_convention;
931 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
932 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
933 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
934 locations->SetOut(Location::RegisterLocation(R0));
935
936 // Need a temp for slow-path codepoint compare.
937 locations->AddTemp(Location::RequiresRegister());
938}
939
940void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
941 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
942}
943
Jeff Hao848f70a2014-01-15 13:49:50 -0800944void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
945 LocationSummary* locations = new (arena_) LocationSummary(invoke,
946 LocationSummary::kCall,
947 kIntrinsified);
948 InvokeRuntimeCallingConvention calling_convention;
949 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
950 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
951 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
952 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
953 locations->SetOut(Location::RegisterLocation(R0));
954}
955
956void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
957 ArmAssembler* assembler = GetAssembler();
958 LocationSummary* locations = invoke->GetLocations();
959
960 Register byte_array = locations->InAt(0).AsRegister<Register>();
961 __ cmp(byte_array, ShifterOperand(0));
962 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
963 codegen_->AddSlowPath(slow_path);
964 __ b(slow_path->GetEntryLabel(), EQ);
965
966 __ LoadFromOffset(
967 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
968 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
969 __ blx(LR);
970 __ Bind(slow_path->GetExitLabel());
971}
972
973void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
974 LocationSummary* locations = new (arena_) LocationSummary(invoke,
975 LocationSummary::kCall,
976 kIntrinsified);
977 InvokeRuntimeCallingConvention calling_convention;
978 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
979 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
980 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
981 locations->SetOut(Location::RegisterLocation(R0));
982}
983
984void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
985 ArmAssembler* assembler = GetAssembler();
986
987 __ LoadFromOffset(
988 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
989 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
990 __ blx(LR);
991}
992
993void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
994 LocationSummary* locations = new (arena_) LocationSummary(invoke,
995 LocationSummary::kCall,
996 kIntrinsified);
997 InvokeRuntimeCallingConvention calling_convention;
998 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
999 locations->SetOut(Location::RegisterLocation(R0));
1000}
1001
1002void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1003 ArmAssembler* assembler = GetAssembler();
1004 LocationSummary* locations = invoke->GetLocations();
1005
1006 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1007 __ cmp(string_to_copy, ShifterOperand(0));
1008 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1009 codegen_->AddSlowPath(slow_path);
1010 __ b(slow_path->GetEntryLabel(), EQ);
1011
1012 __ LoadFromOffset(kLoadWord,
1013 LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
1014 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1015 __ blx(LR);
1016 __ Bind(slow_path->GetExitLabel());
1017}
1018
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001019// Unimplemented intrinsics.
1020
1021#define UNIMPLEMENTED_INTRINSIC(Name) \
1022void IntrinsicLocationsBuilderARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1023} \
1024void IntrinsicCodeGeneratorARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1025}
1026
1027UNIMPLEMENTED_INTRINSIC(IntegerReverse)
1028UNIMPLEMENTED_INTRINSIC(IntegerReverseBytes)
1029UNIMPLEMENTED_INTRINSIC(LongReverse)
1030UNIMPLEMENTED_INTRINSIC(LongReverseBytes)
1031UNIMPLEMENTED_INTRINSIC(ShortReverseBytes)
1032UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
1033UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
1034UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
1035UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
1036UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
1037UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
1038UNIMPLEMENTED_INTRINSIC(MathCeil) // Could be done by changing rounding mode, maybe?
1039UNIMPLEMENTED_INTRINSIC(MathFloor) // Could be done by changing rounding mode, maybe?
1040UNIMPLEMENTED_INTRINSIC(MathRint)
1041UNIMPLEMENTED_INTRINSIC(MathRoundDouble) // Could be done by changing rounding mode, maybe?
1042UNIMPLEMENTED_INTRINSIC(MathRoundFloat) // Could be done by changing rounding mode, maybe?
1043UNIMPLEMENTED_INTRINSIC(UnsafeCASLong) // High register pressure.
1044UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001045UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001046UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001047
1048} // namespace arm
1049} // namespace art