blob: cc8ddb62997737ed17397c1640d22d263d68e202 [file] [log] [blame]
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080021#include "code_generator_arm.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
24#include "mirror/array-inl.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080025#include "mirror/string.h"
26#include "thread.h"
27#include "utils/arm/assembler_arm.h"
28
29namespace art {
30
31namespace arm {
32
33ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
34 return codegen_->GetAssembler();
35}
36
37ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
38 return codegen_->GetGraph()->GetArena();
39}
40
41#define __ codegen->GetAssembler()->
42
43static void MoveFromReturnRegister(Location trg, Primitive::Type type, CodeGeneratorARM* codegen) {
44 if (!trg.IsValid()) {
45 DCHECK(type == Primitive::kPrimVoid);
46 return;
47 }
48
49 DCHECK_NE(type, Primitive::kPrimVoid);
50
Jeff Hao848f70a2014-01-15 13:49:50 -080051 if (Primitive::IsIntegralType(type) || type == Primitive::kPrimNot) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080052 if (type == Primitive::kPrimLong) {
53 Register trg_reg_lo = trg.AsRegisterPairLow<Register>();
54 Register trg_reg_hi = trg.AsRegisterPairHigh<Register>();
55 Register res_reg_lo = R0;
56 Register res_reg_hi = R1;
57 if (trg_reg_lo != res_reg_hi) {
58 if (trg_reg_lo != res_reg_lo) {
59 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
60 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
61 } else {
62 DCHECK_EQ(trg_reg_lo + 1, trg_reg_hi);
63 }
64 } else {
65 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi));
66 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo));
67 }
68 } else {
69 Register trg_reg = trg.AsRegister<Register>();
70 Register res_reg = R0;
71 if (trg_reg != res_reg) {
72 __ mov(trg_reg, ShifterOperand(res_reg));
73 }
74 }
75 } else {
76 UNIMPLEMENTED(FATAL) << "Floating-point return.";
77 }
78}
79
Roland Levillainec525fc2015-04-28 15:50:20 +010080static void MoveArguments(HInvoke* invoke, CodeGeneratorARM* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010081 InvokeDexCallingConventionVisitorARM calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010082 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080083}
84
85// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
86// call. This will copy the arguments into the positions for a regular call.
87//
88// Note: The actual parameters are required to be in the locations given by the invoke's location
89// summary. If an intrinsic modifies those locations before a slowpath call, they must be
90// restored!
91class IntrinsicSlowPathARM : public SlowPathCodeARM {
92 public:
93 explicit IntrinsicSlowPathARM(HInvoke* invoke) : invoke_(invoke) { }
94
95 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
96 CodeGeneratorARM* codegen = down_cast<CodeGeneratorARM*>(codegen_in);
97 __ Bind(GetEntryLabel());
98
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +000099 SaveLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800100
Roland Levillainec525fc2015-04-28 15:50:20 +0100101 MoveArguments(invoke_, codegen);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800102
103 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100104 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
105 Location::RegisterLocation(kArtMethodRegister));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800106 } else {
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000107 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(),
108 Location::RegisterLocation(kArtMethodRegister));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800109 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000110 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800111
112 // Copy the result back to the expected output.
113 Location out = invoke_->GetLocations()->Out();
114 if (out.IsValid()) {
115 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
116 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
117 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
118 }
119
Nicolas Geoffraya8ac9132015-03-13 16:36:36 +0000120 RestoreLiveRegisters(codegen, invoke_->GetLocations());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800121 __ b(GetExitLabel());
122 }
123
Alexandre Rames9931f312015-06-19 14:47:01 +0100124 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathARM"; }
125
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800126 private:
127 // The instruction where this slow path is happening.
128 HInvoke* const invoke_;
129
130 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathARM);
131};
132
133#undef __
134
135bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
136 Dispatch(invoke);
137 LocationSummary* res = invoke->GetLocations();
138 return res != nullptr && res->Intrinsified();
139}
140
141#define __ assembler->
142
143static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
144 LocationSummary* locations = new (arena) LocationSummary(invoke,
145 LocationSummary::kNoCall,
146 kIntrinsified);
147 locations->SetInAt(0, Location::RequiresFpuRegister());
148 locations->SetOut(Location::RequiresRegister());
149}
150
151static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
152 LocationSummary* locations = new (arena) LocationSummary(invoke,
153 LocationSummary::kNoCall,
154 kIntrinsified);
155 locations->SetInAt(0, Location::RequiresRegister());
156 locations->SetOut(Location::RequiresFpuRegister());
157}
158
159static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
160 Location input = locations->InAt(0);
161 Location output = locations->Out();
162 if (is64bit) {
163 __ vmovrrd(output.AsRegisterPairLow<Register>(),
164 output.AsRegisterPairHigh<Register>(),
165 FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
166 } else {
167 __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
168 }
169}
170
171static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
172 Location input = locations->InAt(0);
173 Location output = locations->Out();
174 if (is64bit) {
175 __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
176 input.AsRegisterPairLow<Register>(),
177 input.AsRegisterPairHigh<Register>());
178 } else {
179 __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
180 }
181}
182
183void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
184 CreateFPToIntLocations(arena_, invoke);
185}
186void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
187 CreateIntToFPLocations(arena_, invoke);
188}
189
190void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
191 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
192}
193void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
194 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
195}
196
197void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
198 CreateFPToIntLocations(arena_, invoke);
199}
200void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
201 CreateIntToFPLocations(arena_, invoke);
202}
203
204void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
205 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
206}
207void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
208 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
209}
210
211static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
212 LocationSummary* locations = new (arena) LocationSummary(invoke,
213 LocationSummary::kNoCall,
214 kIntrinsified);
215 locations->SetInAt(0, Location::RequiresRegister());
216 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
217}
218
219static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
220 LocationSummary* locations = new (arena) LocationSummary(invoke,
221 LocationSummary::kNoCall,
222 kIntrinsified);
223 locations->SetInAt(0, Location::RequiresFpuRegister());
224 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
225}
226
Scott Wakeling611d3392015-07-10 11:42:06 +0100227static void GenNumberOfLeadingZeros(LocationSummary* locations,
228 Primitive::Type type,
229 ArmAssembler* assembler) {
230 Location in = locations->InAt(0);
231 Register out = locations->Out().AsRegister<Register>();
232
233 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
234
235 if (type == Primitive::kPrimLong) {
236 Register in_reg_lo = in.AsRegisterPairLow<Register>();
237 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
238 Label end;
239 __ clz(out, in_reg_hi);
240 __ CompareAndBranchIfNonZero(in_reg_hi, &end);
241 __ clz(out, in_reg_lo);
242 __ AddConstant(out, 32);
243 __ Bind(&end);
244 } else {
245 __ clz(out, in.AsRegister<Register>());
246 }
247}
248
249void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
250 CreateIntToIntLocations(arena_, invoke);
251}
252
253void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
254 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
255}
256
257void IntrinsicLocationsBuilderARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
258 LocationSummary* locations = new (arena_) LocationSummary(invoke,
259 LocationSummary::kNoCall,
260 kIntrinsified);
261 locations->SetInAt(0, Location::RequiresRegister());
262 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
263}
264
265void IntrinsicCodeGeneratorARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
266 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
267}
268
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100269static void GenNumberOfTrailingZeros(LocationSummary* locations,
270 Primitive::Type type,
271 ArmAssembler* assembler) {
272 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
273
274 Register out = locations->Out().AsRegister<Register>();
275
276 if (type == Primitive::kPrimLong) {
277 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
278 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
279 Label end;
280 __ rbit(out, in_reg_lo);
281 __ clz(out, out);
282 __ CompareAndBranchIfNonZero(in_reg_lo, &end);
283 __ rbit(out, in_reg_hi);
284 __ clz(out, out);
285 __ AddConstant(out, 32);
286 __ Bind(&end);
287 } else {
288 Register in = locations->InAt(0).AsRegister<Register>();
289 __ rbit(out, in);
290 __ clz(out, out);
291 }
292}
293
294void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
295 LocationSummary* locations = new (arena_) LocationSummary(invoke,
296 LocationSummary::kNoCall,
297 kIntrinsified);
298 locations->SetInAt(0, Location::RequiresRegister());
299 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
300}
301
302void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
303 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
304}
305
306void IntrinsicLocationsBuilderARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
307 LocationSummary* locations = new (arena_) LocationSummary(invoke,
308 LocationSummary::kNoCall,
309 kIntrinsified);
310 locations->SetInAt(0, Location::RequiresRegister());
311 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
312}
313
314void IntrinsicCodeGeneratorARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
315 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
316}
317
318static void GenIntegerRotate(LocationSummary* locations,
319 ArmAssembler* assembler,
320 bool is_left) {
321 Register in = locations->InAt(0).AsRegister<Register>();
322 Location rhs = locations->InAt(1);
323 Register out = locations->Out().AsRegister<Register>();
324
325 if (rhs.IsConstant()) {
326 // Arm32 and Thumb2 assemblers require a rotation on the interval [1,31],
327 // so map all rotations to a +ve. equivalent in that range.
328 // (e.g. left *or* right by -2 bits == 30 bits in the same direction.)
329 uint32_t rot = rhs.GetConstant()->AsIntConstant()->GetValue() & 0x1F;
330 if (rot) {
331 // Rotate, mapping left rotations to right equivalents if necessary.
332 // (e.g. left by 2 bits == right by 30.)
333 __ Ror(out, in, is_left ? (0x20 - rot) : rot);
334 } else if (out != in) {
335 __ Mov(out, in);
336 }
337 } else {
338 if (is_left) {
339 __ rsb(out, rhs.AsRegister<Register>(), ShifterOperand(0));
340 __ Ror(out, in, out);
341 } else {
342 __ Ror(out, in, rhs.AsRegister<Register>());
343 }
344 }
345}
346
347// Gain some speed by mapping all Long rotates onto equivalent pairs of Integer
348// rotates by swapping input regs (effectively rotating by the first 32-bits of
349// a larger rotation) or flipping direction (thus treating larger right/left
350// rotations as sub-word sized rotations in the other direction) as appropriate.
351static void GenLongRotate(LocationSummary* locations,
352 ArmAssembler* assembler,
353 bool is_left) {
354 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
355 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
356 Location rhs = locations->InAt(1);
357 Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
358 Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
359
360 if (rhs.IsConstant()) {
361 uint32_t rot = rhs.GetConstant()->AsIntConstant()->GetValue();
362 // Map all left rotations to right equivalents.
363 if (is_left) {
364 rot = 0x40 - rot;
365 }
366 // Map all rotations to +ve. equivalents on the interval [0,63].
367 rot &= 0x3F;
368 // For rotates over a word in size, 'pre-rotate' by 32-bits to keep rotate
369 // logic below to a simple pair of binary orr.
370 // (e.g. 34 bits == in_reg swap + 2 bits right.)
371 if (rot >= 0x20) {
372 rot -= 0x20;
373 std::swap(in_reg_hi, in_reg_lo);
374 }
375 // Rotate, or mov to out for zero or word size rotations.
376 if (rot) {
377 __ Lsr(out_reg_hi, in_reg_hi, rot);
378 __ orr(out_reg_hi, out_reg_hi, ShifterOperand(in_reg_lo, arm::LSL, 0x20 - rot));
379 __ Lsr(out_reg_lo, in_reg_lo, rot);
380 __ orr(out_reg_lo, out_reg_lo, ShifterOperand(in_reg_hi, arm::LSL, 0x20 - rot));
381 } else {
382 __ Mov(out_reg_lo, in_reg_lo);
383 __ Mov(out_reg_hi, in_reg_hi);
384 }
385 } else {
386 Register shift_left = locations->GetTemp(0).AsRegister<Register>();
387 Register shift_right = locations->GetTemp(1).AsRegister<Register>();
388 Label end;
389 Label right;
390
391 __ and_(shift_left, rhs.AsRegister<Register>(), ShifterOperand(0x1F));
392 __ Lsrs(shift_right, rhs.AsRegister<Register>(), 6);
393 __ rsb(shift_right, shift_left, ShifterOperand(0x20), AL, kCcKeep);
394
395 if (is_left) {
396 __ b(&right, CS);
397 } else {
398 __ b(&right, CC);
399 std::swap(shift_left, shift_right);
400 }
401
402 // out_reg_hi = (reg_hi << shift_left) | (reg_lo >> shift_right).
403 // out_reg_lo = (reg_lo << shift_left) | (reg_hi >> shift_right).
404 __ Lsl(out_reg_hi, in_reg_hi, shift_left);
405 __ Lsr(out_reg_lo, in_reg_lo, shift_right);
406 __ add(out_reg_hi, out_reg_hi, ShifterOperand(out_reg_lo));
407 __ Lsl(out_reg_lo, in_reg_lo, shift_left);
408 __ Lsr(shift_left, in_reg_hi, shift_right);
409 __ add(out_reg_lo, out_reg_lo, ShifterOperand(shift_left));
410 __ b(&end);
411
412 // out_reg_hi = (reg_hi >> shift_right) | (reg_lo << shift_left).
413 // out_reg_lo = (reg_lo >> shift_right) | (reg_hi << shift_left).
414 __ Bind(&right);
415 __ Lsr(out_reg_hi, in_reg_hi, shift_right);
416 __ Lsl(out_reg_lo, in_reg_lo, shift_left);
417 __ add(out_reg_hi, out_reg_hi, ShifterOperand(out_reg_lo));
418 __ Lsr(out_reg_lo, in_reg_lo, shift_right);
419 __ Lsl(shift_right, in_reg_hi, shift_left);
420 __ add(out_reg_lo, out_reg_lo, ShifterOperand(shift_right));
421
422 __ Bind(&end);
423 }
424}
425
426void IntrinsicLocationsBuilderARM::VisitIntegerRotateRight(HInvoke* invoke) {
427 LocationSummary* locations = new (arena_) LocationSummary(invoke,
428 LocationSummary::kNoCall,
429 kIntrinsified);
430 locations->SetInAt(0, Location::RequiresRegister());
431 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
432 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
433}
434
435void IntrinsicCodeGeneratorARM::VisitIntegerRotateRight(HInvoke* invoke) {
436 GenIntegerRotate(invoke->GetLocations(), GetAssembler(), false /* is_left */);
437}
438
439void IntrinsicLocationsBuilderARM::VisitLongRotateRight(HInvoke* invoke) {
440 LocationSummary* locations = new (arena_) LocationSummary(invoke,
441 LocationSummary::kNoCall,
442 kIntrinsified);
443 locations->SetInAt(0, Location::RequiresRegister());
444 if (invoke->InputAt(1)->IsConstant()) {
445 locations->SetInAt(1, Location::ConstantLocation(invoke->InputAt(1)->AsConstant()));
446 } else {
447 locations->SetInAt(1, Location::RequiresRegister());
448 locations->AddTemp(Location::RequiresRegister());
449 locations->AddTemp(Location::RequiresRegister());
450 }
451 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
452}
453
454void IntrinsicCodeGeneratorARM::VisitLongRotateRight(HInvoke* invoke) {
455 GenLongRotate(invoke->GetLocations(), GetAssembler(), false /* is_left */);
456}
457
458void IntrinsicLocationsBuilderARM::VisitIntegerRotateLeft(HInvoke* invoke) {
459 LocationSummary* locations = new (arena_) LocationSummary(invoke,
460 LocationSummary::kNoCall,
461 kIntrinsified);
462 locations->SetInAt(0, Location::RequiresRegister());
463 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
464 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
465}
466
467void IntrinsicCodeGeneratorARM::VisitIntegerRotateLeft(HInvoke* invoke) {
468 GenIntegerRotate(invoke->GetLocations(), GetAssembler(), true /* is_left */);
469}
470
471void IntrinsicLocationsBuilderARM::VisitLongRotateLeft(HInvoke* invoke) {
472 LocationSummary* locations = new (arena_) LocationSummary(invoke,
473 LocationSummary::kNoCall,
474 kIntrinsified);
475 locations->SetInAt(0, Location::RequiresRegister());
476 if (invoke->InputAt(1)->IsConstant()) {
477 locations->SetInAt(1, Location::ConstantLocation(invoke->InputAt(1)->AsConstant()));
478 } else {
479 locations->SetInAt(1, Location::RequiresRegister());
480 locations->AddTemp(Location::RequiresRegister());
481 locations->AddTemp(Location::RequiresRegister());
482 }
483 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
484}
485
486void IntrinsicCodeGeneratorARM::VisitLongRotateLeft(HInvoke* invoke) {
487 GenLongRotate(invoke->GetLocations(), GetAssembler(), true /* is_left */);
488}
489
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800490static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
491 Location in = locations->InAt(0);
492 Location out = locations->Out();
493
494 if (is64bit) {
495 __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
496 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
497 } else {
498 __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
499 }
500}
501
502void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
503 CreateFPToFPLocations(arena_, invoke);
504}
505
506void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
507 MathAbsFP(invoke->GetLocations(), true, GetAssembler());
508}
509
510void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
511 CreateFPToFPLocations(arena_, invoke);
512}
513
514void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
515 MathAbsFP(invoke->GetLocations(), false, GetAssembler());
516}
517
518static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
519 LocationSummary* locations = new (arena) LocationSummary(invoke,
520 LocationSummary::kNoCall,
521 kIntrinsified);
522 locations->SetInAt(0, Location::RequiresRegister());
523 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
524
525 locations->AddTemp(Location::RequiresRegister());
526}
527
528static void GenAbsInteger(LocationSummary* locations,
529 bool is64bit,
530 ArmAssembler* assembler) {
531 Location in = locations->InAt(0);
532 Location output = locations->Out();
533
534 Register mask = locations->GetTemp(0).AsRegister<Register>();
535
536 if (is64bit) {
537 Register in_reg_lo = in.AsRegisterPairLow<Register>();
538 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
539 Register out_reg_lo = output.AsRegisterPairLow<Register>();
540 Register out_reg_hi = output.AsRegisterPairHigh<Register>();
541
542 DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
543
544 __ Asr(mask, in_reg_hi, 31);
545 __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
546 __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
547 __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
548 __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
549 } else {
550 Register in_reg = in.AsRegister<Register>();
551 Register out_reg = output.AsRegister<Register>();
552
553 __ Asr(mask, in_reg, 31);
554 __ add(out_reg, in_reg, ShifterOperand(mask));
555 __ eor(out_reg, mask, ShifterOperand(out_reg));
556 }
557}
558
559void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
560 CreateIntToIntPlusTemp(arena_, invoke);
561}
562
563void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
564 GenAbsInteger(invoke->GetLocations(), false, GetAssembler());
565}
566
567
568void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
569 CreateIntToIntPlusTemp(arena_, invoke);
570}
571
572void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
573 GenAbsInteger(invoke->GetLocations(), true, GetAssembler());
574}
575
576static void GenMinMax(LocationSummary* locations,
577 bool is_min,
578 ArmAssembler* assembler) {
579 Register op1 = locations->InAt(0).AsRegister<Register>();
580 Register op2 = locations->InAt(1).AsRegister<Register>();
581 Register out = locations->Out().AsRegister<Register>();
582
583 __ cmp(op1, ShifterOperand(op2));
584
585 __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
586 __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
587 __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
588}
589
590static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
591 LocationSummary* locations = new (arena) LocationSummary(invoke,
592 LocationSummary::kNoCall,
593 kIntrinsified);
594 locations->SetInAt(0, Location::RequiresRegister());
595 locations->SetInAt(1, Location::RequiresRegister());
596 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
597}
598
599void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
600 CreateIntIntToIntLocations(arena_, invoke);
601}
602
603void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
604 GenMinMax(invoke->GetLocations(), true, GetAssembler());
605}
606
607void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
608 CreateIntIntToIntLocations(arena_, invoke);
609}
610
611void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
612 GenMinMax(invoke->GetLocations(), false, GetAssembler());
613}
614
615void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
616 CreateFPToFPLocations(arena_, invoke);
617}
618
619void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
620 LocationSummary* locations = invoke->GetLocations();
621 ArmAssembler* assembler = GetAssembler();
622 __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
623 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
624}
625
626void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
627 CreateIntToIntLocations(arena_, invoke);
628}
629
630void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
631 ArmAssembler* assembler = GetAssembler();
632 // Ignore upper 4B of long address.
633 __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
634 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
635}
636
637void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
638 CreateIntToIntLocations(arena_, invoke);
639}
640
641void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
642 ArmAssembler* assembler = GetAssembler();
643 // Ignore upper 4B of long address.
644 __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
645 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
646}
647
648void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
649 CreateIntToIntLocations(arena_, invoke);
650}
651
652void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
653 ArmAssembler* assembler = GetAssembler();
654 // Ignore upper 4B of long address.
655 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
656 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
657 // exception. So we can't use ldrd as addr may be unaligned.
658 Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
659 Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
660 if (addr == lo) {
661 __ ldr(hi, Address(addr, 4));
662 __ ldr(lo, Address(addr, 0));
663 } else {
664 __ ldr(lo, Address(addr, 0));
665 __ ldr(hi, Address(addr, 4));
666 }
667}
668
669void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
670 CreateIntToIntLocations(arena_, invoke);
671}
672
673void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
674 ArmAssembler* assembler = GetAssembler();
675 // Ignore upper 4B of long address.
676 __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
677 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
678}
679
680static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
681 LocationSummary* locations = new (arena) LocationSummary(invoke,
682 LocationSummary::kNoCall,
683 kIntrinsified);
684 locations->SetInAt(0, Location::RequiresRegister());
685 locations->SetInAt(1, Location::RequiresRegister());
686}
687
688void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
689 CreateIntIntToVoidLocations(arena_, invoke);
690}
691
692void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
693 ArmAssembler* assembler = GetAssembler();
694 __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
695 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
696}
697
698void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
699 CreateIntIntToVoidLocations(arena_, invoke);
700}
701
702void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
703 ArmAssembler* assembler = GetAssembler();
704 __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
705 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
706}
707
708void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
709 CreateIntIntToVoidLocations(arena_, invoke);
710}
711
712void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
713 ArmAssembler* assembler = GetAssembler();
714 // Ignore upper 4B of long address.
715 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
716 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
717 // exception. So we can't use ldrd as addr may be unaligned.
718 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
719 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
720}
721
722void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
723 CreateIntIntToVoidLocations(arena_, invoke);
724}
725
726void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
727 ArmAssembler* assembler = GetAssembler();
728 __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
729 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
730}
731
732void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
733 LocationSummary* locations = new (arena_) LocationSummary(invoke,
734 LocationSummary::kNoCall,
735 kIntrinsified);
736 locations->SetOut(Location::RequiresRegister());
737}
738
739void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
740 ArmAssembler* assembler = GetAssembler();
741 __ LoadFromOffset(kLoadWord,
742 invoke->GetLocations()->Out().AsRegister<Register>(),
743 TR,
744 Thread::PeerOffset<kArmPointerSize>().Int32Value());
745}
746
747static void GenUnsafeGet(HInvoke* invoke,
748 Primitive::Type type,
749 bool is_volatile,
750 CodeGeneratorARM* codegen) {
751 LocationSummary* locations = invoke->GetLocations();
752 DCHECK((type == Primitive::kPrimInt) ||
753 (type == Primitive::kPrimLong) ||
754 (type == Primitive::kPrimNot));
755 ArmAssembler* assembler = codegen->GetAssembler();
756 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
757 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
758
759 if (type == Primitive::kPrimLong) {
760 Register trg_lo = locations->Out().AsRegisterPairLow<Register>();
761 __ add(IP, base, ShifterOperand(offset));
762 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
763 Register trg_hi = locations->Out().AsRegisterPairHigh<Register>();
764 __ ldrexd(trg_lo, trg_hi, IP);
765 } else {
766 __ ldrd(trg_lo, Address(IP));
767 }
768 } else {
769 Register trg = locations->Out().AsRegister<Register>();
770 __ ldr(trg, Address(base, offset));
771 }
772
773 if (is_volatile) {
774 __ dmb(ISH);
775 }
Roland Levillain4d027112015-07-01 15:41:14 +0100776
777 if (type == Primitive::kPrimNot) {
778 Register trg = locations->Out().AsRegister<Register>();
779 __ MaybeUnpoisonHeapReference(trg);
780 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800781}
782
783static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
784 LocationSummary* locations = new (arena) LocationSummary(invoke,
785 LocationSummary::kNoCall,
786 kIntrinsified);
787 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
788 locations->SetInAt(1, Location::RequiresRegister());
789 locations->SetInAt(2, Location::RequiresRegister());
790 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
791}
792
793void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
794 CreateIntIntIntToIntLocations(arena_, invoke);
795}
796void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
797 CreateIntIntIntToIntLocations(arena_, invoke);
798}
799void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
800 CreateIntIntIntToIntLocations(arena_, invoke);
801}
802void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
803 CreateIntIntIntToIntLocations(arena_, invoke);
804}
805void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
806 CreateIntIntIntToIntLocations(arena_, invoke);
807}
808void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
809 CreateIntIntIntToIntLocations(arena_, invoke);
810}
811
812void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
813 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
814}
815void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
816 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
817}
818void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
819 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
820}
821void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
822 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
823}
824void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
825 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
826}
827void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
828 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
829}
830
831static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
832 const ArmInstructionSetFeatures& features,
833 Primitive::Type type,
834 bool is_volatile,
835 HInvoke* invoke) {
836 LocationSummary* locations = new (arena) LocationSummary(invoke,
837 LocationSummary::kNoCall,
838 kIntrinsified);
839 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
840 locations->SetInAt(1, Location::RequiresRegister());
841 locations->SetInAt(2, Location::RequiresRegister());
842 locations->SetInAt(3, Location::RequiresRegister());
843
844 if (type == Primitive::kPrimLong) {
845 // Potentially need temps for ldrexd-strexd loop.
846 if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
847 locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
848 locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
849 }
850 } else if (type == Primitive::kPrimNot) {
851 // Temps for card-marking.
852 locations->AddTemp(Location::RequiresRegister()); // Temp.
853 locations->AddTemp(Location::RequiresRegister()); // Card.
854 }
855}
856
857void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
858 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
859}
860void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
861 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, false, invoke);
862}
863void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
864 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, true, invoke);
865}
866void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
867 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
868}
869void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
870 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, false, invoke);
871}
872void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
873 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, true, invoke);
874}
875void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
876 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
877}
878void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
879 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, false, invoke);
880}
881void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
882 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimLong, true, invoke);
883}
884
885static void GenUnsafePut(LocationSummary* locations,
886 Primitive::Type type,
887 bool is_volatile,
888 bool is_ordered,
889 CodeGeneratorARM* codegen) {
890 ArmAssembler* assembler = codegen->GetAssembler();
891
892 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
893 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
894 Register value;
895
896 if (is_volatile || is_ordered) {
897 __ dmb(ISH);
898 }
899
900 if (type == Primitive::kPrimLong) {
901 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
902 value = value_lo;
903 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
904 Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
905 Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
906 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
907
908 __ add(IP, base, ShifterOperand(offset));
909 Label loop_head;
910 __ Bind(&loop_head);
911 __ ldrexd(temp_lo, temp_hi, IP);
912 __ strexd(temp_lo, value_lo, value_hi, IP);
913 __ cmp(temp_lo, ShifterOperand(0));
914 __ b(&loop_head, NE);
915 } else {
916 __ add(IP, base, ShifterOperand(offset));
917 __ strd(value_lo, Address(IP));
918 }
919 } else {
Roland Levillain4d027112015-07-01 15:41:14 +0100920 value = locations->InAt(3).AsRegister<Register>();
921 Register source = value;
922 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
923 Register temp = locations->GetTemp(0).AsRegister<Register>();
924 __ Mov(temp, value);
925 __ PoisonHeapReference(temp);
926 source = temp;
927 }
928 __ str(source, Address(base, offset));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800929 }
930
931 if (is_volatile) {
932 __ dmb(ISH);
933 }
934
935 if (type == Primitive::kPrimNot) {
936 Register temp = locations->GetTemp(0).AsRegister<Register>();
937 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100938 bool value_can_be_null = true; // TODO: Worth finding out this information?
939 codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800940 }
941}
942
943void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
944 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, false, codegen_);
945}
946void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
947 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, true, codegen_);
948}
949void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
950 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, false, codegen_);
951}
952void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
953 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, false, codegen_);
954}
955void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
956 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, true, codegen_);
957}
958void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
959 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, false, codegen_);
960}
961void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
962 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, false, codegen_);
963}
964void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
965 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, true, codegen_);
966}
967void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
968 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, false, codegen_);
969}
970
971static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
972 HInvoke* invoke) {
973 LocationSummary* locations = new (arena) LocationSummary(invoke,
974 LocationSummary::kNoCall,
975 kIntrinsified);
976 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
977 locations->SetInAt(1, Location::RequiresRegister());
978 locations->SetInAt(2, Location::RequiresRegister());
979 locations->SetInAt(3, Location::RequiresRegister());
980 locations->SetInAt(4, Location::RequiresRegister());
981
982 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
983
984 locations->AddTemp(Location::RequiresRegister()); // Pointer.
985 locations->AddTemp(Location::RequiresRegister()); // Temp 1.
986 locations->AddTemp(Location::RequiresRegister()); // Temp 2.
987}
988
989static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
990 DCHECK_NE(type, Primitive::kPrimLong);
991
992 ArmAssembler* assembler = codegen->GetAssembler();
993
994 Register out = locations->Out().AsRegister<Register>(); // Boolean result.
995
996 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
997 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Offset (discard high 4B).
998 Register expected_lo = locations->InAt(3).AsRegister<Register>(); // Expected.
999 Register value_lo = locations->InAt(4).AsRegister<Register>(); // Value.
1000
1001 Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>(); // Pointer to actual memory.
1002 Register tmp_lo = locations->GetTemp(1).AsRegister<Register>(); // Value in memory.
1003
1004 if (type == Primitive::kPrimNot) {
1005 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
1006 // object and scan the receiver at the next GC for nothing.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001007 bool value_can_be_null = true; // TODO: Worth finding out this information?
1008 codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001009 }
1010
1011 // Prevent reordering with prior memory operations.
1012 __ dmb(ISH);
1013
1014 __ add(tmp_ptr, base, ShifterOperand(offset));
1015
Roland Levillain4d027112015-07-01 15:41:14 +01001016 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1017 codegen->GetAssembler()->PoisonHeapReference(expected_lo);
1018 codegen->GetAssembler()->PoisonHeapReference(value_lo);
1019 }
1020
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001021 // do {
1022 // tmp = [r_ptr] - expected;
1023 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
1024 // result = tmp != 0;
1025
1026 Label loop_head;
1027 __ Bind(&loop_head);
1028
1029 __ ldrex(tmp_lo, tmp_ptr);
1030
1031 __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
1032
1033 __ it(EQ, ItState::kItT);
1034 __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
1035 __ cmp(tmp_lo, ShifterOperand(1), EQ);
1036
1037 __ b(&loop_head, EQ);
1038
1039 __ dmb(ISH);
1040
1041 __ rsbs(out, tmp_lo, ShifterOperand(1));
1042 __ it(CC);
1043 __ mov(out, ShifterOperand(0), CC);
Roland Levillain4d027112015-07-01 15:41:14 +01001044
1045 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1046 codegen->GetAssembler()->UnpoisonHeapReference(value_lo);
1047 codegen->GetAssembler()->UnpoisonHeapReference(expected_lo);
1048 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001049}
1050
Andreas Gampeca714582015-04-03 19:41:34 -07001051void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001052 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
1053}
Andreas Gampeca714582015-04-03 19:41:34 -07001054void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001055 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
1056}
1057void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
1058 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
1059}
1060void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
1061 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
1062}
1063
1064void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) {
1065 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1066 LocationSummary::kCallOnSlowPath,
1067 kIntrinsified);
1068 locations->SetInAt(0, Location::RequiresRegister());
1069 locations->SetInAt(1, Location::RequiresRegister());
1070 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1071
1072 locations->AddTemp(Location::RequiresRegister());
1073 locations->AddTemp(Location::RequiresRegister());
1074}
1075
1076void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) {
1077 ArmAssembler* assembler = GetAssembler();
1078 LocationSummary* locations = invoke->GetLocations();
1079
1080 // Location of reference to data array
1081 const MemberOffset value_offset = mirror::String::ValueOffset();
1082 // Location of count
1083 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001084
1085 Register obj = locations->InAt(0).AsRegister<Register>(); // String object pointer.
1086 Register idx = locations->InAt(1).AsRegister<Register>(); // Index of character.
1087 Register out = locations->Out().AsRegister<Register>(); // Result character.
1088
1089 Register temp = locations->GetTemp(0).AsRegister<Register>();
1090 Register array_temp = locations->GetTemp(1).AsRegister<Register>();
1091
1092 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
1093 // the cost.
1094 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
1095 // we will not optimize the code for constants (which would save a register).
1096
1097 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1098 codegen_->AddSlowPath(slow_path);
1099
1100 __ ldr(temp, Address(obj, count_offset.Int32Value())); // temp = str.length.
1101 codegen_->MaybeRecordImplicitNullCheck(invoke);
1102 __ cmp(idx, ShifterOperand(temp));
1103 __ b(slow_path->GetEntryLabel(), CS);
1104
Jeff Hao848f70a2014-01-15 13:49:50 -08001105 __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001106
1107 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -08001108 __ ldrh(out, Address(array_temp, idx, LSL, 1)); // out := array_temp[idx].
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001109
1110 __ Bind(slow_path->GetExitLabel());
1111}
1112
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001113void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
1114 // The inputs plus one temp.
1115 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1116 LocationSummary::kCall,
1117 kIntrinsified);
1118 InvokeRuntimeCallingConvention calling_convention;
1119 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1120 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1121 locations->SetOut(Location::RegisterLocation(R0));
1122}
1123
1124void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
1125 ArmAssembler* assembler = GetAssembler();
1126 LocationSummary* locations = invoke->GetLocations();
1127
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001128 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001129 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001130
1131 Register argument = locations->InAt(1).AsRegister<Register>();
1132 __ cmp(argument, ShifterOperand(0));
1133 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1134 codegen_->AddSlowPath(slow_path);
1135 __ b(slow_path->GetEntryLabel(), EQ);
1136
1137 __ LoadFromOffset(
1138 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value());
1139 __ blx(LR);
1140 __ Bind(slow_path->GetExitLabel());
1141}
1142
Agi Csaki289cd552015-08-18 17:10:38 -07001143void IntrinsicLocationsBuilderARM::VisitStringEquals(HInvoke* invoke) {
1144 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1145 LocationSummary::kNoCall,
1146 kIntrinsified);
1147 InvokeRuntimeCallingConvention calling_convention;
1148 locations->SetInAt(0, Location::RequiresRegister());
1149 locations->SetInAt(1, Location::RequiresRegister());
1150 // Temporary registers to store lengths of strings and for calculations.
1151 // Using instruction cbz requires a low register, so explicitly set a temp to be R0.
1152 locations->AddTemp(Location::RegisterLocation(R0));
1153 locations->AddTemp(Location::RequiresRegister());
1154 locations->AddTemp(Location::RequiresRegister());
1155
1156 locations->SetOut(Location::RequiresRegister());
1157}
1158
1159void IntrinsicCodeGeneratorARM::VisitStringEquals(HInvoke* invoke) {
1160 ArmAssembler* assembler = GetAssembler();
1161 LocationSummary* locations = invoke->GetLocations();
1162
1163 Register str = locations->InAt(0).AsRegister<Register>();
1164 Register arg = locations->InAt(1).AsRegister<Register>();
1165 Register out = locations->Out().AsRegister<Register>();
1166
1167 Register temp = locations->GetTemp(0).AsRegister<Register>();
1168 Register temp1 = locations->GetTemp(1).AsRegister<Register>();
1169 Register temp2 = locations->GetTemp(2).AsRegister<Register>();
1170
1171 Label loop;
1172 Label end;
1173 Label return_true;
1174 Label return_false;
1175
1176 // Get offsets of count, value, and class fields within a string object.
1177 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1178 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1179 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1180
1181 // Note that the null check must have been done earlier.
1182 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1183
1184 // Check if input is null, return false if it is.
1185 __ CompareAndBranchIfZero(arg, &return_false);
1186
1187 // Instanceof check for the argument by comparing class fields.
1188 // All string objects must have the same type since String cannot be subclassed.
1189 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1190 // If the argument is a string object, its class field must be equal to receiver's class field.
1191 __ ldr(temp, Address(str, class_offset));
1192 __ ldr(temp1, Address(arg, class_offset));
1193 __ cmp(temp, ShifterOperand(temp1));
1194 __ b(&return_false, NE);
1195
1196 // Load lengths of this and argument strings.
1197 __ ldr(temp, Address(str, count_offset));
1198 __ ldr(temp1, Address(arg, count_offset));
1199 // Check if lengths are equal, return false if they're not.
1200 __ cmp(temp, ShifterOperand(temp1));
1201 __ b(&return_false, NE);
1202 // Return true if both strings are empty.
1203 __ cbz(temp, &return_true);
1204
1205 // Reference equality check, return true if same reference.
1206 __ cmp(str, ShifterOperand(arg));
1207 __ b(&return_true, EQ);
1208
1209 // Assertions that must hold in order to compare strings 2 characters at a time.
1210 DCHECK_ALIGNED(value_offset, 4);
1211 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1212
Agi Csaki289cd552015-08-18 17:10:38 -07001213 __ LoadImmediate(temp1, value_offset);
Agi Csaki289cd552015-08-18 17:10:38 -07001214
1215 // Loop to compare strings 2 characters at a time starting at the front of the string.
1216 // Ok to do this because strings with an odd length are zero-padded.
1217 __ Bind(&loop);
1218 __ ldr(out, Address(str, temp1));
1219 __ ldr(temp2, Address(arg, temp1));
1220 __ cmp(out, ShifterOperand(temp2));
1221 __ b(&return_false, NE);
1222 __ add(temp1, temp1, ShifterOperand(sizeof(uint32_t)));
Vladimir Markoa63f0d42015-09-01 13:36:35 +01001223 __ subs(temp, temp, ShifterOperand(sizeof(uint32_t) / sizeof(uint16_t)));
1224 __ b(&loop, GT);
Agi Csaki289cd552015-08-18 17:10:38 -07001225
1226 // Return true and exit the function.
1227 // If loop does not result in returning false, we return true.
1228 __ Bind(&return_true);
1229 __ LoadImmediate(out, 1);
1230 __ b(&end);
1231
1232 // Return false and exit the function.
1233 __ Bind(&return_false);
1234 __ LoadImmediate(out, 0);
1235 __ Bind(&end);
1236}
1237
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001238static void GenerateVisitStringIndexOf(HInvoke* invoke,
1239 ArmAssembler* assembler,
1240 CodeGeneratorARM* codegen,
1241 ArenaAllocator* allocator,
1242 bool start_at_zero) {
1243 LocationSummary* locations = invoke->GetLocations();
1244 Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
1245
1246 // Note that the null check must have been done earlier.
1247 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1248
1249 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1250 // or directly dispatch if we have a constant.
1251 SlowPathCodeARM* slow_path = nullptr;
1252 if (invoke->InputAt(1)->IsIntConstant()) {
1253 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
1254 std::numeric_limits<uint16_t>::max()) {
1255 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1256 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1257 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1258 codegen->AddSlowPath(slow_path);
1259 __ b(slow_path->GetEntryLabel());
1260 __ Bind(slow_path->GetExitLabel());
1261 return;
1262 }
1263 } else {
1264 Register char_reg = locations->InAt(1).AsRegister<Register>();
1265 __ LoadImmediate(tmp_reg, std::numeric_limits<uint16_t>::max());
1266 __ cmp(char_reg, ShifterOperand(tmp_reg));
1267 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1268 codegen->AddSlowPath(slow_path);
1269 __ b(slow_path->GetEntryLabel(), HI);
1270 }
1271
1272 if (start_at_zero) {
1273 DCHECK_EQ(tmp_reg, R2);
1274 // Start-index = 0.
1275 __ LoadImmediate(tmp_reg, 0);
1276 }
1277
1278 __ LoadFromOffset(kLoadWord, LR, TR,
1279 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value());
1280 __ blx(LR);
1281
1282 if (slow_path != nullptr) {
1283 __ Bind(slow_path->GetExitLabel());
1284 }
1285}
1286
1287void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
1288 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1289 LocationSummary::kCall,
1290 kIntrinsified);
1291 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1292 // best to align the inputs accordingly.
1293 InvokeRuntimeCallingConvention calling_convention;
1294 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1295 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1296 locations->SetOut(Location::RegisterLocation(R0));
1297
1298 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1299 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1300}
1301
1302void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
1303 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
1304}
1305
1306void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1307 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1308 LocationSummary::kCall,
1309 kIntrinsified);
1310 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1311 // best to align the inputs accordingly.
1312 InvokeRuntimeCallingConvention calling_convention;
1313 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1314 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1315 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1316 locations->SetOut(Location::RegisterLocation(R0));
1317
1318 // Need a temp for slow-path codepoint compare.
1319 locations->AddTemp(Location::RequiresRegister());
1320}
1321
1322void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1323 GenerateVisitStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
1324}
1325
Jeff Hao848f70a2014-01-15 13:49:50 -08001326void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1327 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1328 LocationSummary::kCall,
1329 kIntrinsified);
1330 InvokeRuntimeCallingConvention calling_convention;
1331 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1332 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1333 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1334 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1335 locations->SetOut(Location::RegisterLocation(R0));
1336}
1337
1338void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1339 ArmAssembler* assembler = GetAssembler();
1340 LocationSummary* locations = invoke->GetLocations();
1341
1342 Register byte_array = locations->InAt(0).AsRegister<Register>();
1343 __ cmp(byte_array, ShifterOperand(0));
1344 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1345 codegen_->AddSlowPath(slow_path);
1346 __ b(slow_path->GetEntryLabel(), EQ);
1347
1348 __ LoadFromOffset(
1349 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
1350 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1351 __ blx(LR);
1352 __ Bind(slow_path->GetExitLabel());
1353}
1354
1355void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1356 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1357 LocationSummary::kCall,
1358 kIntrinsified);
1359 InvokeRuntimeCallingConvention calling_convention;
1360 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1361 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1362 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1363 locations->SetOut(Location::RegisterLocation(R0));
1364}
1365
1366void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1367 ArmAssembler* assembler = GetAssembler();
1368
1369 __ LoadFromOffset(
1370 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
1371 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1372 __ blx(LR);
1373}
1374
1375void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
1376 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1377 LocationSummary::kCall,
1378 kIntrinsified);
1379 InvokeRuntimeCallingConvention calling_convention;
1380 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1381 locations->SetOut(Location::RegisterLocation(R0));
1382}
1383
1384void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1385 ArmAssembler* assembler = GetAssembler();
1386 LocationSummary* locations = invoke->GetLocations();
1387
1388 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1389 __ cmp(string_to_copy, ShifterOperand(0));
1390 SlowPathCodeARM* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1391 codegen_->AddSlowPath(slow_path);
1392 __ b(slow_path->GetEntryLabel(), EQ);
1393
1394 __ LoadFromOffset(kLoadWord,
1395 LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
1396 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1397 __ blx(LR);
1398 __ Bind(slow_path->GetExitLabel());
1399}
1400
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001401// Unimplemented intrinsics.
1402
1403#define UNIMPLEMENTED_INTRINSIC(Name) \
1404void IntrinsicLocationsBuilderARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1405} \
1406void IntrinsicCodeGeneratorARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1407}
1408
1409UNIMPLEMENTED_INTRINSIC(IntegerReverse)
1410UNIMPLEMENTED_INTRINSIC(IntegerReverseBytes)
1411UNIMPLEMENTED_INTRINSIC(LongReverse)
1412UNIMPLEMENTED_INTRINSIC(LongReverseBytes)
1413UNIMPLEMENTED_INTRINSIC(ShortReverseBytes)
1414UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
1415UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
1416UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
1417UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
1418UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
1419UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
1420UNIMPLEMENTED_INTRINSIC(MathCeil) // Could be done by changing rounding mode, maybe?
1421UNIMPLEMENTED_INTRINSIC(MathFloor) // Could be done by changing rounding mode, maybe?
1422UNIMPLEMENTED_INTRINSIC(MathRint)
1423UNIMPLEMENTED_INTRINSIC(MathRoundDouble) // Could be done by changing rounding mode, maybe?
1424UNIMPLEMENTED_INTRINSIC(MathRoundFloat) // Could be done by changing rounding mode, maybe?
1425UNIMPLEMENTED_INTRINSIC(UnsafeCASLong) // High register pressure.
1426UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001427UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001428UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001429
Roland Levillain4d027112015-07-01 15:41:14 +01001430#undef UNIMPLEMENTED_INTRINSIC
1431
1432#undef __
1433
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001434} // namespace arm
1435} // namespace art