blob: df7cb85a4ca7ff9dcb4927a3058b57dcd7e3b5c8 [file] [log] [blame]
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080021#include "code_generator_arm.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070024#include "intrinsics_utils.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080025#include "mirror/array-inl.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29
30namespace art {
31
32namespace arm {
33
34ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
35 return codegen_->GetAssembler();
36}
37
38ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
39 return codegen_->GetGraph()->GetArena();
40}
41
Andreas Gampe85b62f22015-09-09 13:15:38 -070042using IntrinsicSlowPathARM = IntrinsicSlowPath<InvokeDexCallingConventionVisitorARM>;
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080043
Roland Levillain0b671c02016-08-19 12:02:34 +010044// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
45#define __ down_cast<ArmAssembler*>(codegen->GetAssembler())-> // NOLINT
46
47// Slow path implementing the SystemArrayCopy intrinsic copy loop with read barriers.
48class ReadBarrierSystemArrayCopySlowPathARM : public SlowPathCode {
49 public:
50 explicit ReadBarrierSystemArrayCopySlowPathARM(HInstruction* instruction)
51 : SlowPathCode(instruction) {
52 DCHECK(kEmitCompilerReadBarrier);
53 DCHECK(kUseBakerReadBarrier);
54 }
55
56 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
57 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
58 LocationSummary* locations = instruction_->GetLocations();
59 DCHECK(locations->CanCall());
60 DCHECK(instruction_->IsInvokeStaticOrDirect())
61 << "Unexpected instruction in read barrier arraycopy slow path: "
62 << instruction_->DebugName();
63 DCHECK(instruction_->GetLocations()->Intrinsified());
64 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kSystemArrayCopy);
65
66 int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
67 uint32_t element_size_shift = Primitive::ComponentSizeShift(Primitive::kPrimNot);
68 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
69
70 Register dest = locations->InAt(2).AsRegister<Register>();
71 Location dest_pos = locations->InAt(3);
72 Register src_curr_addr = locations->GetTemp(0).AsRegister<Register>();
73 Register dst_curr_addr = locations->GetTemp(1).AsRegister<Register>();
74 Register src_stop_addr = locations->GetTemp(2).AsRegister<Register>();
75 Register tmp = locations->GetTemp(3).AsRegister<Register>();
76
77 __ Bind(GetEntryLabel());
78 // Compute the base destination address in `dst_curr_addr`.
79 if (dest_pos.IsConstant()) {
80 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
81 __ AddConstant(dst_curr_addr, dest, element_size * constant + offset);
82 } else {
83 __ add(dst_curr_addr,
84 dest,
85 ShifterOperand(dest_pos.AsRegister<Register>(), LSL, element_size_shift));
86 __ AddConstant(dst_curr_addr, offset);
87 }
88
89 Label loop;
90 __ Bind(&loop);
91 __ ldr(tmp, Address(src_curr_addr, element_size, Address::PostIndex));
92 __ MaybeUnpoisonHeapReference(tmp);
93 // TODO: Inline the mark bit check before calling the runtime?
94 // tmp = ReadBarrier::Mark(tmp);
95 // No need to save live registers; it's taken care of by the
96 // entrypoint. Also, there is no need to update the stack mask,
97 // as this runtime call will not trigger a garbage collection.
98 // (See ReadBarrierMarkSlowPathARM::EmitNativeCode for more
99 // explanations.)
100 DCHECK_NE(tmp, SP);
101 DCHECK_NE(tmp, LR);
102 DCHECK_NE(tmp, PC);
103 // IP is used internally by the ReadBarrierMarkRegX entry point
104 // as a temporary (and not preserved). It thus cannot be used by
105 // any live register in this slow path.
106 DCHECK_NE(src_curr_addr, IP);
107 DCHECK_NE(dst_curr_addr, IP);
108 DCHECK_NE(src_stop_addr, IP);
109 DCHECK_NE(tmp, IP);
110 DCHECK(0 <= tmp && tmp < kNumberOfCoreRegisters) << tmp;
111 int32_t entry_point_offset =
112 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArmPointerSize>(tmp);
113 // This runtime call does not require a stack map.
114 arm_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
115 __ MaybePoisonHeapReference(tmp);
116 __ str(tmp, Address(dst_curr_addr, element_size, Address::PostIndex));
117 __ cmp(src_curr_addr, ShifterOperand(src_stop_addr));
118 __ b(&loop, NE);
119 __ b(GetExitLabel());
120 }
121
122 const char* GetDescription() const OVERRIDE { return "ReadBarrierSystemArrayCopySlowPathARM"; }
123
124 private:
125 DISALLOW_COPY_AND_ASSIGN(ReadBarrierSystemArrayCopySlowPathARM);
126};
127
128#undef __
129
Vladimir Marko68c981f2016-08-26 13:13:33 +0100130IntrinsicLocationsBuilderARM::IntrinsicLocationsBuilderARM(CodeGeneratorARM* codegen)
131 : arena_(codegen->GetGraph()->GetArena()),
132 assembler_(codegen->GetAssembler()),
133 features_(codegen->GetInstructionSetFeatures()) {}
134
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800135bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
136 Dispatch(invoke);
137 LocationSummary* res = invoke->GetLocations();
Roland Levillain3b359c72015-11-17 19:35:12 +0000138 if (res == nullptr) {
139 return false;
140 }
Roland Levillain3b359c72015-11-17 19:35:12 +0000141 return res->Intrinsified();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800142}
143
144#define __ assembler->
145
146static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
147 LocationSummary* locations = new (arena) LocationSummary(invoke,
148 LocationSummary::kNoCall,
149 kIntrinsified);
150 locations->SetInAt(0, Location::RequiresFpuRegister());
151 locations->SetOut(Location::RequiresRegister());
152}
153
154static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
155 LocationSummary* locations = new (arena) LocationSummary(invoke,
156 LocationSummary::kNoCall,
157 kIntrinsified);
158 locations->SetInAt(0, Location::RequiresRegister());
159 locations->SetOut(Location::RequiresFpuRegister());
160}
161
162static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
163 Location input = locations->InAt(0);
164 Location output = locations->Out();
165 if (is64bit) {
166 __ vmovrrd(output.AsRegisterPairLow<Register>(),
167 output.AsRegisterPairHigh<Register>(),
168 FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
169 } else {
170 __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
171 }
172}
173
174static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
175 Location input = locations->InAt(0);
176 Location output = locations->Out();
177 if (is64bit) {
178 __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
179 input.AsRegisterPairLow<Register>(),
180 input.AsRegisterPairHigh<Register>());
181 } else {
182 __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
183 }
184}
185
186void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
187 CreateFPToIntLocations(arena_, invoke);
188}
189void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
190 CreateIntToFPLocations(arena_, invoke);
191}
192
193void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000194 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800195}
196void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000197 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800198}
199
200void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
201 CreateFPToIntLocations(arena_, invoke);
202}
203void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
204 CreateIntToFPLocations(arena_, invoke);
205}
206
207void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000208 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800209}
210void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000211 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800212}
213
214static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
215 LocationSummary* locations = new (arena) LocationSummary(invoke,
216 LocationSummary::kNoCall,
217 kIntrinsified);
218 locations->SetInAt(0, Location::RequiresRegister());
219 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
220}
221
222static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
223 LocationSummary* locations = new (arena) LocationSummary(invoke,
224 LocationSummary::kNoCall,
225 kIntrinsified);
226 locations->SetInAt(0, Location::RequiresFpuRegister());
227 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
228}
229
Scott Wakeling611d3392015-07-10 11:42:06 +0100230static void GenNumberOfLeadingZeros(LocationSummary* locations,
231 Primitive::Type type,
232 ArmAssembler* assembler) {
233 Location in = locations->InAt(0);
234 Register out = locations->Out().AsRegister<Register>();
235
236 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
237
238 if (type == Primitive::kPrimLong) {
239 Register in_reg_lo = in.AsRegisterPairLow<Register>();
240 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
241 Label end;
242 __ clz(out, in_reg_hi);
243 __ CompareAndBranchIfNonZero(in_reg_hi, &end);
244 __ clz(out, in_reg_lo);
245 __ AddConstant(out, 32);
246 __ Bind(&end);
247 } else {
248 __ clz(out, in.AsRegister<Register>());
249 }
250}
251
252void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
253 CreateIntToIntLocations(arena_, invoke);
254}
255
256void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
257 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
258}
259
260void IntrinsicLocationsBuilderARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
261 LocationSummary* locations = new (arena_) LocationSummary(invoke,
262 LocationSummary::kNoCall,
263 kIntrinsified);
264 locations->SetInAt(0, Location::RequiresRegister());
265 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
266}
267
268void IntrinsicCodeGeneratorARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
269 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
270}
271
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100272static void GenNumberOfTrailingZeros(LocationSummary* locations,
273 Primitive::Type type,
274 ArmAssembler* assembler) {
275 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
276
277 Register out = locations->Out().AsRegister<Register>();
278
279 if (type == Primitive::kPrimLong) {
280 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
281 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
282 Label end;
283 __ rbit(out, in_reg_lo);
284 __ clz(out, out);
285 __ CompareAndBranchIfNonZero(in_reg_lo, &end);
286 __ rbit(out, in_reg_hi);
287 __ clz(out, out);
288 __ AddConstant(out, 32);
289 __ Bind(&end);
290 } else {
291 Register in = locations->InAt(0).AsRegister<Register>();
292 __ rbit(out, in);
293 __ clz(out, out);
294 }
295}
296
297void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
298 LocationSummary* locations = new (arena_) LocationSummary(invoke,
299 LocationSummary::kNoCall,
300 kIntrinsified);
301 locations->SetInAt(0, Location::RequiresRegister());
302 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
303}
304
305void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
306 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
307}
308
309void IntrinsicLocationsBuilderARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
310 LocationSummary* locations = new (arena_) LocationSummary(invoke,
311 LocationSummary::kNoCall,
312 kIntrinsified);
313 locations->SetInAt(0, Location::RequiresRegister());
314 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
315}
316
317void IntrinsicCodeGeneratorARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
318 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
319}
320
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800321static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
322 Location in = locations->InAt(0);
323 Location out = locations->Out();
324
325 if (is64bit) {
326 __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
327 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
328 } else {
329 __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
330 }
331}
332
333void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
334 CreateFPToFPLocations(arena_, invoke);
335}
336
337void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000338 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800339}
340
341void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
342 CreateFPToFPLocations(arena_, invoke);
343}
344
345void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000346 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800347}
348
349static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
350 LocationSummary* locations = new (arena) LocationSummary(invoke,
351 LocationSummary::kNoCall,
352 kIntrinsified);
353 locations->SetInAt(0, Location::RequiresRegister());
354 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
355
356 locations->AddTemp(Location::RequiresRegister());
357}
358
359static void GenAbsInteger(LocationSummary* locations,
360 bool is64bit,
361 ArmAssembler* assembler) {
362 Location in = locations->InAt(0);
363 Location output = locations->Out();
364
365 Register mask = locations->GetTemp(0).AsRegister<Register>();
366
367 if (is64bit) {
368 Register in_reg_lo = in.AsRegisterPairLow<Register>();
369 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
370 Register out_reg_lo = output.AsRegisterPairLow<Register>();
371 Register out_reg_hi = output.AsRegisterPairHigh<Register>();
372
373 DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
374
375 __ Asr(mask, in_reg_hi, 31);
376 __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
377 __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
378 __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
379 __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
380 } else {
381 Register in_reg = in.AsRegister<Register>();
382 Register out_reg = output.AsRegister<Register>();
383
384 __ Asr(mask, in_reg, 31);
385 __ add(out_reg, in_reg, ShifterOperand(mask));
386 __ eor(out_reg, mask, ShifterOperand(out_reg));
387 }
388}
389
390void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
391 CreateIntToIntPlusTemp(arena_, invoke);
392}
393
394void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000395 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800396}
397
398
399void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
400 CreateIntToIntPlusTemp(arena_, invoke);
401}
402
403void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000404 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800405}
406
407static void GenMinMax(LocationSummary* locations,
408 bool is_min,
409 ArmAssembler* assembler) {
410 Register op1 = locations->InAt(0).AsRegister<Register>();
411 Register op2 = locations->InAt(1).AsRegister<Register>();
412 Register out = locations->Out().AsRegister<Register>();
413
414 __ cmp(op1, ShifterOperand(op2));
415
416 __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
417 __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
418 __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
419}
420
421static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
422 LocationSummary* locations = new (arena) LocationSummary(invoke,
423 LocationSummary::kNoCall,
424 kIntrinsified);
425 locations->SetInAt(0, Location::RequiresRegister());
426 locations->SetInAt(1, Location::RequiresRegister());
427 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
428}
429
430void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
431 CreateIntIntToIntLocations(arena_, invoke);
432}
433
434void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000435 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800436}
437
438void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
439 CreateIntIntToIntLocations(arena_, invoke);
440}
441
442void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000443 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800444}
445
446void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
447 CreateFPToFPLocations(arena_, invoke);
448}
449
450void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
451 LocationSummary* locations = invoke->GetLocations();
452 ArmAssembler* assembler = GetAssembler();
453 __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
454 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
455}
456
457void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
458 CreateIntToIntLocations(arena_, invoke);
459}
460
461void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
462 ArmAssembler* assembler = GetAssembler();
463 // Ignore upper 4B of long address.
464 __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
465 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
466}
467
468void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
469 CreateIntToIntLocations(arena_, invoke);
470}
471
472void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
473 ArmAssembler* assembler = GetAssembler();
474 // Ignore upper 4B of long address.
475 __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
476 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
477}
478
479void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
480 CreateIntToIntLocations(arena_, invoke);
481}
482
483void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
484 ArmAssembler* assembler = GetAssembler();
485 // Ignore upper 4B of long address.
486 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
487 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
488 // exception. So we can't use ldrd as addr may be unaligned.
489 Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
490 Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
491 if (addr == lo) {
492 __ ldr(hi, Address(addr, 4));
493 __ ldr(lo, Address(addr, 0));
494 } else {
495 __ ldr(lo, Address(addr, 0));
496 __ ldr(hi, Address(addr, 4));
497 }
498}
499
500void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
501 CreateIntToIntLocations(arena_, invoke);
502}
503
504void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
505 ArmAssembler* assembler = GetAssembler();
506 // Ignore upper 4B of long address.
507 __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
508 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
509}
510
511static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
512 LocationSummary* locations = new (arena) LocationSummary(invoke,
513 LocationSummary::kNoCall,
514 kIntrinsified);
515 locations->SetInAt(0, Location::RequiresRegister());
516 locations->SetInAt(1, Location::RequiresRegister());
517}
518
519void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
520 CreateIntIntToVoidLocations(arena_, invoke);
521}
522
523void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
524 ArmAssembler* assembler = GetAssembler();
525 __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
526 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
527}
528
529void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
530 CreateIntIntToVoidLocations(arena_, invoke);
531}
532
533void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
534 ArmAssembler* assembler = GetAssembler();
535 __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
536 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
537}
538
539void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
540 CreateIntIntToVoidLocations(arena_, invoke);
541}
542
543void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
544 ArmAssembler* assembler = GetAssembler();
545 // Ignore upper 4B of long address.
546 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
547 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
548 // exception. So we can't use ldrd as addr may be unaligned.
549 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
550 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
551}
552
553void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
554 CreateIntIntToVoidLocations(arena_, invoke);
555}
556
557void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
558 ArmAssembler* assembler = GetAssembler();
559 __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
560 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
561}
562
563void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
564 LocationSummary* locations = new (arena_) LocationSummary(invoke,
565 LocationSummary::kNoCall,
566 kIntrinsified);
567 locations->SetOut(Location::RequiresRegister());
568}
569
570void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
571 ArmAssembler* assembler = GetAssembler();
572 __ LoadFromOffset(kLoadWord,
573 invoke->GetLocations()->Out().AsRegister<Register>(),
574 TR,
575 Thread::PeerOffset<kArmPointerSize>().Int32Value());
576}
577
578static void GenUnsafeGet(HInvoke* invoke,
579 Primitive::Type type,
580 bool is_volatile,
581 CodeGeneratorARM* codegen) {
582 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800583 ArmAssembler* assembler = codegen->GetAssembler();
Roland Levillain3b359c72015-11-17 19:35:12 +0000584 Location base_loc = locations->InAt(1);
585 Register base = base_loc.AsRegister<Register>(); // Object pointer.
586 Location offset_loc = locations->InAt(2);
587 Register offset = offset_loc.AsRegisterPairLow<Register>(); // Long offset, lo part only.
588 Location trg_loc = locations->Out();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800589
Roland Levillainc9285912015-12-18 10:38:42 +0000590 switch (type) {
591 case Primitive::kPrimInt: {
592 Register trg = trg_loc.AsRegister<Register>();
593 __ ldr(trg, Address(base, offset));
594 if (is_volatile) {
595 __ dmb(ISH);
596 }
597 break;
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800598 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800599
Roland Levillainc9285912015-12-18 10:38:42 +0000600 case Primitive::kPrimNot: {
601 Register trg = trg_loc.AsRegister<Register>();
602 if (kEmitCompilerReadBarrier) {
603 if (kUseBakerReadBarrier) {
604 Location temp = locations->GetTemp(0);
Roland Levillainbfea3352016-06-23 13:48:47 +0100605 codegen->GenerateReferenceLoadWithBakerReadBarrier(
606 invoke, trg_loc, base, 0U, offset_loc, TIMES_1, temp, /* needs_null_check */ false);
Roland Levillainc9285912015-12-18 10:38:42 +0000607 if (is_volatile) {
608 __ dmb(ISH);
609 }
610 } else {
611 __ ldr(trg, Address(base, offset));
612 if (is_volatile) {
613 __ dmb(ISH);
614 }
615 codegen->GenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
616 }
617 } else {
618 __ ldr(trg, Address(base, offset));
619 if (is_volatile) {
620 __ dmb(ISH);
621 }
622 __ MaybeUnpoisonHeapReference(trg);
623 }
624 break;
625 }
Roland Levillain4d027112015-07-01 15:41:14 +0100626
Roland Levillainc9285912015-12-18 10:38:42 +0000627 case Primitive::kPrimLong: {
628 Register trg_lo = trg_loc.AsRegisterPairLow<Register>();
629 __ add(IP, base, ShifterOperand(offset));
630 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
631 Register trg_hi = trg_loc.AsRegisterPairHigh<Register>();
632 __ ldrexd(trg_lo, trg_hi, IP);
633 } else {
634 __ ldrd(trg_lo, Address(IP));
635 }
636 if (is_volatile) {
637 __ dmb(ISH);
638 }
639 break;
640 }
641
642 default:
643 LOG(FATAL) << "Unexpected type " << type;
644 UNREACHABLE();
Roland Levillain4d027112015-07-01 15:41:14 +0100645 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800646}
647
Roland Levillainc9285912015-12-18 10:38:42 +0000648static void CreateIntIntIntToIntLocations(ArenaAllocator* arena,
649 HInvoke* invoke,
650 Primitive::Type type) {
Roland Levillain3b359c72015-11-17 19:35:12 +0000651 bool can_call = kEmitCompilerReadBarrier &&
652 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
653 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800654 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain3b359c72015-11-17 19:35:12 +0000655 can_call ?
656 LocationSummary::kCallOnSlowPath :
657 LocationSummary::kNoCall,
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800658 kIntrinsified);
659 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
660 locations->SetInAt(1, Location::RequiresRegister());
661 locations->SetInAt(2, Location::RequiresRegister());
Roland Levillainbfea3352016-06-23 13:48:47 +0100662 locations->SetOut(Location::RequiresRegister(),
663 can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Roland Levillainc9285912015-12-18 10:38:42 +0000664 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
665 // We need a temporary register for the read barrier marking slow
Roland Levillainbfea3352016-06-23 13:48:47 +0100666 // path in InstructionCodeGeneratorARM::GenerateReferenceLoadWithBakerReadBarrier.
Roland Levillainc9285912015-12-18 10:38:42 +0000667 locations->AddTemp(Location::RequiresRegister());
668 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800669}
670
671void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000672 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800673}
674void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000675 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800676}
677void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000678 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800679}
680void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000681 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800682}
683void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000684 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800685}
686void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000687 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800688}
689
690void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000691 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800692}
693void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000694 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800695}
696void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000697 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800698}
699void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000700 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800701}
702void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000703 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800704}
705void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000706 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800707}
708
709static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
710 const ArmInstructionSetFeatures& features,
711 Primitive::Type type,
712 bool is_volatile,
713 HInvoke* invoke) {
714 LocationSummary* locations = new (arena) LocationSummary(invoke,
715 LocationSummary::kNoCall,
716 kIntrinsified);
717 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
718 locations->SetInAt(1, Location::RequiresRegister());
719 locations->SetInAt(2, Location::RequiresRegister());
720 locations->SetInAt(3, Location::RequiresRegister());
721
722 if (type == Primitive::kPrimLong) {
723 // Potentially need temps for ldrexd-strexd loop.
724 if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
725 locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
726 locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
727 }
728 } else if (type == Primitive::kPrimNot) {
729 // Temps for card-marking.
730 locations->AddTemp(Location::RequiresRegister()); // Temp.
731 locations->AddTemp(Location::RequiresRegister()); // Card.
732 }
733}
734
735void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000736 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800737}
738void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000739 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800740}
741void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000742 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800743}
744void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000745 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800746}
747void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000748 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800749}
750void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000751 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800752}
753void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000754 CreateIntIntIntIntToVoid(
755 arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800756}
757void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000758 CreateIntIntIntIntToVoid(
759 arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800760}
761void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000762 CreateIntIntIntIntToVoid(
763 arena_, features_, Primitive::kPrimLong, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800764}
765
766static void GenUnsafePut(LocationSummary* locations,
767 Primitive::Type type,
768 bool is_volatile,
769 bool is_ordered,
770 CodeGeneratorARM* codegen) {
771 ArmAssembler* assembler = codegen->GetAssembler();
772
773 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
774 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
775 Register value;
776
777 if (is_volatile || is_ordered) {
778 __ dmb(ISH);
779 }
780
781 if (type == Primitive::kPrimLong) {
782 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
783 value = value_lo;
784 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
785 Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
786 Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
787 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
788
789 __ add(IP, base, ShifterOperand(offset));
790 Label loop_head;
791 __ Bind(&loop_head);
792 __ ldrexd(temp_lo, temp_hi, IP);
793 __ strexd(temp_lo, value_lo, value_hi, IP);
794 __ cmp(temp_lo, ShifterOperand(0));
795 __ b(&loop_head, NE);
796 } else {
797 __ add(IP, base, ShifterOperand(offset));
798 __ strd(value_lo, Address(IP));
799 }
800 } else {
Roland Levillain4d027112015-07-01 15:41:14 +0100801 value = locations->InAt(3).AsRegister<Register>();
802 Register source = value;
803 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
804 Register temp = locations->GetTemp(0).AsRegister<Register>();
805 __ Mov(temp, value);
806 __ PoisonHeapReference(temp);
807 source = temp;
808 }
809 __ str(source, Address(base, offset));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800810 }
811
812 if (is_volatile) {
813 __ dmb(ISH);
814 }
815
816 if (type == Primitive::kPrimNot) {
817 Register temp = locations->GetTemp(0).AsRegister<Register>();
818 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100819 bool value_can_be_null = true; // TODO: Worth finding out this information?
820 codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800821 }
822}
823
824void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000825 GenUnsafePut(invoke->GetLocations(),
826 Primitive::kPrimInt,
827 /* is_volatile */ false,
828 /* is_ordered */ false,
829 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800830}
831void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000832 GenUnsafePut(invoke->GetLocations(),
833 Primitive::kPrimInt,
834 /* is_volatile */ false,
835 /* is_ordered */ true,
836 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800837}
838void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000839 GenUnsafePut(invoke->GetLocations(),
840 Primitive::kPrimInt,
841 /* is_volatile */ true,
842 /* is_ordered */ false,
843 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800844}
845void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000846 GenUnsafePut(invoke->GetLocations(),
847 Primitive::kPrimNot,
848 /* is_volatile */ false,
849 /* is_ordered */ false,
850 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800851}
852void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000853 GenUnsafePut(invoke->GetLocations(),
854 Primitive::kPrimNot,
855 /* is_volatile */ false,
856 /* is_ordered */ true,
857 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800858}
859void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000860 GenUnsafePut(invoke->GetLocations(),
861 Primitive::kPrimNot,
862 /* is_volatile */ true,
863 /* is_ordered */ false,
864 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800865}
866void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000867 GenUnsafePut(invoke->GetLocations(),
868 Primitive::kPrimLong,
869 /* is_volatile */ false,
870 /* is_ordered */ false,
871 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800872}
873void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000874 GenUnsafePut(invoke->GetLocations(),
875 Primitive::kPrimLong,
876 /* is_volatile */ false,
877 /* is_ordered */ true,
878 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800879}
880void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000881 GenUnsafePut(invoke->GetLocations(),
882 Primitive::kPrimLong,
883 /* is_volatile */ true,
884 /* is_ordered */ false,
885 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800886}
887
888static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000889 HInvoke* invoke,
890 Primitive::Type type) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800891 LocationSummary* locations = new (arena) LocationSummary(invoke,
892 LocationSummary::kNoCall,
893 kIntrinsified);
894 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
895 locations->SetInAt(1, Location::RequiresRegister());
896 locations->SetInAt(2, Location::RequiresRegister());
897 locations->SetInAt(3, Location::RequiresRegister());
898 locations->SetInAt(4, Location::RequiresRegister());
899
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000900 // If heap poisoning is enabled, we don't want the unpoisoning
901 // operations to potentially clobber the output.
902 Location::OutputOverlap overlaps = (kPoisonHeapReferences && type == Primitive::kPrimNot)
903 ? Location::kOutputOverlap
904 : Location::kNoOutputOverlap;
905 locations->SetOut(Location::RequiresRegister(), overlaps);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800906
907 locations->AddTemp(Location::RequiresRegister()); // Pointer.
908 locations->AddTemp(Location::RequiresRegister()); // Temp 1.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800909}
910
911static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
912 DCHECK_NE(type, Primitive::kPrimLong);
913
914 ArmAssembler* assembler = codegen->GetAssembler();
915
916 Register out = locations->Out().AsRegister<Register>(); // Boolean result.
917
918 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
919 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Offset (discard high 4B).
920 Register expected_lo = locations->InAt(3).AsRegister<Register>(); // Expected.
921 Register value_lo = locations->InAt(4).AsRegister<Register>(); // Value.
922
923 Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>(); // Pointer to actual memory.
924 Register tmp_lo = locations->GetTemp(1).AsRegister<Register>(); // Value in memory.
925
926 if (type == Primitive::kPrimNot) {
927 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
928 // object and scan the receiver at the next GC for nothing.
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100929 bool value_can_be_null = true; // TODO: Worth finding out this information?
930 codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800931 }
932
933 // Prevent reordering with prior memory operations.
Roland Levillain4bedb382016-01-12 12:01:04 +0000934 // Emit a DMB ISH instruction instead of an DMB ISHST one, as the
935 // latter allows a preceding load to be delayed past the STXR
936 // instruction below.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800937 __ dmb(ISH);
938
939 __ add(tmp_ptr, base, ShifterOperand(offset));
940
Roland Levillain4d027112015-07-01 15:41:14 +0100941 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
942 codegen->GetAssembler()->PoisonHeapReference(expected_lo);
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000943 if (value_lo == expected_lo) {
944 // Do not poison `value_lo`, as it is the same register as
945 // `expected_lo`, which has just been poisoned.
946 } else {
947 codegen->GetAssembler()->PoisonHeapReference(value_lo);
948 }
Roland Levillain4d027112015-07-01 15:41:14 +0100949 }
950
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800951 // do {
952 // tmp = [r_ptr] - expected;
953 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
954 // result = tmp != 0;
955
956 Label loop_head;
957 __ Bind(&loop_head);
958
Roland Levillain391b8662015-12-18 11:43:38 +0000959 // TODO: When `type == Primitive::kPrimNot`, add a read barrier for
960 // the reference stored in the object before attempting the CAS,
961 // similar to the one in the art::Unsafe_compareAndSwapObject JNI
962 // implementation.
963 //
964 // Note that this code is not (yet) used when read barriers are
965 // enabled (see IntrinsicLocationsBuilderARM::VisitUnsafeCASObject).
966 DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800967 __ ldrex(tmp_lo, tmp_ptr);
968
969 __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
970
971 __ it(EQ, ItState::kItT);
972 __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
973 __ cmp(tmp_lo, ShifterOperand(1), EQ);
974
975 __ b(&loop_head, EQ);
976
977 __ dmb(ISH);
978
979 __ rsbs(out, tmp_lo, ShifterOperand(1));
980 __ it(CC);
981 __ mov(out, ShifterOperand(0), CC);
Roland Levillain4d027112015-07-01 15:41:14 +0100982
983 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +0100984 codegen->GetAssembler()->UnpoisonHeapReference(expected_lo);
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000985 if (value_lo == expected_lo) {
986 // Do not unpoison `value_lo`, as it is the same register as
987 // `expected_lo`, which has just been unpoisoned.
988 } else {
989 codegen->GetAssembler()->UnpoisonHeapReference(value_lo);
990 }
Roland Levillain4d027112015-07-01 15:41:14 +0100991 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800992}
993
Andreas Gampeca714582015-04-03 19:41:34 -0700994void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000995 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800996}
Andreas Gampeca714582015-04-03 19:41:34 -0700997void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain391b8662015-12-18 11:43:38 +0000998 // The UnsafeCASObject intrinsic is missing a read barrier, and
999 // therefore sometimes does not work as expected (b/25883050).
1000 // Turn it off temporarily as a quick fix, until the read barrier is
Roland Levillain3d312422016-06-23 13:53:42 +01001001 // implemented (see TODO in GenCAS).
Roland Levillain391b8662015-12-18 11:43:38 +00001002 //
Roland Levillain3d312422016-06-23 13:53:42 +01001003 // TODO(rpl): Implement read barrier support in GenCAS and re-enable
1004 // this intrinsic.
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001005 if (kEmitCompilerReadBarrier) {
Roland Levillain985ff702015-10-23 13:25:35 +01001006 return;
1007 }
1008
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001009 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001010}
1011void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
1012 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
1013}
1014void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain3d312422016-06-23 13:53:42 +01001015 // The UnsafeCASObject intrinsic is missing a read barrier, and
1016 // therefore sometimes does not work as expected (b/25883050).
1017 // Turn it off temporarily as a quick fix, until the read barrier is
1018 // implemented (see TODO in GenCAS).
1019 //
1020 // TODO(rpl): Implement read barrier support in GenCAS and re-enable
1021 // this intrinsic.
1022 DCHECK(!kEmitCompilerReadBarrier);
1023
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001024 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
1025}
1026
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001027void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
1028 // The inputs plus one temp.
1029 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001030 invoke->InputAt(1)->CanBeNull()
1031 ? LocationSummary::kCallOnSlowPath
1032 : LocationSummary::kNoCall,
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001033 kIntrinsified);
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001034 locations->SetInAt(0, Location::RequiresRegister());
1035 locations->SetInAt(1, Location::RequiresRegister());
1036 locations->AddTemp(Location::RequiresRegister());
1037 locations->AddTemp(Location::RequiresRegister());
1038 locations->AddTemp(Location::RequiresRegister());
1039 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001040}
1041
1042void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
1043 ArmAssembler* assembler = GetAssembler();
1044 LocationSummary* locations = invoke->GetLocations();
1045
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001046 Register str = locations->InAt(0).AsRegister<Register>();
1047 Register arg = locations->InAt(1).AsRegister<Register>();
1048 Register out = locations->Out().AsRegister<Register>();
1049
1050 Register temp0 = locations->GetTemp(0).AsRegister<Register>();
1051 Register temp1 = locations->GetTemp(1).AsRegister<Register>();
1052 Register temp2 = locations->GetTemp(2).AsRegister<Register>();
1053
1054 Label loop;
1055 Label find_char_diff;
1056 Label end;
1057
1058 // Get offsets of count and value fields within a string object.
1059 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1060 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1061
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001062 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001063 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001064
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001065 // Take slow path and throw if input can be and is null.
1066 SlowPathCode* slow_path = nullptr;
1067 const bool can_slow_path = invoke->InputAt(1)->CanBeNull();
1068 if (can_slow_path) {
1069 slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1070 codegen_->AddSlowPath(slow_path);
1071 __ CompareAndBranchIfZero(arg, slow_path->GetEntryLabel());
1072 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001073
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001074 // Reference equality check, return 0 if same reference.
1075 __ subs(out, str, ShifterOperand(arg));
1076 __ b(&end, EQ);
1077 // Load lengths of this and argument strings.
1078 __ ldr(temp2, Address(str, count_offset));
1079 __ ldr(temp1, Address(arg, count_offset));
1080 // out = length diff.
1081 __ subs(out, temp2, ShifterOperand(temp1));
1082 // temp0 = min(len(str), len(arg)).
1083 __ it(Condition::LT, kItElse);
1084 __ mov(temp0, ShifterOperand(temp2), Condition::LT);
1085 __ mov(temp0, ShifterOperand(temp1), Condition::GE);
1086 // Shorter string is empty?
1087 __ CompareAndBranchIfZero(temp0, &end);
1088
1089 // Store offset of string value in preparation for comparison loop.
1090 __ mov(temp1, ShifterOperand(value_offset));
1091
1092 // Assertions that must hold in order to compare multiple characters at a time.
1093 CHECK_ALIGNED(value_offset, 8);
1094 static_assert(IsAligned<8>(kObjectAlignment),
1095 "String data must be 8-byte aligned for unrolled CompareTo loop.");
1096
1097 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1098 DCHECK_EQ(char_size, 2u);
1099
1100 // Unrolled loop comparing 4x16-bit chars per iteration (ok because of string data alignment).
1101 __ Bind(&loop);
1102 __ ldr(IP, Address(str, temp1));
1103 __ ldr(temp2, Address(arg, temp1));
1104 __ cmp(IP, ShifterOperand(temp2));
1105 __ b(&find_char_diff, NE);
1106 __ add(temp1, temp1, ShifterOperand(char_size * 2));
1107 __ sub(temp0, temp0, ShifterOperand(2));
1108
1109 __ ldr(IP, Address(str, temp1));
1110 __ ldr(temp2, Address(arg, temp1));
1111 __ cmp(IP, ShifterOperand(temp2));
1112 __ b(&find_char_diff, NE);
1113 __ add(temp1, temp1, ShifterOperand(char_size * 2));
1114 __ subs(temp0, temp0, ShifterOperand(2));
1115
1116 __ b(&loop, GT);
1117 __ b(&end);
1118
1119 // Find the single 16-bit character difference.
1120 __ Bind(&find_char_diff);
1121 // Get the bit position of the first character that differs.
1122 __ eor(temp1, temp2, ShifterOperand(IP));
1123 __ rbit(temp1, temp1);
1124 __ clz(temp1, temp1);
1125
1126 // temp0 = number of 16-bit characters remaining to compare.
1127 // (it could be < 1 if a difference is found after the first SUB in the comparison loop, and
1128 // after the end of the shorter string data).
1129
1130 // (temp1 >> 4) = character where difference occurs between the last two words compared, on the
1131 // interval [0,1] (0 for low half-word different, 1 for high half-word different).
1132
1133 // If temp0 <= (temp1 >> 4), the difference occurs outside the remaining string data, so just
1134 // return length diff (out).
1135 __ cmp(temp0, ShifterOperand(temp1, LSR, 4));
1136 __ b(&end, LE);
1137 // Extract the characters and calculate the difference.
1138 __ bic(temp1, temp1, ShifterOperand(0xf));
1139 __ Lsr(temp2, temp2, temp1);
1140 __ Lsr(IP, IP, temp1);
1141 __ movt(temp2, 0);
1142 __ movt(IP, 0);
1143 __ sub(out, IP, ShifterOperand(temp2));
1144
1145 __ Bind(&end);
1146
1147 if (can_slow_path) {
1148 __ Bind(slow_path->GetExitLabel());
1149 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001150}
1151
Agi Csaki289cd552015-08-18 17:10:38 -07001152void IntrinsicLocationsBuilderARM::VisitStringEquals(HInvoke* invoke) {
1153 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1154 LocationSummary::kNoCall,
1155 kIntrinsified);
1156 InvokeRuntimeCallingConvention calling_convention;
1157 locations->SetInAt(0, Location::RequiresRegister());
1158 locations->SetInAt(1, Location::RequiresRegister());
1159 // Temporary registers to store lengths of strings and for calculations.
1160 // Using instruction cbz requires a low register, so explicitly set a temp to be R0.
1161 locations->AddTemp(Location::RegisterLocation(R0));
1162 locations->AddTemp(Location::RequiresRegister());
1163 locations->AddTemp(Location::RequiresRegister());
1164
1165 locations->SetOut(Location::RequiresRegister());
1166}
1167
1168void IntrinsicCodeGeneratorARM::VisitStringEquals(HInvoke* invoke) {
1169 ArmAssembler* assembler = GetAssembler();
1170 LocationSummary* locations = invoke->GetLocations();
1171
1172 Register str = locations->InAt(0).AsRegister<Register>();
1173 Register arg = locations->InAt(1).AsRegister<Register>();
1174 Register out = locations->Out().AsRegister<Register>();
1175
1176 Register temp = locations->GetTemp(0).AsRegister<Register>();
1177 Register temp1 = locations->GetTemp(1).AsRegister<Register>();
1178 Register temp2 = locations->GetTemp(2).AsRegister<Register>();
1179
1180 Label loop;
1181 Label end;
1182 Label return_true;
1183 Label return_false;
1184
1185 // Get offsets of count, value, and class fields within a string object.
1186 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1187 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1188 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1189
1190 // Note that the null check must have been done earlier.
1191 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1192
Vladimir Marko53b52002016-05-24 19:30:45 +01001193 StringEqualsOptimizations optimizations(invoke);
1194 if (!optimizations.GetArgumentNotNull()) {
1195 // Check if input is null, return false if it is.
1196 __ CompareAndBranchIfZero(arg, &return_false);
1197 }
Agi Csaki289cd552015-08-18 17:10:38 -07001198
Vladimir Marko53b52002016-05-24 19:30:45 +01001199 if (!optimizations.GetArgumentIsString()) {
1200 // Instanceof check for the argument by comparing class fields.
1201 // All string objects must have the same type since String cannot be subclassed.
1202 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1203 // If the argument is a string object, its class field must be equal to receiver's class field.
1204 __ ldr(temp, Address(str, class_offset));
1205 __ ldr(temp1, Address(arg, class_offset));
1206 __ cmp(temp, ShifterOperand(temp1));
1207 __ b(&return_false, NE);
1208 }
Agi Csaki289cd552015-08-18 17:10:38 -07001209
1210 // Load lengths of this and argument strings.
1211 __ ldr(temp, Address(str, count_offset));
1212 __ ldr(temp1, Address(arg, count_offset));
1213 // Check if lengths are equal, return false if they're not.
1214 __ cmp(temp, ShifterOperand(temp1));
1215 __ b(&return_false, NE);
1216 // Return true if both strings are empty.
1217 __ cbz(temp, &return_true);
1218
1219 // Reference equality check, return true if same reference.
1220 __ cmp(str, ShifterOperand(arg));
1221 __ b(&return_true, EQ);
1222
1223 // Assertions that must hold in order to compare strings 2 characters at a time.
1224 DCHECK_ALIGNED(value_offset, 4);
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001225 static_assert(IsAligned<4>(kObjectAlignment), "String data must be aligned for fast compare.");
Agi Csaki289cd552015-08-18 17:10:38 -07001226
Agi Csaki289cd552015-08-18 17:10:38 -07001227 __ LoadImmediate(temp1, value_offset);
Agi Csaki289cd552015-08-18 17:10:38 -07001228
1229 // Loop to compare strings 2 characters at a time starting at the front of the string.
1230 // Ok to do this because strings with an odd length are zero-padded.
1231 __ Bind(&loop);
1232 __ ldr(out, Address(str, temp1));
1233 __ ldr(temp2, Address(arg, temp1));
1234 __ cmp(out, ShifterOperand(temp2));
1235 __ b(&return_false, NE);
1236 __ add(temp1, temp1, ShifterOperand(sizeof(uint32_t)));
Vladimir Markoa63f0d42015-09-01 13:36:35 +01001237 __ subs(temp, temp, ShifterOperand(sizeof(uint32_t) / sizeof(uint16_t)));
1238 __ b(&loop, GT);
Agi Csaki289cd552015-08-18 17:10:38 -07001239
1240 // Return true and exit the function.
1241 // If loop does not result in returning false, we return true.
1242 __ Bind(&return_true);
1243 __ LoadImmediate(out, 1);
1244 __ b(&end);
1245
1246 // Return false and exit the function.
1247 __ Bind(&return_false);
1248 __ LoadImmediate(out, 0);
1249 __ Bind(&end);
1250}
1251
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001252static void GenerateVisitStringIndexOf(HInvoke* invoke,
1253 ArmAssembler* assembler,
1254 CodeGeneratorARM* codegen,
1255 ArenaAllocator* allocator,
1256 bool start_at_zero) {
1257 LocationSummary* locations = invoke->GetLocations();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001258
1259 // Note that the null check must have been done earlier.
1260 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1261
1262 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001263 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001264 SlowPathCode* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001265 HInstruction* code_point = invoke->InputAt(1);
1266 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001267 if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) >
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001268 std::numeric_limits<uint16_t>::max()) {
1269 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1270 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1271 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1272 codegen->AddSlowPath(slow_path);
1273 __ b(slow_path->GetEntryLabel());
1274 __ Bind(slow_path->GetExitLabel());
1275 return;
1276 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001277 } else if (code_point->GetType() != Primitive::kPrimChar) {
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001278 Register char_reg = locations->InAt(1).AsRegister<Register>();
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001279 // 0xffff is not modified immediate but 0x10000 is, so use `>= 0x10000` instead of `> 0xffff`.
1280 __ cmp(char_reg,
1281 ShifterOperand(static_cast<uint32_t>(std::numeric_limits<uint16_t>::max()) + 1));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001282 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1283 codegen->AddSlowPath(slow_path);
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001284 __ b(slow_path->GetEntryLabel(), HS);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001285 }
1286
1287 if (start_at_zero) {
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001288 Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001289 DCHECK_EQ(tmp_reg, R2);
1290 // Start-index = 0.
1291 __ LoadImmediate(tmp_reg, 0);
1292 }
1293
1294 __ LoadFromOffset(kLoadWord, LR, TR,
Andreas Gampe542451c2016-07-26 09:02:02 -07001295 QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, pIndexOf).Int32Value());
Roland Levillain42ad2882016-02-29 18:26:54 +00001296 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001297 __ blx(LR);
1298
1299 if (slow_path != nullptr) {
1300 __ Bind(slow_path->GetExitLabel());
1301 }
1302}
1303
1304void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
1305 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001306 LocationSummary::kCallOnMainAndSlowPath,
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001307 kIntrinsified);
1308 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1309 // best to align the inputs accordingly.
1310 InvokeRuntimeCallingConvention calling_convention;
1311 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1312 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1313 locations->SetOut(Location::RegisterLocation(R0));
1314
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001315 // Need to send start-index=0.
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001316 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1317}
1318
1319void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001320 GenerateVisitStringIndexOf(
1321 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001322}
1323
1324void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1325 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001326 LocationSummary::kCallOnMainAndSlowPath,
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001327 kIntrinsified);
1328 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1329 // best to align the inputs accordingly.
1330 InvokeRuntimeCallingConvention calling_convention;
1331 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1332 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1333 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1334 locations->SetOut(Location::RegisterLocation(R0));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001335}
1336
1337void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001338 GenerateVisitStringIndexOf(
1339 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001340}
1341
Jeff Hao848f70a2014-01-15 13:49:50 -08001342void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1343 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001344 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001345 kIntrinsified);
1346 InvokeRuntimeCallingConvention calling_convention;
1347 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1348 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1349 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1350 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1351 locations->SetOut(Location::RegisterLocation(R0));
1352}
1353
1354void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1355 ArmAssembler* assembler = GetAssembler();
1356 LocationSummary* locations = invoke->GetLocations();
1357
1358 Register byte_array = locations->InAt(0).AsRegister<Register>();
1359 __ cmp(byte_array, ShifterOperand(0));
Andreas Gampe85b62f22015-09-09 13:15:38 -07001360 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001361 codegen_->AddSlowPath(slow_path);
1362 __ b(slow_path->GetEntryLabel(), EQ);
1363
Andreas Gampe542451c2016-07-26 09:02:02 -07001364 __ LoadFromOffset(kLoadWord,
1365 LR,
1366 TR,
1367 QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, pAllocStringFromBytes).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001368 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001369 __ blx(LR);
Roland Levillainf969a202016-03-09 16:14:00 +00001370 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001371 __ Bind(slow_path->GetExitLabel());
1372}
1373
1374void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1375 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001376 LocationSummary::kCallOnMainOnly,
Jeff Hao848f70a2014-01-15 13:49:50 -08001377 kIntrinsified);
1378 InvokeRuntimeCallingConvention calling_convention;
1379 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1380 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1381 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1382 locations->SetOut(Location::RegisterLocation(R0));
1383}
1384
1385void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1386 ArmAssembler* assembler = GetAssembler();
1387
Roland Levillaincc3839c2016-02-29 16:23:48 +00001388 // No need to emit code checking whether `locations->InAt(2)` is a null
1389 // pointer, as callers of the native method
1390 //
1391 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1392 //
1393 // all include a null check on `data` before calling that method.
Andreas Gampe542451c2016-07-26 09:02:02 -07001394 __ LoadFromOffset(kLoadWord,
1395 LR,
1396 TR,
1397 QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, pAllocStringFromChars).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001398 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001399 __ blx(LR);
Roland Levillainf969a202016-03-09 16:14:00 +00001400 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001401}
1402
1403void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
1404 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001405 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001406 kIntrinsified);
1407 InvokeRuntimeCallingConvention calling_convention;
1408 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1409 locations->SetOut(Location::RegisterLocation(R0));
1410}
1411
1412void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1413 ArmAssembler* assembler = GetAssembler();
1414 LocationSummary* locations = invoke->GetLocations();
1415
1416 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1417 __ cmp(string_to_copy, ShifterOperand(0));
Andreas Gampe85b62f22015-09-09 13:15:38 -07001418 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001419 codegen_->AddSlowPath(slow_path);
1420 __ b(slow_path->GetEntryLabel(), EQ);
1421
1422 __ LoadFromOffset(kLoadWord,
Andreas Gampe542451c2016-07-26 09:02:02 -07001423 LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, pAllocStringFromString).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001424 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001425 __ blx(LR);
Roland Levillainf969a202016-03-09 16:14:00 +00001426 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001427 __ Bind(slow_path->GetExitLabel());
1428}
1429
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001430void IntrinsicLocationsBuilderARM::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01001431 // The only read barrier implementation supporting the
1432 // SystemArrayCopy intrinsic is the Baker-style read barriers.
1433 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
Roland Levillain3d312422016-06-23 13:53:42 +01001434 return;
1435 }
1436
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001437 CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke);
1438 LocationSummary* locations = invoke->GetLocations();
1439 if (locations == nullptr) {
1440 return;
1441 }
1442
1443 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1444 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1445 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1446
1447 if (src_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(src_pos->GetValue())) {
1448 locations->SetInAt(1, Location::RequiresRegister());
1449 }
1450 if (dest_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(dest_pos->GetValue())) {
1451 locations->SetInAt(3, Location::RequiresRegister());
1452 }
1453 if (length != nullptr && !assembler_->ShifterOperandCanAlwaysHold(length->GetValue())) {
1454 locations->SetInAt(4, Location::RequiresRegister());
1455 }
Roland Levillain0b671c02016-08-19 12:02:34 +01001456 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1457 // Temporary register IP cannot be used in
Roland Levillain16d9f942016-08-25 17:27:56 +01001458 // ReadBarrierSystemArrayCopySlowPathARM (because that register
Roland Levillain0b671c02016-08-19 12:02:34 +01001459 // is clobbered by ReadBarrierMarkRegX entry points). Get an extra
1460 // temporary register from the register allocator.
1461 locations->AddTemp(Location::RequiresRegister());
1462 }
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001463}
1464
1465static void CheckPosition(ArmAssembler* assembler,
1466 Location pos,
1467 Register input,
1468 Location length,
1469 SlowPathCode* slow_path,
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001470 Register temp,
1471 bool length_is_input_length = false) {
1472 // Where is the length in the Array?
1473 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1474
1475 if (pos.IsConstant()) {
1476 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1477 if (pos_const == 0) {
1478 if (!length_is_input_length) {
1479 // Check that length(input) >= length.
1480 __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1481 if (length.IsConstant()) {
1482 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1483 } else {
1484 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1485 }
1486 __ b(slow_path->GetEntryLabel(), LT);
1487 }
1488 } else {
1489 // Check that length(input) >= pos.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001490 __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1491 __ subs(temp, temp, ShifterOperand(pos_const));
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001492 __ b(slow_path->GetEntryLabel(), LT);
1493
1494 // Check that (length(input) - pos) >= length.
1495 if (length.IsConstant()) {
1496 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1497 } else {
1498 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1499 }
1500 __ b(slow_path->GetEntryLabel(), LT);
1501 }
1502 } else if (length_is_input_length) {
1503 // The only way the copy can succeed is if pos is zero.
1504 Register pos_reg = pos.AsRegister<Register>();
1505 __ CompareAndBranchIfNonZero(pos_reg, slow_path->GetEntryLabel());
1506 } else {
1507 // Check that pos >= 0.
1508 Register pos_reg = pos.AsRegister<Register>();
1509 __ cmp(pos_reg, ShifterOperand(0));
1510 __ b(slow_path->GetEntryLabel(), LT);
1511
1512 // Check that pos <= length(input).
1513 __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1514 __ subs(temp, temp, ShifterOperand(pos_reg));
1515 __ b(slow_path->GetEntryLabel(), LT);
1516
1517 // Check that (length(input) - pos) >= length.
1518 if (length.IsConstant()) {
1519 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1520 } else {
1521 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1522 }
1523 __ b(slow_path->GetEntryLabel(), LT);
1524 }
1525}
1526
1527void IntrinsicCodeGeneratorARM::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01001528 // The only read barrier implementation supporting the
1529 // SystemArrayCopy intrinsic is the Baker-style read barriers.
1530 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
Roland Levillain3d312422016-06-23 13:53:42 +01001531
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001532 ArmAssembler* assembler = GetAssembler();
1533 LocationSummary* locations = invoke->GetLocations();
1534
1535 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1536 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
1537 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
1538 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Roland Levillain0b671c02016-08-19 12:02:34 +01001539 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001540
1541 Register src = locations->InAt(0).AsRegister<Register>();
1542 Location src_pos = locations->InAt(1);
1543 Register dest = locations->InAt(2).AsRegister<Register>();
1544 Location dest_pos = locations->InAt(3);
1545 Location length = locations->InAt(4);
Roland Levillain0b671c02016-08-19 12:02:34 +01001546 Location temp1_loc = locations->GetTemp(0);
1547 Register temp1 = temp1_loc.AsRegister<Register>();
1548 Location temp2_loc = locations->GetTemp(1);
1549 Register temp2 = temp2_loc.AsRegister<Register>();
1550 Location temp3_loc = locations->GetTemp(2);
1551 Register temp3 = temp3_loc.AsRegister<Register>();
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001552
Roland Levillain0b671c02016-08-19 12:02:34 +01001553 SlowPathCode* intrinsic_slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1554 codegen_->AddSlowPath(intrinsic_slow_path);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001555
Roland Levillainebea3d22016-04-12 15:42:57 +01001556 Label conditions_on_positions_validated;
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001557 SystemArrayCopyOptimizations optimizations(invoke);
1558
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001559 // If source and destination are the same, we go to slow path if we need to do
1560 // forward copying.
1561 if (src_pos.IsConstant()) {
1562 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1563 if (dest_pos.IsConstant()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01001564 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1565 if (optimizations.GetDestinationIsSource()) {
1566 // Checked when building locations.
1567 DCHECK_GE(src_pos_constant, dest_pos_constant);
1568 } else if (src_pos_constant < dest_pos_constant) {
1569 __ cmp(src, ShifterOperand(dest));
Roland Levillain0b671c02016-08-19 12:02:34 +01001570 __ b(intrinsic_slow_path->GetEntryLabel(), EQ);
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01001571 }
1572
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001573 // Checked when building locations.
1574 DCHECK(!optimizations.GetDestinationIsSource()
1575 || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
1576 } else {
1577 if (!optimizations.GetDestinationIsSource()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01001578 __ cmp(src, ShifterOperand(dest));
Roland Levillainebea3d22016-04-12 15:42:57 +01001579 __ b(&conditions_on_positions_validated, NE);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001580 }
1581 __ cmp(dest_pos.AsRegister<Register>(), ShifterOperand(src_pos_constant));
Roland Levillain0b671c02016-08-19 12:02:34 +01001582 __ b(intrinsic_slow_path->GetEntryLabel(), GT);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001583 }
1584 } else {
1585 if (!optimizations.GetDestinationIsSource()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01001586 __ cmp(src, ShifterOperand(dest));
Roland Levillainebea3d22016-04-12 15:42:57 +01001587 __ b(&conditions_on_positions_validated, NE);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001588 }
1589 if (dest_pos.IsConstant()) {
1590 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1591 __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos_constant));
1592 } else {
1593 __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos.AsRegister<Register>()));
1594 }
Roland Levillain0b671c02016-08-19 12:02:34 +01001595 __ b(intrinsic_slow_path->GetEntryLabel(), LT);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001596 }
1597
Roland Levillainebea3d22016-04-12 15:42:57 +01001598 __ Bind(&conditions_on_positions_validated);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001599
1600 if (!optimizations.GetSourceIsNotNull()) {
1601 // Bail out if the source is null.
Roland Levillain0b671c02016-08-19 12:02:34 +01001602 __ CompareAndBranchIfZero(src, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001603 }
1604
1605 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
1606 // Bail out if the destination is null.
Roland Levillain0b671c02016-08-19 12:02:34 +01001607 __ CompareAndBranchIfZero(dest, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001608 }
1609
1610 // If the length is negative, bail out.
1611 // We have already checked in the LocationsBuilder for the constant case.
1612 if (!length.IsConstant() &&
1613 !optimizations.GetCountIsSourceLength() &&
1614 !optimizations.GetCountIsDestinationLength()) {
1615 __ cmp(length.AsRegister<Register>(), ShifterOperand(0));
Roland Levillain0b671c02016-08-19 12:02:34 +01001616 __ b(intrinsic_slow_path->GetEntryLabel(), LT);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001617 }
1618
1619 // Validity checks: source.
1620 CheckPosition(assembler,
1621 src_pos,
1622 src,
1623 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01001624 intrinsic_slow_path,
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001625 temp1,
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001626 optimizations.GetCountIsSourceLength());
1627
1628 // Validity checks: dest.
1629 CheckPosition(assembler,
1630 dest_pos,
1631 dest,
1632 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01001633 intrinsic_slow_path,
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001634 temp1,
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001635 optimizations.GetCountIsDestinationLength());
1636
1637 if (!optimizations.GetDoesNotNeedTypeCheck()) {
1638 // Check whether all elements of the source array are assignable to the component
1639 // type of the destination array. We do two checks: the classes are the same,
1640 // or the destination is Object[]. If none of these checks succeed, we go to the
1641 // slow path.
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001642
Roland Levillain0b671c02016-08-19 12:02:34 +01001643 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1644 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1645 // /* HeapReference<Class> */ temp1 = src->klass_
1646 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1647 invoke, temp1_loc, src, class_offset, temp2_loc, /* needs_null_check */ false);
1648 // Bail out if the source is not a non primitive array.
1649 // /* HeapReference<Class> */ temp1 = temp1->component_type_
1650 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1651 invoke, temp1_loc, temp1, component_offset, temp2_loc, /* needs_null_check */ false);
1652 __ CompareAndBranchIfZero(temp1, intrinsic_slow_path->GetEntryLabel());
1653 // If heap poisoning is enabled, `temp1` has been unpoisoned
1654 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
1655 // /* uint16_t */ temp1 = static_cast<uint16>(temp1->primitive_type_);
1656 __ LoadFromOffset(kLoadUnsignedHalfword, temp1, temp1, primitive_offset);
1657 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1658 __ CompareAndBranchIfNonZero(temp1, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001659 }
Roland Levillain0b671c02016-08-19 12:02:34 +01001660
1661 // /* HeapReference<Class> */ temp1 = dest->klass_
1662 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1663 invoke, temp1_loc, dest, class_offset, temp2_loc, /* needs_null_check */ false);
1664
1665 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
1666 // Bail out if the destination is not a non primitive array.
1667 //
1668 // Register `temp1` is not trashed by the read barrier emitted
1669 // by GenerateFieldLoadWithBakerReadBarrier below, as that
1670 // method produces a call to a ReadBarrierMarkRegX entry point,
1671 // which saves all potentially live registers, including
1672 // temporaries such a `temp1`.
1673 // /* HeapReference<Class> */ temp2 = temp1->component_type_
1674 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1675 invoke, temp2_loc, temp1, component_offset, temp3_loc, /* needs_null_check */ false);
1676 __ CompareAndBranchIfZero(temp2, intrinsic_slow_path->GetEntryLabel());
1677 // If heap poisoning is enabled, `temp2` has been unpoisoned
1678 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
1679 // /* uint16_t */ temp2 = static_cast<uint16>(temp2->primitive_type_);
1680 __ LoadFromOffset(kLoadUnsignedHalfword, temp2, temp2, primitive_offset);
1681 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1682 __ CompareAndBranchIfNonZero(temp2, intrinsic_slow_path->GetEntryLabel());
1683 }
1684
1685 // For the same reason given earlier, `temp1` is not trashed by the
1686 // read barrier emitted by GenerateFieldLoadWithBakerReadBarrier below.
1687 // /* HeapReference<Class> */ temp2 = src->klass_
1688 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1689 invoke, temp2_loc, src, class_offset, temp3_loc, /* needs_null_check */ false);
1690 // Note: if heap poisoning is on, we are comparing two unpoisoned references here.
1691 __ cmp(temp1, ShifterOperand(temp2));
1692
1693 if (optimizations.GetDestinationIsTypedObjectArray()) {
1694 Label do_copy;
1695 __ b(&do_copy, EQ);
1696 // /* HeapReference<Class> */ temp1 = temp1->component_type_
1697 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1698 invoke, temp1_loc, temp1, component_offset, temp2_loc, /* needs_null_check */ false);
1699 // /* HeapReference<Class> */ temp1 = temp1->super_class_
1700 // We do not need to emit a read barrier for the following
1701 // heap reference load, as `temp1` is only used in a
1702 // comparison with null below, and this reference is not
1703 // kept afterwards.
1704 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
1705 __ CompareAndBranchIfNonZero(temp1, intrinsic_slow_path->GetEntryLabel());
1706 __ Bind(&do_copy);
1707 } else {
1708 __ b(intrinsic_slow_path->GetEntryLabel(), NE);
1709 }
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001710 } else {
Roland Levillain0b671c02016-08-19 12:02:34 +01001711 // Non read barrier code.
1712
1713 // /* HeapReference<Class> */ temp1 = dest->klass_
1714 __ LoadFromOffset(kLoadWord, temp1, dest, class_offset);
1715 // /* HeapReference<Class> */ temp2 = src->klass_
1716 __ LoadFromOffset(kLoadWord, temp2, src, class_offset);
1717 bool did_unpoison = false;
1718 if (!optimizations.GetDestinationIsNonPrimitiveArray() ||
1719 !optimizations.GetSourceIsNonPrimitiveArray()) {
1720 // One or two of the references need to be unpoisoned. Unpoison them
1721 // both to make the identity check valid.
1722 __ MaybeUnpoisonHeapReference(temp1);
1723 __ MaybeUnpoisonHeapReference(temp2);
1724 did_unpoison = true;
1725 }
1726
1727 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
1728 // Bail out if the destination is not a non primitive array.
1729 // /* HeapReference<Class> */ temp3 = temp1->component_type_
1730 __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1731 __ CompareAndBranchIfZero(temp3, intrinsic_slow_path->GetEntryLabel());
1732 __ MaybeUnpoisonHeapReference(temp3);
1733 // /* uint16_t */ temp3 = static_cast<uint16>(temp3->primitive_type_);
1734 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1735 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1736 __ CompareAndBranchIfNonZero(temp3, intrinsic_slow_path->GetEntryLabel());
1737 }
1738
1739 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1740 // Bail out if the source is not a non primitive array.
1741 // /* HeapReference<Class> */ temp3 = temp2->component_type_
1742 __ LoadFromOffset(kLoadWord, temp3, temp2, component_offset);
1743 __ CompareAndBranchIfZero(temp3, intrinsic_slow_path->GetEntryLabel());
1744 __ MaybeUnpoisonHeapReference(temp3);
1745 // /* uint16_t */ temp3 = static_cast<uint16>(temp3->primitive_type_);
1746 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1747 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1748 __ CompareAndBranchIfNonZero(temp3, intrinsic_slow_path->GetEntryLabel());
1749 }
1750
1751 __ cmp(temp1, ShifterOperand(temp2));
1752
1753 if (optimizations.GetDestinationIsTypedObjectArray()) {
1754 Label do_copy;
1755 __ b(&do_copy, EQ);
1756 if (!did_unpoison) {
1757 __ MaybeUnpoisonHeapReference(temp1);
1758 }
1759 // /* HeapReference<Class> */ temp1 = temp1->component_type_
1760 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
1761 __ MaybeUnpoisonHeapReference(temp1);
1762 // /* HeapReference<Class> */ temp1 = temp1->super_class_
1763 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
1764 // No need to unpoison the result, we're comparing against null.
1765 __ CompareAndBranchIfNonZero(temp1, intrinsic_slow_path->GetEntryLabel());
1766 __ Bind(&do_copy);
1767 } else {
1768 __ b(intrinsic_slow_path->GetEntryLabel(), NE);
1769 }
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001770 }
1771 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1772 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
1773 // Bail out if the source is not a non primitive array.
Roland Levillain0b671c02016-08-19 12:02:34 +01001774 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1775 // /* HeapReference<Class> */ temp1 = src->klass_
1776 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1777 invoke, temp1_loc, src, class_offset, temp2_loc, /* needs_null_check */ false);
1778 // /* HeapReference<Class> */ temp3 = temp1->component_type_
1779 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1780 invoke, temp3_loc, temp1, component_offset, temp2_loc, /* needs_null_check */ false);
1781 __ CompareAndBranchIfZero(temp3, intrinsic_slow_path->GetEntryLabel());
1782 // If heap poisoning is enabled, `temp3` has been unpoisoned
1783 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
1784 } else {
1785 // /* HeapReference<Class> */ temp1 = src->klass_
1786 __ LoadFromOffset(kLoadWord, temp1, src, class_offset);
1787 __ MaybeUnpoisonHeapReference(temp1);
1788 // /* HeapReference<Class> */ temp3 = temp1->component_type_
1789 __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1790 __ CompareAndBranchIfZero(temp3, intrinsic_slow_path->GetEntryLabel());
1791 __ MaybeUnpoisonHeapReference(temp3);
1792 }
1793 // /* uint16_t */ temp3 = static_cast<uint16>(temp3->primitive_type_);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001794 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1795 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain0b671c02016-08-19 12:02:34 +01001796 __ CompareAndBranchIfNonZero(temp3, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001797 }
1798
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001799 int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
Roland Levillain0b671c02016-08-19 12:02:34 +01001800 uint32_t element_size_shift = Primitive::ComponentSizeShift(Primitive::kPrimNot);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001801 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
Roland Levillain0b671c02016-08-19 12:02:34 +01001802
1803 // Compute the base source address in `temp1`.
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001804 if (src_pos.IsConstant()) {
1805 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1806 __ AddConstant(temp1, src, element_size * constant + offset);
1807 } else {
Roland Levillain0b671c02016-08-19 12:02:34 +01001808 __ add(temp1, src, ShifterOperand(src_pos.AsRegister<Register>(), LSL, element_size_shift));
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001809 __ AddConstant(temp1, offset);
1810 }
1811
Roland Levillain0b671c02016-08-19 12:02:34 +01001812 // Compute the end source address in `temp3`.
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001813 if (length.IsConstant()) {
1814 int32_t constant = length.GetConstant()->AsIntConstant()->GetValue();
1815 __ AddConstant(temp3, temp1, element_size * constant);
1816 } else {
Roland Levillain0b671c02016-08-19 12:02:34 +01001817 __ add(temp3, temp1, ShifterOperand(length.AsRegister<Register>(), LSL, element_size_shift));
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001818 }
1819
Roland Levillain0b671c02016-08-19 12:02:34 +01001820 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1821 // The base destination address is computed later, as `temp2` is
1822 // used for intermediate computations.
1823
1824 // SystemArrayCopy implementation for Baker read barriers (see
1825 // also CodeGeneratorARM::GenerateReferenceLoadWithBakerReadBarrier):
1826 //
1827 // if (src_ptr != end_ptr) {
1828 // uint32_t rb_state = Lockword(src->monitor_).ReadBarrierState();
1829 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
1830 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
1831 // if (is_gray) {
1832 // // Slow-path copy.
1833 // do {
1834 // *dest_ptr++ = MaybePoison(ReadBarrier::Mark(MaybeUnpoison(*src_ptr++)));
1835 // } while (src_ptr != end_ptr)
1836 // } else {
1837 // // Fast-path copy.
1838 // do {
1839 // *dest_ptr++ = *src_ptr++;
1840 // } while (src_ptr != end_ptr)
1841 // }
1842 // }
1843
1844 Label loop, done;
1845
1846 // Don't enter copy loop if `length == 0`.
1847 __ cmp(temp1, ShifterOperand(temp3));
1848 __ b(&done, EQ);
1849
1850 // /* int32_t */ monitor = src->monitor_
1851 __ LoadFromOffset(kLoadWord, temp2, src, monitor_offset);
1852 // /* LockWord */ lock_word = LockWord(monitor)
1853 static_assert(sizeof(LockWord) == sizeof(int32_t),
1854 "art::LockWord and int32_t have different sizes.");
1855
1856 // Introduce a dependency on the lock_word including the rb_state,
1857 // which shall prevent load-load reordering without using
1858 // a memory barrier (which would be more expensive).
1859 // `src` is unchanged by this operation, but its value now depends
1860 // on `temp2`.
1861 __ add(src, src, ShifterOperand(temp2, LSR, 32));
1862
1863 // Slow path used to copy array when `src` is gray.
1864 SlowPathCode* read_barrier_slow_path =
1865 new (GetAllocator()) ReadBarrierSystemArrayCopySlowPathARM(invoke);
1866 codegen_->AddSlowPath(read_barrier_slow_path);
1867
1868 // Given the numeric representation, it's enough to check the low bit of the
1869 // rb_state. We do that by shifting the bit out of the lock word with LSRS
1870 // which can be a 16-bit instruction unlike the TST immediate.
1871 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
1872 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
1873 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
1874 __ Lsrs(temp2, temp2, LockWord::kReadBarrierStateShift + 1);
1875 // Carry flag is the last bit shifted out by LSRS.
1876 __ b(read_barrier_slow_path->GetEntryLabel(), CS);
1877
1878 // Fast-path copy.
1879
1880 // Compute the base destination address in `temp2`.
1881 if (dest_pos.IsConstant()) {
1882 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1883 __ AddConstant(temp2, dest, element_size * constant + offset);
1884 } else {
1885 __ add(temp2, dest, ShifterOperand(dest_pos.AsRegister<Register>(), LSL, element_size_shift));
1886 __ AddConstant(temp2, offset);
1887 }
1888
1889 // Iterate over the arrays and do a raw copy of the objects. We don't need to
1890 // poison/unpoison.
1891 __ Bind(&loop);
1892 __ ldr(IP, Address(temp1, element_size, Address::PostIndex));
1893 __ str(IP, Address(temp2, element_size, Address::PostIndex));
1894 __ cmp(temp1, ShifterOperand(temp3));
1895 __ b(&loop, NE);
1896
1897 __ Bind(read_barrier_slow_path->GetExitLabel());
1898 __ Bind(&done);
1899 } else {
1900 // Non read barrier code.
1901
1902 // Compute the base destination address in `temp2`.
1903 if (dest_pos.IsConstant()) {
1904 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1905 __ AddConstant(temp2, dest, element_size * constant + offset);
1906 } else {
1907 __ add(temp2, dest, ShifterOperand(dest_pos.AsRegister<Register>(), LSL, element_size_shift));
1908 __ AddConstant(temp2, offset);
1909 }
1910
1911 // Iterate over the arrays and do a raw copy of the objects. We don't need to
1912 // poison/unpoison.
1913 Label loop, done;
1914 __ cmp(temp1, ShifterOperand(temp3));
1915 __ b(&done, EQ);
1916 __ Bind(&loop);
1917 __ ldr(IP, Address(temp1, element_size, Address::PostIndex));
1918 __ str(IP, Address(temp2, element_size, Address::PostIndex));
1919 __ cmp(temp1, ShifterOperand(temp3));
1920 __ b(&loop, NE);
1921 __ Bind(&done);
1922 }
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001923
1924 // We only need one card marking on the destination array.
1925 codegen_->MarkGCCard(temp1,
1926 temp2,
1927 dest,
1928 Register(kNoRegister),
Roland Levillainebea3d22016-04-12 15:42:57 +01001929 /* value_can_be_null */ false);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001930
Roland Levillain0b671c02016-08-19 12:02:34 +01001931 __ Bind(intrinsic_slow_path->GetExitLabel());
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001932}
1933
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00001934static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1935 // If the graph is debuggable, all callee-saved floating-point registers are blocked by
1936 // the code generator. Furthermore, the register allocator creates fixed live intervals
1937 // for all caller-saved registers because we are doing a function call. As a result, if
1938 // the input and output locations are unallocated, the register allocator runs out of
1939 // registers and fails; however, a debuggable graph is not the common case.
1940 if (invoke->GetBlock()->GetGraph()->IsDebuggable()) {
1941 return;
1942 }
1943
1944 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1945 DCHECK_EQ(invoke->InputAt(0)->GetType(), Primitive::kPrimDouble);
1946 DCHECK_EQ(invoke->GetType(), Primitive::kPrimDouble);
1947
1948 LocationSummary* const locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001949 LocationSummary::kCallOnMainOnly,
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00001950 kIntrinsified);
1951 const InvokeRuntimeCallingConvention calling_convention;
1952
1953 locations->SetInAt(0, Location::RequiresFpuRegister());
1954 locations->SetOut(Location::RequiresFpuRegister());
1955 // Native code uses the soft float ABI.
1956 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1957 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1958}
1959
1960static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1961 // If the graph is debuggable, all callee-saved floating-point registers are blocked by
1962 // the code generator. Furthermore, the register allocator creates fixed live intervals
1963 // for all caller-saved registers because we are doing a function call. As a result, if
1964 // the input and output locations are unallocated, the register allocator runs out of
1965 // registers and fails; however, a debuggable graph is not the common case.
1966 if (invoke->GetBlock()->GetGraph()->IsDebuggable()) {
1967 return;
1968 }
1969
1970 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
1971 DCHECK_EQ(invoke->InputAt(0)->GetType(), Primitive::kPrimDouble);
1972 DCHECK_EQ(invoke->InputAt(1)->GetType(), Primitive::kPrimDouble);
1973 DCHECK_EQ(invoke->GetType(), Primitive::kPrimDouble);
1974
1975 LocationSummary* const locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001976 LocationSummary::kCallOnMainOnly,
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00001977 kIntrinsified);
1978 const InvokeRuntimeCallingConvention calling_convention;
1979
1980 locations->SetInAt(0, Location::RequiresFpuRegister());
1981 locations->SetInAt(1, Location::RequiresFpuRegister());
1982 locations->SetOut(Location::RequiresFpuRegister());
1983 // Native code uses the soft float ABI.
1984 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1985 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1986 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1987 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1988}
1989
1990static void GenFPToFPCall(HInvoke* invoke,
1991 ArmAssembler* assembler,
1992 CodeGeneratorARM* codegen,
1993 QuickEntrypointEnum entry) {
1994 LocationSummary* const locations = invoke->GetLocations();
1995 const InvokeRuntimeCallingConvention calling_convention;
1996
1997 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1998 DCHECK(locations->WillCall() && locations->Intrinsified());
1999 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(0)));
2000 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(1)));
2001
Andreas Gampe542451c2016-07-26 09:02:02 -07002002 __ LoadFromOffset(kLoadWord, LR, TR, GetThreadOffset<kArmPointerSize>(entry).Int32Value());
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00002003 // Native code uses the soft float ABI.
2004 __ vmovrrd(calling_convention.GetRegisterAt(0),
2005 calling_convention.GetRegisterAt(1),
2006 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
2007 __ blx(LR);
2008 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
2009 __ vmovdrr(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
2010 calling_convention.GetRegisterAt(0),
2011 calling_convention.GetRegisterAt(1));
2012}
2013
2014static void GenFPFPToFPCall(HInvoke* invoke,
2015 ArmAssembler* assembler,
2016 CodeGeneratorARM* codegen,
2017 QuickEntrypointEnum entry) {
2018 LocationSummary* const locations = invoke->GetLocations();
2019 const InvokeRuntimeCallingConvention calling_convention;
2020
2021 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
2022 DCHECK(locations->WillCall() && locations->Intrinsified());
2023 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(0)));
2024 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(1)));
2025 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(2)));
2026 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(3)));
2027
Andreas Gampe542451c2016-07-26 09:02:02 -07002028 __ LoadFromOffset(kLoadWord, LR, TR, GetThreadOffset<kArmPointerSize>(entry).Int32Value());
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00002029 // Native code uses the soft float ABI.
2030 __ vmovrrd(calling_convention.GetRegisterAt(0),
2031 calling_convention.GetRegisterAt(1),
2032 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
2033 __ vmovrrd(calling_convention.GetRegisterAt(2),
2034 calling_convention.GetRegisterAt(3),
2035 FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>()));
2036 __ blx(LR);
2037 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
2038 __ vmovdrr(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
2039 calling_convention.GetRegisterAt(0),
2040 calling_convention.GetRegisterAt(1));
2041}
2042
2043void IntrinsicLocationsBuilderARM::VisitMathCos(HInvoke* invoke) {
2044 CreateFPToFPCallLocations(arena_, invoke);
2045}
2046
2047void IntrinsicCodeGeneratorARM::VisitMathCos(HInvoke* invoke) {
2048 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCos);
2049}
2050
2051void IntrinsicLocationsBuilderARM::VisitMathSin(HInvoke* invoke) {
2052 CreateFPToFPCallLocations(arena_, invoke);
2053}
2054
2055void IntrinsicCodeGeneratorARM::VisitMathSin(HInvoke* invoke) {
2056 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickSin);
2057}
2058
2059void IntrinsicLocationsBuilderARM::VisitMathAcos(HInvoke* invoke) {
2060 CreateFPToFPCallLocations(arena_, invoke);
2061}
2062
2063void IntrinsicCodeGeneratorARM::VisitMathAcos(HInvoke* invoke) {
2064 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAcos);
2065}
2066
2067void IntrinsicLocationsBuilderARM::VisitMathAsin(HInvoke* invoke) {
2068 CreateFPToFPCallLocations(arena_, invoke);
2069}
2070
2071void IntrinsicCodeGeneratorARM::VisitMathAsin(HInvoke* invoke) {
2072 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAsin);
2073}
2074
2075void IntrinsicLocationsBuilderARM::VisitMathAtan(HInvoke* invoke) {
2076 CreateFPToFPCallLocations(arena_, invoke);
2077}
2078
2079void IntrinsicCodeGeneratorARM::VisitMathAtan(HInvoke* invoke) {
2080 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAtan);
2081}
2082
2083void IntrinsicLocationsBuilderARM::VisitMathCbrt(HInvoke* invoke) {
2084 CreateFPToFPCallLocations(arena_, invoke);
2085}
2086
2087void IntrinsicCodeGeneratorARM::VisitMathCbrt(HInvoke* invoke) {
2088 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCbrt);
2089}
2090
2091void IntrinsicLocationsBuilderARM::VisitMathCosh(HInvoke* invoke) {
2092 CreateFPToFPCallLocations(arena_, invoke);
2093}
2094
2095void IntrinsicCodeGeneratorARM::VisitMathCosh(HInvoke* invoke) {
2096 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCosh);
2097}
2098
2099void IntrinsicLocationsBuilderARM::VisitMathExp(HInvoke* invoke) {
2100 CreateFPToFPCallLocations(arena_, invoke);
2101}
2102
2103void IntrinsicCodeGeneratorARM::VisitMathExp(HInvoke* invoke) {
2104 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickExp);
2105}
2106
2107void IntrinsicLocationsBuilderARM::VisitMathExpm1(HInvoke* invoke) {
2108 CreateFPToFPCallLocations(arena_, invoke);
2109}
2110
2111void IntrinsicCodeGeneratorARM::VisitMathExpm1(HInvoke* invoke) {
2112 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickExpm1);
2113}
2114
2115void IntrinsicLocationsBuilderARM::VisitMathLog(HInvoke* invoke) {
2116 CreateFPToFPCallLocations(arena_, invoke);
2117}
2118
2119void IntrinsicCodeGeneratorARM::VisitMathLog(HInvoke* invoke) {
2120 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickLog);
2121}
2122
2123void IntrinsicLocationsBuilderARM::VisitMathLog10(HInvoke* invoke) {
2124 CreateFPToFPCallLocations(arena_, invoke);
2125}
2126
2127void IntrinsicCodeGeneratorARM::VisitMathLog10(HInvoke* invoke) {
2128 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickLog10);
2129}
2130
2131void IntrinsicLocationsBuilderARM::VisitMathSinh(HInvoke* invoke) {
2132 CreateFPToFPCallLocations(arena_, invoke);
2133}
2134
2135void IntrinsicCodeGeneratorARM::VisitMathSinh(HInvoke* invoke) {
2136 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickSinh);
2137}
2138
2139void IntrinsicLocationsBuilderARM::VisitMathTan(HInvoke* invoke) {
2140 CreateFPToFPCallLocations(arena_, invoke);
2141}
2142
2143void IntrinsicCodeGeneratorARM::VisitMathTan(HInvoke* invoke) {
2144 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickTan);
2145}
2146
2147void IntrinsicLocationsBuilderARM::VisitMathTanh(HInvoke* invoke) {
2148 CreateFPToFPCallLocations(arena_, invoke);
2149}
2150
2151void IntrinsicCodeGeneratorARM::VisitMathTanh(HInvoke* invoke) {
2152 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickTanh);
2153}
2154
2155void IntrinsicLocationsBuilderARM::VisitMathAtan2(HInvoke* invoke) {
2156 CreateFPFPToFPCallLocations(arena_, invoke);
2157}
2158
2159void IntrinsicCodeGeneratorARM::VisitMathAtan2(HInvoke* invoke) {
2160 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAtan2);
2161}
2162
2163void IntrinsicLocationsBuilderARM::VisitMathHypot(HInvoke* invoke) {
2164 CreateFPFPToFPCallLocations(arena_, invoke);
2165}
2166
2167void IntrinsicCodeGeneratorARM::VisitMathHypot(HInvoke* invoke) {
2168 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickHypot);
2169}
2170
2171void IntrinsicLocationsBuilderARM::VisitMathNextAfter(HInvoke* invoke) {
2172 CreateFPFPToFPCallLocations(arena_, invoke);
2173}
2174
2175void IntrinsicCodeGeneratorARM::VisitMathNextAfter(HInvoke* invoke) {
2176 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickNextAfter);
2177}
2178
Artem Serovc257da72016-02-02 13:49:43 +00002179void IntrinsicLocationsBuilderARM::VisitIntegerReverse(HInvoke* invoke) {
2180 CreateIntToIntLocations(arena_, invoke);
2181}
2182
2183void IntrinsicCodeGeneratorARM::VisitIntegerReverse(HInvoke* invoke) {
2184 ArmAssembler* assembler = GetAssembler();
2185 LocationSummary* locations = invoke->GetLocations();
2186
2187 Register out = locations->Out().AsRegister<Register>();
2188 Register in = locations->InAt(0).AsRegister<Register>();
2189
2190 __ rbit(out, in);
2191}
2192
2193void IntrinsicLocationsBuilderARM::VisitLongReverse(HInvoke* invoke) {
2194 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2195 LocationSummary::kNoCall,
2196 kIntrinsified);
2197 locations->SetInAt(0, Location::RequiresRegister());
2198 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2199}
2200
2201void IntrinsicCodeGeneratorARM::VisitLongReverse(HInvoke* invoke) {
2202 ArmAssembler* assembler = GetAssembler();
2203 LocationSummary* locations = invoke->GetLocations();
2204
2205 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
2206 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
2207 Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
2208 Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
2209
2210 __ rbit(out_reg_lo, in_reg_hi);
2211 __ rbit(out_reg_hi, in_reg_lo);
2212}
2213
2214void IntrinsicLocationsBuilderARM::VisitIntegerReverseBytes(HInvoke* invoke) {
2215 CreateIntToIntLocations(arena_, invoke);
2216}
2217
2218void IntrinsicCodeGeneratorARM::VisitIntegerReverseBytes(HInvoke* invoke) {
2219 ArmAssembler* assembler = GetAssembler();
2220 LocationSummary* locations = invoke->GetLocations();
2221
2222 Register out = locations->Out().AsRegister<Register>();
2223 Register in = locations->InAt(0).AsRegister<Register>();
2224
2225 __ rev(out, in);
2226}
2227
2228void IntrinsicLocationsBuilderARM::VisitLongReverseBytes(HInvoke* invoke) {
2229 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2230 LocationSummary::kNoCall,
2231 kIntrinsified);
2232 locations->SetInAt(0, Location::RequiresRegister());
2233 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2234}
2235
2236void IntrinsicCodeGeneratorARM::VisitLongReverseBytes(HInvoke* invoke) {
2237 ArmAssembler* assembler = GetAssembler();
2238 LocationSummary* locations = invoke->GetLocations();
2239
2240 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
2241 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
2242 Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
2243 Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
2244
2245 __ rev(out_reg_lo, in_reg_hi);
2246 __ rev(out_reg_hi, in_reg_lo);
2247}
2248
2249void IntrinsicLocationsBuilderARM::VisitShortReverseBytes(HInvoke* invoke) {
2250 CreateIntToIntLocations(arena_, invoke);
2251}
2252
2253void IntrinsicCodeGeneratorARM::VisitShortReverseBytes(HInvoke* invoke) {
2254 ArmAssembler* assembler = GetAssembler();
2255 LocationSummary* locations = invoke->GetLocations();
2256
2257 Register out = locations->Out().AsRegister<Register>();
2258 Register in = locations->InAt(0).AsRegister<Register>();
2259
2260 __ revsh(out, in);
2261}
2262
xueliang.zhongf1073c82016-07-05 15:28:19 +01002263static void GenBitCount(HInvoke* instr, Primitive::Type type, ArmAssembler* assembler) {
2264 DCHECK(Primitive::IsIntOrLongType(type)) << type;
2265 DCHECK_EQ(instr->GetType(), Primitive::kPrimInt);
2266 DCHECK_EQ(Primitive::PrimitiveKind(instr->InputAt(0)->GetType()), type);
2267
2268 bool is_long = type == Primitive::kPrimLong;
2269 LocationSummary* locations = instr->GetLocations();
2270 Location in = locations->InAt(0);
2271 Register src_0 = is_long ? in.AsRegisterPairLow<Register>() : in.AsRegister<Register>();
2272 Register src_1 = is_long ? in.AsRegisterPairHigh<Register>() : src_0;
2273 SRegister tmp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
2274 DRegister tmp_d = FromLowSToD(tmp_s);
2275 Register out_r = locations->Out().AsRegister<Register>();
2276
2277 // Move data from core register(s) to temp D-reg for bit count calculation, then move back.
2278 // According to Cortex A57 and A72 optimization guides, compared to transferring to full D-reg,
2279 // transferring data from core reg to upper or lower half of vfp D-reg requires extra latency,
2280 // That's why for integer bit count, we use 'vmov d0, r0, r0' instead of 'vmov d0[0], r0'.
2281 __ vmovdrr(tmp_d, src_1, src_0); // Temp DReg |--src_1|--src_0|
2282 __ vcntd(tmp_d, tmp_d); // Temp DReg |c|c|c|c|c|c|c|c|
2283 __ vpaddld(tmp_d, tmp_d, 8, /* is_unsigned */ true); // Temp DReg |--c|--c|--c|--c|
2284 __ vpaddld(tmp_d, tmp_d, 16, /* is_unsigned */ true); // Temp DReg |------c|------c|
2285 if (is_long) {
2286 __ vpaddld(tmp_d, tmp_d, 32, /* is_unsigned */ true); // Temp DReg |--------------c|
2287 }
2288 __ vmovrs(out_r, tmp_s);
2289}
2290
2291void IntrinsicLocationsBuilderARM::VisitIntegerBitCount(HInvoke* invoke) {
2292 CreateIntToIntLocations(arena_, invoke);
2293 invoke->GetLocations()->AddTemp(Location::RequiresFpuRegister());
2294}
2295
2296void IntrinsicCodeGeneratorARM::VisitIntegerBitCount(HInvoke* invoke) {
2297 GenBitCount(invoke, Primitive::kPrimInt, GetAssembler());
2298}
2299
2300void IntrinsicLocationsBuilderARM::VisitLongBitCount(HInvoke* invoke) {
2301 VisitIntegerBitCount(invoke);
2302}
2303
2304void IntrinsicCodeGeneratorARM::VisitLongBitCount(HInvoke* invoke) {
2305 GenBitCount(invoke, Primitive::kPrimLong, GetAssembler());
2306}
2307
Tim Zhang25abd6c2016-01-19 23:39:24 +08002308void IntrinsicLocationsBuilderARM::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2309 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2310 LocationSummary::kNoCall,
2311 kIntrinsified);
2312 locations->SetInAt(0, Location::RequiresRegister());
2313 locations->SetInAt(1, Location::RequiresRegister());
2314 locations->SetInAt(2, Location::RequiresRegister());
2315 locations->SetInAt(3, Location::RequiresRegister());
2316 locations->SetInAt(4, Location::RequiresRegister());
2317
Scott Wakeling3fdab772016-04-25 11:32:37 +01002318 // Temporary registers to store lengths of strings and for calculations.
Tim Zhang25abd6c2016-01-19 23:39:24 +08002319 locations->AddTemp(Location::RequiresRegister());
2320 locations->AddTemp(Location::RequiresRegister());
2321 locations->AddTemp(Location::RequiresRegister());
2322}
2323
2324void IntrinsicCodeGeneratorARM::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2325 ArmAssembler* assembler = GetAssembler();
2326 LocationSummary* locations = invoke->GetLocations();
2327
2328 // Check assumption that sizeof(Char) is 2 (used in scaling below).
2329 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
2330 DCHECK_EQ(char_size, 2u);
2331
2332 // Location of data in char array buffer.
2333 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2334
2335 // Location of char array data in string.
2336 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
2337
2338 // void getCharsNoCheck(int srcBegin, int srcEnd, char[] dst, int dstBegin);
2339 // Since getChars() calls getCharsNoCheck() - we use registers rather than constants.
2340 Register srcObj = locations->InAt(0).AsRegister<Register>();
2341 Register srcBegin = locations->InAt(1).AsRegister<Register>();
2342 Register srcEnd = locations->InAt(2).AsRegister<Register>();
2343 Register dstObj = locations->InAt(3).AsRegister<Register>();
2344 Register dstBegin = locations->InAt(4).AsRegister<Register>();
2345
Scott Wakeling3fdab772016-04-25 11:32:37 +01002346 Register num_chr = locations->GetTemp(0).AsRegister<Register>();
2347 Register src_ptr = locations->GetTemp(1).AsRegister<Register>();
Tim Zhang25abd6c2016-01-19 23:39:24 +08002348 Register dst_ptr = locations->GetTemp(2).AsRegister<Register>();
Tim Zhang25abd6c2016-01-19 23:39:24 +08002349
2350 // src range to copy.
2351 __ add(src_ptr, srcObj, ShifterOperand(value_offset));
Tim Zhang25abd6c2016-01-19 23:39:24 +08002352 __ add(src_ptr, src_ptr, ShifterOperand(srcBegin, LSL, 1));
2353
2354 // dst to be copied.
2355 __ add(dst_ptr, dstObj, ShifterOperand(data_offset));
2356 __ add(dst_ptr, dst_ptr, ShifterOperand(dstBegin, LSL, 1));
2357
Scott Wakeling3fdab772016-04-25 11:32:37 +01002358 __ subs(num_chr, srcEnd, ShifterOperand(srcBegin));
2359
Tim Zhang25abd6c2016-01-19 23:39:24 +08002360 // Do the copy.
Scott Wakeling3fdab772016-04-25 11:32:37 +01002361 Label loop, remainder, done;
2362
2363 // Early out for valid zero-length retrievals.
Tim Zhang25abd6c2016-01-19 23:39:24 +08002364 __ b(&done, EQ);
Scott Wakeling3fdab772016-04-25 11:32:37 +01002365
2366 // Save repairing the value of num_chr on the < 4 character path.
2367 __ subs(IP, num_chr, ShifterOperand(4));
2368 __ b(&remainder, LT);
2369
2370 // Keep the result of the earlier subs, we are going to fetch at least 4 characters.
2371 __ mov(num_chr, ShifterOperand(IP));
2372
2373 // Main loop used for longer fetches loads and stores 4x16-bit characters at a time.
2374 // (LDRD/STRD fault on unaligned addresses and it's not worth inlining extra code
2375 // to rectify these everywhere this intrinsic applies.)
2376 __ Bind(&loop);
2377 __ ldr(IP, Address(src_ptr, char_size * 2));
2378 __ subs(num_chr, num_chr, ShifterOperand(4));
2379 __ str(IP, Address(dst_ptr, char_size * 2));
2380 __ ldr(IP, Address(src_ptr, char_size * 4, Address::PostIndex));
2381 __ str(IP, Address(dst_ptr, char_size * 4, Address::PostIndex));
2382 __ b(&loop, GE);
2383
2384 __ adds(num_chr, num_chr, ShifterOperand(4));
2385 __ b(&done, EQ);
2386
2387 // Main loop for < 4 character case and remainder handling. Loads and stores one
2388 // 16-bit Java character at a time.
2389 __ Bind(&remainder);
2390 __ ldrh(IP, Address(src_ptr, char_size, Address::PostIndex));
2391 __ subs(num_chr, num_chr, ShifterOperand(1));
2392 __ strh(IP, Address(dst_ptr, char_size, Address::PostIndex));
2393 __ b(&remainder, GT);
2394
Tim Zhang25abd6c2016-01-19 23:39:24 +08002395 __ Bind(&done);
2396}
2397
Anton Kirilova3ffea22016-04-07 17:02:37 +01002398void IntrinsicLocationsBuilderARM::VisitFloatIsInfinite(HInvoke* invoke) {
2399 CreateFPToIntLocations(arena_, invoke);
2400}
2401
2402void IntrinsicCodeGeneratorARM::VisitFloatIsInfinite(HInvoke* invoke) {
2403 ArmAssembler* const assembler = GetAssembler();
2404 LocationSummary* const locations = invoke->GetLocations();
2405 const Register out = locations->Out().AsRegister<Register>();
2406 // Shifting left by 1 bit makes the value encodable as an immediate operand;
2407 // we don't care about the sign bit anyway.
2408 constexpr uint32_t infinity = kPositiveInfinityFloat << 1U;
2409
2410 __ vmovrs(out, locations->InAt(0).AsFpuRegister<SRegister>());
2411 // We don't care about the sign bit, so shift left.
2412 __ Lsl(out, out, 1);
2413 __ eor(out, out, ShifterOperand(infinity));
2414 // If the result is 0, then it has 32 leading zeros, and less than that otherwise.
2415 __ clz(out, out);
2416 // Any number less than 32 logically shifted right by 5 bits results in 0;
2417 // the same operation on 32 yields 1.
2418 __ Lsr(out, out, 5);
2419}
2420
2421void IntrinsicLocationsBuilderARM::VisitDoubleIsInfinite(HInvoke* invoke) {
2422 CreateFPToIntLocations(arena_, invoke);
2423}
2424
2425void IntrinsicCodeGeneratorARM::VisitDoubleIsInfinite(HInvoke* invoke) {
2426 ArmAssembler* const assembler = GetAssembler();
2427 LocationSummary* const locations = invoke->GetLocations();
2428 const Register out = locations->Out().AsRegister<Register>();
2429 // The highest 32 bits of double precision positive infinity separated into
2430 // two constants encodable as immediate operands.
2431 constexpr uint32_t infinity_high = 0x7f000000U;
2432 constexpr uint32_t infinity_high2 = 0x00f00000U;
2433
2434 static_assert((infinity_high | infinity_high2) == static_cast<uint32_t>(kPositiveInfinityDouble >> 32U),
2435 "The constants do not add up to the high 32 bits of double precision positive infinity.");
2436 __ vmovrrd(IP, out, FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
2437 __ eor(out, out, ShifterOperand(infinity_high));
2438 __ eor(out, out, ShifterOperand(infinity_high2));
2439 // We don't care about the sign bit, so shift left.
2440 __ orr(out, IP, ShifterOperand(out, LSL, 1));
2441 // If the result is 0, then it has 32 leading zeros, and less than that otherwise.
2442 __ clz(out, out);
2443 // Any number less than 32 logically shifted right by 5 bits results in 0;
2444 // the same operation on 32 yields 1.
2445 __ Lsr(out, out, 5);
2446}
2447
Aart Bik2f9fcc92016-03-01 15:16:54 -08002448UNIMPLEMENTED_INTRINSIC(ARM, MathMinDoubleDouble)
2449UNIMPLEMENTED_INTRINSIC(ARM, MathMinFloatFloat)
2450UNIMPLEMENTED_INTRINSIC(ARM, MathMaxDoubleDouble)
2451UNIMPLEMENTED_INTRINSIC(ARM, MathMaxFloatFloat)
2452UNIMPLEMENTED_INTRINSIC(ARM, MathMinLongLong)
2453UNIMPLEMENTED_INTRINSIC(ARM, MathMaxLongLong)
2454UNIMPLEMENTED_INTRINSIC(ARM, MathCeil) // Could be done by changing rounding mode, maybe?
2455UNIMPLEMENTED_INTRINSIC(ARM, MathFloor) // Could be done by changing rounding mode, maybe?
2456UNIMPLEMENTED_INTRINSIC(ARM, MathRint)
2457UNIMPLEMENTED_INTRINSIC(ARM, MathRoundDouble) // Could be done by changing rounding mode, maybe?
2458UNIMPLEMENTED_INTRINSIC(ARM, MathRoundFloat) // Could be done by changing rounding mode, maybe?
2459UNIMPLEMENTED_INTRINSIC(ARM, UnsafeCASLong) // High register pressure.
2460UNIMPLEMENTED_INTRINSIC(ARM, SystemArrayCopyChar)
2461UNIMPLEMENTED_INTRINSIC(ARM, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002462UNIMPLEMENTED_INTRINSIC(ARM, IntegerHighestOneBit)
2463UNIMPLEMENTED_INTRINSIC(ARM, LongHighestOneBit)
2464UNIMPLEMENTED_INTRINSIC(ARM, IntegerLowestOneBit)
2465UNIMPLEMENTED_INTRINSIC(ARM, LongLowestOneBit)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08002466
Aart Bik0e54c012016-03-04 12:08:31 -08002467// 1.8.
2468UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndAddInt)
2469UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndAddLong)
2470UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetInt)
2471UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetLong)
2472UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002473
Aart Bik2f9fcc92016-03-01 15:16:54 -08002474UNREACHABLE_INTRINSICS(ARM)
Roland Levillain4d027112015-07-01 15:41:14 +01002475
2476#undef __
2477
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08002478} // namespace arm
2479} // namespace art