blob: 86000e9356286d9c26f47596ffe313fd7a21281a [file] [log] [blame]
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080021#include "code_generator_arm.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070024#include "intrinsics_utils.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080025#include "mirror/array-inl.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29
30namespace art {
31
32namespace arm {
33
34ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
35 return codegen_->GetAssembler();
36}
37
38ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
39 return codegen_->GetGraph()->GetArena();
40}
41
Andreas Gampe85b62f22015-09-09 13:15:38 -070042using IntrinsicSlowPathARM = IntrinsicSlowPath<InvokeDexCallingConventionVisitorARM>;
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080043
Roland Levillain0b671c02016-08-19 12:02:34 +010044// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
45#define __ down_cast<ArmAssembler*>(codegen->GetAssembler())-> // NOLINT
46
47// Slow path implementing the SystemArrayCopy intrinsic copy loop with read barriers.
48class ReadBarrierSystemArrayCopySlowPathARM : public SlowPathCode {
49 public:
50 explicit ReadBarrierSystemArrayCopySlowPathARM(HInstruction* instruction)
51 : SlowPathCode(instruction) {
52 DCHECK(kEmitCompilerReadBarrier);
53 DCHECK(kUseBakerReadBarrier);
54 }
55
56 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
57 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
58 LocationSummary* locations = instruction_->GetLocations();
59 DCHECK(locations->CanCall());
60 DCHECK(instruction_->IsInvokeStaticOrDirect())
61 << "Unexpected instruction in read barrier arraycopy slow path: "
62 << instruction_->DebugName();
63 DCHECK(instruction_->GetLocations()->Intrinsified());
64 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kSystemArrayCopy);
65
66 int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
67 uint32_t element_size_shift = Primitive::ComponentSizeShift(Primitive::kPrimNot);
68 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
69
70 Register dest = locations->InAt(2).AsRegister<Register>();
71 Location dest_pos = locations->InAt(3);
72 Register src_curr_addr = locations->GetTemp(0).AsRegister<Register>();
73 Register dst_curr_addr = locations->GetTemp(1).AsRegister<Register>();
74 Register src_stop_addr = locations->GetTemp(2).AsRegister<Register>();
75 Register tmp = locations->GetTemp(3).AsRegister<Register>();
76
77 __ Bind(GetEntryLabel());
78 // Compute the base destination address in `dst_curr_addr`.
79 if (dest_pos.IsConstant()) {
80 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
81 __ AddConstant(dst_curr_addr, dest, element_size * constant + offset);
82 } else {
83 __ add(dst_curr_addr,
84 dest,
85 ShifterOperand(dest_pos.AsRegister<Register>(), LSL, element_size_shift));
86 __ AddConstant(dst_curr_addr, offset);
87 }
88
89 Label loop;
90 __ Bind(&loop);
91 __ ldr(tmp, Address(src_curr_addr, element_size, Address::PostIndex));
92 __ MaybeUnpoisonHeapReference(tmp);
93 // TODO: Inline the mark bit check before calling the runtime?
94 // tmp = ReadBarrier::Mark(tmp);
95 // No need to save live registers; it's taken care of by the
96 // entrypoint. Also, there is no need to update the stack mask,
97 // as this runtime call will not trigger a garbage collection.
98 // (See ReadBarrierMarkSlowPathARM::EmitNativeCode for more
99 // explanations.)
100 DCHECK_NE(tmp, SP);
101 DCHECK_NE(tmp, LR);
102 DCHECK_NE(tmp, PC);
103 // IP is used internally by the ReadBarrierMarkRegX entry point
104 // as a temporary (and not preserved). It thus cannot be used by
105 // any live register in this slow path.
106 DCHECK_NE(src_curr_addr, IP);
107 DCHECK_NE(dst_curr_addr, IP);
108 DCHECK_NE(src_stop_addr, IP);
109 DCHECK_NE(tmp, IP);
110 DCHECK(0 <= tmp && tmp < kNumberOfCoreRegisters) << tmp;
111 int32_t entry_point_offset =
112 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kArmPointerSize>(tmp);
113 // This runtime call does not require a stack map.
114 arm_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
115 __ MaybePoisonHeapReference(tmp);
116 __ str(tmp, Address(dst_curr_addr, element_size, Address::PostIndex));
117 __ cmp(src_curr_addr, ShifterOperand(src_stop_addr));
118 __ b(&loop, NE);
119 __ b(GetExitLabel());
120 }
121
122 const char* GetDescription() const OVERRIDE { return "ReadBarrierSystemArrayCopySlowPathARM"; }
123
124 private:
125 DISALLOW_COPY_AND_ASSIGN(ReadBarrierSystemArrayCopySlowPathARM);
126};
127
128#undef __
129
Vladimir Marko68c981f2016-08-26 13:13:33 +0100130IntrinsicLocationsBuilderARM::IntrinsicLocationsBuilderARM(CodeGeneratorARM* codegen)
131 : arena_(codegen->GetGraph()->GetArena()),
Nicolas Geoffray331605a2017-03-01 11:01:41 +0000132 codegen_(codegen),
Vladimir Marko68c981f2016-08-26 13:13:33 +0100133 assembler_(codegen->GetAssembler()),
134 features_(codegen->GetInstructionSetFeatures()) {}
135
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800136bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
137 Dispatch(invoke);
138 LocationSummary* res = invoke->GetLocations();
Roland Levillain3b359c72015-11-17 19:35:12 +0000139 if (res == nullptr) {
140 return false;
141 }
Roland Levillain3b359c72015-11-17 19:35:12 +0000142 return res->Intrinsified();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800143}
144
145#define __ assembler->
146
147static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
148 LocationSummary* locations = new (arena) LocationSummary(invoke,
149 LocationSummary::kNoCall,
150 kIntrinsified);
151 locations->SetInAt(0, Location::RequiresFpuRegister());
152 locations->SetOut(Location::RequiresRegister());
153}
154
155static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
156 LocationSummary* locations = new (arena) LocationSummary(invoke,
157 LocationSummary::kNoCall,
158 kIntrinsified);
159 locations->SetInAt(0, Location::RequiresRegister());
160 locations->SetOut(Location::RequiresFpuRegister());
161}
162
163static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
164 Location input = locations->InAt(0);
165 Location output = locations->Out();
166 if (is64bit) {
167 __ vmovrrd(output.AsRegisterPairLow<Register>(),
168 output.AsRegisterPairHigh<Register>(),
169 FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
170 } else {
171 __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
172 }
173}
174
175static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
176 Location input = locations->InAt(0);
177 Location output = locations->Out();
178 if (is64bit) {
179 __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
180 input.AsRegisterPairLow<Register>(),
181 input.AsRegisterPairHigh<Register>());
182 } else {
183 __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
184 }
185}
186
187void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
188 CreateFPToIntLocations(arena_, invoke);
189}
190void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
191 CreateIntToFPLocations(arena_, invoke);
192}
193
194void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000195 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800196}
197void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000198 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800199}
200
201void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
202 CreateFPToIntLocations(arena_, invoke);
203}
204void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
205 CreateIntToFPLocations(arena_, invoke);
206}
207
208void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000209 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800210}
211void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000212 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800213}
214
215static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
216 LocationSummary* locations = new (arena) LocationSummary(invoke,
217 LocationSummary::kNoCall,
218 kIntrinsified);
219 locations->SetInAt(0, Location::RequiresRegister());
220 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
221}
222
223static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
224 LocationSummary* locations = new (arena) LocationSummary(invoke,
225 LocationSummary::kNoCall,
226 kIntrinsified);
227 locations->SetInAt(0, Location::RequiresFpuRegister());
228 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
229}
230
Scott Wakeling611d3392015-07-10 11:42:06 +0100231static void GenNumberOfLeadingZeros(LocationSummary* locations,
232 Primitive::Type type,
233 ArmAssembler* assembler) {
234 Location in = locations->InAt(0);
235 Register out = locations->Out().AsRegister<Register>();
236
237 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
238
239 if (type == Primitive::kPrimLong) {
240 Register in_reg_lo = in.AsRegisterPairLow<Register>();
241 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
242 Label end;
243 __ clz(out, in_reg_hi);
244 __ CompareAndBranchIfNonZero(in_reg_hi, &end);
245 __ clz(out, in_reg_lo);
246 __ AddConstant(out, 32);
247 __ Bind(&end);
248 } else {
249 __ clz(out, in.AsRegister<Register>());
250 }
251}
252
253void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
254 CreateIntToIntLocations(arena_, invoke);
255}
256
257void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
258 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
259}
260
261void IntrinsicLocationsBuilderARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
262 LocationSummary* locations = new (arena_) LocationSummary(invoke,
263 LocationSummary::kNoCall,
264 kIntrinsified);
265 locations->SetInAt(0, Location::RequiresRegister());
266 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
267}
268
269void IntrinsicCodeGeneratorARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
270 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
271}
272
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100273static void GenNumberOfTrailingZeros(LocationSummary* locations,
274 Primitive::Type type,
275 ArmAssembler* assembler) {
276 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
277
278 Register out = locations->Out().AsRegister<Register>();
279
280 if (type == Primitive::kPrimLong) {
281 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
282 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
283 Label end;
284 __ rbit(out, in_reg_lo);
285 __ clz(out, out);
286 __ CompareAndBranchIfNonZero(in_reg_lo, &end);
287 __ rbit(out, in_reg_hi);
288 __ clz(out, out);
289 __ AddConstant(out, 32);
290 __ Bind(&end);
291 } else {
292 Register in = locations->InAt(0).AsRegister<Register>();
293 __ rbit(out, in);
294 __ clz(out, out);
295 }
296}
297
298void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
299 LocationSummary* locations = new (arena_) LocationSummary(invoke,
300 LocationSummary::kNoCall,
301 kIntrinsified);
302 locations->SetInAt(0, Location::RequiresRegister());
303 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
304}
305
306void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
307 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
308}
309
310void IntrinsicLocationsBuilderARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
311 LocationSummary* locations = new (arena_) LocationSummary(invoke,
312 LocationSummary::kNoCall,
313 kIntrinsified);
314 locations->SetInAt(0, Location::RequiresRegister());
315 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
316}
317
318void IntrinsicCodeGeneratorARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
319 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
320}
321
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800322static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
323 Location in = locations->InAt(0);
324 Location out = locations->Out();
325
326 if (is64bit) {
327 __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
328 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
329 } else {
330 __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
331 }
332}
333
334void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
335 CreateFPToFPLocations(arena_, invoke);
336}
337
338void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000339 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800340}
341
342void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
343 CreateFPToFPLocations(arena_, invoke);
344}
345
346void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000347 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800348}
349
350static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
351 LocationSummary* locations = new (arena) LocationSummary(invoke,
352 LocationSummary::kNoCall,
353 kIntrinsified);
354 locations->SetInAt(0, Location::RequiresRegister());
355 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
356
357 locations->AddTemp(Location::RequiresRegister());
358}
359
360static void GenAbsInteger(LocationSummary* locations,
361 bool is64bit,
362 ArmAssembler* assembler) {
363 Location in = locations->InAt(0);
364 Location output = locations->Out();
365
366 Register mask = locations->GetTemp(0).AsRegister<Register>();
367
368 if (is64bit) {
369 Register in_reg_lo = in.AsRegisterPairLow<Register>();
370 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
371 Register out_reg_lo = output.AsRegisterPairLow<Register>();
372 Register out_reg_hi = output.AsRegisterPairHigh<Register>();
373
374 DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
375
376 __ Asr(mask, in_reg_hi, 31);
377 __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
378 __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
379 __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
380 __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
381 } else {
382 Register in_reg = in.AsRegister<Register>();
383 Register out_reg = output.AsRegister<Register>();
384
385 __ Asr(mask, in_reg, 31);
386 __ add(out_reg, in_reg, ShifterOperand(mask));
387 __ eor(out_reg, mask, ShifterOperand(out_reg));
388 }
389}
390
391void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
392 CreateIntToIntPlusTemp(arena_, invoke);
393}
394
395void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000396 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800397}
398
399
400void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
401 CreateIntToIntPlusTemp(arena_, invoke);
402}
403
404void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000405 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800406}
407
408static void GenMinMax(LocationSummary* locations,
409 bool is_min,
410 ArmAssembler* assembler) {
411 Register op1 = locations->InAt(0).AsRegister<Register>();
412 Register op2 = locations->InAt(1).AsRegister<Register>();
413 Register out = locations->Out().AsRegister<Register>();
414
415 __ cmp(op1, ShifterOperand(op2));
416
417 __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
418 __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
419 __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
420}
421
422static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
423 LocationSummary* locations = new (arena) LocationSummary(invoke,
424 LocationSummary::kNoCall,
425 kIntrinsified);
426 locations->SetInAt(0, Location::RequiresRegister());
427 locations->SetInAt(1, Location::RequiresRegister());
428 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
429}
430
431void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
432 CreateIntIntToIntLocations(arena_, invoke);
433}
434
435void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000436 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800437}
438
439void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
440 CreateIntIntToIntLocations(arena_, invoke);
441}
442
443void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000444 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800445}
446
447void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
448 CreateFPToFPLocations(arena_, invoke);
449}
450
451void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
452 LocationSummary* locations = invoke->GetLocations();
453 ArmAssembler* assembler = GetAssembler();
454 __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
455 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
456}
457
458void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
459 CreateIntToIntLocations(arena_, invoke);
460}
461
462void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
463 ArmAssembler* assembler = GetAssembler();
464 // Ignore upper 4B of long address.
465 __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
466 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
467}
468
469void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
470 CreateIntToIntLocations(arena_, invoke);
471}
472
473void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
474 ArmAssembler* assembler = GetAssembler();
475 // Ignore upper 4B of long address.
476 __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
477 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
478}
479
480void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
481 CreateIntToIntLocations(arena_, invoke);
482}
483
484void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
485 ArmAssembler* assembler = GetAssembler();
486 // Ignore upper 4B of long address.
487 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
488 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
489 // exception. So we can't use ldrd as addr may be unaligned.
490 Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
491 Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
492 if (addr == lo) {
493 __ ldr(hi, Address(addr, 4));
494 __ ldr(lo, Address(addr, 0));
495 } else {
496 __ ldr(lo, Address(addr, 0));
497 __ ldr(hi, Address(addr, 4));
498 }
499}
500
501void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
502 CreateIntToIntLocations(arena_, invoke);
503}
504
505void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
506 ArmAssembler* assembler = GetAssembler();
507 // Ignore upper 4B of long address.
508 __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
509 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
510}
511
512static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
513 LocationSummary* locations = new (arena) LocationSummary(invoke,
514 LocationSummary::kNoCall,
515 kIntrinsified);
516 locations->SetInAt(0, Location::RequiresRegister());
517 locations->SetInAt(1, Location::RequiresRegister());
518}
519
520void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
521 CreateIntIntToVoidLocations(arena_, invoke);
522}
523
524void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
525 ArmAssembler* assembler = GetAssembler();
526 __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
527 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
528}
529
530void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
531 CreateIntIntToVoidLocations(arena_, invoke);
532}
533
534void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
535 ArmAssembler* assembler = GetAssembler();
536 __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
537 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
538}
539
540void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
541 CreateIntIntToVoidLocations(arena_, invoke);
542}
543
544void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
545 ArmAssembler* assembler = GetAssembler();
546 // Ignore upper 4B of long address.
547 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
548 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
549 // exception. So we can't use ldrd as addr may be unaligned.
550 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
551 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
552}
553
554void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
555 CreateIntIntToVoidLocations(arena_, invoke);
556}
557
558void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
559 ArmAssembler* assembler = GetAssembler();
560 __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
561 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
562}
563
564void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
565 LocationSummary* locations = new (arena_) LocationSummary(invoke,
566 LocationSummary::kNoCall,
567 kIntrinsified);
568 locations->SetOut(Location::RequiresRegister());
569}
570
571void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
572 ArmAssembler* assembler = GetAssembler();
573 __ LoadFromOffset(kLoadWord,
574 invoke->GetLocations()->Out().AsRegister<Register>(),
575 TR,
576 Thread::PeerOffset<kArmPointerSize>().Int32Value());
577}
578
579static void GenUnsafeGet(HInvoke* invoke,
580 Primitive::Type type,
581 bool is_volatile,
582 CodeGeneratorARM* codegen) {
583 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800584 ArmAssembler* assembler = codegen->GetAssembler();
Roland Levillain3b359c72015-11-17 19:35:12 +0000585 Location base_loc = locations->InAt(1);
586 Register base = base_loc.AsRegister<Register>(); // Object pointer.
587 Location offset_loc = locations->InAt(2);
588 Register offset = offset_loc.AsRegisterPairLow<Register>(); // Long offset, lo part only.
589 Location trg_loc = locations->Out();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800590
Roland Levillainc9285912015-12-18 10:38:42 +0000591 switch (type) {
592 case Primitive::kPrimInt: {
593 Register trg = trg_loc.AsRegister<Register>();
594 __ ldr(trg, Address(base, offset));
595 if (is_volatile) {
596 __ dmb(ISH);
597 }
598 break;
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800599 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800600
Roland Levillainc9285912015-12-18 10:38:42 +0000601 case Primitive::kPrimNot: {
602 Register trg = trg_loc.AsRegister<Register>();
603 if (kEmitCompilerReadBarrier) {
604 if (kUseBakerReadBarrier) {
605 Location temp = locations->GetTemp(0);
Roland Levillainbfea3352016-06-23 13:48:47 +0100606 codegen->GenerateReferenceLoadWithBakerReadBarrier(
607 invoke, trg_loc, base, 0U, offset_loc, TIMES_1, temp, /* needs_null_check */ false);
Roland Levillainc9285912015-12-18 10:38:42 +0000608 if (is_volatile) {
609 __ dmb(ISH);
610 }
611 } else {
612 __ ldr(trg, Address(base, offset));
613 if (is_volatile) {
614 __ dmb(ISH);
615 }
616 codegen->GenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
617 }
618 } else {
619 __ ldr(trg, Address(base, offset));
620 if (is_volatile) {
621 __ dmb(ISH);
622 }
623 __ MaybeUnpoisonHeapReference(trg);
624 }
625 break;
626 }
Roland Levillain4d027112015-07-01 15:41:14 +0100627
Roland Levillainc9285912015-12-18 10:38:42 +0000628 case Primitive::kPrimLong: {
629 Register trg_lo = trg_loc.AsRegisterPairLow<Register>();
630 __ add(IP, base, ShifterOperand(offset));
631 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
632 Register trg_hi = trg_loc.AsRegisterPairHigh<Register>();
633 __ ldrexd(trg_lo, trg_hi, IP);
634 } else {
635 __ ldrd(trg_lo, Address(IP));
636 }
637 if (is_volatile) {
638 __ dmb(ISH);
639 }
640 break;
641 }
642
643 default:
644 LOG(FATAL) << "Unexpected type " << type;
645 UNREACHABLE();
Roland Levillain4d027112015-07-01 15:41:14 +0100646 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800647}
648
Roland Levillainc9285912015-12-18 10:38:42 +0000649static void CreateIntIntIntToIntLocations(ArenaAllocator* arena,
650 HInvoke* invoke,
651 Primitive::Type type) {
Roland Levillain3b359c72015-11-17 19:35:12 +0000652 bool can_call = kEmitCompilerReadBarrier &&
653 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
654 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800655 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100656 (can_call
657 ? LocationSummary::kCallOnSlowPath
658 : LocationSummary::kNoCall),
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800659 kIntrinsified);
Vladimir Marko70e97462016-08-09 11:04:26 +0100660 if (can_call && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +0100661 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +0100662 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800663 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
664 locations->SetInAt(1, Location::RequiresRegister());
665 locations->SetInAt(2, Location::RequiresRegister());
Roland Levillainbfea3352016-06-23 13:48:47 +0100666 locations->SetOut(Location::RequiresRegister(),
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100667 (can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
Roland Levillainc9285912015-12-18 10:38:42 +0000668 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
669 // We need a temporary register for the read barrier marking slow
Roland Levillainbfea3352016-06-23 13:48:47 +0100670 // path in InstructionCodeGeneratorARM::GenerateReferenceLoadWithBakerReadBarrier.
Roland Levillainc9285912015-12-18 10:38:42 +0000671 locations->AddTemp(Location::RequiresRegister());
672 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800673}
674
675void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000676 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800677}
678void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000679 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800680}
681void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000682 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800683}
684void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000685 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800686}
687void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000688 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800689}
690void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000691 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800692}
693
694void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000695 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800696}
697void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000698 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800699}
700void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000701 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800702}
703void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000704 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800705}
706void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000707 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800708}
709void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000710 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800711}
712
713static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
714 const ArmInstructionSetFeatures& features,
715 Primitive::Type type,
716 bool is_volatile,
717 HInvoke* invoke) {
718 LocationSummary* locations = new (arena) LocationSummary(invoke,
719 LocationSummary::kNoCall,
720 kIntrinsified);
721 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
722 locations->SetInAt(1, Location::RequiresRegister());
723 locations->SetInAt(2, Location::RequiresRegister());
724 locations->SetInAt(3, Location::RequiresRegister());
725
726 if (type == Primitive::kPrimLong) {
727 // Potentially need temps for ldrexd-strexd loop.
728 if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
729 locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
730 locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
731 }
732 } else if (type == Primitive::kPrimNot) {
733 // Temps for card-marking.
734 locations->AddTemp(Location::RequiresRegister()); // Temp.
735 locations->AddTemp(Location::RequiresRegister()); // Card.
736 }
737}
738
739void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000740 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800741}
742void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000743 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800744}
745void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000746 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800747}
748void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000749 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800750}
751void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000752 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800753}
754void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000755 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800756}
757void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000758 CreateIntIntIntIntToVoid(
759 arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800760}
761void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000762 CreateIntIntIntIntToVoid(
763 arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800764}
765void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000766 CreateIntIntIntIntToVoid(
767 arena_, features_, Primitive::kPrimLong, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800768}
769
770static void GenUnsafePut(LocationSummary* locations,
771 Primitive::Type type,
772 bool is_volatile,
773 bool is_ordered,
774 CodeGeneratorARM* codegen) {
775 ArmAssembler* assembler = codegen->GetAssembler();
776
777 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
778 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
779 Register value;
780
781 if (is_volatile || is_ordered) {
782 __ dmb(ISH);
783 }
784
785 if (type == Primitive::kPrimLong) {
786 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
787 value = value_lo;
788 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
789 Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
790 Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
791 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
792
793 __ add(IP, base, ShifterOperand(offset));
794 Label loop_head;
795 __ Bind(&loop_head);
796 __ ldrexd(temp_lo, temp_hi, IP);
797 __ strexd(temp_lo, value_lo, value_hi, IP);
798 __ cmp(temp_lo, ShifterOperand(0));
799 __ b(&loop_head, NE);
800 } else {
801 __ add(IP, base, ShifterOperand(offset));
802 __ strd(value_lo, Address(IP));
803 }
804 } else {
Roland Levillain4d027112015-07-01 15:41:14 +0100805 value = locations->InAt(3).AsRegister<Register>();
806 Register source = value;
807 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
808 Register temp = locations->GetTemp(0).AsRegister<Register>();
809 __ Mov(temp, value);
810 __ PoisonHeapReference(temp);
811 source = temp;
812 }
813 __ str(source, Address(base, offset));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800814 }
815
816 if (is_volatile) {
817 __ dmb(ISH);
818 }
819
820 if (type == Primitive::kPrimNot) {
821 Register temp = locations->GetTemp(0).AsRegister<Register>();
822 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100823 bool value_can_be_null = true; // TODO: Worth finding out this information?
824 codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800825 }
826}
827
828void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000829 GenUnsafePut(invoke->GetLocations(),
830 Primitive::kPrimInt,
831 /* is_volatile */ false,
832 /* is_ordered */ false,
833 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800834}
835void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000836 GenUnsafePut(invoke->GetLocations(),
837 Primitive::kPrimInt,
838 /* is_volatile */ false,
839 /* is_ordered */ true,
840 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800841}
842void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000843 GenUnsafePut(invoke->GetLocations(),
844 Primitive::kPrimInt,
845 /* is_volatile */ true,
846 /* is_ordered */ false,
847 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800848}
849void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000850 GenUnsafePut(invoke->GetLocations(),
851 Primitive::kPrimNot,
852 /* is_volatile */ false,
853 /* is_ordered */ false,
854 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800855}
856void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000857 GenUnsafePut(invoke->GetLocations(),
858 Primitive::kPrimNot,
859 /* is_volatile */ false,
860 /* is_ordered */ true,
861 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800862}
863void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000864 GenUnsafePut(invoke->GetLocations(),
865 Primitive::kPrimNot,
866 /* is_volatile */ true,
867 /* is_ordered */ false,
868 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800869}
870void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000871 GenUnsafePut(invoke->GetLocations(),
872 Primitive::kPrimLong,
873 /* is_volatile */ false,
874 /* is_ordered */ false,
875 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800876}
877void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000878 GenUnsafePut(invoke->GetLocations(),
879 Primitive::kPrimLong,
880 /* is_volatile */ false,
881 /* is_ordered */ true,
882 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800883}
884void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000885 GenUnsafePut(invoke->GetLocations(),
886 Primitive::kPrimLong,
887 /* is_volatile */ true,
888 /* is_ordered */ false,
889 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800890}
891
892static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000893 HInvoke* invoke,
894 Primitive::Type type) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100895 bool can_call = kEmitCompilerReadBarrier &&
896 kUseBakerReadBarrier &&
897 (invoke->GetIntrinsic() == Intrinsics::kUnsafeCASObject);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800898 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100899 (can_call
900 ? LocationSummary::kCallOnSlowPath
901 : LocationSummary::kNoCall),
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800902 kIntrinsified);
903 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
904 locations->SetInAt(1, Location::RequiresRegister());
905 locations->SetInAt(2, Location::RequiresRegister());
906 locations->SetInAt(3, Location::RequiresRegister());
907 locations->SetInAt(4, Location::RequiresRegister());
908
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000909 // If heap poisoning is enabled, we don't want the unpoisoning
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100910 // operations to potentially clobber the output. Likewise when
911 // emitting a (Baker) read barrier, which may call.
912 Location::OutputOverlap overlaps =
913 ((kPoisonHeapReferences && type == Primitive::kPrimNot) || can_call)
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000914 ? Location::kOutputOverlap
915 : Location::kNoOutputOverlap;
916 locations->SetOut(Location::RequiresRegister(), overlaps);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800917
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100918 // Temporary registers used in CAS. In the object case
919 // (UnsafeCASObject intrinsic), these are also used for
920 // card-marking, and possibly for (Baker) read barrier.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800921 locations->AddTemp(Location::RequiresRegister()); // Pointer.
922 locations->AddTemp(Location::RequiresRegister()); // Temp 1.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800923}
924
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100925static void GenCas(HInvoke* invoke, Primitive::Type type, CodeGeneratorARM* codegen) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800926 DCHECK_NE(type, Primitive::kPrimLong);
927
928 ArmAssembler* assembler = codegen->GetAssembler();
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100929 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800930
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100931 Location out_loc = locations->Out();
932 Register out = out_loc.AsRegister<Register>(); // Boolean result.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800933
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100934 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
935 Location offset_loc = locations->InAt(2);
936 Register offset = offset_loc.AsRegisterPairLow<Register>(); // Offset (discard high 4B).
937 Register expected = locations->InAt(3).AsRegister<Register>(); // Expected.
938 Register value = locations->InAt(4).AsRegister<Register>(); // Value.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800939
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100940 Location tmp_ptr_loc = locations->GetTemp(0);
941 Register tmp_ptr = tmp_ptr_loc.AsRegister<Register>(); // Pointer to actual memory.
942 Register tmp = locations->GetTemp(1).AsRegister<Register>(); // Value in memory.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800943
944 if (type == Primitive::kPrimNot) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100945 // The only read barrier implementation supporting the
946 // UnsafeCASObject intrinsic is the Baker-style read barriers.
947 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
948
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800949 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
950 // object and scan the receiver at the next GC for nothing.
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100951 bool value_can_be_null = true; // TODO: Worth finding out this information?
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100952 codegen->MarkGCCard(tmp_ptr, tmp, base, value, value_can_be_null);
953
954 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
955 // Need to make sure the reference stored in the field is a to-space
956 // one before attempting the CAS or the CAS could fail incorrectly.
957 codegen->GenerateReferenceLoadWithBakerReadBarrier(
958 invoke,
959 out_loc, // Unused, used only as a "temporary" within the read barrier.
960 base,
961 /* offset */ 0u,
962 /* index */ offset_loc,
963 ScaleFactor::TIMES_1,
964 tmp_ptr_loc,
965 /* needs_null_check */ false,
966 /* always_update_field */ true,
967 &tmp);
968 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800969 }
970
971 // Prevent reordering with prior memory operations.
Roland Levillain4bedb382016-01-12 12:01:04 +0000972 // Emit a DMB ISH instruction instead of an DMB ISHST one, as the
973 // latter allows a preceding load to be delayed past the STXR
974 // instruction below.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800975 __ dmb(ISH);
976
977 __ add(tmp_ptr, base, ShifterOperand(offset));
978
Roland Levillain4d027112015-07-01 15:41:14 +0100979 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100980 __ PoisonHeapReference(expected);
981 if (value == expected) {
982 // Do not poison `value`, as it is the same register as
983 // `expected`, which has just been poisoned.
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000984 } else {
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100985 __ PoisonHeapReference(value);
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000986 }
Roland Levillain4d027112015-07-01 15:41:14 +0100987 }
988
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800989 // do {
990 // tmp = [r_ptr] - expected;
991 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
992 // result = tmp != 0;
993
994 Label loop_head;
995 __ Bind(&loop_head);
996
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100997 __ ldrex(tmp, tmp_ptr);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800998
Roland Levillaina1aa3b12016-10-26 13:03:38 +0100999 __ subs(tmp, tmp, ShifterOperand(expected));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001000
1001 __ it(EQ, ItState::kItT);
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001002 __ strex(tmp, value, tmp_ptr, EQ);
1003 __ cmp(tmp, ShifterOperand(1), EQ);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001004
1005 __ b(&loop_head, EQ);
1006
1007 __ dmb(ISH);
1008
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001009 __ rsbs(out, tmp, ShifterOperand(1));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001010 __ it(CC);
1011 __ mov(out, ShifterOperand(0), CC);
Roland Levillain4d027112015-07-01 15:41:14 +01001012
1013 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001014 __ UnpoisonHeapReference(expected);
1015 if (value == expected) {
1016 // Do not unpoison `value`, as it is the same register as
1017 // `expected`, which has just been unpoisoned.
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001018 } else {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001019 __ UnpoisonHeapReference(value);
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001020 }
Roland Levillain4d027112015-07-01 15:41:14 +01001021 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001022}
1023
Andreas Gampeca714582015-04-03 19:41:34 -07001024void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001025 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001026}
Andreas Gampeca714582015-04-03 19:41:34 -07001027void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001028 // The only read barrier implementation supporting the
1029 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1030 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
Roland Levillain985ff702015-10-23 13:25:35 +01001031 return;
1032 }
1033
Roland Levillain2e50ecb2016-01-27 14:08:33 +00001034 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001035}
1036void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001037 GenCas(invoke, Primitive::kPrimInt, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001038}
1039void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001040 // The only read barrier implementation supporting the
1041 // UnsafeCASObject intrinsic is the Baker-style read barriers.
1042 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
Roland Levillain3d312422016-06-23 13:53:42 +01001043
Roland Levillaina1aa3b12016-10-26 13:03:38 +01001044 GenCas(invoke, Primitive::kPrimNot, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001045}
1046
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001047void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
1048 // The inputs plus one temp.
1049 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001050 invoke->InputAt(1)->CanBeNull()
1051 ? LocationSummary::kCallOnSlowPath
1052 : LocationSummary::kNoCall,
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001053 kIntrinsified);
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001054 locations->SetInAt(0, Location::RequiresRegister());
1055 locations->SetInAt(1, Location::RequiresRegister());
1056 locations->AddTemp(Location::RequiresRegister());
1057 locations->AddTemp(Location::RequiresRegister());
1058 locations->AddTemp(Location::RequiresRegister());
jessicahandojo05765752016-09-09 19:01:32 -07001059 // Need temporary registers for String compression's feature.
1060 if (mirror::kUseStringCompression) {
1061 locations->AddTemp(Location::RequiresRegister());
jessicahandojo05765752016-09-09 19:01:32 -07001062 }
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001063 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001064}
1065
1066void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
1067 ArmAssembler* assembler = GetAssembler();
1068 LocationSummary* locations = invoke->GetLocations();
1069
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001070 Register str = locations->InAt(0).AsRegister<Register>();
1071 Register arg = locations->InAt(1).AsRegister<Register>();
1072 Register out = locations->Out().AsRegister<Register>();
1073
1074 Register temp0 = locations->GetTemp(0).AsRegister<Register>();
1075 Register temp1 = locations->GetTemp(1).AsRegister<Register>();
1076 Register temp2 = locations->GetTemp(2).AsRegister<Register>();
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001077 Register temp3;
jessicahandojo05765752016-09-09 19:01:32 -07001078 if (mirror::kUseStringCompression) {
1079 temp3 = locations->GetTemp(3).AsRegister<Register>();
jessicahandojo05765752016-09-09 19:01:32 -07001080 }
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001081
1082 Label loop;
1083 Label find_char_diff;
1084 Label end;
jessicahandojo05765752016-09-09 19:01:32 -07001085 Label different_compression;
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001086
1087 // Get offsets of count and value fields within a string object.
1088 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1089 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1090
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001091 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001092 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001093
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001094 // Take slow path and throw if input can be and is null.
1095 SlowPathCode* slow_path = nullptr;
1096 const bool can_slow_path = invoke->InputAt(1)->CanBeNull();
1097 if (can_slow_path) {
1098 slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1099 codegen_->AddSlowPath(slow_path);
1100 __ CompareAndBranchIfZero(arg, slow_path->GetEntryLabel());
1101 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001102
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001103 // Reference equality check, return 0 if same reference.
1104 __ subs(out, str, ShifterOperand(arg));
1105 __ b(&end, EQ);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001106
jessicahandojo05765752016-09-09 19:01:32 -07001107 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001108 // Load `count` fields of this and argument strings.
jessicahandojo05765752016-09-09 19:01:32 -07001109 __ ldr(temp3, Address(str, count_offset));
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001110 __ ldr(temp2, Address(arg, count_offset));
1111 // Extract lengths from the `count` fields.
1112 __ Lsr(temp0, temp3, 1u);
1113 __ Lsr(temp1, temp2, 1u);
jessicahandojo05765752016-09-09 19:01:32 -07001114 } else {
1115 // Load lengths of this and argument strings.
1116 __ ldr(temp0, Address(str, count_offset));
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001117 __ ldr(temp1, Address(arg, count_offset));
jessicahandojo05765752016-09-09 19:01:32 -07001118 }
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001119 // out = length diff.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001120 __ subs(out, temp0, ShifterOperand(temp1));
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001121 // temp0 = min(len(str), len(arg)).
jessicahandojo05765752016-09-09 19:01:32 -07001122 __ it(GT);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001123 __ mov(temp0, ShifterOperand(temp1), GT);
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001124 // Shorter string is empty?
1125 __ CompareAndBranchIfZero(temp0, &end);
1126
jessicahandojo05765752016-09-09 19:01:32 -07001127 if (mirror::kUseStringCompression) {
1128 // Check if both strings using same compression style to use this comparison loop.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001129 __ eor(temp2, temp2, ShifterOperand(temp3));
1130 __ Lsrs(temp2, temp2, 1u);
1131 __ b(&different_compression, CS);
jessicahandojo05765752016-09-09 19:01:32 -07001132 // For string compression, calculate the number of bytes to compare (not chars).
1133 // This could in theory exceed INT32_MAX, so treat temp0 as unsigned.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001134 __ Lsls(temp3, temp3, 31u); // Extract purely the compression flag.
1135 __ it(NE);
1136 __ add(temp0, temp0, ShifterOperand(temp0), NE);
jessicahandojo05765752016-09-09 19:01:32 -07001137 }
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001138
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001139 // Store offset of string value in preparation for comparison loop.
1140 __ mov(temp1, ShifterOperand(value_offset));
1141
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001142 // Assertions that must hold in order to compare multiple characters at a time.
1143 CHECK_ALIGNED(value_offset, 8);
1144 static_assert(IsAligned<8>(kObjectAlignment),
1145 "String data must be 8-byte aligned for unrolled CompareTo loop.");
1146
1147 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1148 DCHECK_EQ(char_size, 2u);
1149
jessicahandojo05765752016-09-09 19:01:32 -07001150 Label find_char_diff_2nd_cmp;
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001151 // Unrolled loop comparing 4x16-bit chars per iteration (ok because of string data alignment).
1152 __ Bind(&loop);
1153 __ ldr(IP, Address(str, temp1));
1154 __ ldr(temp2, Address(arg, temp1));
1155 __ cmp(IP, ShifterOperand(temp2));
1156 __ b(&find_char_diff, NE);
1157 __ add(temp1, temp1, ShifterOperand(char_size * 2));
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001158
1159 __ ldr(IP, Address(str, temp1));
1160 __ ldr(temp2, Address(arg, temp1));
1161 __ cmp(IP, ShifterOperand(temp2));
jessicahandojo05765752016-09-09 19:01:32 -07001162 __ b(&find_char_diff_2nd_cmp, NE);
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001163 __ add(temp1, temp1, ShifterOperand(char_size * 2));
jessicahandojo05765752016-09-09 19:01:32 -07001164 // With string compression, we have compared 8 bytes, otherwise 4 chars.
1165 __ subs(temp0, temp0, ShifterOperand(mirror::kUseStringCompression ? 8 : 4));
1166 __ b(&loop, HI);
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001167 __ b(&end);
1168
jessicahandojo05765752016-09-09 19:01:32 -07001169 __ Bind(&find_char_diff_2nd_cmp);
1170 if (mirror::kUseStringCompression) {
1171 __ subs(temp0, temp0, ShifterOperand(4)); // 4 bytes previously compared.
1172 __ b(&end, LS); // Was the second comparison fully beyond the end?
1173 } else {
1174 // Without string compression, we can start treating temp0 as signed
1175 // and rely on the signed comparison below.
1176 __ sub(temp0, temp0, ShifterOperand(2));
1177 }
1178
1179 // Find the single character difference.
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001180 __ Bind(&find_char_diff);
1181 // Get the bit position of the first character that differs.
1182 __ eor(temp1, temp2, ShifterOperand(IP));
1183 __ rbit(temp1, temp1);
1184 __ clz(temp1, temp1);
1185
jessicahandojo05765752016-09-09 19:01:32 -07001186 // temp0 = number of characters remaining to compare.
1187 // (Without string compression, it could be < 1 if a difference is found by the second CMP
1188 // in the comparison loop, and after the end of the shorter string data).
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001189
jessicahandojo05765752016-09-09 19:01:32 -07001190 // Without string compression (temp1 >> 4) = character where difference occurs between the last
1191 // two words compared, in the interval [0,1].
1192 // (0 for low half-word different, 1 for high half-word different).
1193 // With string compression, (temp1 << 3) = byte where the difference occurs,
1194 // in the interval [0,3].
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001195
jessicahandojo05765752016-09-09 19:01:32 -07001196 // If temp0 <= (temp1 >> (kUseStringCompression ? 3 : 4)), the difference occurs outside
1197 // the remaining string data, so just return length diff (out).
1198 // The comparison is unsigned for string compression, otherwise signed.
1199 __ cmp(temp0, ShifterOperand(temp1, LSR, mirror::kUseStringCompression ? 3 : 4));
1200 __ b(&end, mirror::kUseStringCompression ? LS : LE);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001201
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001202 // Extract the characters and calculate the difference.
jessicahandojo05765752016-09-09 19:01:32 -07001203 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001204 // For compressed strings we need to clear 0x7 from temp1, for uncompressed we need to clear
1205 // 0xf. We also need to prepare the character extraction mask `uncompressed ? 0xffffu : 0xffu`.
1206 // The compression flag is now in the highest bit of temp3, so let's play some tricks.
1207 __ orr(temp3, temp3, ShifterOperand(0xffu << 23)); // uncompressed ? 0xff800000u : 0x7ff80000u
1208 __ bic(temp1, temp1, ShifterOperand(temp3, LSR, 31 - 3)); // &= ~(uncompressed ? 0xfu : 0x7u)
1209 __ Asr(temp3, temp3, 7u); // uncompressed ? 0xffff0000u : 0xff0000u.
1210 __ Lsr(temp2, temp2, temp1); // Extract second character.
1211 __ Lsr(temp3, temp3, 16u); // uncompressed ? 0xffffu : 0xffu
1212 __ Lsr(out, IP, temp1); // Extract first character.
1213 __ and_(temp2, temp2, ShifterOperand(temp3));
1214 __ and_(out, out, ShifterOperand(temp3));
1215 } else {
1216 __ bic(temp1, temp1, ShifterOperand(0xf));
1217 __ Lsr(temp2, temp2, temp1);
1218 __ Lsr(out, IP, temp1);
1219 __ movt(temp2, 0);
1220 __ movt(out, 0);
jessicahandojo05765752016-09-09 19:01:32 -07001221 }
jessicahandojo05765752016-09-09 19:01:32 -07001222
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001223 __ sub(out, out, ShifterOperand(temp2));
jessicahandojo05765752016-09-09 19:01:32 -07001224
1225 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001226 __ b(&end);
1227 __ Bind(&different_compression);
1228
1229 // Comparison for different compression style.
jessicahandojo05765752016-09-09 19:01:32 -07001230 const size_t c_char_size = Primitive::ComponentSize(Primitive::kPrimByte);
1231 DCHECK_EQ(c_char_size, 1u);
jessicahandojo05765752016-09-09 19:01:32 -07001232
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001233 // We want to free up the temp3, currently holding `str.count`, for comparison.
1234 // So, we move it to the bottom bit of the iteration count `temp0` which we tnen
1235 // need to treat as unsigned. Start by freeing the bit with an ADD and continue
1236 // further down by a LSRS+SBC which will flip the meaning of the flag but allow
1237 // `subs temp0, #2; bhi different_compression_loop` to serve as the loop condition.
1238 __ add(temp0, temp0, ShifterOperand(temp0)); // Unlike LSL, this ADD is always 16-bit.
1239 // `temp1` will hold the compressed data pointer, `temp2` the uncompressed data pointer.
1240 __ mov(temp1, ShifterOperand(str));
1241 __ mov(temp2, ShifterOperand(arg));
1242 __ Lsrs(temp3, temp3, 1u); // Continue the move of the compression flag.
1243 __ it(CS, kItThen); // Interleave with selection of temp1 and temp2.
1244 __ mov(temp1, ShifterOperand(arg), CS); // Preserves flags.
1245 __ mov(temp2, ShifterOperand(str), CS); // Preserves flags.
1246 __ sbc(temp0, temp0, ShifterOperand(0)); // Complete the move of the compression flag.
jessicahandojo05765752016-09-09 19:01:32 -07001247
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001248 // Adjust temp1 and temp2 from string pointers to data pointers.
1249 __ add(temp1, temp1, ShifterOperand(value_offset));
1250 __ add(temp2, temp2, ShifterOperand(value_offset));
1251
1252 Label different_compression_loop;
1253 Label different_compression_diff;
1254
1255 // Main loop for different compression.
1256 __ Bind(&different_compression_loop);
1257 __ ldrb(IP, Address(temp1, c_char_size, Address::PostIndex));
1258 __ ldrh(temp3, Address(temp2, char_size, Address::PostIndex));
1259 __ cmp(IP, ShifterOperand(temp3));
1260 __ b(&different_compression_diff, NE);
1261 __ subs(temp0, temp0, ShifterOperand(2));
1262 __ b(&different_compression_loop, HI);
jessicahandojo05765752016-09-09 19:01:32 -07001263 __ b(&end);
1264
1265 // Calculate the difference.
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001266 __ Bind(&different_compression_diff);
1267 __ sub(out, IP, ShifterOperand(temp3));
1268 // Flip the difference if the `arg` is compressed.
1269 // `temp0` contains inverted `str` compression flag, i.e the same as `arg` compression flag.
1270 __ Lsrs(temp0, temp0, 1u);
1271 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1272 "Expecting 0=compressed, 1=uncompressed");
1273 __ it(CC);
1274 __ rsb(out, out, ShifterOperand(0), CC);
jessicahandojo05765752016-09-09 19:01:32 -07001275 }
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001276
1277 __ Bind(&end);
1278
1279 if (can_slow_path) {
1280 __ Bind(slow_path->GetExitLabel());
1281 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001282}
1283
Agi Csaki289cd552015-08-18 17:10:38 -07001284void IntrinsicLocationsBuilderARM::VisitStringEquals(HInvoke* invoke) {
1285 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1286 LocationSummary::kNoCall,
1287 kIntrinsified);
1288 InvokeRuntimeCallingConvention calling_convention;
1289 locations->SetInAt(0, Location::RequiresRegister());
1290 locations->SetInAt(1, Location::RequiresRegister());
1291 // Temporary registers to store lengths of strings and for calculations.
1292 // Using instruction cbz requires a low register, so explicitly set a temp to be R0.
1293 locations->AddTemp(Location::RegisterLocation(R0));
1294 locations->AddTemp(Location::RequiresRegister());
1295 locations->AddTemp(Location::RequiresRegister());
1296
1297 locations->SetOut(Location::RequiresRegister());
1298}
1299
1300void IntrinsicCodeGeneratorARM::VisitStringEquals(HInvoke* invoke) {
1301 ArmAssembler* assembler = GetAssembler();
1302 LocationSummary* locations = invoke->GetLocations();
1303
1304 Register str = locations->InAt(0).AsRegister<Register>();
1305 Register arg = locations->InAt(1).AsRegister<Register>();
1306 Register out = locations->Out().AsRegister<Register>();
1307
1308 Register temp = locations->GetTemp(0).AsRegister<Register>();
1309 Register temp1 = locations->GetTemp(1).AsRegister<Register>();
1310 Register temp2 = locations->GetTemp(2).AsRegister<Register>();
1311
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001312 Label loop;
Agi Csaki289cd552015-08-18 17:10:38 -07001313 Label end;
1314 Label return_true;
1315 Label return_false;
1316
1317 // Get offsets of count, value, and class fields within a string object.
1318 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1319 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1320 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1321
1322 // Note that the null check must have been done earlier.
1323 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1324
Vladimir Marko53b52002016-05-24 19:30:45 +01001325 StringEqualsOptimizations optimizations(invoke);
1326 if (!optimizations.GetArgumentNotNull()) {
1327 // Check if input is null, return false if it is.
1328 __ CompareAndBranchIfZero(arg, &return_false);
1329 }
Agi Csaki289cd552015-08-18 17:10:38 -07001330
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001331 // Reference equality check, return true if same reference.
1332 __ cmp(str, ShifterOperand(arg));
1333 __ b(&return_true, EQ);
1334
Vladimir Marko53b52002016-05-24 19:30:45 +01001335 if (!optimizations.GetArgumentIsString()) {
1336 // Instanceof check for the argument by comparing class fields.
1337 // All string objects must have the same type since String cannot be subclassed.
1338 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1339 // If the argument is a string object, its class field must be equal to receiver's class field.
1340 __ ldr(temp, Address(str, class_offset));
1341 __ ldr(temp1, Address(arg, class_offset));
1342 __ cmp(temp, ShifterOperand(temp1));
1343 __ b(&return_false, NE);
1344 }
Agi Csaki289cd552015-08-18 17:10:38 -07001345
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001346 // Load `count` fields of this and argument strings.
Agi Csaki289cd552015-08-18 17:10:38 -07001347 __ ldr(temp, Address(str, count_offset));
1348 __ ldr(temp1, Address(arg, count_offset));
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001349 // Check if `count` fields are equal, return false if they're not.
jessicahandojo05765752016-09-09 19:01:32 -07001350 // Also compares the compression style, if differs return false.
Agi Csaki289cd552015-08-18 17:10:38 -07001351 __ cmp(temp, ShifterOperand(temp1));
1352 __ b(&return_false, NE);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001353 // Return true if both strings are empty. Even with string compression `count == 0` means empty.
1354 static_assert(static_cast<uint32_t>(mirror::StringCompressionFlag::kCompressed) == 0u,
1355 "Expecting 0=compressed, 1=uncompressed");
Agi Csaki289cd552015-08-18 17:10:38 -07001356 __ cbz(temp, &return_true);
Agi Csaki289cd552015-08-18 17:10:38 -07001357
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001358 // Assertions that must hold in order to compare strings 4 bytes at a time.
Agi Csaki289cd552015-08-18 17:10:38 -07001359 DCHECK_ALIGNED(value_offset, 4);
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001360 static_assert(IsAligned<4>(kObjectAlignment), "String data must be aligned for fast compare.");
Agi Csaki289cd552015-08-18 17:10:38 -07001361
jessicahandojo05765752016-09-09 19:01:32 -07001362 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001363 // For string compression, calculate the number of bytes to compare (not chars).
1364 // This could in theory exceed INT32_MAX, so treat temp as unsigned.
1365 __ Lsrs(temp, temp, 1u); // Extract length and check compression flag.
1366 __ it(CS); // If uncompressed,
1367 __ add(temp, temp, ShifterOperand(temp), CS); // double the byte count.
jessicahandojo05765752016-09-09 19:01:32 -07001368 }
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001369
1370 // Store offset of string value in preparation for comparison loop.
jessicahandojo05765752016-09-09 19:01:32 -07001371 __ LoadImmediate(temp1, value_offset);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001372
1373 // Loop to compare strings 4 bytes at a time starting at the front of the string.
1374 // Ok to do this because strings are zero-padded to kObjectAlignment.
Agi Csaki289cd552015-08-18 17:10:38 -07001375 __ Bind(&loop);
1376 __ ldr(out, Address(str, temp1));
1377 __ ldr(temp2, Address(arg, temp1));
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001378 __ add(temp1, temp1, ShifterOperand(sizeof(uint32_t)));
Agi Csaki289cd552015-08-18 17:10:38 -07001379 __ cmp(out, ShifterOperand(temp2));
1380 __ b(&return_false, NE);
Vladimir Markofdaf0f42016-10-13 19:29:53 +01001381 // With string compression, we have compared 4 bytes, otherwise 2 chars.
1382 __ subs(temp, temp, ShifterOperand(mirror::kUseStringCompression ? 4 : 2));
1383 __ b(&loop, HI);
Agi Csaki289cd552015-08-18 17:10:38 -07001384
1385 // Return true and exit the function.
1386 // If loop does not result in returning false, we return true.
1387 __ Bind(&return_true);
1388 __ LoadImmediate(out, 1);
1389 __ b(&end);
1390
1391 // Return false and exit the function.
1392 __ Bind(&return_false);
1393 __ LoadImmediate(out, 0);
1394 __ Bind(&end);
1395}
1396
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001397static void GenerateVisitStringIndexOf(HInvoke* invoke,
1398 ArmAssembler* assembler,
1399 CodeGeneratorARM* codegen,
1400 ArenaAllocator* allocator,
1401 bool start_at_zero) {
1402 LocationSummary* locations = invoke->GetLocations();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001403
1404 // Note that the null check must have been done earlier.
1405 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1406
1407 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001408 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001409 SlowPathCode* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001410 HInstruction* code_point = invoke->InputAt(1);
1411 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001412 if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) >
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001413 std::numeric_limits<uint16_t>::max()) {
1414 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1415 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1416 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1417 codegen->AddSlowPath(slow_path);
1418 __ b(slow_path->GetEntryLabel());
1419 __ Bind(slow_path->GetExitLabel());
1420 return;
1421 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001422 } else if (code_point->GetType() != Primitive::kPrimChar) {
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001423 Register char_reg = locations->InAt(1).AsRegister<Register>();
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001424 // 0xffff is not modified immediate but 0x10000 is, so use `>= 0x10000` instead of `> 0xffff`.
1425 __ cmp(char_reg,
1426 ShifterOperand(static_cast<uint32_t>(std::numeric_limits<uint16_t>::max()) + 1));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001427 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1428 codegen->AddSlowPath(slow_path);
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001429 __ b(slow_path->GetEntryLabel(), HS);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001430 }
1431
1432 if (start_at_zero) {
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001433 Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001434 DCHECK_EQ(tmp_reg, R2);
1435 // Start-index = 0.
1436 __ LoadImmediate(tmp_reg, 0);
1437 }
1438
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01001439 codegen->InvokeRuntime(kQuickIndexOf, invoke, invoke->GetDexPc(), slow_path);
Roland Levillain42ad2882016-02-29 18:26:54 +00001440 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001441
1442 if (slow_path != nullptr) {
1443 __ Bind(slow_path->GetExitLabel());
1444 }
1445}
1446
1447void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
1448 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001449 LocationSummary::kCallOnMainAndSlowPath,
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001450 kIntrinsified);
1451 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1452 // best to align the inputs accordingly.
1453 InvokeRuntimeCallingConvention calling_convention;
1454 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1455 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1456 locations->SetOut(Location::RegisterLocation(R0));
1457
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001458 // Need to send start-index=0.
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001459 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1460}
1461
1462void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001463 GenerateVisitStringIndexOf(
1464 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001465}
1466
1467void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1468 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001469 LocationSummary::kCallOnMainAndSlowPath,
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001470 kIntrinsified);
1471 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1472 // best to align the inputs accordingly.
1473 InvokeRuntimeCallingConvention calling_convention;
1474 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1475 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1476 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1477 locations->SetOut(Location::RegisterLocation(R0));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001478}
1479
1480void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001481 GenerateVisitStringIndexOf(
1482 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001483}
1484
Jeff Hao848f70a2014-01-15 13:49:50 -08001485void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1486 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001487 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001488 kIntrinsified);
1489 InvokeRuntimeCallingConvention calling_convention;
1490 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1491 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1492 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1493 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1494 locations->SetOut(Location::RegisterLocation(R0));
1495}
1496
1497void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1498 ArmAssembler* assembler = GetAssembler();
1499 LocationSummary* locations = invoke->GetLocations();
1500
1501 Register byte_array = locations->InAt(0).AsRegister<Register>();
1502 __ cmp(byte_array, ShifterOperand(0));
Andreas Gampe85b62f22015-09-09 13:15:38 -07001503 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001504 codegen_->AddSlowPath(slow_path);
1505 __ b(slow_path->GetEntryLabel(), EQ);
1506
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01001507 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001508 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001509 __ Bind(slow_path->GetExitLabel());
1510}
1511
1512void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1513 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001514 LocationSummary::kCallOnMainOnly,
Jeff Hao848f70a2014-01-15 13:49:50 -08001515 kIntrinsified);
1516 InvokeRuntimeCallingConvention calling_convention;
1517 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1518 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1519 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1520 locations->SetOut(Location::RegisterLocation(R0));
1521}
1522
1523void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001524 // No need to emit code checking whether `locations->InAt(2)` is a null
1525 // pointer, as callers of the native method
1526 //
1527 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1528 //
1529 // all include a null check on `data` before calling that method.
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01001530 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001531 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001532}
1533
1534void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
1535 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001536 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001537 kIntrinsified);
1538 InvokeRuntimeCallingConvention calling_convention;
1539 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1540 locations->SetOut(Location::RegisterLocation(R0));
1541}
1542
1543void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1544 ArmAssembler* assembler = GetAssembler();
1545 LocationSummary* locations = invoke->GetLocations();
1546
1547 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1548 __ cmp(string_to_copy, ShifterOperand(0));
Andreas Gampe85b62f22015-09-09 13:15:38 -07001549 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001550 codegen_->AddSlowPath(slow_path);
1551 __ b(slow_path->GetEntryLabel(), EQ);
1552
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01001553 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc(), slow_path);
Roland Levillainf969a202016-03-09 16:14:00 +00001554 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01001555
Jeff Hao848f70a2014-01-15 13:49:50 -08001556 __ Bind(slow_path->GetExitLabel());
1557}
1558
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001559void IntrinsicLocationsBuilderARM::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01001560 // The only read barrier implementation supporting the
1561 // SystemArrayCopy intrinsic is the Baker-style read barriers.
1562 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
Roland Levillain3d312422016-06-23 13:53:42 +01001563 return;
1564 }
1565
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001566 CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke);
1567 LocationSummary* locations = invoke->GetLocations();
1568 if (locations == nullptr) {
1569 return;
1570 }
1571
1572 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1573 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1574 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1575
1576 if (src_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(src_pos->GetValue())) {
1577 locations->SetInAt(1, Location::RequiresRegister());
1578 }
1579 if (dest_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(dest_pos->GetValue())) {
1580 locations->SetInAt(3, Location::RequiresRegister());
1581 }
1582 if (length != nullptr && !assembler_->ShifterOperandCanAlwaysHold(length->GetValue())) {
1583 locations->SetInAt(4, Location::RequiresRegister());
1584 }
Roland Levillain0b671c02016-08-19 12:02:34 +01001585 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1586 // Temporary register IP cannot be used in
Roland Levillain16d9f942016-08-25 17:27:56 +01001587 // ReadBarrierSystemArrayCopySlowPathARM (because that register
Roland Levillain0b671c02016-08-19 12:02:34 +01001588 // is clobbered by ReadBarrierMarkRegX entry points). Get an extra
1589 // temporary register from the register allocator.
1590 locations->AddTemp(Location::RequiresRegister());
1591 }
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001592}
1593
1594static void CheckPosition(ArmAssembler* assembler,
1595 Location pos,
1596 Register input,
1597 Location length,
1598 SlowPathCode* slow_path,
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001599 Register temp,
1600 bool length_is_input_length = false) {
1601 // Where is the length in the Array?
1602 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1603
1604 if (pos.IsConstant()) {
1605 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1606 if (pos_const == 0) {
1607 if (!length_is_input_length) {
1608 // Check that length(input) >= length.
1609 __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1610 if (length.IsConstant()) {
1611 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1612 } else {
1613 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1614 }
1615 __ b(slow_path->GetEntryLabel(), LT);
1616 }
1617 } else {
1618 // Check that length(input) >= pos.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001619 __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1620 __ subs(temp, temp, ShifterOperand(pos_const));
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001621 __ b(slow_path->GetEntryLabel(), LT);
1622
1623 // Check that (length(input) - pos) >= length.
1624 if (length.IsConstant()) {
1625 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1626 } else {
1627 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1628 }
1629 __ b(slow_path->GetEntryLabel(), LT);
1630 }
1631 } else if (length_is_input_length) {
1632 // The only way the copy can succeed is if pos is zero.
1633 Register pos_reg = pos.AsRegister<Register>();
1634 __ CompareAndBranchIfNonZero(pos_reg, slow_path->GetEntryLabel());
1635 } else {
1636 // Check that pos >= 0.
1637 Register pos_reg = pos.AsRegister<Register>();
1638 __ cmp(pos_reg, ShifterOperand(0));
1639 __ b(slow_path->GetEntryLabel(), LT);
1640
1641 // Check that pos <= length(input).
1642 __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1643 __ subs(temp, temp, ShifterOperand(pos_reg));
1644 __ b(slow_path->GetEntryLabel(), LT);
1645
1646 // Check that (length(input) - pos) >= length.
1647 if (length.IsConstant()) {
1648 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1649 } else {
1650 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1651 }
1652 __ b(slow_path->GetEntryLabel(), LT);
1653 }
1654}
1655
1656void IntrinsicCodeGeneratorARM::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01001657 // The only read barrier implementation supporting the
1658 // SystemArrayCopy intrinsic is the Baker-style read barriers.
1659 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
Roland Levillain3d312422016-06-23 13:53:42 +01001660
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001661 ArmAssembler* assembler = GetAssembler();
1662 LocationSummary* locations = invoke->GetLocations();
1663
1664 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1665 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
1666 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
1667 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Roland Levillain0b671c02016-08-19 12:02:34 +01001668 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001669
1670 Register src = locations->InAt(0).AsRegister<Register>();
1671 Location src_pos = locations->InAt(1);
1672 Register dest = locations->InAt(2).AsRegister<Register>();
1673 Location dest_pos = locations->InAt(3);
1674 Location length = locations->InAt(4);
Roland Levillain0b671c02016-08-19 12:02:34 +01001675 Location temp1_loc = locations->GetTemp(0);
1676 Register temp1 = temp1_loc.AsRegister<Register>();
1677 Location temp2_loc = locations->GetTemp(1);
1678 Register temp2 = temp2_loc.AsRegister<Register>();
1679 Location temp3_loc = locations->GetTemp(2);
1680 Register temp3 = temp3_loc.AsRegister<Register>();
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001681
Roland Levillain0b671c02016-08-19 12:02:34 +01001682 SlowPathCode* intrinsic_slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1683 codegen_->AddSlowPath(intrinsic_slow_path);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001684
Roland Levillainebea3d22016-04-12 15:42:57 +01001685 Label conditions_on_positions_validated;
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001686 SystemArrayCopyOptimizations optimizations(invoke);
1687
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001688 // If source and destination are the same, we go to slow path if we need to do
1689 // forward copying.
1690 if (src_pos.IsConstant()) {
1691 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1692 if (dest_pos.IsConstant()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01001693 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1694 if (optimizations.GetDestinationIsSource()) {
1695 // Checked when building locations.
1696 DCHECK_GE(src_pos_constant, dest_pos_constant);
1697 } else if (src_pos_constant < dest_pos_constant) {
1698 __ cmp(src, ShifterOperand(dest));
Roland Levillain0b671c02016-08-19 12:02:34 +01001699 __ b(intrinsic_slow_path->GetEntryLabel(), EQ);
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01001700 }
1701
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001702 // Checked when building locations.
1703 DCHECK(!optimizations.GetDestinationIsSource()
1704 || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
1705 } else {
1706 if (!optimizations.GetDestinationIsSource()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01001707 __ cmp(src, ShifterOperand(dest));
Roland Levillainebea3d22016-04-12 15:42:57 +01001708 __ b(&conditions_on_positions_validated, NE);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001709 }
1710 __ cmp(dest_pos.AsRegister<Register>(), ShifterOperand(src_pos_constant));
Roland Levillain0b671c02016-08-19 12:02:34 +01001711 __ b(intrinsic_slow_path->GetEntryLabel(), GT);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001712 }
1713 } else {
1714 if (!optimizations.GetDestinationIsSource()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01001715 __ cmp(src, ShifterOperand(dest));
Roland Levillainebea3d22016-04-12 15:42:57 +01001716 __ b(&conditions_on_positions_validated, NE);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001717 }
1718 if (dest_pos.IsConstant()) {
1719 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1720 __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos_constant));
1721 } else {
1722 __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos.AsRegister<Register>()));
1723 }
Roland Levillain0b671c02016-08-19 12:02:34 +01001724 __ b(intrinsic_slow_path->GetEntryLabel(), LT);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001725 }
1726
Roland Levillainebea3d22016-04-12 15:42:57 +01001727 __ Bind(&conditions_on_positions_validated);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001728
1729 if (!optimizations.GetSourceIsNotNull()) {
1730 // Bail out if the source is null.
Roland Levillain0b671c02016-08-19 12:02:34 +01001731 __ CompareAndBranchIfZero(src, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001732 }
1733
1734 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
1735 // Bail out if the destination is null.
Roland Levillain0b671c02016-08-19 12:02:34 +01001736 __ CompareAndBranchIfZero(dest, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001737 }
1738
1739 // If the length is negative, bail out.
1740 // We have already checked in the LocationsBuilder for the constant case.
1741 if (!length.IsConstant() &&
1742 !optimizations.GetCountIsSourceLength() &&
1743 !optimizations.GetCountIsDestinationLength()) {
1744 __ cmp(length.AsRegister<Register>(), ShifterOperand(0));
Roland Levillain0b671c02016-08-19 12:02:34 +01001745 __ b(intrinsic_slow_path->GetEntryLabel(), LT);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001746 }
1747
1748 // Validity checks: source.
1749 CheckPosition(assembler,
1750 src_pos,
1751 src,
1752 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01001753 intrinsic_slow_path,
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001754 temp1,
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001755 optimizations.GetCountIsSourceLength());
1756
1757 // Validity checks: dest.
1758 CheckPosition(assembler,
1759 dest_pos,
1760 dest,
1761 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01001762 intrinsic_slow_path,
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001763 temp1,
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001764 optimizations.GetCountIsDestinationLength());
1765
1766 if (!optimizations.GetDoesNotNeedTypeCheck()) {
1767 // Check whether all elements of the source array are assignable to the component
1768 // type of the destination array. We do two checks: the classes are the same,
1769 // or the destination is Object[]. If none of these checks succeed, we go to the
1770 // slow path.
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001771
Roland Levillain0b671c02016-08-19 12:02:34 +01001772 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1773 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1774 // /* HeapReference<Class> */ temp1 = src->klass_
1775 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1776 invoke, temp1_loc, src, class_offset, temp2_loc, /* needs_null_check */ false);
1777 // Bail out if the source is not a non primitive array.
1778 // /* HeapReference<Class> */ temp1 = temp1->component_type_
1779 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1780 invoke, temp1_loc, temp1, component_offset, temp2_loc, /* needs_null_check */ false);
1781 __ CompareAndBranchIfZero(temp1, intrinsic_slow_path->GetEntryLabel());
1782 // If heap poisoning is enabled, `temp1` has been unpoisoned
1783 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
1784 // /* uint16_t */ temp1 = static_cast<uint16>(temp1->primitive_type_);
1785 __ LoadFromOffset(kLoadUnsignedHalfword, temp1, temp1, primitive_offset);
1786 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1787 __ CompareAndBranchIfNonZero(temp1, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001788 }
Roland Levillain0b671c02016-08-19 12:02:34 +01001789
1790 // /* HeapReference<Class> */ temp1 = dest->klass_
1791 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1792 invoke, temp1_loc, dest, class_offset, temp2_loc, /* needs_null_check */ false);
1793
1794 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
1795 // Bail out if the destination is not a non primitive array.
1796 //
1797 // Register `temp1` is not trashed by the read barrier emitted
1798 // by GenerateFieldLoadWithBakerReadBarrier below, as that
1799 // method produces a call to a ReadBarrierMarkRegX entry point,
1800 // which saves all potentially live registers, including
1801 // temporaries such a `temp1`.
1802 // /* HeapReference<Class> */ temp2 = temp1->component_type_
1803 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1804 invoke, temp2_loc, temp1, component_offset, temp3_loc, /* needs_null_check */ false);
1805 __ CompareAndBranchIfZero(temp2, intrinsic_slow_path->GetEntryLabel());
1806 // If heap poisoning is enabled, `temp2` has been unpoisoned
1807 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
1808 // /* uint16_t */ temp2 = static_cast<uint16>(temp2->primitive_type_);
1809 __ LoadFromOffset(kLoadUnsignedHalfword, temp2, temp2, primitive_offset);
1810 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1811 __ CompareAndBranchIfNonZero(temp2, intrinsic_slow_path->GetEntryLabel());
1812 }
1813
1814 // For the same reason given earlier, `temp1` is not trashed by the
1815 // read barrier emitted by GenerateFieldLoadWithBakerReadBarrier below.
1816 // /* HeapReference<Class> */ temp2 = src->klass_
1817 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1818 invoke, temp2_loc, src, class_offset, temp3_loc, /* needs_null_check */ false);
1819 // Note: if heap poisoning is on, we are comparing two unpoisoned references here.
1820 __ cmp(temp1, ShifterOperand(temp2));
1821
1822 if (optimizations.GetDestinationIsTypedObjectArray()) {
1823 Label do_copy;
1824 __ b(&do_copy, EQ);
1825 // /* HeapReference<Class> */ temp1 = temp1->component_type_
1826 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1827 invoke, temp1_loc, temp1, component_offset, temp2_loc, /* needs_null_check */ false);
1828 // /* HeapReference<Class> */ temp1 = temp1->super_class_
1829 // We do not need to emit a read barrier for the following
1830 // heap reference load, as `temp1` is only used in a
1831 // comparison with null below, and this reference is not
1832 // kept afterwards.
1833 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
1834 __ CompareAndBranchIfNonZero(temp1, intrinsic_slow_path->GetEntryLabel());
1835 __ Bind(&do_copy);
1836 } else {
1837 __ b(intrinsic_slow_path->GetEntryLabel(), NE);
1838 }
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001839 } else {
Roland Levillain0b671c02016-08-19 12:02:34 +01001840 // Non read barrier code.
1841
1842 // /* HeapReference<Class> */ temp1 = dest->klass_
1843 __ LoadFromOffset(kLoadWord, temp1, dest, class_offset);
1844 // /* HeapReference<Class> */ temp2 = src->klass_
1845 __ LoadFromOffset(kLoadWord, temp2, src, class_offset);
1846 bool did_unpoison = false;
1847 if (!optimizations.GetDestinationIsNonPrimitiveArray() ||
1848 !optimizations.GetSourceIsNonPrimitiveArray()) {
1849 // One or two of the references need to be unpoisoned. Unpoison them
1850 // both to make the identity check valid.
1851 __ MaybeUnpoisonHeapReference(temp1);
1852 __ MaybeUnpoisonHeapReference(temp2);
1853 did_unpoison = true;
1854 }
1855
1856 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
1857 // Bail out if the destination is not a non primitive array.
1858 // /* HeapReference<Class> */ temp3 = temp1->component_type_
1859 __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1860 __ CompareAndBranchIfZero(temp3, intrinsic_slow_path->GetEntryLabel());
1861 __ MaybeUnpoisonHeapReference(temp3);
1862 // /* uint16_t */ temp3 = static_cast<uint16>(temp3->primitive_type_);
1863 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1864 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1865 __ CompareAndBranchIfNonZero(temp3, intrinsic_slow_path->GetEntryLabel());
1866 }
1867
1868 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1869 // Bail out if the source is not a non primitive array.
1870 // /* HeapReference<Class> */ temp3 = temp2->component_type_
1871 __ LoadFromOffset(kLoadWord, temp3, temp2, component_offset);
1872 __ CompareAndBranchIfZero(temp3, intrinsic_slow_path->GetEntryLabel());
1873 __ MaybeUnpoisonHeapReference(temp3);
1874 // /* uint16_t */ temp3 = static_cast<uint16>(temp3->primitive_type_);
1875 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1876 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1877 __ CompareAndBranchIfNonZero(temp3, intrinsic_slow_path->GetEntryLabel());
1878 }
1879
1880 __ cmp(temp1, ShifterOperand(temp2));
1881
1882 if (optimizations.GetDestinationIsTypedObjectArray()) {
1883 Label do_copy;
1884 __ b(&do_copy, EQ);
1885 if (!did_unpoison) {
1886 __ MaybeUnpoisonHeapReference(temp1);
1887 }
1888 // /* HeapReference<Class> */ temp1 = temp1->component_type_
1889 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
1890 __ MaybeUnpoisonHeapReference(temp1);
1891 // /* HeapReference<Class> */ temp1 = temp1->super_class_
1892 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
1893 // No need to unpoison the result, we're comparing against null.
1894 __ CompareAndBranchIfNonZero(temp1, intrinsic_slow_path->GetEntryLabel());
1895 __ Bind(&do_copy);
1896 } else {
1897 __ b(intrinsic_slow_path->GetEntryLabel(), NE);
1898 }
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001899 }
1900 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1901 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
1902 // Bail out if the source is not a non primitive array.
Roland Levillain0b671c02016-08-19 12:02:34 +01001903 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1904 // /* HeapReference<Class> */ temp1 = src->klass_
1905 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1906 invoke, temp1_loc, src, class_offset, temp2_loc, /* needs_null_check */ false);
1907 // /* HeapReference<Class> */ temp3 = temp1->component_type_
1908 codegen_->GenerateFieldLoadWithBakerReadBarrier(
1909 invoke, temp3_loc, temp1, component_offset, temp2_loc, /* needs_null_check */ false);
1910 __ CompareAndBranchIfZero(temp3, intrinsic_slow_path->GetEntryLabel());
1911 // If heap poisoning is enabled, `temp3` has been unpoisoned
1912 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
1913 } else {
1914 // /* HeapReference<Class> */ temp1 = src->klass_
1915 __ LoadFromOffset(kLoadWord, temp1, src, class_offset);
1916 __ MaybeUnpoisonHeapReference(temp1);
1917 // /* HeapReference<Class> */ temp3 = temp1->component_type_
1918 __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1919 __ CompareAndBranchIfZero(temp3, intrinsic_slow_path->GetEntryLabel());
1920 __ MaybeUnpoisonHeapReference(temp3);
1921 }
1922 // /* uint16_t */ temp3 = static_cast<uint16>(temp3->primitive_type_);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001923 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1924 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
Roland Levillain0b671c02016-08-19 12:02:34 +01001925 __ CompareAndBranchIfNonZero(temp3, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001926 }
1927
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001928 int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
Roland Levillain0b671c02016-08-19 12:02:34 +01001929 uint32_t element_size_shift = Primitive::ComponentSizeShift(Primitive::kPrimNot);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001930 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
Roland Levillain0b671c02016-08-19 12:02:34 +01001931
1932 // Compute the base source address in `temp1`.
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001933 if (src_pos.IsConstant()) {
1934 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1935 __ AddConstant(temp1, src, element_size * constant + offset);
1936 } else {
Roland Levillain0b671c02016-08-19 12:02:34 +01001937 __ add(temp1, src, ShifterOperand(src_pos.AsRegister<Register>(), LSL, element_size_shift));
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001938 __ AddConstant(temp1, offset);
1939 }
1940
Roland Levillain0b671c02016-08-19 12:02:34 +01001941 // Compute the end source address in `temp3`.
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001942 if (length.IsConstant()) {
1943 int32_t constant = length.GetConstant()->AsIntConstant()->GetValue();
1944 __ AddConstant(temp3, temp1, element_size * constant);
1945 } else {
Roland Levillain0b671c02016-08-19 12:02:34 +01001946 __ add(temp3, temp1, ShifterOperand(length.AsRegister<Register>(), LSL, element_size_shift));
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001947 }
1948
Roland Levillain0b671c02016-08-19 12:02:34 +01001949 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
1950 // The base destination address is computed later, as `temp2` is
1951 // used for intermediate computations.
1952
1953 // SystemArrayCopy implementation for Baker read barriers (see
1954 // also CodeGeneratorARM::GenerateReferenceLoadWithBakerReadBarrier):
1955 //
1956 // if (src_ptr != end_ptr) {
1957 // uint32_t rb_state = Lockword(src->monitor_).ReadBarrierState();
1958 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07001959 // bool is_gray = (rb_state == ReadBarrier::GrayState());
Roland Levillain0b671c02016-08-19 12:02:34 +01001960 // if (is_gray) {
1961 // // Slow-path copy.
1962 // do {
1963 // *dest_ptr++ = MaybePoison(ReadBarrier::Mark(MaybeUnpoison(*src_ptr++)));
1964 // } while (src_ptr != end_ptr)
1965 // } else {
1966 // // Fast-path copy.
1967 // do {
1968 // *dest_ptr++ = *src_ptr++;
1969 // } while (src_ptr != end_ptr)
1970 // }
1971 // }
1972
1973 Label loop, done;
1974
1975 // Don't enter copy loop if `length == 0`.
1976 __ cmp(temp1, ShifterOperand(temp3));
1977 __ b(&done, EQ);
1978
1979 // /* int32_t */ monitor = src->monitor_
1980 __ LoadFromOffset(kLoadWord, temp2, src, monitor_offset);
1981 // /* LockWord */ lock_word = LockWord(monitor)
1982 static_assert(sizeof(LockWord) == sizeof(int32_t),
1983 "art::LockWord and int32_t have different sizes.");
1984
1985 // Introduce a dependency on the lock_word including the rb_state,
1986 // which shall prevent load-load reordering without using
1987 // a memory barrier (which would be more expensive).
1988 // `src` is unchanged by this operation, but its value now depends
1989 // on `temp2`.
1990 __ add(src, src, ShifterOperand(temp2, LSR, 32));
1991
1992 // Slow path used to copy array when `src` is gray.
1993 SlowPathCode* read_barrier_slow_path =
1994 new (GetAllocator()) ReadBarrierSystemArrayCopySlowPathARM(invoke);
1995 codegen_->AddSlowPath(read_barrier_slow_path);
1996
1997 // Given the numeric representation, it's enough to check the low bit of the
1998 // rb_state. We do that by shifting the bit out of the lock word with LSRS
1999 // which can be a 16-bit instruction unlike the TST immediate.
Hiroshi Yamauchi12b58b22016-11-01 11:55:29 -07002000 static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
2001 static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
Roland Levillain0b671c02016-08-19 12:02:34 +01002002 __ Lsrs(temp2, temp2, LockWord::kReadBarrierStateShift + 1);
2003 // Carry flag is the last bit shifted out by LSRS.
2004 __ b(read_barrier_slow_path->GetEntryLabel(), CS);
2005
2006 // Fast-path copy.
2007
2008 // Compute the base destination address in `temp2`.
2009 if (dest_pos.IsConstant()) {
2010 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2011 __ AddConstant(temp2, dest, element_size * constant + offset);
2012 } else {
2013 __ add(temp2, dest, ShifterOperand(dest_pos.AsRegister<Register>(), LSL, element_size_shift));
2014 __ AddConstant(temp2, offset);
2015 }
2016
2017 // Iterate over the arrays and do a raw copy of the objects. We don't need to
2018 // poison/unpoison.
2019 __ Bind(&loop);
2020 __ ldr(IP, Address(temp1, element_size, Address::PostIndex));
2021 __ str(IP, Address(temp2, element_size, Address::PostIndex));
2022 __ cmp(temp1, ShifterOperand(temp3));
2023 __ b(&loop, NE);
2024
2025 __ Bind(read_barrier_slow_path->GetExitLabel());
2026 __ Bind(&done);
2027 } else {
2028 // Non read barrier code.
2029
2030 // Compute the base destination address in `temp2`.
2031 if (dest_pos.IsConstant()) {
2032 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2033 __ AddConstant(temp2, dest, element_size * constant + offset);
2034 } else {
2035 __ add(temp2, dest, ShifterOperand(dest_pos.AsRegister<Register>(), LSL, element_size_shift));
2036 __ AddConstant(temp2, offset);
2037 }
2038
2039 // Iterate over the arrays and do a raw copy of the objects. We don't need to
2040 // poison/unpoison.
2041 Label loop, done;
2042 __ cmp(temp1, ShifterOperand(temp3));
2043 __ b(&done, EQ);
2044 __ Bind(&loop);
2045 __ ldr(IP, Address(temp1, element_size, Address::PostIndex));
2046 __ str(IP, Address(temp2, element_size, Address::PostIndex));
2047 __ cmp(temp1, ShifterOperand(temp3));
2048 __ b(&loop, NE);
2049 __ Bind(&done);
2050 }
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01002051
2052 // We only need one card marking on the destination array.
2053 codegen_->MarkGCCard(temp1,
2054 temp2,
2055 dest,
2056 Register(kNoRegister),
Roland Levillainebea3d22016-04-12 15:42:57 +01002057 /* value_can_be_null */ false);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01002058
Roland Levillain0b671c02016-08-19 12:02:34 +01002059 __ Bind(intrinsic_slow_path->GetExitLabel());
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01002060}
2061
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00002062static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
2063 // If the graph is debuggable, all callee-saved floating-point registers are blocked by
2064 // the code generator. Furthermore, the register allocator creates fixed live intervals
2065 // for all caller-saved registers because we are doing a function call. As a result, if
2066 // the input and output locations are unallocated, the register allocator runs out of
2067 // registers and fails; however, a debuggable graph is not the common case.
2068 if (invoke->GetBlock()->GetGraph()->IsDebuggable()) {
2069 return;
2070 }
2071
2072 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
2073 DCHECK_EQ(invoke->InputAt(0)->GetType(), Primitive::kPrimDouble);
2074 DCHECK_EQ(invoke->GetType(), Primitive::kPrimDouble);
2075
2076 LocationSummary* const locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01002077 LocationSummary::kCallOnMainOnly,
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00002078 kIntrinsified);
2079 const InvokeRuntimeCallingConvention calling_convention;
2080
2081 locations->SetInAt(0, Location::RequiresFpuRegister());
2082 locations->SetOut(Location::RequiresFpuRegister());
2083 // Native code uses the soft float ABI.
2084 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2085 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2086}
2087
2088static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
2089 // If the graph is debuggable, all callee-saved floating-point registers are blocked by
2090 // the code generator. Furthermore, the register allocator creates fixed live intervals
2091 // for all caller-saved registers because we are doing a function call. As a result, if
2092 // the input and output locations are unallocated, the register allocator runs out of
2093 // registers and fails; however, a debuggable graph is not the common case.
2094 if (invoke->GetBlock()->GetGraph()->IsDebuggable()) {
2095 return;
2096 }
2097
2098 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
2099 DCHECK_EQ(invoke->InputAt(0)->GetType(), Primitive::kPrimDouble);
2100 DCHECK_EQ(invoke->InputAt(1)->GetType(), Primitive::kPrimDouble);
2101 DCHECK_EQ(invoke->GetType(), Primitive::kPrimDouble);
2102
2103 LocationSummary* const locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01002104 LocationSummary::kCallOnMainOnly,
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00002105 kIntrinsified);
2106 const InvokeRuntimeCallingConvention calling_convention;
2107
2108 locations->SetInAt(0, Location::RequiresFpuRegister());
2109 locations->SetInAt(1, Location::RequiresFpuRegister());
2110 locations->SetOut(Location::RequiresFpuRegister());
2111 // Native code uses the soft float ABI.
2112 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2113 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2114 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2115 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
2116}
2117
2118static void GenFPToFPCall(HInvoke* invoke,
2119 ArmAssembler* assembler,
2120 CodeGeneratorARM* codegen,
2121 QuickEntrypointEnum entry) {
2122 LocationSummary* const locations = invoke->GetLocations();
2123 const InvokeRuntimeCallingConvention calling_convention;
2124
2125 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
2126 DCHECK(locations->WillCall() && locations->Intrinsified());
2127 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(0)));
2128 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(1)));
2129
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00002130 // Native code uses the soft float ABI.
2131 __ vmovrrd(calling_convention.GetRegisterAt(0),
2132 calling_convention.GetRegisterAt(1),
2133 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01002134 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00002135 __ vmovdrr(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
2136 calling_convention.GetRegisterAt(0),
2137 calling_convention.GetRegisterAt(1));
2138}
2139
2140static void GenFPFPToFPCall(HInvoke* invoke,
2141 ArmAssembler* assembler,
2142 CodeGeneratorARM* codegen,
2143 QuickEntrypointEnum entry) {
2144 LocationSummary* const locations = invoke->GetLocations();
2145 const InvokeRuntimeCallingConvention calling_convention;
2146
2147 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
2148 DCHECK(locations->WillCall() && locations->Intrinsified());
2149 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(0)));
2150 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(1)));
2151 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(2)));
2152 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(3)));
2153
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00002154 // Native code uses the soft float ABI.
2155 __ vmovrrd(calling_convention.GetRegisterAt(0),
2156 calling_convention.GetRegisterAt(1),
2157 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
2158 __ vmovrrd(calling_convention.GetRegisterAt(2),
2159 calling_convention.GetRegisterAt(3),
2160 FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>()));
Serban Constantinescu4bb30ac2016-06-22 17:04:45 +01002161 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00002162 __ vmovdrr(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
2163 calling_convention.GetRegisterAt(0),
2164 calling_convention.GetRegisterAt(1));
2165}
2166
2167void IntrinsicLocationsBuilderARM::VisitMathCos(HInvoke* invoke) {
2168 CreateFPToFPCallLocations(arena_, invoke);
2169}
2170
2171void IntrinsicCodeGeneratorARM::VisitMathCos(HInvoke* invoke) {
2172 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCos);
2173}
2174
2175void IntrinsicLocationsBuilderARM::VisitMathSin(HInvoke* invoke) {
2176 CreateFPToFPCallLocations(arena_, invoke);
2177}
2178
2179void IntrinsicCodeGeneratorARM::VisitMathSin(HInvoke* invoke) {
2180 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickSin);
2181}
2182
2183void IntrinsicLocationsBuilderARM::VisitMathAcos(HInvoke* invoke) {
2184 CreateFPToFPCallLocations(arena_, invoke);
2185}
2186
2187void IntrinsicCodeGeneratorARM::VisitMathAcos(HInvoke* invoke) {
2188 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAcos);
2189}
2190
2191void IntrinsicLocationsBuilderARM::VisitMathAsin(HInvoke* invoke) {
2192 CreateFPToFPCallLocations(arena_, invoke);
2193}
2194
2195void IntrinsicCodeGeneratorARM::VisitMathAsin(HInvoke* invoke) {
2196 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAsin);
2197}
2198
2199void IntrinsicLocationsBuilderARM::VisitMathAtan(HInvoke* invoke) {
2200 CreateFPToFPCallLocations(arena_, invoke);
2201}
2202
2203void IntrinsicCodeGeneratorARM::VisitMathAtan(HInvoke* invoke) {
2204 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAtan);
2205}
2206
2207void IntrinsicLocationsBuilderARM::VisitMathCbrt(HInvoke* invoke) {
2208 CreateFPToFPCallLocations(arena_, invoke);
2209}
2210
2211void IntrinsicCodeGeneratorARM::VisitMathCbrt(HInvoke* invoke) {
2212 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCbrt);
2213}
2214
2215void IntrinsicLocationsBuilderARM::VisitMathCosh(HInvoke* invoke) {
2216 CreateFPToFPCallLocations(arena_, invoke);
2217}
2218
2219void IntrinsicCodeGeneratorARM::VisitMathCosh(HInvoke* invoke) {
2220 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCosh);
2221}
2222
2223void IntrinsicLocationsBuilderARM::VisitMathExp(HInvoke* invoke) {
2224 CreateFPToFPCallLocations(arena_, invoke);
2225}
2226
2227void IntrinsicCodeGeneratorARM::VisitMathExp(HInvoke* invoke) {
2228 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickExp);
2229}
2230
2231void IntrinsicLocationsBuilderARM::VisitMathExpm1(HInvoke* invoke) {
2232 CreateFPToFPCallLocations(arena_, invoke);
2233}
2234
2235void IntrinsicCodeGeneratorARM::VisitMathExpm1(HInvoke* invoke) {
2236 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickExpm1);
2237}
2238
2239void IntrinsicLocationsBuilderARM::VisitMathLog(HInvoke* invoke) {
2240 CreateFPToFPCallLocations(arena_, invoke);
2241}
2242
2243void IntrinsicCodeGeneratorARM::VisitMathLog(HInvoke* invoke) {
2244 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickLog);
2245}
2246
2247void IntrinsicLocationsBuilderARM::VisitMathLog10(HInvoke* invoke) {
2248 CreateFPToFPCallLocations(arena_, invoke);
2249}
2250
2251void IntrinsicCodeGeneratorARM::VisitMathLog10(HInvoke* invoke) {
2252 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickLog10);
2253}
2254
2255void IntrinsicLocationsBuilderARM::VisitMathSinh(HInvoke* invoke) {
2256 CreateFPToFPCallLocations(arena_, invoke);
2257}
2258
2259void IntrinsicCodeGeneratorARM::VisitMathSinh(HInvoke* invoke) {
2260 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickSinh);
2261}
2262
2263void IntrinsicLocationsBuilderARM::VisitMathTan(HInvoke* invoke) {
2264 CreateFPToFPCallLocations(arena_, invoke);
2265}
2266
2267void IntrinsicCodeGeneratorARM::VisitMathTan(HInvoke* invoke) {
2268 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickTan);
2269}
2270
2271void IntrinsicLocationsBuilderARM::VisitMathTanh(HInvoke* invoke) {
2272 CreateFPToFPCallLocations(arena_, invoke);
2273}
2274
2275void IntrinsicCodeGeneratorARM::VisitMathTanh(HInvoke* invoke) {
2276 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickTanh);
2277}
2278
2279void IntrinsicLocationsBuilderARM::VisitMathAtan2(HInvoke* invoke) {
2280 CreateFPFPToFPCallLocations(arena_, invoke);
2281}
2282
2283void IntrinsicCodeGeneratorARM::VisitMathAtan2(HInvoke* invoke) {
2284 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAtan2);
2285}
2286
2287void IntrinsicLocationsBuilderARM::VisitMathHypot(HInvoke* invoke) {
2288 CreateFPFPToFPCallLocations(arena_, invoke);
2289}
2290
2291void IntrinsicCodeGeneratorARM::VisitMathHypot(HInvoke* invoke) {
2292 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickHypot);
2293}
2294
2295void IntrinsicLocationsBuilderARM::VisitMathNextAfter(HInvoke* invoke) {
2296 CreateFPFPToFPCallLocations(arena_, invoke);
2297}
2298
2299void IntrinsicCodeGeneratorARM::VisitMathNextAfter(HInvoke* invoke) {
2300 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickNextAfter);
2301}
2302
Artem Serovc257da72016-02-02 13:49:43 +00002303void IntrinsicLocationsBuilderARM::VisitIntegerReverse(HInvoke* invoke) {
2304 CreateIntToIntLocations(arena_, invoke);
2305}
2306
2307void IntrinsicCodeGeneratorARM::VisitIntegerReverse(HInvoke* invoke) {
2308 ArmAssembler* assembler = GetAssembler();
2309 LocationSummary* locations = invoke->GetLocations();
2310
2311 Register out = locations->Out().AsRegister<Register>();
2312 Register in = locations->InAt(0).AsRegister<Register>();
2313
2314 __ rbit(out, in);
2315}
2316
2317void IntrinsicLocationsBuilderARM::VisitLongReverse(HInvoke* invoke) {
2318 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2319 LocationSummary::kNoCall,
2320 kIntrinsified);
2321 locations->SetInAt(0, Location::RequiresRegister());
2322 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2323}
2324
2325void IntrinsicCodeGeneratorARM::VisitLongReverse(HInvoke* invoke) {
2326 ArmAssembler* assembler = GetAssembler();
2327 LocationSummary* locations = invoke->GetLocations();
2328
2329 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
2330 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
2331 Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
2332 Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
2333
2334 __ rbit(out_reg_lo, in_reg_hi);
2335 __ rbit(out_reg_hi, in_reg_lo);
2336}
2337
2338void IntrinsicLocationsBuilderARM::VisitIntegerReverseBytes(HInvoke* invoke) {
2339 CreateIntToIntLocations(arena_, invoke);
2340}
2341
2342void IntrinsicCodeGeneratorARM::VisitIntegerReverseBytes(HInvoke* invoke) {
2343 ArmAssembler* assembler = GetAssembler();
2344 LocationSummary* locations = invoke->GetLocations();
2345
2346 Register out = locations->Out().AsRegister<Register>();
2347 Register in = locations->InAt(0).AsRegister<Register>();
2348
2349 __ rev(out, in);
2350}
2351
2352void IntrinsicLocationsBuilderARM::VisitLongReverseBytes(HInvoke* invoke) {
2353 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2354 LocationSummary::kNoCall,
2355 kIntrinsified);
2356 locations->SetInAt(0, Location::RequiresRegister());
2357 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2358}
2359
2360void IntrinsicCodeGeneratorARM::VisitLongReverseBytes(HInvoke* invoke) {
2361 ArmAssembler* assembler = GetAssembler();
2362 LocationSummary* locations = invoke->GetLocations();
2363
2364 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
2365 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
2366 Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
2367 Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
2368
2369 __ rev(out_reg_lo, in_reg_hi);
2370 __ rev(out_reg_hi, in_reg_lo);
2371}
2372
2373void IntrinsicLocationsBuilderARM::VisitShortReverseBytes(HInvoke* invoke) {
2374 CreateIntToIntLocations(arena_, invoke);
2375}
2376
2377void IntrinsicCodeGeneratorARM::VisitShortReverseBytes(HInvoke* invoke) {
2378 ArmAssembler* assembler = GetAssembler();
2379 LocationSummary* locations = invoke->GetLocations();
2380
2381 Register out = locations->Out().AsRegister<Register>();
2382 Register in = locations->InAt(0).AsRegister<Register>();
2383
2384 __ revsh(out, in);
2385}
2386
xueliang.zhongf1073c82016-07-05 15:28:19 +01002387static void GenBitCount(HInvoke* instr, Primitive::Type type, ArmAssembler* assembler) {
2388 DCHECK(Primitive::IsIntOrLongType(type)) << type;
2389 DCHECK_EQ(instr->GetType(), Primitive::kPrimInt);
2390 DCHECK_EQ(Primitive::PrimitiveKind(instr->InputAt(0)->GetType()), type);
2391
2392 bool is_long = type == Primitive::kPrimLong;
2393 LocationSummary* locations = instr->GetLocations();
2394 Location in = locations->InAt(0);
2395 Register src_0 = is_long ? in.AsRegisterPairLow<Register>() : in.AsRegister<Register>();
2396 Register src_1 = is_long ? in.AsRegisterPairHigh<Register>() : src_0;
2397 SRegister tmp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>();
2398 DRegister tmp_d = FromLowSToD(tmp_s);
2399 Register out_r = locations->Out().AsRegister<Register>();
2400
2401 // Move data from core register(s) to temp D-reg for bit count calculation, then move back.
2402 // According to Cortex A57 and A72 optimization guides, compared to transferring to full D-reg,
2403 // transferring data from core reg to upper or lower half of vfp D-reg requires extra latency,
2404 // That's why for integer bit count, we use 'vmov d0, r0, r0' instead of 'vmov d0[0], r0'.
2405 __ vmovdrr(tmp_d, src_1, src_0); // Temp DReg |--src_1|--src_0|
2406 __ vcntd(tmp_d, tmp_d); // Temp DReg |c|c|c|c|c|c|c|c|
2407 __ vpaddld(tmp_d, tmp_d, 8, /* is_unsigned */ true); // Temp DReg |--c|--c|--c|--c|
2408 __ vpaddld(tmp_d, tmp_d, 16, /* is_unsigned */ true); // Temp DReg |------c|------c|
2409 if (is_long) {
2410 __ vpaddld(tmp_d, tmp_d, 32, /* is_unsigned */ true); // Temp DReg |--------------c|
2411 }
2412 __ vmovrs(out_r, tmp_s);
2413}
2414
2415void IntrinsicLocationsBuilderARM::VisitIntegerBitCount(HInvoke* invoke) {
2416 CreateIntToIntLocations(arena_, invoke);
2417 invoke->GetLocations()->AddTemp(Location::RequiresFpuRegister());
2418}
2419
2420void IntrinsicCodeGeneratorARM::VisitIntegerBitCount(HInvoke* invoke) {
2421 GenBitCount(invoke, Primitive::kPrimInt, GetAssembler());
2422}
2423
2424void IntrinsicLocationsBuilderARM::VisitLongBitCount(HInvoke* invoke) {
2425 VisitIntegerBitCount(invoke);
2426}
2427
2428void IntrinsicCodeGeneratorARM::VisitLongBitCount(HInvoke* invoke) {
2429 GenBitCount(invoke, Primitive::kPrimLong, GetAssembler());
2430}
2431
Tim Zhang25abd6c2016-01-19 23:39:24 +08002432void IntrinsicLocationsBuilderARM::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2433 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2434 LocationSummary::kNoCall,
2435 kIntrinsified);
2436 locations->SetInAt(0, Location::RequiresRegister());
2437 locations->SetInAt(1, Location::RequiresRegister());
2438 locations->SetInAt(2, Location::RequiresRegister());
2439 locations->SetInAt(3, Location::RequiresRegister());
2440 locations->SetInAt(4, Location::RequiresRegister());
2441
Scott Wakeling3fdab772016-04-25 11:32:37 +01002442 // Temporary registers to store lengths of strings and for calculations.
Tim Zhang25abd6c2016-01-19 23:39:24 +08002443 locations->AddTemp(Location::RequiresRegister());
2444 locations->AddTemp(Location::RequiresRegister());
2445 locations->AddTemp(Location::RequiresRegister());
2446}
2447
2448void IntrinsicCodeGeneratorARM::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2449 ArmAssembler* assembler = GetAssembler();
2450 LocationSummary* locations = invoke->GetLocations();
2451
2452 // Check assumption that sizeof(Char) is 2 (used in scaling below).
2453 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
2454 DCHECK_EQ(char_size, 2u);
2455
2456 // Location of data in char array buffer.
2457 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2458
2459 // Location of char array data in string.
2460 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
2461
2462 // void getCharsNoCheck(int srcBegin, int srcEnd, char[] dst, int dstBegin);
2463 // Since getChars() calls getCharsNoCheck() - we use registers rather than constants.
2464 Register srcObj = locations->InAt(0).AsRegister<Register>();
2465 Register srcBegin = locations->InAt(1).AsRegister<Register>();
2466 Register srcEnd = locations->InAt(2).AsRegister<Register>();
2467 Register dstObj = locations->InAt(3).AsRegister<Register>();
2468 Register dstBegin = locations->InAt(4).AsRegister<Register>();
2469
Scott Wakeling3fdab772016-04-25 11:32:37 +01002470 Register num_chr = locations->GetTemp(0).AsRegister<Register>();
2471 Register src_ptr = locations->GetTemp(1).AsRegister<Register>();
Tim Zhang25abd6c2016-01-19 23:39:24 +08002472 Register dst_ptr = locations->GetTemp(2).AsRegister<Register>();
Tim Zhang25abd6c2016-01-19 23:39:24 +08002473
jessicahandojo05765752016-09-09 19:01:32 -07002474 Label done, compressed_string_loop;
Tim Zhang25abd6c2016-01-19 23:39:24 +08002475 // dst to be copied.
2476 __ add(dst_ptr, dstObj, ShifterOperand(data_offset));
2477 __ add(dst_ptr, dst_ptr, ShifterOperand(dstBegin, LSL, 1));
2478
Scott Wakeling3fdab772016-04-25 11:32:37 +01002479 __ subs(num_chr, srcEnd, ShifterOperand(srcBegin));
Scott Wakeling3fdab772016-04-25 11:32:37 +01002480 // Early out for valid zero-length retrievals.
Tim Zhang25abd6c2016-01-19 23:39:24 +08002481 __ b(&done, EQ);
Scott Wakeling3fdab772016-04-25 11:32:37 +01002482
jessicahandojo05765752016-09-09 19:01:32 -07002483 // src range to copy.
2484 __ add(src_ptr, srcObj, ShifterOperand(value_offset));
2485 Label compressed_string_preloop;
2486 if (mirror::kUseStringCompression) {
2487 // Location of count in string.
2488 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
2489 // String's length.
2490 __ ldr(IP, Address(srcObj, count_offset));
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002491 __ tst(IP, ShifterOperand(1));
2492 __ b(&compressed_string_preloop, EQ);
jessicahandojo05765752016-09-09 19:01:32 -07002493 }
2494 __ add(src_ptr, src_ptr, ShifterOperand(srcBegin, LSL, 1));
2495
2496 // Do the copy.
2497 Label loop, remainder;
2498
Scott Wakeling3fdab772016-04-25 11:32:37 +01002499 // Save repairing the value of num_chr on the < 4 character path.
2500 __ subs(IP, num_chr, ShifterOperand(4));
2501 __ b(&remainder, LT);
2502
2503 // Keep the result of the earlier subs, we are going to fetch at least 4 characters.
2504 __ mov(num_chr, ShifterOperand(IP));
2505
2506 // Main loop used for longer fetches loads and stores 4x16-bit characters at a time.
2507 // (LDRD/STRD fault on unaligned addresses and it's not worth inlining extra code
2508 // to rectify these everywhere this intrinsic applies.)
2509 __ Bind(&loop);
2510 __ ldr(IP, Address(src_ptr, char_size * 2));
2511 __ subs(num_chr, num_chr, ShifterOperand(4));
2512 __ str(IP, Address(dst_ptr, char_size * 2));
2513 __ ldr(IP, Address(src_ptr, char_size * 4, Address::PostIndex));
2514 __ str(IP, Address(dst_ptr, char_size * 4, Address::PostIndex));
2515 __ b(&loop, GE);
2516
2517 __ adds(num_chr, num_chr, ShifterOperand(4));
2518 __ b(&done, EQ);
2519
2520 // Main loop for < 4 character case and remainder handling. Loads and stores one
2521 // 16-bit Java character at a time.
2522 __ Bind(&remainder);
2523 __ ldrh(IP, Address(src_ptr, char_size, Address::PostIndex));
2524 __ subs(num_chr, num_chr, ShifterOperand(1));
2525 __ strh(IP, Address(dst_ptr, char_size, Address::PostIndex));
2526 __ b(&remainder, GT);
jessicahandojo05765752016-09-09 19:01:32 -07002527
2528 if (mirror::kUseStringCompression) {
Vladimir Markofdaf0f42016-10-13 19:29:53 +01002529 __ b(&done);
2530
jessicahandojo05765752016-09-09 19:01:32 -07002531 const size_t c_char_size = Primitive::ComponentSize(Primitive::kPrimByte);
2532 DCHECK_EQ(c_char_size, 1u);
2533 // Copy loop for compressed src, copying 1 character (8-bit) to (16-bit) at a time.
2534 __ Bind(&compressed_string_preloop);
2535 __ add(src_ptr, src_ptr, ShifterOperand(srcBegin));
2536 __ Bind(&compressed_string_loop);
2537 __ ldrb(IP, Address(src_ptr, c_char_size, Address::PostIndex));
2538 __ strh(IP, Address(dst_ptr, char_size, Address::PostIndex));
2539 __ subs(num_chr, num_chr, ShifterOperand(1));
2540 __ b(&compressed_string_loop, GT);
2541 }
Scott Wakeling3fdab772016-04-25 11:32:37 +01002542
Tim Zhang25abd6c2016-01-19 23:39:24 +08002543 __ Bind(&done);
2544}
2545
Anton Kirilova3ffea22016-04-07 17:02:37 +01002546void IntrinsicLocationsBuilderARM::VisitFloatIsInfinite(HInvoke* invoke) {
2547 CreateFPToIntLocations(arena_, invoke);
2548}
2549
2550void IntrinsicCodeGeneratorARM::VisitFloatIsInfinite(HInvoke* invoke) {
2551 ArmAssembler* const assembler = GetAssembler();
2552 LocationSummary* const locations = invoke->GetLocations();
2553 const Register out = locations->Out().AsRegister<Register>();
2554 // Shifting left by 1 bit makes the value encodable as an immediate operand;
2555 // we don't care about the sign bit anyway.
2556 constexpr uint32_t infinity = kPositiveInfinityFloat << 1U;
2557
2558 __ vmovrs(out, locations->InAt(0).AsFpuRegister<SRegister>());
2559 // We don't care about the sign bit, so shift left.
2560 __ Lsl(out, out, 1);
2561 __ eor(out, out, ShifterOperand(infinity));
2562 // If the result is 0, then it has 32 leading zeros, and less than that otherwise.
2563 __ clz(out, out);
2564 // Any number less than 32 logically shifted right by 5 bits results in 0;
2565 // the same operation on 32 yields 1.
2566 __ Lsr(out, out, 5);
2567}
2568
2569void IntrinsicLocationsBuilderARM::VisitDoubleIsInfinite(HInvoke* invoke) {
2570 CreateFPToIntLocations(arena_, invoke);
2571}
2572
2573void IntrinsicCodeGeneratorARM::VisitDoubleIsInfinite(HInvoke* invoke) {
2574 ArmAssembler* const assembler = GetAssembler();
2575 LocationSummary* const locations = invoke->GetLocations();
2576 const Register out = locations->Out().AsRegister<Register>();
2577 // The highest 32 bits of double precision positive infinity separated into
2578 // two constants encodable as immediate operands.
2579 constexpr uint32_t infinity_high = 0x7f000000U;
2580 constexpr uint32_t infinity_high2 = 0x00f00000U;
2581
2582 static_assert((infinity_high | infinity_high2) == static_cast<uint32_t>(kPositiveInfinityDouble >> 32U),
2583 "The constants do not add up to the high 32 bits of double precision positive infinity.");
2584 __ vmovrrd(IP, out, FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
2585 __ eor(out, out, ShifterOperand(infinity_high));
2586 __ eor(out, out, ShifterOperand(infinity_high2));
2587 // We don't care about the sign bit, so shift left.
2588 __ orr(out, IP, ShifterOperand(out, LSL, 1));
2589 // If the result is 0, then it has 32 leading zeros, and less than that otherwise.
2590 __ clz(out, out);
2591 // Any number less than 32 logically shifted right by 5 bits results in 0;
2592 // the same operation on 32 yields 1.
2593 __ Lsr(out, out, 5);
2594}
2595
TatWai Chongd8c052a2016-11-02 16:12:48 +08002596void IntrinsicLocationsBuilderARM::VisitReferenceGetReferent(HInvoke* invoke) {
2597 if (kEmitCompilerReadBarrier) {
2598 // Do not intrinsify this call with the read barrier configuration.
2599 return;
2600 }
2601 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2602 LocationSummary::kCallOnSlowPath,
2603 kIntrinsified);
2604 locations->SetInAt(0, Location::RequiresRegister());
2605 locations->SetOut(Location::SameAsFirstInput());
2606 locations->AddTemp(Location::RequiresRegister());
2607}
2608
2609void IntrinsicCodeGeneratorARM::VisitReferenceGetReferent(HInvoke* invoke) {
2610 DCHECK(!kEmitCompilerReadBarrier);
2611 ArmAssembler* const assembler = GetAssembler();
2612 LocationSummary* locations = invoke->GetLocations();
2613
2614 Register obj = locations->InAt(0).AsRegister<Register>();
2615 Register out = locations->Out().AsRegister<Register>();
2616
2617 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
2618 codegen_->AddSlowPath(slow_path);
2619
2620 // Load ArtMethod first.
2621 HInvokeStaticOrDirect* invoke_direct = invoke->AsInvokeStaticOrDirect();
2622 DCHECK(invoke_direct != nullptr);
2623 Register temp = codegen_->GenerateCalleeMethodStaticOrDirectCall(
2624 invoke_direct, locations->GetTemp(0)).AsRegister<Register>();
2625
2626 // Now get declaring class.
2627 __ ldr(temp, Address(temp, ArtMethod::DeclaringClassOffset().Int32Value()));
2628
2629 uint32_t slow_path_flag_offset = codegen_->GetReferenceSlowFlagOffset();
2630 uint32_t disable_flag_offset = codegen_->GetReferenceDisableFlagOffset();
2631 DCHECK_NE(slow_path_flag_offset, 0u);
2632 DCHECK_NE(disable_flag_offset, 0u);
2633 DCHECK_NE(slow_path_flag_offset, disable_flag_offset);
2634
2635 // Check static flags that prevent using intrinsic.
2636 __ ldr(IP, Address(temp, disable_flag_offset));
2637 __ ldr(temp, Address(temp, slow_path_flag_offset));
2638 __ orr(IP, IP, ShifterOperand(temp));
2639 __ CompareAndBranchIfNonZero(IP, slow_path->GetEntryLabel());
2640
2641 // Fast path.
2642 __ ldr(out, Address(obj, mirror::Reference::ReferentOffset().Int32Value()));
2643 codegen_->MaybeRecordImplicitNullCheck(invoke);
2644 __ MaybeUnpoisonHeapReference(out);
2645 __ Bind(slow_path->GetExitLabel());
2646}
2647
Nicolas Geoffray331605a2017-03-01 11:01:41 +00002648void IntrinsicLocationsBuilderARM::VisitIntegerValueOf(HInvoke* invoke) {
2649 InvokeRuntimeCallingConvention calling_convention;
2650 IntrinsicVisitor::ComputeIntegerValueOfLocations(
2651 invoke,
2652 codegen_,
2653 Location::RegisterLocation(R0),
2654 Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2655}
2656
2657void IntrinsicCodeGeneratorARM::VisitIntegerValueOf(HInvoke* invoke) {
2658 IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo();
2659 LocationSummary* locations = invoke->GetLocations();
2660 ArmAssembler* const assembler = GetAssembler();
2661
2662 Register out = locations->Out().AsRegister<Register>();
2663 InvokeRuntimeCallingConvention calling_convention;
2664 Register argument = calling_convention.GetRegisterAt(0);
2665 if (invoke->InputAt(0)->IsConstant()) {
2666 int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue();
2667 if (value >= info.low && value <= info.high) {
2668 // Just embed the j.l.Integer in the code.
2669 ScopedObjectAccess soa(Thread::Current());
2670 mirror::Object* boxed = info.cache->Get(value + (-info.low));
2671 DCHECK(boxed != nullptr && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(boxed));
2672 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(boxed));
2673 __ LoadLiteral(out, codegen_->DeduplicateBootImageAddressLiteral(address));
2674 } else {
2675 // Allocate and initialize a new j.l.Integer.
2676 // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the
2677 // JIT object table.
2678 uint32_t address =
2679 dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2680 __ LoadLiteral(argument, codegen_->DeduplicateBootImageAddressLiteral(address));
2681 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2682 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2683 __ LoadImmediate(IP, value);
2684 __ StoreToOffset(kStoreWord, IP, out, info.value_offset);
2685 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2686 // one.
2687 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2688 }
2689 } else {
2690 Register in = locations->InAt(0).AsRegister<Register>();
2691 // Check bounds of our cache.
2692 __ AddConstant(out, in, -info.low);
2693 __ CmpConstant(out, info.high - info.low + 1);
2694 Label allocate, done;
2695 __ b(&allocate, HS);
2696 // If the value is within the bounds, load the j.l.Integer directly from the array.
2697 uint32_t data_offset = mirror::Array::DataOffset(kHeapReferenceSize).Uint32Value();
2698 uint32_t address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.cache));
2699 __ LoadLiteral(IP, codegen_->DeduplicateBootImageAddressLiteral(data_offset + address));
2700 codegen_->LoadFromShiftedRegOffset(Primitive::kPrimNot, locations->Out(), IP, out);
2701 __ MaybeUnpoisonHeapReference(out);
2702 __ b(&done);
2703 __ Bind(&allocate);
2704 // Otherwise allocate and initialize a new j.l.Integer.
2705 address = dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(info.integer));
2706 __ LoadLiteral(argument, codegen_->DeduplicateBootImageAddressLiteral(address));
2707 codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc());
2708 CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
2709 __ StoreToOffset(kStoreWord, in, out, info.value_offset);
2710 // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation
2711 // one.
2712 codegen_->GenerateMemoryBarrier(MemBarrierKind::kStoreStore);
2713 __ Bind(&done);
2714 }
2715}
2716
Aart Bik2f9fcc92016-03-01 15:16:54 -08002717UNIMPLEMENTED_INTRINSIC(ARM, MathMinDoubleDouble)
2718UNIMPLEMENTED_INTRINSIC(ARM, MathMinFloatFloat)
2719UNIMPLEMENTED_INTRINSIC(ARM, MathMaxDoubleDouble)
2720UNIMPLEMENTED_INTRINSIC(ARM, MathMaxFloatFloat)
2721UNIMPLEMENTED_INTRINSIC(ARM, MathMinLongLong)
2722UNIMPLEMENTED_INTRINSIC(ARM, MathMaxLongLong)
2723UNIMPLEMENTED_INTRINSIC(ARM, MathCeil) // Could be done by changing rounding mode, maybe?
2724UNIMPLEMENTED_INTRINSIC(ARM, MathFloor) // Could be done by changing rounding mode, maybe?
2725UNIMPLEMENTED_INTRINSIC(ARM, MathRint)
2726UNIMPLEMENTED_INTRINSIC(ARM, MathRoundDouble) // Could be done by changing rounding mode, maybe?
2727UNIMPLEMENTED_INTRINSIC(ARM, MathRoundFloat) // Could be done by changing rounding mode, maybe?
2728UNIMPLEMENTED_INTRINSIC(ARM, UnsafeCASLong) // High register pressure.
2729UNIMPLEMENTED_INTRINSIC(ARM, SystemArrayCopyChar)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002730UNIMPLEMENTED_INTRINSIC(ARM, IntegerHighestOneBit)
2731UNIMPLEMENTED_INTRINSIC(ARM, LongHighestOneBit)
2732UNIMPLEMENTED_INTRINSIC(ARM, IntegerLowestOneBit)
2733UNIMPLEMENTED_INTRINSIC(ARM, LongLowestOneBit)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08002734
Aart Bikff7d89c2016-11-07 08:49:28 -08002735UNIMPLEMENTED_INTRINSIC(ARM, StringStringIndexOf);
2736UNIMPLEMENTED_INTRINSIC(ARM, StringStringIndexOfAfter);
Aart Bik71bf7b42016-11-16 10:17:46 -08002737UNIMPLEMENTED_INTRINSIC(ARM, StringBufferAppend);
2738UNIMPLEMENTED_INTRINSIC(ARM, StringBufferLength);
2739UNIMPLEMENTED_INTRINSIC(ARM, StringBufferToString);
2740UNIMPLEMENTED_INTRINSIC(ARM, StringBuilderAppend);
2741UNIMPLEMENTED_INTRINSIC(ARM, StringBuilderLength);
2742UNIMPLEMENTED_INTRINSIC(ARM, StringBuilderToString);
Aart Bikff7d89c2016-11-07 08:49:28 -08002743
Aart Bik0e54c012016-03-04 12:08:31 -08002744// 1.8.
2745UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndAddInt)
2746UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndAddLong)
2747UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetInt)
2748UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetLong)
2749UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002750
Aart Bik2f9fcc92016-03-01 15:16:54 -08002751UNREACHABLE_INTRINSICS(ARM)
Roland Levillain4d027112015-07-01 15:41:14 +01002752
2753#undef __
2754
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08002755} // namespace arm
2756} // namespace art