blob: 1fc9061510ae77a1f82ee1c3a32e11a051d45ee6 [file] [log] [blame]
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080021#include "code_generator_arm.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070024#include "intrinsics_utils.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080025#include "mirror/array-inl.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29
30namespace art {
31
32namespace arm {
33
34ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
35 return codegen_->GetAssembler();
36}
37
38ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
39 return codegen_->GetGraph()->GetArena();
40}
41
Andreas Gampe85b62f22015-09-09 13:15:38 -070042using IntrinsicSlowPathARM = IntrinsicSlowPath<InvokeDexCallingConventionVisitorARM>;
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080043
44bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
45 Dispatch(invoke);
46 LocationSummary* res = invoke->GetLocations();
Roland Levillain3b359c72015-11-17 19:35:12 +000047 if (res == nullptr) {
48 return false;
49 }
Roland Levillain3b359c72015-11-17 19:35:12 +000050 return res->Intrinsified();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080051}
52
53#define __ assembler->
54
55static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
56 LocationSummary* locations = new (arena) LocationSummary(invoke,
57 LocationSummary::kNoCall,
58 kIntrinsified);
59 locations->SetInAt(0, Location::RequiresFpuRegister());
60 locations->SetOut(Location::RequiresRegister());
61}
62
63static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
64 LocationSummary* locations = new (arena) LocationSummary(invoke,
65 LocationSummary::kNoCall,
66 kIntrinsified);
67 locations->SetInAt(0, Location::RequiresRegister());
68 locations->SetOut(Location::RequiresFpuRegister());
69}
70
71static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
72 Location input = locations->InAt(0);
73 Location output = locations->Out();
74 if (is64bit) {
75 __ vmovrrd(output.AsRegisterPairLow<Register>(),
76 output.AsRegisterPairHigh<Register>(),
77 FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
78 } else {
79 __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
80 }
81}
82
83static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
84 Location input = locations->InAt(0);
85 Location output = locations->Out();
86 if (is64bit) {
87 __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
88 input.AsRegisterPairLow<Register>(),
89 input.AsRegisterPairHigh<Register>());
90 } else {
91 __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
92 }
93}
94
95void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
96 CreateFPToIntLocations(arena_, invoke);
97}
98void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
99 CreateIntToFPLocations(arena_, invoke);
100}
101
102void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000103 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800104}
105void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000106 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800107}
108
109void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
110 CreateFPToIntLocations(arena_, invoke);
111}
112void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
113 CreateIntToFPLocations(arena_, invoke);
114}
115
116void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000117 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800118}
119void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000120 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800121}
122
123static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
124 LocationSummary* locations = new (arena) LocationSummary(invoke,
125 LocationSummary::kNoCall,
126 kIntrinsified);
127 locations->SetInAt(0, Location::RequiresRegister());
128 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
129}
130
131static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
132 LocationSummary* locations = new (arena) LocationSummary(invoke,
133 LocationSummary::kNoCall,
134 kIntrinsified);
135 locations->SetInAt(0, Location::RequiresFpuRegister());
136 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
137}
138
Scott Wakeling611d3392015-07-10 11:42:06 +0100139static void GenNumberOfLeadingZeros(LocationSummary* locations,
140 Primitive::Type type,
141 ArmAssembler* assembler) {
142 Location in = locations->InAt(0);
143 Register out = locations->Out().AsRegister<Register>();
144
145 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
146
147 if (type == Primitive::kPrimLong) {
148 Register in_reg_lo = in.AsRegisterPairLow<Register>();
149 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
150 Label end;
151 __ clz(out, in_reg_hi);
152 __ CompareAndBranchIfNonZero(in_reg_hi, &end);
153 __ clz(out, in_reg_lo);
154 __ AddConstant(out, 32);
155 __ Bind(&end);
156 } else {
157 __ clz(out, in.AsRegister<Register>());
158 }
159}
160
161void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
162 CreateIntToIntLocations(arena_, invoke);
163}
164
165void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
166 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
167}
168
169void IntrinsicLocationsBuilderARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
170 LocationSummary* locations = new (arena_) LocationSummary(invoke,
171 LocationSummary::kNoCall,
172 kIntrinsified);
173 locations->SetInAt(0, Location::RequiresRegister());
174 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
175}
176
177void IntrinsicCodeGeneratorARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
178 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
179}
180
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100181static void GenNumberOfTrailingZeros(LocationSummary* locations,
182 Primitive::Type type,
183 ArmAssembler* assembler) {
184 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
185
186 Register out = locations->Out().AsRegister<Register>();
187
188 if (type == Primitive::kPrimLong) {
189 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
190 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
191 Label end;
192 __ rbit(out, in_reg_lo);
193 __ clz(out, out);
194 __ CompareAndBranchIfNonZero(in_reg_lo, &end);
195 __ rbit(out, in_reg_hi);
196 __ clz(out, out);
197 __ AddConstant(out, 32);
198 __ Bind(&end);
199 } else {
200 Register in = locations->InAt(0).AsRegister<Register>();
201 __ rbit(out, in);
202 __ clz(out, out);
203 }
204}
205
206void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
207 LocationSummary* locations = new (arena_) LocationSummary(invoke,
208 LocationSummary::kNoCall,
209 kIntrinsified);
210 locations->SetInAt(0, Location::RequiresRegister());
211 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
212}
213
214void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
215 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
216}
217
218void IntrinsicLocationsBuilderARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
219 LocationSummary* locations = new (arena_) LocationSummary(invoke,
220 LocationSummary::kNoCall,
221 kIntrinsified);
222 locations->SetInAt(0, Location::RequiresRegister());
223 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
224}
225
226void IntrinsicCodeGeneratorARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
227 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
228}
229
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800230static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
231 Location in = locations->InAt(0);
232 Location out = locations->Out();
233
234 if (is64bit) {
235 __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
236 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
237 } else {
238 __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
239 }
240}
241
242void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
243 CreateFPToFPLocations(arena_, invoke);
244}
245
246void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000247 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800248}
249
250void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
251 CreateFPToFPLocations(arena_, invoke);
252}
253
254void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000255 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800256}
257
258static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
259 LocationSummary* locations = new (arena) LocationSummary(invoke,
260 LocationSummary::kNoCall,
261 kIntrinsified);
262 locations->SetInAt(0, Location::RequiresRegister());
263 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
264
265 locations->AddTemp(Location::RequiresRegister());
266}
267
268static void GenAbsInteger(LocationSummary* locations,
269 bool is64bit,
270 ArmAssembler* assembler) {
271 Location in = locations->InAt(0);
272 Location output = locations->Out();
273
274 Register mask = locations->GetTemp(0).AsRegister<Register>();
275
276 if (is64bit) {
277 Register in_reg_lo = in.AsRegisterPairLow<Register>();
278 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
279 Register out_reg_lo = output.AsRegisterPairLow<Register>();
280 Register out_reg_hi = output.AsRegisterPairHigh<Register>();
281
282 DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
283
284 __ Asr(mask, in_reg_hi, 31);
285 __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
286 __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
287 __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
288 __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
289 } else {
290 Register in_reg = in.AsRegister<Register>();
291 Register out_reg = output.AsRegister<Register>();
292
293 __ Asr(mask, in_reg, 31);
294 __ add(out_reg, in_reg, ShifterOperand(mask));
295 __ eor(out_reg, mask, ShifterOperand(out_reg));
296 }
297}
298
299void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
300 CreateIntToIntPlusTemp(arena_, invoke);
301}
302
303void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000304 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800305}
306
307
308void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
309 CreateIntToIntPlusTemp(arena_, invoke);
310}
311
312void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000313 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800314}
315
316static void GenMinMax(LocationSummary* locations,
317 bool is_min,
318 ArmAssembler* assembler) {
319 Register op1 = locations->InAt(0).AsRegister<Register>();
320 Register op2 = locations->InAt(1).AsRegister<Register>();
321 Register out = locations->Out().AsRegister<Register>();
322
323 __ cmp(op1, ShifterOperand(op2));
324
325 __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
326 __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
327 __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
328}
329
330static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
331 LocationSummary* locations = new (arena) LocationSummary(invoke,
332 LocationSummary::kNoCall,
333 kIntrinsified);
334 locations->SetInAt(0, Location::RequiresRegister());
335 locations->SetInAt(1, Location::RequiresRegister());
336 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
337}
338
339void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
340 CreateIntIntToIntLocations(arena_, invoke);
341}
342
343void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000344 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800345}
346
347void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
348 CreateIntIntToIntLocations(arena_, invoke);
349}
350
351void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000352 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800353}
354
355void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
356 CreateFPToFPLocations(arena_, invoke);
357}
358
359void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
360 LocationSummary* locations = invoke->GetLocations();
361 ArmAssembler* assembler = GetAssembler();
362 __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
363 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
364}
365
366void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
367 CreateIntToIntLocations(arena_, invoke);
368}
369
370void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
371 ArmAssembler* assembler = GetAssembler();
372 // Ignore upper 4B of long address.
373 __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
374 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
375}
376
377void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
378 CreateIntToIntLocations(arena_, invoke);
379}
380
381void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
382 ArmAssembler* assembler = GetAssembler();
383 // Ignore upper 4B of long address.
384 __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
385 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
386}
387
388void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
389 CreateIntToIntLocations(arena_, invoke);
390}
391
392void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
393 ArmAssembler* assembler = GetAssembler();
394 // Ignore upper 4B of long address.
395 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
396 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
397 // exception. So we can't use ldrd as addr may be unaligned.
398 Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
399 Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
400 if (addr == lo) {
401 __ ldr(hi, Address(addr, 4));
402 __ ldr(lo, Address(addr, 0));
403 } else {
404 __ ldr(lo, Address(addr, 0));
405 __ ldr(hi, Address(addr, 4));
406 }
407}
408
409void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
410 CreateIntToIntLocations(arena_, invoke);
411}
412
413void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
414 ArmAssembler* assembler = GetAssembler();
415 // Ignore upper 4B of long address.
416 __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
417 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
418}
419
420static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
421 LocationSummary* locations = new (arena) LocationSummary(invoke,
422 LocationSummary::kNoCall,
423 kIntrinsified);
424 locations->SetInAt(0, Location::RequiresRegister());
425 locations->SetInAt(1, Location::RequiresRegister());
426}
427
428void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
429 CreateIntIntToVoidLocations(arena_, invoke);
430}
431
432void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
433 ArmAssembler* assembler = GetAssembler();
434 __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
435 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
436}
437
438void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
439 CreateIntIntToVoidLocations(arena_, invoke);
440}
441
442void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
443 ArmAssembler* assembler = GetAssembler();
444 __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
445 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
446}
447
448void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
449 CreateIntIntToVoidLocations(arena_, invoke);
450}
451
452void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
453 ArmAssembler* assembler = GetAssembler();
454 // Ignore upper 4B of long address.
455 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
456 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
457 // exception. So we can't use ldrd as addr may be unaligned.
458 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
459 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
460}
461
462void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
463 CreateIntIntToVoidLocations(arena_, invoke);
464}
465
466void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
467 ArmAssembler* assembler = GetAssembler();
468 __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
469 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
470}
471
472void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
473 LocationSummary* locations = new (arena_) LocationSummary(invoke,
474 LocationSummary::kNoCall,
475 kIntrinsified);
476 locations->SetOut(Location::RequiresRegister());
477}
478
479void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
480 ArmAssembler* assembler = GetAssembler();
481 __ LoadFromOffset(kLoadWord,
482 invoke->GetLocations()->Out().AsRegister<Register>(),
483 TR,
484 Thread::PeerOffset<kArmPointerSize>().Int32Value());
485}
486
487static void GenUnsafeGet(HInvoke* invoke,
488 Primitive::Type type,
489 bool is_volatile,
490 CodeGeneratorARM* codegen) {
491 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800492 ArmAssembler* assembler = codegen->GetAssembler();
Roland Levillain3b359c72015-11-17 19:35:12 +0000493 Location base_loc = locations->InAt(1);
494 Register base = base_loc.AsRegister<Register>(); // Object pointer.
495 Location offset_loc = locations->InAt(2);
496 Register offset = offset_loc.AsRegisterPairLow<Register>(); // Long offset, lo part only.
497 Location trg_loc = locations->Out();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800498
Roland Levillainc9285912015-12-18 10:38:42 +0000499 switch (type) {
500 case Primitive::kPrimInt: {
501 Register trg = trg_loc.AsRegister<Register>();
502 __ ldr(trg, Address(base, offset));
503 if (is_volatile) {
504 __ dmb(ISH);
505 }
506 break;
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800507 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800508
Roland Levillainc9285912015-12-18 10:38:42 +0000509 case Primitive::kPrimNot: {
510 Register trg = trg_loc.AsRegister<Register>();
511 if (kEmitCompilerReadBarrier) {
512 if (kUseBakerReadBarrier) {
513 Location temp = locations->GetTemp(0);
Roland Levillainbfea3352016-06-23 13:48:47 +0100514 codegen->GenerateReferenceLoadWithBakerReadBarrier(
515 invoke, trg_loc, base, 0U, offset_loc, TIMES_1, temp, /* needs_null_check */ false);
Roland Levillainc9285912015-12-18 10:38:42 +0000516 if (is_volatile) {
517 __ dmb(ISH);
518 }
519 } else {
520 __ ldr(trg, Address(base, offset));
521 if (is_volatile) {
522 __ dmb(ISH);
523 }
524 codegen->GenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
525 }
526 } else {
527 __ ldr(trg, Address(base, offset));
528 if (is_volatile) {
529 __ dmb(ISH);
530 }
531 __ MaybeUnpoisonHeapReference(trg);
532 }
533 break;
534 }
Roland Levillain4d027112015-07-01 15:41:14 +0100535
Roland Levillainc9285912015-12-18 10:38:42 +0000536 case Primitive::kPrimLong: {
537 Register trg_lo = trg_loc.AsRegisterPairLow<Register>();
538 __ add(IP, base, ShifterOperand(offset));
539 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
540 Register trg_hi = trg_loc.AsRegisterPairHigh<Register>();
541 __ ldrexd(trg_lo, trg_hi, IP);
542 } else {
543 __ ldrd(trg_lo, Address(IP));
544 }
545 if (is_volatile) {
546 __ dmb(ISH);
547 }
548 break;
549 }
550
551 default:
552 LOG(FATAL) << "Unexpected type " << type;
553 UNREACHABLE();
Roland Levillain4d027112015-07-01 15:41:14 +0100554 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800555}
556
Roland Levillainc9285912015-12-18 10:38:42 +0000557static void CreateIntIntIntToIntLocations(ArenaAllocator* arena,
558 HInvoke* invoke,
559 Primitive::Type type) {
Roland Levillain3b359c72015-11-17 19:35:12 +0000560 bool can_call = kEmitCompilerReadBarrier &&
561 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
562 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800563 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain3b359c72015-11-17 19:35:12 +0000564 can_call ?
565 LocationSummary::kCallOnSlowPath :
566 LocationSummary::kNoCall,
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800567 kIntrinsified);
568 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
569 locations->SetInAt(1, Location::RequiresRegister());
570 locations->SetInAt(2, Location::RequiresRegister());
Roland Levillainbfea3352016-06-23 13:48:47 +0100571 locations->SetOut(Location::RequiresRegister(),
572 can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Roland Levillainc9285912015-12-18 10:38:42 +0000573 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
574 // We need a temporary register for the read barrier marking slow
Roland Levillainbfea3352016-06-23 13:48:47 +0100575 // path in InstructionCodeGeneratorARM::GenerateReferenceLoadWithBakerReadBarrier.
Roland Levillainc9285912015-12-18 10:38:42 +0000576 locations->AddTemp(Location::RequiresRegister());
577 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800578}
579
580void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000581 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800582}
583void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000584 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800585}
586void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000587 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800588}
589void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000590 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800591}
592void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000593 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800594}
595void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000596 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800597}
598
599void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000600 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800601}
602void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000603 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800604}
605void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000606 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800607}
608void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000609 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800610}
611void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000612 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800613}
614void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000615 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800616}
617
618static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
619 const ArmInstructionSetFeatures& features,
620 Primitive::Type type,
621 bool is_volatile,
622 HInvoke* invoke) {
623 LocationSummary* locations = new (arena) LocationSummary(invoke,
624 LocationSummary::kNoCall,
625 kIntrinsified);
626 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
627 locations->SetInAt(1, Location::RequiresRegister());
628 locations->SetInAt(2, Location::RequiresRegister());
629 locations->SetInAt(3, Location::RequiresRegister());
630
631 if (type == Primitive::kPrimLong) {
632 // Potentially need temps for ldrexd-strexd loop.
633 if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
634 locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
635 locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
636 }
637 } else if (type == Primitive::kPrimNot) {
638 // Temps for card-marking.
639 locations->AddTemp(Location::RequiresRegister()); // Temp.
640 locations->AddTemp(Location::RequiresRegister()); // Card.
641 }
642}
643
644void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000645 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800646}
647void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000648 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800649}
650void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000651 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800652}
653void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000654 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800655}
656void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000657 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800658}
659void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000660 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800661}
662void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000663 CreateIntIntIntIntToVoid(
664 arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800665}
666void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000667 CreateIntIntIntIntToVoid(
668 arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800669}
670void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000671 CreateIntIntIntIntToVoid(
672 arena_, features_, Primitive::kPrimLong, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800673}
674
675static void GenUnsafePut(LocationSummary* locations,
676 Primitive::Type type,
677 bool is_volatile,
678 bool is_ordered,
679 CodeGeneratorARM* codegen) {
680 ArmAssembler* assembler = codegen->GetAssembler();
681
682 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
683 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
684 Register value;
685
686 if (is_volatile || is_ordered) {
687 __ dmb(ISH);
688 }
689
690 if (type == Primitive::kPrimLong) {
691 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
692 value = value_lo;
693 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
694 Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
695 Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
696 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
697
698 __ add(IP, base, ShifterOperand(offset));
699 Label loop_head;
700 __ Bind(&loop_head);
701 __ ldrexd(temp_lo, temp_hi, IP);
702 __ strexd(temp_lo, value_lo, value_hi, IP);
703 __ cmp(temp_lo, ShifterOperand(0));
704 __ b(&loop_head, NE);
705 } else {
706 __ add(IP, base, ShifterOperand(offset));
707 __ strd(value_lo, Address(IP));
708 }
709 } else {
Roland Levillain4d027112015-07-01 15:41:14 +0100710 value = locations->InAt(3).AsRegister<Register>();
711 Register source = value;
712 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
713 Register temp = locations->GetTemp(0).AsRegister<Register>();
714 __ Mov(temp, value);
715 __ PoisonHeapReference(temp);
716 source = temp;
717 }
718 __ str(source, Address(base, offset));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800719 }
720
721 if (is_volatile) {
722 __ dmb(ISH);
723 }
724
725 if (type == Primitive::kPrimNot) {
726 Register temp = locations->GetTemp(0).AsRegister<Register>();
727 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100728 bool value_can_be_null = true; // TODO: Worth finding out this information?
729 codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800730 }
731}
732
733void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000734 GenUnsafePut(invoke->GetLocations(),
735 Primitive::kPrimInt,
736 /* is_volatile */ false,
737 /* is_ordered */ false,
738 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800739}
740void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000741 GenUnsafePut(invoke->GetLocations(),
742 Primitive::kPrimInt,
743 /* is_volatile */ false,
744 /* is_ordered */ true,
745 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800746}
747void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000748 GenUnsafePut(invoke->GetLocations(),
749 Primitive::kPrimInt,
750 /* is_volatile */ true,
751 /* is_ordered */ false,
752 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800753}
754void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000755 GenUnsafePut(invoke->GetLocations(),
756 Primitive::kPrimNot,
757 /* is_volatile */ false,
758 /* is_ordered */ false,
759 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800760}
761void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000762 GenUnsafePut(invoke->GetLocations(),
763 Primitive::kPrimNot,
764 /* is_volatile */ false,
765 /* is_ordered */ true,
766 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800767}
768void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000769 GenUnsafePut(invoke->GetLocations(),
770 Primitive::kPrimNot,
771 /* is_volatile */ true,
772 /* is_ordered */ false,
773 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800774}
775void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000776 GenUnsafePut(invoke->GetLocations(),
777 Primitive::kPrimLong,
778 /* is_volatile */ false,
779 /* is_ordered */ false,
780 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800781}
782void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000783 GenUnsafePut(invoke->GetLocations(),
784 Primitive::kPrimLong,
785 /* is_volatile */ false,
786 /* is_ordered */ true,
787 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800788}
789void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000790 GenUnsafePut(invoke->GetLocations(),
791 Primitive::kPrimLong,
792 /* is_volatile */ true,
793 /* is_ordered */ false,
794 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800795}
796
797static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000798 HInvoke* invoke,
799 Primitive::Type type) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800800 LocationSummary* locations = new (arena) LocationSummary(invoke,
801 LocationSummary::kNoCall,
802 kIntrinsified);
803 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
804 locations->SetInAt(1, Location::RequiresRegister());
805 locations->SetInAt(2, Location::RequiresRegister());
806 locations->SetInAt(3, Location::RequiresRegister());
807 locations->SetInAt(4, Location::RequiresRegister());
808
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000809 // If heap poisoning is enabled, we don't want the unpoisoning
810 // operations to potentially clobber the output.
811 Location::OutputOverlap overlaps = (kPoisonHeapReferences && type == Primitive::kPrimNot)
812 ? Location::kOutputOverlap
813 : Location::kNoOutputOverlap;
814 locations->SetOut(Location::RequiresRegister(), overlaps);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800815
816 locations->AddTemp(Location::RequiresRegister()); // Pointer.
817 locations->AddTemp(Location::RequiresRegister()); // Temp 1.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800818}
819
820static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
821 DCHECK_NE(type, Primitive::kPrimLong);
822
823 ArmAssembler* assembler = codegen->GetAssembler();
824
825 Register out = locations->Out().AsRegister<Register>(); // Boolean result.
826
827 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
828 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Offset (discard high 4B).
829 Register expected_lo = locations->InAt(3).AsRegister<Register>(); // Expected.
830 Register value_lo = locations->InAt(4).AsRegister<Register>(); // Value.
831
832 Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>(); // Pointer to actual memory.
833 Register tmp_lo = locations->GetTemp(1).AsRegister<Register>(); // Value in memory.
834
835 if (type == Primitive::kPrimNot) {
836 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
837 // object and scan the receiver at the next GC for nothing.
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100838 bool value_can_be_null = true; // TODO: Worth finding out this information?
839 codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800840 }
841
842 // Prevent reordering with prior memory operations.
Roland Levillain4bedb382016-01-12 12:01:04 +0000843 // Emit a DMB ISH instruction instead of an DMB ISHST one, as the
844 // latter allows a preceding load to be delayed past the STXR
845 // instruction below.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800846 __ dmb(ISH);
847
848 __ add(tmp_ptr, base, ShifterOperand(offset));
849
Roland Levillain4d027112015-07-01 15:41:14 +0100850 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
851 codegen->GetAssembler()->PoisonHeapReference(expected_lo);
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000852 if (value_lo == expected_lo) {
853 // Do not poison `value_lo`, as it is the same register as
854 // `expected_lo`, which has just been poisoned.
855 } else {
856 codegen->GetAssembler()->PoisonHeapReference(value_lo);
857 }
Roland Levillain4d027112015-07-01 15:41:14 +0100858 }
859
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800860 // do {
861 // tmp = [r_ptr] - expected;
862 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
863 // result = tmp != 0;
864
865 Label loop_head;
866 __ Bind(&loop_head);
867
Roland Levillain391b8662015-12-18 11:43:38 +0000868 // TODO: When `type == Primitive::kPrimNot`, add a read barrier for
869 // the reference stored in the object before attempting the CAS,
870 // similar to the one in the art::Unsafe_compareAndSwapObject JNI
871 // implementation.
872 //
873 // Note that this code is not (yet) used when read barriers are
874 // enabled (see IntrinsicLocationsBuilderARM::VisitUnsafeCASObject).
875 DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800876 __ ldrex(tmp_lo, tmp_ptr);
877
878 __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
879
880 __ it(EQ, ItState::kItT);
881 __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
882 __ cmp(tmp_lo, ShifterOperand(1), EQ);
883
884 __ b(&loop_head, EQ);
885
886 __ dmb(ISH);
887
888 __ rsbs(out, tmp_lo, ShifterOperand(1));
889 __ it(CC);
890 __ mov(out, ShifterOperand(0), CC);
Roland Levillain4d027112015-07-01 15:41:14 +0100891
892 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +0100893 codegen->GetAssembler()->UnpoisonHeapReference(expected_lo);
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000894 if (value_lo == expected_lo) {
895 // Do not unpoison `value_lo`, as it is the same register as
896 // `expected_lo`, which has just been unpoisoned.
897 } else {
898 codegen->GetAssembler()->UnpoisonHeapReference(value_lo);
899 }
Roland Levillain4d027112015-07-01 15:41:14 +0100900 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800901}
902
Andreas Gampeca714582015-04-03 19:41:34 -0700903void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000904 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800905}
Andreas Gampeca714582015-04-03 19:41:34 -0700906void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain391b8662015-12-18 11:43:38 +0000907 // The UnsafeCASObject intrinsic is missing a read barrier, and
908 // therefore sometimes does not work as expected (b/25883050).
909 // Turn it off temporarily as a quick fix, until the read barrier is
Roland Levillain3d312422016-06-23 13:53:42 +0100910 // implemented (see TODO in GenCAS).
Roland Levillain391b8662015-12-18 11:43:38 +0000911 //
Roland Levillain3d312422016-06-23 13:53:42 +0100912 // TODO(rpl): Implement read barrier support in GenCAS and re-enable
913 // this intrinsic.
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000914 if (kEmitCompilerReadBarrier) {
Roland Levillain985ff702015-10-23 13:25:35 +0100915 return;
916 }
917
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000918 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800919}
920void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
921 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
922}
923void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain3d312422016-06-23 13:53:42 +0100924 // The UnsafeCASObject intrinsic is missing a read barrier, and
925 // therefore sometimes does not work as expected (b/25883050).
926 // Turn it off temporarily as a quick fix, until the read barrier is
927 // implemented (see TODO in GenCAS).
928 //
929 // TODO(rpl): Implement read barrier support in GenCAS and re-enable
930 // this intrinsic.
931 DCHECK(!kEmitCompilerReadBarrier);
932
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800933 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
934}
935
936void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) {
937 LocationSummary* locations = new (arena_) LocationSummary(invoke,
938 LocationSummary::kCallOnSlowPath,
939 kIntrinsified);
940 locations->SetInAt(0, Location::RequiresRegister());
941 locations->SetInAt(1, Location::RequiresRegister());
942 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
943
944 locations->AddTemp(Location::RequiresRegister());
945 locations->AddTemp(Location::RequiresRegister());
946}
947
948void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) {
949 ArmAssembler* assembler = GetAssembler();
950 LocationSummary* locations = invoke->GetLocations();
951
952 // Location of reference to data array
953 const MemberOffset value_offset = mirror::String::ValueOffset();
954 // Location of count
955 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800956
957 Register obj = locations->InAt(0).AsRegister<Register>(); // String object pointer.
958 Register idx = locations->InAt(1).AsRegister<Register>(); // Index of character.
959 Register out = locations->Out().AsRegister<Register>(); // Result character.
960
961 Register temp = locations->GetTemp(0).AsRegister<Register>();
962 Register array_temp = locations->GetTemp(1).AsRegister<Register>();
963
964 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
965 // the cost.
966 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
967 // we will not optimize the code for constants (which would save a register).
968
Andreas Gampe85b62f22015-09-09 13:15:38 -0700969 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800970 codegen_->AddSlowPath(slow_path);
971
972 __ ldr(temp, Address(obj, count_offset.Int32Value())); // temp = str.length.
973 codegen_->MaybeRecordImplicitNullCheck(invoke);
974 __ cmp(idx, ShifterOperand(temp));
975 __ b(slow_path->GetEntryLabel(), CS);
976
Jeff Hao848f70a2014-01-15 13:49:50 -0800977 __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800978
979 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -0800980 __ ldrh(out, Address(array_temp, idx, LSL, 1)); // out := array_temp[idx].
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800981
982 __ Bind(slow_path->GetExitLabel());
983}
984
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000985void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
986 // The inputs plus one temp.
987 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Scott Wakelingc25cbf12016-04-18 09:00:11 +0100988 invoke->InputAt(1)->CanBeNull()
989 ? LocationSummary::kCallOnSlowPath
990 : LocationSummary::kNoCall,
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000991 kIntrinsified);
Scott Wakelingc25cbf12016-04-18 09:00:11 +0100992 locations->SetInAt(0, Location::RequiresRegister());
993 locations->SetInAt(1, Location::RequiresRegister());
994 locations->AddTemp(Location::RequiresRegister());
995 locations->AddTemp(Location::RequiresRegister());
996 locations->AddTemp(Location::RequiresRegister());
997 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000998}
999
1000void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
1001 ArmAssembler* assembler = GetAssembler();
1002 LocationSummary* locations = invoke->GetLocations();
1003
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001004 Register str = locations->InAt(0).AsRegister<Register>();
1005 Register arg = locations->InAt(1).AsRegister<Register>();
1006 Register out = locations->Out().AsRegister<Register>();
1007
1008 Register temp0 = locations->GetTemp(0).AsRegister<Register>();
1009 Register temp1 = locations->GetTemp(1).AsRegister<Register>();
1010 Register temp2 = locations->GetTemp(2).AsRegister<Register>();
1011
1012 Label loop;
1013 Label find_char_diff;
1014 Label end;
1015
1016 // Get offsets of count and value fields within a string object.
1017 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
1018 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1019
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001020 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001021 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001022
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001023 // Take slow path and throw if input can be and is null.
1024 SlowPathCode* slow_path = nullptr;
1025 const bool can_slow_path = invoke->InputAt(1)->CanBeNull();
1026 if (can_slow_path) {
1027 slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1028 codegen_->AddSlowPath(slow_path);
1029 __ CompareAndBranchIfZero(arg, slow_path->GetEntryLabel());
1030 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001031
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001032 // Reference equality check, return 0 if same reference.
1033 __ subs(out, str, ShifterOperand(arg));
1034 __ b(&end, EQ);
1035 // Load lengths of this and argument strings.
1036 __ ldr(temp2, Address(str, count_offset));
1037 __ ldr(temp1, Address(arg, count_offset));
1038 // out = length diff.
1039 __ subs(out, temp2, ShifterOperand(temp1));
1040 // temp0 = min(len(str), len(arg)).
1041 __ it(Condition::LT, kItElse);
1042 __ mov(temp0, ShifterOperand(temp2), Condition::LT);
1043 __ mov(temp0, ShifterOperand(temp1), Condition::GE);
1044 // Shorter string is empty?
1045 __ CompareAndBranchIfZero(temp0, &end);
1046
1047 // Store offset of string value in preparation for comparison loop.
1048 __ mov(temp1, ShifterOperand(value_offset));
1049
1050 // Assertions that must hold in order to compare multiple characters at a time.
1051 CHECK_ALIGNED(value_offset, 8);
1052 static_assert(IsAligned<8>(kObjectAlignment),
1053 "String data must be 8-byte aligned for unrolled CompareTo loop.");
1054
1055 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1056 DCHECK_EQ(char_size, 2u);
1057
1058 // Unrolled loop comparing 4x16-bit chars per iteration (ok because of string data alignment).
1059 __ Bind(&loop);
1060 __ ldr(IP, Address(str, temp1));
1061 __ ldr(temp2, Address(arg, temp1));
1062 __ cmp(IP, ShifterOperand(temp2));
1063 __ b(&find_char_diff, NE);
1064 __ add(temp1, temp1, ShifterOperand(char_size * 2));
1065 __ sub(temp0, temp0, ShifterOperand(2));
1066
1067 __ ldr(IP, Address(str, temp1));
1068 __ ldr(temp2, Address(arg, temp1));
1069 __ cmp(IP, ShifterOperand(temp2));
1070 __ b(&find_char_diff, NE);
1071 __ add(temp1, temp1, ShifterOperand(char_size * 2));
1072 __ subs(temp0, temp0, ShifterOperand(2));
1073
1074 __ b(&loop, GT);
1075 __ b(&end);
1076
1077 // Find the single 16-bit character difference.
1078 __ Bind(&find_char_diff);
1079 // Get the bit position of the first character that differs.
1080 __ eor(temp1, temp2, ShifterOperand(IP));
1081 __ rbit(temp1, temp1);
1082 __ clz(temp1, temp1);
1083
1084 // temp0 = number of 16-bit characters remaining to compare.
1085 // (it could be < 1 if a difference is found after the first SUB in the comparison loop, and
1086 // after the end of the shorter string data).
1087
1088 // (temp1 >> 4) = character where difference occurs between the last two words compared, on the
1089 // interval [0,1] (0 for low half-word different, 1 for high half-word different).
1090
1091 // If temp0 <= (temp1 >> 4), the difference occurs outside the remaining string data, so just
1092 // return length diff (out).
1093 __ cmp(temp0, ShifterOperand(temp1, LSR, 4));
1094 __ b(&end, LE);
1095 // Extract the characters and calculate the difference.
1096 __ bic(temp1, temp1, ShifterOperand(0xf));
1097 __ Lsr(temp2, temp2, temp1);
1098 __ Lsr(IP, IP, temp1);
1099 __ movt(temp2, 0);
1100 __ movt(IP, 0);
1101 __ sub(out, IP, ShifterOperand(temp2));
1102
1103 __ Bind(&end);
1104
1105 if (can_slow_path) {
1106 __ Bind(slow_path->GetExitLabel());
1107 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001108}
1109
Agi Csaki289cd552015-08-18 17:10:38 -07001110void IntrinsicLocationsBuilderARM::VisitStringEquals(HInvoke* invoke) {
1111 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1112 LocationSummary::kNoCall,
1113 kIntrinsified);
1114 InvokeRuntimeCallingConvention calling_convention;
1115 locations->SetInAt(0, Location::RequiresRegister());
1116 locations->SetInAt(1, Location::RequiresRegister());
1117 // Temporary registers to store lengths of strings and for calculations.
1118 // Using instruction cbz requires a low register, so explicitly set a temp to be R0.
1119 locations->AddTemp(Location::RegisterLocation(R0));
1120 locations->AddTemp(Location::RequiresRegister());
1121 locations->AddTemp(Location::RequiresRegister());
1122
1123 locations->SetOut(Location::RequiresRegister());
1124}
1125
1126void IntrinsicCodeGeneratorARM::VisitStringEquals(HInvoke* invoke) {
1127 ArmAssembler* assembler = GetAssembler();
1128 LocationSummary* locations = invoke->GetLocations();
1129
1130 Register str = locations->InAt(0).AsRegister<Register>();
1131 Register arg = locations->InAt(1).AsRegister<Register>();
1132 Register out = locations->Out().AsRegister<Register>();
1133
1134 Register temp = locations->GetTemp(0).AsRegister<Register>();
1135 Register temp1 = locations->GetTemp(1).AsRegister<Register>();
1136 Register temp2 = locations->GetTemp(2).AsRegister<Register>();
1137
1138 Label loop;
1139 Label end;
1140 Label return_true;
1141 Label return_false;
1142
1143 // Get offsets of count, value, and class fields within a string object.
1144 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1145 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1146 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1147
1148 // Note that the null check must have been done earlier.
1149 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1150
Vladimir Marko53b52002016-05-24 19:30:45 +01001151 StringEqualsOptimizations optimizations(invoke);
1152 if (!optimizations.GetArgumentNotNull()) {
1153 // Check if input is null, return false if it is.
1154 __ CompareAndBranchIfZero(arg, &return_false);
1155 }
Agi Csaki289cd552015-08-18 17:10:38 -07001156
Vladimir Marko53b52002016-05-24 19:30:45 +01001157 if (!optimizations.GetArgumentIsString()) {
1158 // Instanceof check for the argument by comparing class fields.
1159 // All string objects must have the same type since String cannot be subclassed.
1160 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1161 // If the argument is a string object, its class field must be equal to receiver's class field.
1162 __ ldr(temp, Address(str, class_offset));
1163 __ ldr(temp1, Address(arg, class_offset));
1164 __ cmp(temp, ShifterOperand(temp1));
1165 __ b(&return_false, NE);
1166 }
Agi Csaki289cd552015-08-18 17:10:38 -07001167
1168 // Load lengths of this and argument strings.
1169 __ ldr(temp, Address(str, count_offset));
1170 __ ldr(temp1, Address(arg, count_offset));
1171 // Check if lengths are equal, return false if they're not.
1172 __ cmp(temp, ShifterOperand(temp1));
1173 __ b(&return_false, NE);
1174 // Return true if both strings are empty.
1175 __ cbz(temp, &return_true);
1176
1177 // Reference equality check, return true if same reference.
1178 __ cmp(str, ShifterOperand(arg));
1179 __ b(&return_true, EQ);
1180
1181 // Assertions that must hold in order to compare strings 2 characters at a time.
1182 DCHECK_ALIGNED(value_offset, 4);
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001183 static_assert(IsAligned<4>(kObjectAlignment), "String data must be aligned for fast compare.");
Agi Csaki289cd552015-08-18 17:10:38 -07001184
Agi Csaki289cd552015-08-18 17:10:38 -07001185 __ LoadImmediate(temp1, value_offset);
Agi Csaki289cd552015-08-18 17:10:38 -07001186
1187 // Loop to compare strings 2 characters at a time starting at the front of the string.
1188 // Ok to do this because strings with an odd length are zero-padded.
1189 __ Bind(&loop);
1190 __ ldr(out, Address(str, temp1));
1191 __ ldr(temp2, Address(arg, temp1));
1192 __ cmp(out, ShifterOperand(temp2));
1193 __ b(&return_false, NE);
1194 __ add(temp1, temp1, ShifterOperand(sizeof(uint32_t)));
Vladimir Markoa63f0d42015-09-01 13:36:35 +01001195 __ subs(temp, temp, ShifterOperand(sizeof(uint32_t) / sizeof(uint16_t)));
1196 __ b(&loop, GT);
Agi Csaki289cd552015-08-18 17:10:38 -07001197
1198 // Return true and exit the function.
1199 // If loop does not result in returning false, we return true.
1200 __ Bind(&return_true);
1201 __ LoadImmediate(out, 1);
1202 __ b(&end);
1203
1204 // Return false and exit the function.
1205 __ Bind(&return_false);
1206 __ LoadImmediate(out, 0);
1207 __ Bind(&end);
1208}
1209
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001210static void GenerateVisitStringIndexOf(HInvoke* invoke,
1211 ArmAssembler* assembler,
1212 CodeGeneratorARM* codegen,
1213 ArenaAllocator* allocator,
1214 bool start_at_zero) {
1215 LocationSummary* locations = invoke->GetLocations();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001216
1217 // Note that the null check must have been done earlier.
1218 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1219
1220 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001221 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001222 SlowPathCode* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001223 HInstruction* code_point = invoke->InputAt(1);
1224 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001225 if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) >
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001226 std::numeric_limits<uint16_t>::max()) {
1227 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1228 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1229 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1230 codegen->AddSlowPath(slow_path);
1231 __ b(slow_path->GetEntryLabel());
1232 __ Bind(slow_path->GetExitLabel());
1233 return;
1234 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001235 } else if (code_point->GetType() != Primitive::kPrimChar) {
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001236 Register char_reg = locations->InAt(1).AsRegister<Register>();
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001237 // 0xffff is not modified immediate but 0x10000 is, so use `>= 0x10000` instead of `> 0xffff`.
1238 __ cmp(char_reg,
1239 ShifterOperand(static_cast<uint32_t>(std::numeric_limits<uint16_t>::max()) + 1));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001240 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1241 codegen->AddSlowPath(slow_path);
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001242 __ b(slow_path->GetEntryLabel(), HS);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001243 }
1244
1245 if (start_at_zero) {
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001246 Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001247 DCHECK_EQ(tmp_reg, R2);
1248 // Start-index = 0.
1249 __ LoadImmediate(tmp_reg, 0);
1250 }
1251
1252 __ LoadFromOffset(kLoadWord, LR, TR,
1253 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value());
Roland Levillain42ad2882016-02-29 18:26:54 +00001254 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001255 __ blx(LR);
1256
1257 if (slow_path != nullptr) {
1258 __ Bind(slow_path->GetExitLabel());
1259 }
1260}
1261
1262void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
1263 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1264 LocationSummary::kCall,
1265 kIntrinsified);
1266 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1267 // best to align the inputs accordingly.
1268 InvokeRuntimeCallingConvention calling_convention;
1269 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1270 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1271 locations->SetOut(Location::RegisterLocation(R0));
1272
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001273 // Need to send start-index=0.
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001274 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1275}
1276
1277void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001278 GenerateVisitStringIndexOf(
1279 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001280}
1281
1282void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1283 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1284 LocationSummary::kCall,
1285 kIntrinsified);
1286 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1287 // best to align the inputs accordingly.
1288 InvokeRuntimeCallingConvention calling_convention;
1289 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1290 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1291 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1292 locations->SetOut(Location::RegisterLocation(R0));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001293}
1294
1295void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001296 GenerateVisitStringIndexOf(
1297 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001298}
1299
Jeff Hao848f70a2014-01-15 13:49:50 -08001300void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1301 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1302 LocationSummary::kCall,
1303 kIntrinsified);
1304 InvokeRuntimeCallingConvention calling_convention;
1305 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1306 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1307 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1308 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1309 locations->SetOut(Location::RegisterLocation(R0));
1310}
1311
1312void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1313 ArmAssembler* assembler = GetAssembler();
1314 LocationSummary* locations = invoke->GetLocations();
1315
1316 Register byte_array = locations->InAt(0).AsRegister<Register>();
1317 __ cmp(byte_array, ShifterOperand(0));
Andreas Gampe85b62f22015-09-09 13:15:38 -07001318 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001319 codegen_->AddSlowPath(slow_path);
1320 __ b(slow_path->GetEntryLabel(), EQ);
1321
1322 __ LoadFromOffset(
1323 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001324 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001325 __ blx(LR);
Roland Levillainf969a202016-03-09 16:14:00 +00001326 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001327 __ Bind(slow_path->GetExitLabel());
1328}
1329
1330void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1331 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1332 LocationSummary::kCall,
1333 kIntrinsified);
1334 InvokeRuntimeCallingConvention calling_convention;
1335 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1336 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1337 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1338 locations->SetOut(Location::RegisterLocation(R0));
1339}
1340
1341void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1342 ArmAssembler* assembler = GetAssembler();
1343
Roland Levillaincc3839c2016-02-29 16:23:48 +00001344 // No need to emit code checking whether `locations->InAt(2)` is a null
1345 // pointer, as callers of the native method
1346 //
1347 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1348 //
1349 // all include a null check on `data` before calling that method.
Jeff Hao848f70a2014-01-15 13:49:50 -08001350 __ LoadFromOffset(
1351 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001352 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001353 __ blx(LR);
Roland Levillainf969a202016-03-09 16:14:00 +00001354 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001355}
1356
1357void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
1358 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1359 LocationSummary::kCall,
1360 kIntrinsified);
1361 InvokeRuntimeCallingConvention calling_convention;
1362 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1363 locations->SetOut(Location::RegisterLocation(R0));
1364}
1365
1366void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1367 ArmAssembler* assembler = GetAssembler();
1368 LocationSummary* locations = invoke->GetLocations();
1369
1370 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1371 __ cmp(string_to_copy, ShifterOperand(0));
Andreas Gampe85b62f22015-09-09 13:15:38 -07001372 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001373 codegen_->AddSlowPath(slow_path);
1374 __ b(slow_path->GetEntryLabel(), EQ);
1375
1376 __ LoadFromOffset(kLoadWord,
1377 LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001378 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001379 __ blx(LR);
Roland Levillainf969a202016-03-09 16:14:00 +00001380 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001381 __ Bind(slow_path->GetExitLabel());
1382}
1383
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001384void IntrinsicLocationsBuilderARM::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain3d312422016-06-23 13:53:42 +01001385 // TODO(rpl): Implement read barriers in the SystemArrayCopy
1386 // intrinsic and re-enable it (b/29516905).
1387 if (kEmitCompilerReadBarrier) {
1388 return;
1389 }
1390
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001391 CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke);
1392 LocationSummary* locations = invoke->GetLocations();
1393 if (locations == nullptr) {
1394 return;
1395 }
1396
1397 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1398 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1399 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1400
1401 if (src_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(src_pos->GetValue())) {
1402 locations->SetInAt(1, Location::RequiresRegister());
1403 }
1404 if (dest_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(dest_pos->GetValue())) {
1405 locations->SetInAt(3, Location::RequiresRegister());
1406 }
1407 if (length != nullptr && !assembler_->ShifterOperandCanAlwaysHold(length->GetValue())) {
1408 locations->SetInAt(4, Location::RequiresRegister());
1409 }
1410}
1411
1412static void CheckPosition(ArmAssembler* assembler,
1413 Location pos,
1414 Register input,
1415 Location length,
1416 SlowPathCode* slow_path,
1417 Register input_len,
1418 Register temp,
1419 bool length_is_input_length = false) {
1420 // Where is the length in the Array?
1421 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1422
1423 if (pos.IsConstant()) {
1424 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1425 if (pos_const == 0) {
1426 if (!length_is_input_length) {
1427 // Check that length(input) >= length.
1428 __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1429 if (length.IsConstant()) {
1430 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1431 } else {
1432 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1433 }
1434 __ b(slow_path->GetEntryLabel(), LT);
1435 }
1436 } else {
1437 // Check that length(input) >= pos.
1438 __ LoadFromOffset(kLoadWord, input_len, input, length_offset);
1439 __ subs(temp, input_len, ShifterOperand(pos_const));
1440 __ b(slow_path->GetEntryLabel(), LT);
1441
1442 // Check that (length(input) - pos) >= length.
1443 if (length.IsConstant()) {
1444 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1445 } else {
1446 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1447 }
1448 __ b(slow_path->GetEntryLabel(), LT);
1449 }
1450 } else if (length_is_input_length) {
1451 // The only way the copy can succeed is if pos is zero.
1452 Register pos_reg = pos.AsRegister<Register>();
1453 __ CompareAndBranchIfNonZero(pos_reg, slow_path->GetEntryLabel());
1454 } else {
1455 // Check that pos >= 0.
1456 Register pos_reg = pos.AsRegister<Register>();
1457 __ cmp(pos_reg, ShifterOperand(0));
1458 __ b(slow_path->GetEntryLabel(), LT);
1459
1460 // Check that pos <= length(input).
1461 __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1462 __ subs(temp, temp, ShifterOperand(pos_reg));
1463 __ b(slow_path->GetEntryLabel(), LT);
1464
1465 // Check that (length(input) - pos) >= length.
1466 if (length.IsConstant()) {
1467 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1468 } else {
1469 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1470 }
1471 __ b(slow_path->GetEntryLabel(), LT);
1472 }
1473}
1474
1475void IntrinsicCodeGeneratorARM::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain3d312422016-06-23 13:53:42 +01001476 // TODO(rpl): Implement read barriers in the SystemArrayCopy
1477 // intrinsic and re-enable it (b/29516905).
1478 DCHECK(!kEmitCompilerReadBarrier);
1479
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001480 ArmAssembler* assembler = GetAssembler();
1481 LocationSummary* locations = invoke->GetLocations();
1482
1483 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1484 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
1485 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
1486 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
1487
1488 Register src = locations->InAt(0).AsRegister<Register>();
1489 Location src_pos = locations->InAt(1);
1490 Register dest = locations->InAt(2).AsRegister<Register>();
1491 Location dest_pos = locations->InAt(3);
1492 Location length = locations->InAt(4);
1493 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
1494 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
1495 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
1496
1497 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1498 codegen_->AddSlowPath(slow_path);
1499
Roland Levillainebea3d22016-04-12 15:42:57 +01001500 Label conditions_on_positions_validated;
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001501 SystemArrayCopyOptimizations optimizations(invoke);
1502
Roland Levillainebea3d22016-04-12 15:42:57 +01001503 if (!optimizations.GetDestinationIsSource() &&
1504 (!src_pos.IsConstant() || !dest_pos.IsConstant())) {
1505 __ cmp(src, ShifterOperand(dest));
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001506 }
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001507 // If source and destination are the same, we go to slow path if we need to do
1508 // forward copying.
1509 if (src_pos.IsConstant()) {
1510 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1511 if (dest_pos.IsConstant()) {
1512 // Checked when building locations.
1513 DCHECK(!optimizations.GetDestinationIsSource()
1514 || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
1515 } else {
1516 if (!optimizations.GetDestinationIsSource()) {
Roland Levillainebea3d22016-04-12 15:42:57 +01001517 __ b(&conditions_on_positions_validated, NE);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001518 }
1519 __ cmp(dest_pos.AsRegister<Register>(), ShifterOperand(src_pos_constant));
1520 __ b(slow_path->GetEntryLabel(), GT);
1521 }
1522 } else {
1523 if (!optimizations.GetDestinationIsSource()) {
Roland Levillainebea3d22016-04-12 15:42:57 +01001524 __ b(&conditions_on_positions_validated, NE);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001525 }
1526 if (dest_pos.IsConstant()) {
1527 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1528 __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos_constant));
1529 } else {
1530 __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos.AsRegister<Register>()));
1531 }
1532 __ b(slow_path->GetEntryLabel(), LT);
1533 }
1534
Roland Levillainebea3d22016-04-12 15:42:57 +01001535 __ Bind(&conditions_on_positions_validated);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001536
1537 if (!optimizations.GetSourceIsNotNull()) {
1538 // Bail out if the source is null.
1539 __ CompareAndBranchIfZero(src, slow_path->GetEntryLabel());
1540 }
1541
1542 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
1543 // Bail out if the destination is null.
1544 __ CompareAndBranchIfZero(dest, slow_path->GetEntryLabel());
1545 }
1546
1547 // If the length is negative, bail out.
1548 // We have already checked in the LocationsBuilder for the constant case.
1549 if (!length.IsConstant() &&
1550 !optimizations.GetCountIsSourceLength() &&
1551 !optimizations.GetCountIsDestinationLength()) {
1552 __ cmp(length.AsRegister<Register>(), ShifterOperand(0));
1553 __ b(slow_path->GetEntryLabel(), LT);
1554 }
1555
1556 // Validity checks: source.
1557 CheckPosition(assembler,
1558 src_pos,
1559 src,
1560 length,
1561 slow_path,
1562 temp1,
1563 temp2,
1564 optimizations.GetCountIsSourceLength());
1565
1566 // Validity checks: dest.
1567 CheckPosition(assembler,
1568 dest_pos,
1569 dest,
1570 length,
1571 slow_path,
1572 temp1,
1573 temp2,
1574 optimizations.GetCountIsDestinationLength());
1575
1576 if (!optimizations.GetDoesNotNeedTypeCheck()) {
1577 // Check whether all elements of the source array are assignable to the component
1578 // type of the destination array. We do two checks: the classes are the same,
1579 // or the destination is Object[]. If none of these checks succeed, we go to the
1580 // slow path.
1581 __ LoadFromOffset(kLoadWord, temp1, dest, class_offset);
1582 __ LoadFromOffset(kLoadWord, temp2, src, class_offset);
1583 bool did_unpoison = false;
1584 if (!optimizations.GetDestinationIsNonPrimitiveArray() ||
1585 !optimizations.GetSourceIsNonPrimitiveArray()) {
Roland Levillainebea3d22016-04-12 15:42:57 +01001586 // One or two of the references need to be unpoisoned. Unpoison them
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001587 // both to make the identity check valid.
1588 __ MaybeUnpoisonHeapReference(temp1);
1589 __ MaybeUnpoisonHeapReference(temp2);
1590 did_unpoison = true;
1591 }
1592
1593 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
1594 // Bail out if the destination is not a non primitive array.
Roland Levillainebea3d22016-04-12 15:42:57 +01001595 // /* HeapReference<Class> */ temp3 = temp1->component_type_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001596 __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1597 __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1598 __ MaybeUnpoisonHeapReference(temp3);
1599 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1600 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1601 __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1602 }
1603
1604 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1605 // Bail out if the source is not a non primitive array.
Roland Levillainebea3d22016-04-12 15:42:57 +01001606 // /* HeapReference<Class> */ temp3 = temp2->component_type_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001607 __ LoadFromOffset(kLoadWord, temp3, temp2, component_offset);
1608 __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1609 __ MaybeUnpoisonHeapReference(temp3);
1610 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1611 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1612 __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1613 }
1614
1615 __ cmp(temp1, ShifterOperand(temp2));
1616
1617 if (optimizations.GetDestinationIsTypedObjectArray()) {
1618 Label do_copy;
1619 __ b(&do_copy, EQ);
1620 if (!did_unpoison) {
1621 __ MaybeUnpoisonHeapReference(temp1);
1622 }
Roland Levillainebea3d22016-04-12 15:42:57 +01001623 // /* HeapReference<Class> */ temp1 = temp1->component_type_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001624 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
1625 __ MaybeUnpoisonHeapReference(temp1);
Roland Levillainebea3d22016-04-12 15:42:57 +01001626 // /* HeapReference<Class> */ temp1 = temp1->super_class_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001627 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
1628 // No need to unpoison the result, we're comparing against null.
1629 __ CompareAndBranchIfNonZero(temp1, slow_path->GetEntryLabel());
1630 __ Bind(&do_copy);
1631 } else {
1632 __ b(slow_path->GetEntryLabel(), NE);
1633 }
1634 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1635 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
1636 // Bail out if the source is not a non primitive array.
Roland Levillainebea3d22016-04-12 15:42:57 +01001637 // /* HeapReference<Class> */ temp1 = src->klass_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001638 __ LoadFromOffset(kLoadWord, temp1, src, class_offset);
1639 __ MaybeUnpoisonHeapReference(temp1);
Roland Levillainebea3d22016-04-12 15:42:57 +01001640 // /* HeapReference<Class> */ temp3 = temp1->component_type_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001641 __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1642 __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1643 __ MaybeUnpoisonHeapReference(temp3);
1644 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1645 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1646 __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1647 }
1648
1649 // Compute base source address, base destination address, and end source address.
1650
1651 uint32_t element_size = sizeof(int32_t);
1652 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
1653 if (src_pos.IsConstant()) {
1654 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1655 __ AddConstant(temp1, src, element_size * constant + offset);
1656 } else {
1657 __ add(temp1, src, ShifterOperand(src_pos.AsRegister<Register>(), LSL, 2));
1658 __ AddConstant(temp1, offset);
1659 }
1660
1661 if (dest_pos.IsConstant()) {
1662 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1663 __ AddConstant(temp2, dest, element_size * constant + offset);
1664 } else {
1665 __ add(temp2, dest, ShifterOperand(dest_pos.AsRegister<Register>(), LSL, 2));
1666 __ AddConstant(temp2, offset);
1667 }
1668
1669 if (length.IsConstant()) {
1670 int32_t constant = length.GetConstant()->AsIntConstant()->GetValue();
1671 __ AddConstant(temp3, temp1, element_size * constant);
1672 } else {
1673 __ add(temp3, temp1, ShifterOperand(length.AsRegister<Register>(), LSL, 2));
1674 }
1675
1676 // Iterate over the arrays and do a raw copy of the objects. We don't need to
1677 // poison/unpoison, nor do any read barrier as the next uses of the destination
1678 // array will do it.
1679 Label loop, done;
1680 __ cmp(temp1, ShifterOperand(temp3));
1681 __ b(&done, EQ);
1682 __ Bind(&loop);
1683 __ ldr(IP, Address(temp1, element_size, Address::PostIndex));
1684 __ str(IP, Address(temp2, element_size, Address::PostIndex));
1685 __ cmp(temp1, ShifterOperand(temp3));
1686 __ b(&loop, NE);
1687 __ Bind(&done);
1688
1689 // We only need one card marking on the destination array.
1690 codegen_->MarkGCCard(temp1,
1691 temp2,
1692 dest,
1693 Register(kNoRegister),
Roland Levillainebea3d22016-04-12 15:42:57 +01001694 /* value_can_be_null */ false);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001695
1696 __ Bind(slow_path->GetExitLabel());
1697}
1698
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00001699static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1700 // If the graph is debuggable, all callee-saved floating-point registers are blocked by
1701 // the code generator. Furthermore, the register allocator creates fixed live intervals
1702 // for all caller-saved registers because we are doing a function call. As a result, if
1703 // the input and output locations are unallocated, the register allocator runs out of
1704 // registers and fails; however, a debuggable graph is not the common case.
1705 if (invoke->GetBlock()->GetGraph()->IsDebuggable()) {
1706 return;
1707 }
1708
1709 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1710 DCHECK_EQ(invoke->InputAt(0)->GetType(), Primitive::kPrimDouble);
1711 DCHECK_EQ(invoke->GetType(), Primitive::kPrimDouble);
1712
1713 LocationSummary* const locations = new (arena) LocationSummary(invoke,
1714 LocationSummary::kCall,
1715 kIntrinsified);
1716 const InvokeRuntimeCallingConvention calling_convention;
1717
1718 locations->SetInAt(0, Location::RequiresFpuRegister());
1719 locations->SetOut(Location::RequiresFpuRegister());
1720 // Native code uses the soft float ABI.
1721 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1722 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1723}
1724
1725static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1726 // If the graph is debuggable, all callee-saved floating-point registers are blocked by
1727 // the code generator. Furthermore, the register allocator creates fixed live intervals
1728 // for all caller-saved registers because we are doing a function call. As a result, if
1729 // the input and output locations are unallocated, the register allocator runs out of
1730 // registers and fails; however, a debuggable graph is not the common case.
1731 if (invoke->GetBlock()->GetGraph()->IsDebuggable()) {
1732 return;
1733 }
1734
1735 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
1736 DCHECK_EQ(invoke->InputAt(0)->GetType(), Primitive::kPrimDouble);
1737 DCHECK_EQ(invoke->InputAt(1)->GetType(), Primitive::kPrimDouble);
1738 DCHECK_EQ(invoke->GetType(), Primitive::kPrimDouble);
1739
1740 LocationSummary* const locations = new (arena) LocationSummary(invoke,
1741 LocationSummary::kCall,
1742 kIntrinsified);
1743 const InvokeRuntimeCallingConvention calling_convention;
1744
1745 locations->SetInAt(0, Location::RequiresFpuRegister());
1746 locations->SetInAt(1, Location::RequiresFpuRegister());
1747 locations->SetOut(Location::RequiresFpuRegister());
1748 // Native code uses the soft float ABI.
1749 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1750 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1751 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1752 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1753}
1754
1755static void GenFPToFPCall(HInvoke* invoke,
1756 ArmAssembler* assembler,
1757 CodeGeneratorARM* codegen,
1758 QuickEntrypointEnum entry) {
1759 LocationSummary* const locations = invoke->GetLocations();
1760 const InvokeRuntimeCallingConvention calling_convention;
1761
1762 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1763 DCHECK(locations->WillCall() && locations->Intrinsified());
1764 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(0)));
1765 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(1)));
1766
1767 __ LoadFromOffset(kLoadWord, LR, TR, GetThreadOffset<kArmWordSize>(entry).Int32Value());
1768 // Native code uses the soft float ABI.
1769 __ vmovrrd(calling_convention.GetRegisterAt(0),
1770 calling_convention.GetRegisterAt(1),
1771 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
1772 __ blx(LR);
1773 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
1774 __ vmovdrr(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
1775 calling_convention.GetRegisterAt(0),
1776 calling_convention.GetRegisterAt(1));
1777}
1778
1779static void GenFPFPToFPCall(HInvoke* invoke,
1780 ArmAssembler* assembler,
1781 CodeGeneratorARM* codegen,
1782 QuickEntrypointEnum entry) {
1783 LocationSummary* const locations = invoke->GetLocations();
1784 const InvokeRuntimeCallingConvention calling_convention;
1785
1786 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
1787 DCHECK(locations->WillCall() && locations->Intrinsified());
1788 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(0)));
1789 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(1)));
1790 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(2)));
1791 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(3)));
1792
1793 __ LoadFromOffset(kLoadWord, LR, TR, GetThreadOffset<kArmWordSize>(entry).Int32Value());
1794 // Native code uses the soft float ABI.
1795 __ vmovrrd(calling_convention.GetRegisterAt(0),
1796 calling_convention.GetRegisterAt(1),
1797 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
1798 __ vmovrrd(calling_convention.GetRegisterAt(2),
1799 calling_convention.GetRegisterAt(3),
1800 FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>()));
1801 __ blx(LR);
1802 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
1803 __ vmovdrr(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
1804 calling_convention.GetRegisterAt(0),
1805 calling_convention.GetRegisterAt(1));
1806}
1807
1808void IntrinsicLocationsBuilderARM::VisitMathCos(HInvoke* invoke) {
1809 CreateFPToFPCallLocations(arena_, invoke);
1810}
1811
1812void IntrinsicCodeGeneratorARM::VisitMathCos(HInvoke* invoke) {
1813 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCos);
1814}
1815
1816void IntrinsicLocationsBuilderARM::VisitMathSin(HInvoke* invoke) {
1817 CreateFPToFPCallLocations(arena_, invoke);
1818}
1819
1820void IntrinsicCodeGeneratorARM::VisitMathSin(HInvoke* invoke) {
1821 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickSin);
1822}
1823
1824void IntrinsicLocationsBuilderARM::VisitMathAcos(HInvoke* invoke) {
1825 CreateFPToFPCallLocations(arena_, invoke);
1826}
1827
1828void IntrinsicCodeGeneratorARM::VisitMathAcos(HInvoke* invoke) {
1829 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAcos);
1830}
1831
1832void IntrinsicLocationsBuilderARM::VisitMathAsin(HInvoke* invoke) {
1833 CreateFPToFPCallLocations(arena_, invoke);
1834}
1835
1836void IntrinsicCodeGeneratorARM::VisitMathAsin(HInvoke* invoke) {
1837 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAsin);
1838}
1839
1840void IntrinsicLocationsBuilderARM::VisitMathAtan(HInvoke* invoke) {
1841 CreateFPToFPCallLocations(arena_, invoke);
1842}
1843
1844void IntrinsicCodeGeneratorARM::VisitMathAtan(HInvoke* invoke) {
1845 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAtan);
1846}
1847
1848void IntrinsicLocationsBuilderARM::VisitMathCbrt(HInvoke* invoke) {
1849 CreateFPToFPCallLocations(arena_, invoke);
1850}
1851
1852void IntrinsicCodeGeneratorARM::VisitMathCbrt(HInvoke* invoke) {
1853 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCbrt);
1854}
1855
1856void IntrinsicLocationsBuilderARM::VisitMathCosh(HInvoke* invoke) {
1857 CreateFPToFPCallLocations(arena_, invoke);
1858}
1859
1860void IntrinsicCodeGeneratorARM::VisitMathCosh(HInvoke* invoke) {
1861 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCosh);
1862}
1863
1864void IntrinsicLocationsBuilderARM::VisitMathExp(HInvoke* invoke) {
1865 CreateFPToFPCallLocations(arena_, invoke);
1866}
1867
1868void IntrinsicCodeGeneratorARM::VisitMathExp(HInvoke* invoke) {
1869 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickExp);
1870}
1871
1872void IntrinsicLocationsBuilderARM::VisitMathExpm1(HInvoke* invoke) {
1873 CreateFPToFPCallLocations(arena_, invoke);
1874}
1875
1876void IntrinsicCodeGeneratorARM::VisitMathExpm1(HInvoke* invoke) {
1877 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickExpm1);
1878}
1879
1880void IntrinsicLocationsBuilderARM::VisitMathLog(HInvoke* invoke) {
1881 CreateFPToFPCallLocations(arena_, invoke);
1882}
1883
1884void IntrinsicCodeGeneratorARM::VisitMathLog(HInvoke* invoke) {
1885 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickLog);
1886}
1887
1888void IntrinsicLocationsBuilderARM::VisitMathLog10(HInvoke* invoke) {
1889 CreateFPToFPCallLocations(arena_, invoke);
1890}
1891
1892void IntrinsicCodeGeneratorARM::VisitMathLog10(HInvoke* invoke) {
1893 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickLog10);
1894}
1895
1896void IntrinsicLocationsBuilderARM::VisitMathSinh(HInvoke* invoke) {
1897 CreateFPToFPCallLocations(arena_, invoke);
1898}
1899
1900void IntrinsicCodeGeneratorARM::VisitMathSinh(HInvoke* invoke) {
1901 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickSinh);
1902}
1903
1904void IntrinsicLocationsBuilderARM::VisitMathTan(HInvoke* invoke) {
1905 CreateFPToFPCallLocations(arena_, invoke);
1906}
1907
1908void IntrinsicCodeGeneratorARM::VisitMathTan(HInvoke* invoke) {
1909 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickTan);
1910}
1911
1912void IntrinsicLocationsBuilderARM::VisitMathTanh(HInvoke* invoke) {
1913 CreateFPToFPCallLocations(arena_, invoke);
1914}
1915
1916void IntrinsicCodeGeneratorARM::VisitMathTanh(HInvoke* invoke) {
1917 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickTanh);
1918}
1919
1920void IntrinsicLocationsBuilderARM::VisitMathAtan2(HInvoke* invoke) {
1921 CreateFPFPToFPCallLocations(arena_, invoke);
1922}
1923
1924void IntrinsicCodeGeneratorARM::VisitMathAtan2(HInvoke* invoke) {
1925 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAtan2);
1926}
1927
1928void IntrinsicLocationsBuilderARM::VisitMathHypot(HInvoke* invoke) {
1929 CreateFPFPToFPCallLocations(arena_, invoke);
1930}
1931
1932void IntrinsicCodeGeneratorARM::VisitMathHypot(HInvoke* invoke) {
1933 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickHypot);
1934}
1935
1936void IntrinsicLocationsBuilderARM::VisitMathNextAfter(HInvoke* invoke) {
1937 CreateFPFPToFPCallLocations(arena_, invoke);
1938}
1939
1940void IntrinsicCodeGeneratorARM::VisitMathNextAfter(HInvoke* invoke) {
1941 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickNextAfter);
1942}
1943
Artem Serovc257da72016-02-02 13:49:43 +00001944void IntrinsicLocationsBuilderARM::VisitIntegerReverse(HInvoke* invoke) {
1945 CreateIntToIntLocations(arena_, invoke);
1946}
1947
1948void IntrinsicCodeGeneratorARM::VisitIntegerReverse(HInvoke* invoke) {
1949 ArmAssembler* assembler = GetAssembler();
1950 LocationSummary* locations = invoke->GetLocations();
1951
1952 Register out = locations->Out().AsRegister<Register>();
1953 Register in = locations->InAt(0).AsRegister<Register>();
1954
1955 __ rbit(out, in);
1956}
1957
1958void IntrinsicLocationsBuilderARM::VisitLongReverse(HInvoke* invoke) {
1959 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1960 LocationSummary::kNoCall,
1961 kIntrinsified);
1962 locations->SetInAt(0, Location::RequiresRegister());
1963 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1964}
1965
1966void IntrinsicCodeGeneratorARM::VisitLongReverse(HInvoke* invoke) {
1967 ArmAssembler* assembler = GetAssembler();
1968 LocationSummary* locations = invoke->GetLocations();
1969
1970 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
1971 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
1972 Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
1973 Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
1974
1975 __ rbit(out_reg_lo, in_reg_hi);
1976 __ rbit(out_reg_hi, in_reg_lo);
1977}
1978
1979void IntrinsicLocationsBuilderARM::VisitIntegerReverseBytes(HInvoke* invoke) {
1980 CreateIntToIntLocations(arena_, invoke);
1981}
1982
1983void IntrinsicCodeGeneratorARM::VisitIntegerReverseBytes(HInvoke* invoke) {
1984 ArmAssembler* assembler = GetAssembler();
1985 LocationSummary* locations = invoke->GetLocations();
1986
1987 Register out = locations->Out().AsRegister<Register>();
1988 Register in = locations->InAt(0).AsRegister<Register>();
1989
1990 __ rev(out, in);
1991}
1992
1993void IntrinsicLocationsBuilderARM::VisitLongReverseBytes(HInvoke* invoke) {
1994 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1995 LocationSummary::kNoCall,
1996 kIntrinsified);
1997 locations->SetInAt(0, Location::RequiresRegister());
1998 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1999}
2000
2001void IntrinsicCodeGeneratorARM::VisitLongReverseBytes(HInvoke* invoke) {
2002 ArmAssembler* assembler = GetAssembler();
2003 LocationSummary* locations = invoke->GetLocations();
2004
2005 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
2006 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
2007 Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
2008 Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
2009
2010 __ rev(out_reg_lo, in_reg_hi);
2011 __ rev(out_reg_hi, in_reg_lo);
2012}
2013
2014void IntrinsicLocationsBuilderARM::VisitShortReverseBytes(HInvoke* invoke) {
2015 CreateIntToIntLocations(arena_, invoke);
2016}
2017
2018void IntrinsicCodeGeneratorARM::VisitShortReverseBytes(HInvoke* invoke) {
2019 ArmAssembler* assembler = GetAssembler();
2020 LocationSummary* locations = invoke->GetLocations();
2021
2022 Register out = locations->Out().AsRegister<Register>();
2023 Register in = locations->InAt(0).AsRegister<Register>();
2024
2025 __ revsh(out, in);
2026}
2027
Tim Zhang25abd6c2016-01-19 23:39:24 +08002028void IntrinsicLocationsBuilderARM::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2029 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2030 LocationSummary::kNoCall,
2031 kIntrinsified);
2032 locations->SetInAt(0, Location::RequiresRegister());
2033 locations->SetInAt(1, Location::RequiresRegister());
2034 locations->SetInAt(2, Location::RequiresRegister());
2035 locations->SetInAt(3, Location::RequiresRegister());
2036 locations->SetInAt(4, Location::RequiresRegister());
2037
2038 locations->AddTemp(Location::RequiresRegister());
2039 locations->AddTemp(Location::RequiresRegister());
2040 locations->AddTemp(Location::RequiresRegister());
2041 locations->AddTemp(Location::RequiresRegister());
2042}
2043
2044void IntrinsicCodeGeneratorARM::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2045 ArmAssembler* assembler = GetAssembler();
2046 LocationSummary* locations = invoke->GetLocations();
2047
2048 // Check assumption that sizeof(Char) is 2 (used in scaling below).
2049 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
2050 DCHECK_EQ(char_size, 2u);
2051
2052 // Location of data in char array buffer.
2053 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2054
2055 // Location of char array data in string.
2056 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
2057
2058 // void getCharsNoCheck(int srcBegin, int srcEnd, char[] dst, int dstBegin);
2059 // Since getChars() calls getCharsNoCheck() - we use registers rather than constants.
2060 Register srcObj = locations->InAt(0).AsRegister<Register>();
2061 Register srcBegin = locations->InAt(1).AsRegister<Register>();
2062 Register srcEnd = locations->InAt(2).AsRegister<Register>();
2063 Register dstObj = locations->InAt(3).AsRegister<Register>();
2064 Register dstBegin = locations->InAt(4).AsRegister<Register>();
2065
2066 Register src_ptr = locations->GetTemp(0).AsRegister<Register>();
2067 Register src_ptr_end = locations->GetTemp(1).AsRegister<Register>();
2068 Register dst_ptr = locations->GetTemp(2).AsRegister<Register>();
2069 Register tmp = locations->GetTemp(3).AsRegister<Register>();
2070
2071 // src range to copy.
2072 __ add(src_ptr, srcObj, ShifterOperand(value_offset));
2073 __ add(src_ptr_end, src_ptr, ShifterOperand(srcEnd, LSL, 1));
2074 __ add(src_ptr, src_ptr, ShifterOperand(srcBegin, LSL, 1));
2075
2076 // dst to be copied.
2077 __ add(dst_ptr, dstObj, ShifterOperand(data_offset));
2078 __ add(dst_ptr, dst_ptr, ShifterOperand(dstBegin, LSL, 1));
2079
2080 // Do the copy.
2081 Label loop, done;
2082 __ Bind(&loop);
2083 __ cmp(src_ptr, ShifterOperand(src_ptr_end));
2084 __ b(&done, EQ);
2085 __ ldrh(tmp, Address(src_ptr, char_size, Address::PostIndex));
2086 __ strh(tmp, Address(dst_ptr, char_size, Address::PostIndex));
2087 __ b(&loop);
2088 __ Bind(&done);
2089}
2090
Anton Kirilova3ffea22016-04-07 17:02:37 +01002091void IntrinsicLocationsBuilderARM::VisitFloatIsInfinite(HInvoke* invoke) {
2092 CreateFPToIntLocations(arena_, invoke);
2093}
2094
2095void IntrinsicCodeGeneratorARM::VisitFloatIsInfinite(HInvoke* invoke) {
2096 ArmAssembler* const assembler = GetAssembler();
2097 LocationSummary* const locations = invoke->GetLocations();
2098 const Register out = locations->Out().AsRegister<Register>();
2099 // Shifting left by 1 bit makes the value encodable as an immediate operand;
2100 // we don't care about the sign bit anyway.
2101 constexpr uint32_t infinity = kPositiveInfinityFloat << 1U;
2102
2103 __ vmovrs(out, locations->InAt(0).AsFpuRegister<SRegister>());
2104 // We don't care about the sign bit, so shift left.
2105 __ Lsl(out, out, 1);
2106 __ eor(out, out, ShifterOperand(infinity));
2107 // If the result is 0, then it has 32 leading zeros, and less than that otherwise.
2108 __ clz(out, out);
2109 // Any number less than 32 logically shifted right by 5 bits results in 0;
2110 // the same operation on 32 yields 1.
2111 __ Lsr(out, out, 5);
2112}
2113
2114void IntrinsicLocationsBuilderARM::VisitDoubleIsInfinite(HInvoke* invoke) {
2115 CreateFPToIntLocations(arena_, invoke);
2116}
2117
2118void IntrinsicCodeGeneratorARM::VisitDoubleIsInfinite(HInvoke* invoke) {
2119 ArmAssembler* const assembler = GetAssembler();
2120 LocationSummary* const locations = invoke->GetLocations();
2121 const Register out = locations->Out().AsRegister<Register>();
2122 // The highest 32 bits of double precision positive infinity separated into
2123 // two constants encodable as immediate operands.
2124 constexpr uint32_t infinity_high = 0x7f000000U;
2125 constexpr uint32_t infinity_high2 = 0x00f00000U;
2126
2127 static_assert((infinity_high | infinity_high2) == static_cast<uint32_t>(kPositiveInfinityDouble >> 32U),
2128 "The constants do not add up to the high 32 bits of double precision positive infinity.");
2129 __ vmovrrd(IP, out, FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
2130 __ eor(out, out, ShifterOperand(infinity_high));
2131 __ eor(out, out, ShifterOperand(infinity_high2));
2132 // We don't care about the sign bit, so shift left.
2133 __ orr(out, IP, ShifterOperand(out, LSL, 1));
2134 // If the result is 0, then it has 32 leading zeros, and less than that otherwise.
2135 __ clz(out, out);
2136 // Any number less than 32 logically shifted right by 5 bits results in 0;
2137 // the same operation on 32 yields 1.
2138 __ Lsr(out, out, 5);
2139}
2140
Aart Bik2f9fcc92016-03-01 15:16:54 -08002141UNIMPLEMENTED_INTRINSIC(ARM, IntegerBitCount)
2142UNIMPLEMENTED_INTRINSIC(ARM, LongBitCount)
2143UNIMPLEMENTED_INTRINSIC(ARM, MathMinDoubleDouble)
2144UNIMPLEMENTED_INTRINSIC(ARM, MathMinFloatFloat)
2145UNIMPLEMENTED_INTRINSIC(ARM, MathMaxDoubleDouble)
2146UNIMPLEMENTED_INTRINSIC(ARM, MathMaxFloatFloat)
2147UNIMPLEMENTED_INTRINSIC(ARM, MathMinLongLong)
2148UNIMPLEMENTED_INTRINSIC(ARM, MathMaxLongLong)
2149UNIMPLEMENTED_INTRINSIC(ARM, MathCeil) // Could be done by changing rounding mode, maybe?
2150UNIMPLEMENTED_INTRINSIC(ARM, MathFloor) // Could be done by changing rounding mode, maybe?
2151UNIMPLEMENTED_INTRINSIC(ARM, MathRint)
2152UNIMPLEMENTED_INTRINSIC(ARM, MathRoundDouble) // Could be done by changing rounding mode, maybe?
2153UNIMPLEMENTED_INTRINSIC(ARM, MathRoundFloat) // Could be done by changing rounding mode, maybe?
2154UNIMPLEMENTED_INTRINSIC(ARM, UnsafeCASLong) // High register pressure.
2155UNIMPLEMENTED_INTRINSIC(ARM, SystemArrayCopyChar)
2156UNIMPLEMENTED_INTRINSIC(ARM, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002157UNIMPLEMENTED_INTRINSIC(ARM, IntegerHighestOneBit)
2158UNIMPLEMENTED_INTRINSIC(ARM, LongHighestOneBit)
2159UNIMPLEMENTED_INTRINSIC(ARM, IntegerLowestOneBit)
2160UNIMPLEMENTED_INTRINSIC(ARM, LongLowestOneBit)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08002161
Aart Bik0e54c012016-03-04 12:08:31 -08002162// 1.8.
2163UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndAddInt)
2164UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndAddLong)
2165UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetInt)
2166UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetLong)
2167UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002168
Aart Bik2f9fcc92016-03-01 15:16:54 -08002169UNREACHABLE_INTRINSICS(ARM)
Roland Levillain4d027112015-07-01 15:41:14 +01002170
2171#undef __
2172
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08002173} // namespace arm
2174} // namespace art