blob: bfe49566928f86d0bb6127f99910e26c17964ddb [file] [log] [blame]
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080021#include "code_generator_arm.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070024#include "intrinsics_utils.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080025#include "mirror/array-inl.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29
30namespace art {
31
32namespace arm {
33
34ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
35 return codegen_->GetAssembler();
36}
37
38ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
39 return codegen_->GetGraph()->GetArena();
40}
41
Andreas Gampe85b62f22015-09-09 13:15:38 -070042using IntrinsicSlowPathARM = IntrinsicSlowPath<InvokeDexCallingConventionVisitorARM>;
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080043
44bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
45 Dispatch(invoke);
46 LocationSummary* res = invoke->GetLocations();
Roland Levillain3b359c72015-11-17 19:35:12 +000047 if (res == nullptr) {
48 return false;
49 }
Roland Levillain3b359c72015-11-17 19:35:12 +000050 return res->Intrinsified();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080051}
52
53#define __ assembler->
54
55static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
56 LocationSummary* locations = new (arena) LocationSummary(invoke,
57 LocationSummary::kNoCall,
58 kIntrinsified);
59 locations->SetInAt(0, Location::RequiresFpuRegister());
60 locations->SetOut(Location::RequiresRegister());
61}
62
63static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
64 LocationSummary* locations = new (arena) LocationSummary(invoke,
65 LocationSummary::kNoCall,
66 kIntrinsified);
67 locations->SetInAt(0, Location::RequiresRegister());
68 locations->SetOut(Location::RequiresFpuRegister());
69}
70
71static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
72 Location input = locations->InAt(0);
73 Location output = locations->Out();
74 if (is64bit) {
75 __ vmovrrd(output.AsRegisterPairLow<Register>(),
76 output.AsRegisterPairHigh<Register>(),
77 FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
78 } else {
79 __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
80 }
81}
82
83static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
84 Location input = locations->InAt(0);
85 Location output = locations->Out();
86 if (is64bit) {
87 __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
88 input.AsRegisterPairLow<Register>(),
89 input.AsRegisterPairHigh<Register>());
90 } else {
91 __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
92 }
93}
94
95void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
96 CreateFPToIntLocations(arena_, invoke);
97}
98void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
99 CreateIntToFPLocations(arena_, invoke);
100}
101
102void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000103 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800104}
105void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000106 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800107}
108
109void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
110 CreateFPToIntLocations(arena_, invoke);
111}
112void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
113 CreateIntToFPLocations(arena_, invoke);
114}
115
116void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000117 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800118}
119void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000120 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800121}
122
123static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
124 LocationSummary* locations = new (arena) LocationSummary(invoke,
125 LocationSummary::kNoCall,
126 kIntrinsified);
127 locations->SetInAt(0, Location::RequiresRegister());
128 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
129}
130
131static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
132 LocationSummary* locations = new (arena) LocationSummary(invoke,
133 LocationSummary::kNoCall,
134 kIntrinsified);
135 locations->SetInAt(0, Location::RequiresFpuRegister());
136 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
137}
138
Scott Wakeling611d3392015-07-10 11:42:06 +0100139static void GenNumberOfLeadingZeros(LocationSummary* locations,
140 Primitive::Type type,
141 ArmAssembler* assembler) {
142 Location in = locations->InAt(0);
143 Register out = locations->Out().AsRegister<Register>();
144
145 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
146
147 if (type == Primitive::kPrimLong) {
148 Register in_reg_lo = in.AsRegisterPairLow<Register>();
149 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
150 Label end;
151 __ clz(out, in_reg_hi);
152 __ CompareAndBranchIfNonZero(in_reg_hi, &end);
153 __ clz(out, in_reg_lo);
154 __ AddConstant(out, 32);
155 __ Bind(&end);
156 } else {
157 __ clz(out, in.AsRegister<Register>());
158 }
159}
160
161void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
162 CreateIntToIntLocations(arena_, invoke);
163}
164
165void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
166 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
167}
168
169void IntrinsicLocationsBuilderARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
170 LocationSummary* locations = new (arena_) LocationSummary(invoke,
171 LocationSummary::kNoCall,
172 kIntrinsified);
173 locations->SetInAt(0, Location::RequiresRegister());
174 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
175}
176
177void IntrinsicCodeGeneratorARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
178 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
179}
180
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100181static void GenNumberOfTrailingZeros(LocationSummary* locations,
182 Primitive::Type type,
183 ArmAssembler* assembler) {
184 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
185
186 Register out = locations->Out().AsRegister<Register>();
187
188 if (type == Primitive::kPrimLong) {
189 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
190 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
191 Label end;
192 __ rbit(out, in_reg_lo);
193 __ clz(out, out);
194 __ CompareAndBranchIfNonZero(in_reg_lo, &end);
195 __ rbit(out, in_reg_hi);
196 __ clz(out, out);
197 __ AddConstant(out, 32);
198 __ Bind(&end);
199 } else {
200 Register in = locations->InAt(0).AsRegister<Register>();
201 __ rbit(out, in);
202 __ clz(out, out);
203 }
204}
205
206void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
207 LocationSummary* locations = new (arena_) LocationSummary(invoke,
208 LocationSummary::kNoCall,
209 kIntrinsified);
210 locations->SetInAt(0, Location::RequiresRegister());
211 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
212}
213
214void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
215 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
216}
217
218void IntrinsicLocationsBuilderARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
219 LocationSummary* locations = new (arena_) LocationSummary(invoke,
220 LocationSummary::kNoCall,
221 kIntrinsified);
222 locations->SetInAt(0, Location::RequiresRegister());
223 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
224}
225
226void IntrinsicCodeGeneratorARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
227 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
228}
229
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800230static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
231 Location in = locations->InAt(0);
232 Location out = locations->Out();
233
234 if (is64bit) {
235 __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
236 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
237 } else {
238 __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
239 }
240}
241
242void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
243 CreateFPToFPLocations(arena_, invoke);
244}
245
246void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000247 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800248}
249
250void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
251 CreateFPToFPLocations(arena_, invoke);
252}
253
254void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000255 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800256}
257
258static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
259 LocationSummary* locations = new (arena) LocationSummary(invoke,
260 LocationSummary::kNoCall,
261 kIntrinsified);
262 locations->SetInAt(0, Location::RequiresRegister());
263 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
264
265 locations->AddTemp(Location::RequiresRegister());
266}
267
268static void GenAbsInteger(LocationSummary* locations,
269 bool is64bit,
270 ArmAssembler* assembler) {
271 Location in = locations->InAt(0);
272 Location output = locations->Out();
273
274 Register mask = locations->GetTemp(0).AsRegister<Register>();
275
276 if (is64bit) {
277 Register in_reg_lo = in.AsRegisterPairLow<Register>();
278 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
279 Register out_reg_lo = output.AsRegisterPairLow<Register>();
280 Register out_reg_hi = output.AsRegisterPairHigh<Register>();
281
282 DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
283
284 __ Asr(mask, in_reg_hi, 31);
285 __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
286 __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
287 __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
288 __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
289 } else {
290 Register in_reg = in.AsRegister<Register>();
291 Register out_reg = output.AsRegister<Register>();
292
293 __ Asr(mask, in_reg, 31);
294 __ add(out_reg, in_reg, ShifterOperand(mask));
295 __ eor(out_reg, mask, ShifterOperand(out_reg));
296 }
297}
298
299void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
300 CreateIntToIntPlusTemp(arena_, invoke);
301}
302
303void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000304 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800305}
306
307
308void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
309 CreateIntToIntPlusTemp(arena_, invoke);
310}
311
312void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000313 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800314}
315
316static void GenMinMax(LocationSummary* locations,
317 bool is_min,
318 ArmAssembler* assembler) {
319 Register op1 = locations->InAt(0).AsRegister<Register>();
320 Register op2 = locations->InAt(1).AsRegister<Register>();
321 Register out = locations->Out().AsRegister<Register>();
322
323 __ cmp(op1, ShifterOperand(op2));
324
325 __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
326 __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
327 __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
328}
329
330static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
331 LocationSummary* locations = new (arena) LocationSummary(invoke,
332 LocationSummary::kNoCall,
333 kIntrinsified);
334 locations->SetInAt(0, Location::RequiresRegister());
335 locations->SetInAt(1, Location::RequiresRegister());
336 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
337}
338
339void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
340 CreateIntIntToIntLocations(arena_, invoke);
341}
342
343void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000344 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800345}
346
347void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
348 CreateIntIntToIntLocations(arena_, invoke);
349}
350
351void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000352 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800353}
354
355void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
356 CreateFPToFPLocations(arena_, invoke);
357}
358
359void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
360 LocationSummary* locations = invoke->GetLocations();
361 ArmAssembler* assembler = GetAssembler();
362 __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
363 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
364}
365
366void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
367 CreateIntToIntLocations(arena_, invoke);
368}
369
370void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
371 ArmAssembler* assembler = GetAssembler();
372 // Ignore upper 4B of long address.
373 __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
374 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
375}
376
377void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
378 CreateIntToIntLocations(arena_, invoke);
379}
380
381void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
382 ArmAssembler* assembler = GetAssembler();
383 // Ignore upper 4B of long address.
384 __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
385 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
386}
387
388void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
389 CreateIntToIntLocations(arena_, invoke);
390}
391
392void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
393 ArmAssembler* assembler = GetAssembler();
394 // Ignore upper 4B of long address.
395 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
396 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
397 // exception. So we can't use ldrd as addr may be unaligned.
398 Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
399 Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
400 if (addr == lo) {
401 __ ldr(hi, Address(addr, 4));
402 __ ldr(lo, Address(addr, 0));
403 } else {
404 __ ldr(lo, Address(addr, 0));
405 __ ldr(hi, Address(addr, 4));
406 }
407}
408
409void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
410 CreateIntToIntLocations(arena_, invoke);
411}
412
413void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
414 ArmAssembler* assembler = GetAssembler();
415 // Ignore upper 4B of long address.
416 __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
417 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
418}
419
420static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
421 LocationSummary* locations = new (arena) LocationSummary(invoke,
422 LocationSummary::kNoCall,
423 kIntrinsified);
424 locations->SetInAt(0, Location::RequiresRegister());
425 locations->SetInAt(1, Location::RequiresRegister());
426}
427
428void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
429 CreateIntIntToVoidLocations(arena_, invoke);
430}
431
432void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
433 ArmAssembler* assembler = GetAssembler();
434 __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
435 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
436}
437
438void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
439 CreateIntIntToVoidLocations(arena_, invoke);
440}
441
442void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
443 ArmAssembler* assembler = GetAssembler();
444 __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
445 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
446}
447
448void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
449 CreateIntIntToVoidLocations(arena_, invoke);
450}
451
452void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
453 ArmAssembler* assembler = GetAssembler();
454 // Ignore upper 4B of long address.
455 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
456 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
457 // exception. So we can't use ldrd as addr may be unaligned.
458 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
459 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
460}
461
462void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
463 CreateIntIntToVoidLocations(arena_, invoke);
464}
465
466void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
467 ArmAssembler* assembler = GetAssembler();
468 __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
469 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
470}
471
472void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
473 LocationSummary* locations = new (arena_) LocationSummary(invoke,
474 LocationSummary::kNoCall,
475 kIntrinsified);
476 locations->SetOut(Location::RequiresRegister());
477}
478
479void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
480 ArmAssembler* assembler = GetAssembler();
481 __ LoadFromOffset(kLoadWord,
482 invoke->GetLocations()->Out().AsRegister<Register>(),
483 TR,
484 Thread::PeerOffset<kArmPointerSize>().Int32Value());
485}
486
487static void GenUnsafeGet(HInvoke* invoke,
488 Primitive::Type type,
489 bool is_volatile,
490 CodeGeneratorARM* codegen) {
491 LocationSummary* locations = invoke->GetLocations();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800492 ArmAssembler* assembler = codegen->GetAssembler();
Roland Levillain3b359c72015-11-17 19:35:12 +0000493 Location base_loc = locations->InAt(1);
494 Register base = base_loc.AsRegister<Register>(); // Object pointer.
495 Location offset_loc = locations->InAt(2);
496 Register offset = offset_loc.AsRegisterPairLow<Register>(); // Long offset, lo part only.
497 Location trg_loc = locations->Out();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800498
Roland Levillainc9285912015-12-18 10:38:42 +0000499 switch (type) {
500 case Primitive::kPrimInt: {
501 Register trg = trg_loc.AsRegister<Register>();
502 __ ldr(trg, Address(base, offset));
503 if (is_volatile) {
504 __ dmb(ISH);
505 }
506 break;
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800507 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800508
Roland Levillainc9285912015-12-18 10:38:42 +0000509 case Primitive::kPrimNot: {
510 Register trg = trg_loc.AsRegister<Register>();
511 if (kEmitCompilerReadBarrier) {
512 if (kUseBakerReadBarrier) {
513 Location temp = locations->GetTemp(0);
Roland Levillainbfea3352016-06-23 13:48:47 +0100514 codegen->GenerateReferenceLoadWithBakerReadBarrier(
515 invoke, trg_loc, base, 0U, offset_loc, TIMES_1, temp, /* needs_null_check */ false);
Roland Levillainc9285912015-12-18 10:38:42 +0000516 if (is_volatile) {
517 __ dmb(ISH);
518 }
519 } else {
520 __ ldr(trg, Address(base, offset));
521 if (is_volatile) {
522 __ dmb(ISH);
523 }
524 codegen->GenerateReadBarrierSlow(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
525 }
526 } else {
527 __ ldr(trg, Address(base, offset));
528 if (is_volatile) {
529 __ dmb(ISH);
530 }
531 __ MaybeUnpoisonHeapReference(trg);
532 }
533 break;
534 }
Roland Levillain4d027112015-07-01 15:41:14 +0100535
Roland Levillainc9285912015-12-18 10:38:42 +0000536 case Primitive::kPrimLong: {
537 Register trg_lo = trg_loc.AsRegisterPairLow<Register>();
538 __ add(IP, base, ShifterOperand(offset));
539 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
540 Register trg_hi = trg_loc.AsRegisterPairHigh<Register>();
541 __ ldrexd(trg_lo, trg_hi, IP);
542 } else {
543 __ ldrd(trg_lo, Address(IP));
544 }
545 if (is_volatile) {
546 __ dmb(ISH);
547 }
548 break;
549 }
550
551 default:
552 LOG(FATAL) << "Unexpected type " << type;
553 UNREACHABLE();
Roland Levillain4d027112015-07-01 15:41:14 +0100554 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800555}
556
Roland Levillainc9285912015-12-18 10:38:42 +0000557static void CreateIntIntIntToIntLocations(ArenaAllocator* arena,
558 HInvoke* invoke,
559 Primitive::Type type) {
Roland Levillain3b359c72015-11-17 19:35:12 +0000560 bool can_call = kEmitCompilerReadBarrier &&
561 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
562 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800563 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain3b359c72015-11-17 19:35:12 +0000564 can_call ?
565 LocationSummary::kCallOnSlowPath :
566 LocationSummary::kNoCall,
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800567 kIntrinsified);
568 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
569 locations->SetInAt(1, Location::RequiresRegister());
570 locations->SetInAt(2, Location::RequiresRegister());
Roland Levillainbfea3352016-06-23 13:48:47 +0100571 locations->SetOut(Location::RequiresRegister(),
572 can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Roland Levillainc9285912015-12-18 10:38:42 +0000573 if (type == Primitive::kPrimNot && kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
574 // We need a temporary register for the read barrier marking slow
Roland Levillainbfea3352016-06-23 13:48:47 +0100575 // path in InstructionCodeGeneratorARM::GenerateReferenceLoadWithBakerReadBarrier.
Roland Levillainc9285912015-12-18 10:38:42 +0000576 locations->AddTemp(Location::RequiresRegister());
577 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800578}
579
580void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000581 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800582}
583void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000584 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800585}
586void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000587 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800588}
589void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000590 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800591}
592void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000593 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800594}
595void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainc9285912015-12-18 10:38:42 +0000596 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800597}
598
599void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000600 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800601}
602void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000603 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800604}
605void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000606 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800607}
608void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000609 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800610}
611void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000612 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800613}
614void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000615 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800616}
617
618static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
619 const ArmInstructionSetFeatures& features,
620 Primitive::Type type,
621 bool is_volatile,
622 HInvoke* invoke) {
623 LocationSummary* locations = new (arena) LocationSummary(invoke,
624 LocationSummary::kNoCall,
625 kIntrinsified);
626 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
627 locations->SetInAt(1, Location::RequiresRegister());
628 locations->SetInAt(2, Location::RequiresRegister());
629 locations->SetInAt(3, Location::RequiresRegister());
630
631 if (type == Primitive::kPrimLong) {
632 // Potentially need temps for ldrexd-strexd loop.
633 if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
634 locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
635 locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
636 }
637 } else if (type == Primitive::kPrimNot) {
638 // Temps for card-marking.
639 locations->AddTemp(Location::RequiresRegister()); // Temp.
640 locations->AddTemp(Location::RequiresRegister()); // Card.
641 }
642}
643
644void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000645 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800646}
647void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000648 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800649}
650void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000651 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800652}
653void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000654 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800655}
656void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000657 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800658}
659void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000660 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800661}
662void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000663 CreateIntIntIntIntToVoid(
664 arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800665}
666void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000667 CreateIntIntIntIntToVoid(
668 arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800669}
670void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000671 CreateIntIntIntIntToVoid(
672 arena_, features_, Primitive::kPrimLong, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800673}
674
675static void GenUnsafePut(LocationSummary* locations,
676 Primitive::Type type,
677 bool is_volatile,
678 bool is_ordered,
679 CodeGeneratorARM* codegen) {
680 ArmAssembler* assembler = codegen->GetAssembler();
681
682 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
683 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
684 Register value;
685
686 if (is_volatile || is_ordered) {
687 __ dmb(ISH);
688 }
689
690 if (type == Primitive::kPrimLong) {
691 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
692 value = value_lo;
693 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
694 Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
695 Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
696 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
697
698 __ add(IP, base, ShifterOperand(offset));
699 Label loop_head;
700 __ Bind(&loop_head);
701 __ ldrexd(temp_lo, temp_hi, IP);
702 __ strexd(temp_lo, value_lo, value_hi, IP);
703 __ cmp(temp_lo, ShifterOperand(0));
704 __ b(&loop_head, NE);
705 } else {
706 __ add(IP, base, ShifterOperand(offset));
707 __ strd(value_lo, Address(IP));
708 }
709 } else {
Roland Levillain4d027112015-07-01 15:41:14 +0100710 value = locations->InAt(3).AsRegister<Register>();
711 Register source = value;
712 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
713 Register temp = locations->GetTemp(0).AsRegister<Register>();
714 __ Mov(temp, value);
715 __ PoisonHeapReference(temp);
716 source = temp;
717 }
718 __ str(source, Address(base, offset));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800719 }
720
721 if (is_volatile) {
722 __ dmb(ISH);
723 }
724
725 if (type == Primitive::kPrimNot) {
726 Register temp = locations->GetTemp(0).AsRegister<Register>();
727 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100728 bool value_can_be_null = true; // TODO: Worth finding out this information?
729 codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800730 }
731}
732
733void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000734 GenUnsafePut(invoke->GetLocations(),
735 Primitive::kPrimInt,
736 /* is_volatile */ false,
737 /* is_ordered */ false,
738 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800739}
740void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000741 GenUnsafePut(invoke->GetLocations(),
742 Primitive::kPrimInt,
743 /* is_volatile */ false,
744 /* is_ordered */ true,
745 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800746}
747void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000748 GenUnsafePut(invoke->GetLocations(),
749 Primitive::kPrimInt,
750 /* is_volatile */ true,
751 /* is_ordered */ false,
752 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800753}
754void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000755 GenUnsafePut(invoke->GetLocations(),
756 Primitive::kPrimNot,
757 /* is_volatile */ false,
758 /* is_ordered */ false,
759 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800760}
761void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000762 GenUnsafePut(invoke->GetLocations(),
763 Primitive::kPrimNot,
764 /* is_volatile */ false,
765 /* is_ordered */ true,
766 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800767}
768void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000769 GenUnsafePut(invoke->GetLocations(),
770 Primitive::kPrimNot,
771 /* is_volatile */ true,
772 /* is_ordered */ false,
773 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800774}
775void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000776 GenUnsafePut(invoke->GetLocations(),
777 Primitive::kPrimLong,
778 /* is_volatile */ false,
779 /* is_ordered */ false,
780 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800781}
782void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000783 GenUnsafePut(invoke->GetLocations(),
784 Primitive::kPrimLong,
785 /* is_volatile */ false,
786 /* is_ordered */ true,
787 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800788}
789void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000790 GenUnsafePut(invoke->GetLocations(),
791 Primitive::kPrimLong,
792 /* is_volatile */ true,
793 /* is_ordered */ false,
794 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800795}
796
797static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000798 HInvoke* invoke,
799 Primitive::Type type) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800800 LocationSummary* locations = new (arena) LocationSummary(invoke,
801 LocationSummary::kNoCall,
802 kIntrinsified);
803 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
804 locations->SetInAt(1, Location::RequiresRegister());
805 locations->SetInAt(2, Location::RequiresRegister());
806 locations->SetInAt(3, Location::RequiresRegister());
807 locations->SetInAt(4, Location::RequiresRegister());
808
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000809 // If heap poisoning is enabled, we don't want the unpoisoning
810 // operations to potentially clobber the output.
811 Location::OutputOverlap overlaps = (kPoisonHeapReferences && type == Primitive::kPrimNot)
812 ? Location::kOutputOverlap
813 : Location::kNoOutputOverlap;
814 locations->SetOut(Location::RequiresRegister(), overlaps);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800815
816 locations->AddTemp(Location::RequiresRegister()); // Pointer.
817 locations->AddTemp(Location::RequiresRegister()); // Temp 1.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800818}
819
820static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
821 DCHECK_NE(type, Primitive::kPrimLong);
822
823 ArmAssembler* assembler = codegen->GetAssembler();
824
825 Register out = locations->Out().AsRegister<Register>(); // Boolean result.
826
827 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
828 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Offset (discard high 4B).
829 Register expected_lo = locations->InAt(3).AsRegister<Register>(); // Expected.
830 Register value_lo = locations->InAt(4).AsRegister<Register>(); // Value.
831
832 Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>(); // Pointer to actual memory.
833 Register tmp_lo = locations->GetTemp(1).AsRegister<Register>(); // Value in memory.
834
835 if (type == Primitive::kPrimNot) {
836 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
837 // object and scan the receiver at the next GC for nothing.
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100838 bool value_can_be_null = true; // TODO: Worth finding out this information?
839 codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800840 }
841
842 // Prevent reordering with prior memory operations.
Roland Levillain4bedb382016-01-12 12:01:04 +0000843 // Emit a DMB ISH instruction instead of an DMB ISHST one, as the
844 // latter allows a preceding load to be delayed past the STXR
845 // instruction below.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800846 __ dmb(ISH);
847
848 __ add(tmp_ptr, base, ShifterOperand(offset));
849
Roland Levillain4d027112015-07-01 15:41:14 +0100850 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
851 codegen->GetAssembler()->PoisonHeapReference(expected_lo);
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000852 if (value_lo == expected_lo) {
853 // Do not poison `value_lo`, as it is the same register as
854 // `expected_lo`, which has just been poisoned.
855 } else {
856 codegen->GetAssembler()->PoisonHeapReference(value_lo);
857 }
Roland Levillain4d027112015-07-01 15:41:14 +0100858 }
859
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800860 // do {
861 // tmp = [r_ptr] - expected;
862 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
863 // result = tmp != 0;
864
865 Label loop_head;
866 __ Bind(&loop_head);
867
Roland Levillain391b8662015-12-18 11:43:38 +0000868 // TODO: When `type == Primitive::kPrimNot`, add a read barrier for
869 // the reference stored in the object before attempting the CAS,
870 // similar to the one in the art::Unsafe_compareAndSwapObject JNI
871 // implementation.
872 //
873 // Note that this code is not (yet) used when read barriers are
874 // enabled (see IntrinsicLocationsBuilderARM::VisitUnsafeCASObject).
875 DCHECK(!(type == Primitive::kPrimNot && kEmitCompilerReadBarrier));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800876 __ ldrex(tmp_lo, tmp_ptr);
877
878 __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
879
880 __ it(EQ, ItState::kItT);
881 __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
882 __ cmp(tmp_lo, ShifterOperand(1), EQ);
883
884 __ b(&loop_head, EQ);
885
886 __ dmb(ISH);
887
888 __ rsbs(out, tmp_lo, ShifterOperand(1));
889 __ it(CC);
890 __ mov(out, ShifterOperand(0), CC);
Roland Levillain4d027112015-07-01 15:41:14 +0100891
892 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +0100893 codegen->GetAssembler()->UnpoisonHeapReference(expected_lo);
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000894 if (value_lo == expected_lo) {
895 // Do not unpoison `value_lo`, as it is the same register as
896 // `expected_lo`, which has just been unpoisoned.
897 } else {
898 codegen->GetAssembler()->UnpoisonHeapReference(value_lo);
899 }
Roland Levillain4d027112015-07-01 15:41:14 +0100900 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800901}
902
Andreas Gampeca714582015-04-03 19:41:34 -0700903void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000904 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke, Primitive::kPrimInt);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800905}
Andreas Gampeca714582015-04-03 19:41:34 -0700906void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain391b8662015-12-18 11:43:38 +0000907 // The UnsafeCASObject intrinsic is missing a read barrier, and
908 // therefore sometimes does not work as expected (b/25883050).
909 // Turn it off temporarily as a quick fix, until the read barrier is
Roland Levillain3d312422016-06-23 13:53:42 +0100910 // implemented (see TODO in GenCAS).
Roland Levillain391b8662015-12-18 11:43:38 +0000911 //
Roland Levillain3d312422016-06-23 13:53:42 +0100912 // TODO(rpl): Implement read barrier support in GenCAS and re-enable
913 // this intrinsic.
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000914 if (kEmitCompilerReadBarrier) {
Roland Levillain985ff702015-10-23 13:25:35 +0100915 return;
916 }
917
Roland Levillain2e50ecb2016-01-27 14:08:33 +0000918 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke, Primitive::kPrimNot);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800919}
920void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
921 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
922}
923void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain3d312422016-06-23 13:53:42 +0100924 // The UnsafeCASObject intrinsic is missing a read barrier, and
925 // therefore sometimes does not work as expected (b/25883050).
926 // Turn it off temporarily as a quick fix, until the read barrier is
927 // implemented (see TODO in GenCAS).
928 //
929 // TODO(rpl): Implement read barrier support in GenCAS and re-enable
930 // this intrinsic.
931 DCHECK(!kEmitCompilerReadBarrier);
932
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800933 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
934}
935
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000936void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
937 // The inputs plus one temp.
938 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Scott Wakelingc25cbf12016-04-18 09:00:11 +0100939 invoke->InputAt(1)->CanBeNull()
940 ? LocationSummary::kCallOnSlowPath
941 : LocationSummary::kNoCall,
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000942 kIntrinsified);
Scott Wakelingc25cbf12016-04-18 09:00:11 +0100943 locations->SetInAt(0, Location::RequiresRegister());
944 locations->SetInAt(1, Location::RequiresRegister());
945 locations->AddTemp(Location::RequiresRegister());
946 locations->AddTemp(Location::RequiresRegister());
947 locations->AddTemp(Location::RequiresRegister());
948 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000949}
950
951void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
952 ArmAssembler* assembler = GetAssembler();
953 LocationSummary* locations = invoke->GetLocations();
954
Scott Wakelingc25cbf12016-04-18 09:00:11 +0100955 Register str = locations->InAt(0).AsRegister<Register>();
956 Register arg = locations->InAt(1).AsRegister<Register>();
957 Register out = locations->Out().AsRegister<Register>();
958
959 Register temp0 = locations->GetTemp(0).AsRegister<Register>();
960 Register temp1 = locations->GetTemp(1).AsRegister<Register>();
961 Register temp2 = locations->GetTemp(2).AsRegister<Register>();
962
963 Label loop;
964 Label find_char_diff;
965 Label end;
966
967 // Get offsets of count and value fields within a string object.
968 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
969 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
970
Nicolas Geoffray512e04d2015-03-27 17:21:24 +0000971 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +0100972 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000973
Scott Wakelingc25cbf12016-04-18 09:00:11 +0100974 // Take slow path and throw if input can be and is null.
975 SlowPathCode* slow_path = nullptr;
976 const bool can_slow_path = invoke->InputAt(1)->CanBeNull();
977 if (can_slow_path) {
978 slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
979 codegen_->AddSlowPath(slow_path);
980 __ CompareAndBranchIfZero(arg, slow_path->GetEntryLabel());
981 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000982
Scott Wakelingc25cbf12016-04-18 09:00:11 +0100983 // Reference equality check, return 0 if same reference.
984 __ subs(out, str, ShifterOperand(arg));
985 __ b(&end, EQ);
986 // Load lengths of this and argument strings.
987 __ ldr(temp2, Address(str, count_offset));
988 __ ldr(temp1, Address(arg, count_offset));
989 // out = length diff.
990 __ subs(out, temp2, ShifterOperand(temp1));
991 // temp0 = min(len(str), len(arg)).
992 __ it(Condition::LT, kItElse);
993 __ mov(temp0, ShifterOperand(temp2), Condition::LT);
994 __ mov(temp0, ShifterOperand(temp1), Condition::GE);
995 // Shorter string is empty?
996 __ CompareAndBranchIfZero(temp0, &end);
997
998 // Store offset of string value in preparation for comparison loop.
999 __ mov(temp1, ShifterOperand(value_offset));
1000
1001 // Assertions that must hold in order to compare multiple characters at a time.
1002 CHECK_ALIGNED(value_offset, 8);
1003 static_assert(IsAligned<8>(kObjectAlignment),
1004 "String data must be 8-byte aligned for unrolled CompareTo loop.");
1005
1006 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1007 DCHECK_EQ(char_size, 2u);
1008
1009 // Unrolled loop comparing 4x16-bit chars per iteration (ok because of string data alignment).
1010 __ Bind(&loop);
1011 __ ldr(IP, Address(str, temp1));
1012 __ ldr(temp2, Address(arg, temp1));
1013 __ cmp(IP, ShifterOperand(temp2));
1014 __ b(&find_char_diff, NE);
1015 __ add(temp1, temp1, ShifterOperand(char_size * 2));
1016 __ sub(temp0, temp0, ShifterOperand(2));
1017
1018 __ ldr(IP, Address(str, temp1));
1019 __ ldr(temp2, Address(arg, temp1));
1020 __ cmp(IP, ShifterOperand(temp2));
1021 __ b(&find_char_diff, NE);
1022 __ add(temp1, temp1, ShifterOperand(char_size * 2));
1023 __ subs(temp0, temp0, ShifterOperand(2));
1024
1025 __ b(&loop, GT);
1026 __ b(&end);
1027
1028 // Find the single 16-bit character difference.
1029 __ Bind(&find_char_diff);
1030 // Get the bit position of the first character that differs.
1031 __ eor(temp1, temp2, ShifterOperand(IP));
1032 __ rbit(temp1, temp1);
1033 __ clz(temp1, temp1);
1034
1035 // temp0 = number of 16-bit characters remaining to compare.
1036 // (it could be < 1 if a difference is found after the first SUB in the comparison loop, and
1037 // after the end of the shorter string data).
1038
1039 // (temp1 >> 4) = character where difference occurs between the last two words compared, on the
1040 // interval [0,1] (0 for low half-word different, 1 for high half-word different).
1041
1042 // If temp0 <= (temp1 >> 4), the difference occurs outside the remaining string data, so just
1043 // return length diff (out).
1044 __ cmp(temp0, ShifterOperand(temp1, LSR, 4));
1045 __ b(&end, LE);
1046 // Extract the characters and calculate the difference.
1047 __ bic(temp1, temp1, ShifterOperand(0xf));
1048 __ Lsr(temp2, temp2, temp1);
1049 __ Lsr(IP, IP, temp1);
1050 __ movt(temp2, 0);
1051 __ movt(IP, 0);
1052 __ sub(out, IP, ShifterOperand(temp2));
1053
1054 __ Bind(&end);
1055
1056 if (can_slow_path) {
1057 __ Bind(slow_path->GetExitLabel());
1058 }
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001059}
1060
Agi Csaki289cd552015-08-18 17:10:38 -07001061void IntrinsicLocationsBuilderARM::VisitStringEquals(HInvoke* invoke) {
1062 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1063 LocationSummary::kNoCall,
1064 kIntrinsified);
1065 InvokeRuntimeCallingConvention calling_convention;
1066 locations->SetInAt(0, Location::RequiresRegister());
1067 locations->SetInAt(1, Location::RequiresRegister());
1068 // Temporary registers to store lengths of strings and for calculations.
1069 // Using instruction cbz requires a low register, so explicitly set a temp to be R0.
1070 locations->AddTemp(Location::RegisterLocation(R0));
1071 locations->AddTemp(Location::RequiresRegister());
1072 locations->AddTemp(Location::RequiresRegister());
1073
1074 locations->SetOut(Location::RequiresRegister());
1075}
1076
1077void IntrinsicCodeGeneratorARM::VisitStringEquals(HInvoke* invoke) {
1078 ArmAssembler* assembler = GetAssembler();
1079 LocationSummary* locations = invoke->GetLocations();
1080
1081 Register str = locations->InAt(0).AsRegister<Register>();
1082 Register arg = locations->InAt(1).AsRegister<Register>();
1083 Register out = locations->Out().AsRegister<Register>();
1084
1085 Register temp = locations->GetTemp(0).AsRegister<Register>();
1086 Register temp1 = locations->GetTemp(1).AsRegister<Register>();
1087 Register temp2 = locations->GetTemp(2).AsRegister<Register>();
1088
1089 Label loop;
1090 Label end;
1091 Label return_true;
1092 Label return_false;
1093
1094 // Get offsets of count, value, and class fields within a string object.
1095 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1096 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1097 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1098
1099 // Note that the null check must have been done earlier.
1100 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1101
Vladimir Marko53b52002016-05-24 19:30:45 +01001102 StringEqualsOptimizations optimizations(invoke);
1103 if (!optimizations.GetArgumentNotNull()) {
1104 // Check if input is null, return false if it is.
1105 __ CompareAndBranchIfZero(arg, &return_false);
1106 }
Agi Csaki289cd552015-08-18 17:10:38 -07001107
Vladimir Marko53b52002016-05-24 19:30:45 +01001108 if (!optimizations.GetArgumentIsString()) {
1109 // Instanceof check for the argument by comparing class fields.
1110 // All string objects must have the same type since String cannot be subclassed.
1111 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1112 // If the argument is a string object, its class field must be equal to receiver's class field.
1113 __ ldr(temp, Address(str, class_offset));
1114 __ ldr(temp1, Address(arg, class_offset));
1115 __ cmp(temp, ShifterOperand(temp1));
1116 __ b(&return_false, NE);
1117 }
Agi Csaki289cd552015-08-18 17:10:38 -07001118
1119 // Load lengths of this and argument strings.
1120 __ ldr(temp, Address(str, count_offset));
1121 __ ldr(temp1, Address(arg, count_offset));
1122 // Check if lengths are equal, return false if they're not.
1123 __ cmp(temp, ShifterOperand(temp1));
1124 __ b(&return_false, NE);
1125 // Return true if both strings are empty.
1126 __ cbz(temp, &return_true);
1127
1128 // Reference equality check, return true if same reference.
1129 __ cmp(str, ShifterOperand(arg));
1130 __ b(&return_true, EQ);
1131
1132 // Assertions that must hold in order to compare strings 2 characters at a time.
1133 DCHECK_ALIGNED(value_offset, 4);
Scott Wakelingc25cbf12016-04-18 09:00:11 +01001134 static_assert(IsAligned<4>(kObjectAlignment), "String data must be aligned for fast compare.");
Agi Csaki289cd552015-08-18 17:10:38 -07001135
Agi Csaki289cd552015-08-18 17:10:38 -07001136 __ LoadImmediate(temp1, value_offset);
Agi Csaki289cd552015-08-18 17:10:38 -07001137
1138 // Loop to compare strings 2 characters at a time starting at the front of the string.
1139 // Ok to do this because strings with an odd length are zero-padded.
1140 __ Bind(&loop);
1141 __ ldr(out, Address(str, temp1));
1142 __ ldr(temp2, Address(arg, temp1));
1143 __ cmp(out, ShifterOperand(temp2));
1144 __ b(&return_false, NE);
1145 __ add(temp1, temp1, ShifterOperand(sizeof(uint32_t)));
Vladimir Markoa63f0d42015-09-01 13:36:35 +01001146 __ subs(temp, temp, ShifterOperand(sizeof(uint32_t) / sizeof(uint16_t)));
1147 __ b(&loop, GT);
Agi Csaki289cd552015-08-18 17:10:38 -07001148
1149 // Return true and exit the function.
1150 // If loop does not result in returning false, we return true.
1151 __ Bind(&return_true);
1152 __ LoadImmediate(out, 1);
1153 __ b(&end);
1154
1155 // Return false and exit the function.
1156 __ Bind(&return_false);
1157 __ LoadImmediate(out, 0);
1158 __ Bind(&end);
1159}
1160
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001161static void GenerateVisitStringIndexOf(HInvoke* invoke,
1162 ArmAssembler* assembler,
1163 CodeGeneratorARM* codegen,
1164 ArenaAllocator* allocator,
1165 bool start_at_zero) {
1166 LocationSummary* locations = invoke->GetLocations();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001167
1168 // Note that the null check must have been done earlier.
1169 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1170
1171 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001172 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001173 SlowPathCode* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001174 HInstruction* code_point = invoke->InputAt(1);
1175 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001176 if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) >
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001177 std::numeric_limits<uint16_t>::max()) {
1178 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1179 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1180 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1181 codegen->AddSlowPath(slow_path);
1182 __ b(slow_path->GetEntryLabel());
1183 __ Bind(slow_path->GetExitLabel());
1184 return;
1185 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001186 } else if (code_point->GetType() != Primitive::kPrimChar) {
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001187 Register char_reg = locations->InAt(1).AsRegister<Register>();
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001188 // 0xffff is not modified immediate but 0x10000 is, so use `>= 0x10000` instead of `> 0xffff`.
1189 __ cmp(char_reg,
1190 ShifterOperand(static_cast<uint32_t>(std::numeric_limits<uint16_t>::max()) + 1));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001191 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1192 codegen->AddSlowPath(slow_path);
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001193 __ b(slow_path->GetEntryLabel(), HS);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001194 }
1195
1196 if (start_at_zero) {
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001197 Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001198 DCHECK_EQ(tmp_reg, R2);
1199 // Start-index = 0.
1200 __ LoadImmediate(tmp_reg, 0);
1201 }
1202
1203 __ LoadFromOffset(kLoadWord, LR, TR,
1204 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value());
Roland Levillain42ad2882016-02-29 18:26:54 +00001205 CheckEntrypointTypes<kQuickIndexOf, int32_t, void*, uint32_t, uint32_t>();
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001206 __ blx(LR);
1207
1208 if (slow_path != nullptr) {
1209 __ Bind(slow_path->GetExitLabel());
1210 }
1211}
1212
1213void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
1214 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1215 LocationSummary::kCall,
1216 kIntrinsified);
1217 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1218 // best to align the inputs accordingly.
1219 InvokeRuntimeCallingConvention calling_convention;
1220 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1221 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1222 locations->SetOut(Location::RegisterLocation(R0));
1223
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001224 // Need to send start-index=0.
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001225 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1226}
1227
1228void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001229 GenerateVisitStringIndexOf(
1230 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001231}
1232
1233void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1234 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1235 LocationSummary::kCall,
1236 kIntrinsified);
1237 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1238 // best to align the inputs accordingly.
1239 InvokeRuntimeCallingConvention calling_convention;
1240 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1241 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1242 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1243 locations->SetOut(Location::RegisterLocation(R0));
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001244}
1245
1246void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001247 GenerateVisitStringIndexOf(
1248 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001249}
1250
Jeff Hao848f70a2014-01-15 13:49:50 -08001251void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1252 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1253 LocationSummary::kCall,
1254 kIntrinsified);
1255 InvokeRuntimeCallingConvention calling_convention;
1256 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1257 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1258 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1259 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1260 locations->SetOut(Location::RegisterLocation(R0));
1261}
1262
1263void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1264 ArmAssembler* assembler = GetAssembler();
1265 LocationSummary* locations = invoke->GetLocations();
1266
1267 Register byte_array = locations->InAt(0).AsRegister<Register>();
1268 __ cmp(byte_array, ShifterOperand(0));
Andreas Gampe85b62f22015-09-09 13:15:38 -07001269 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001270 codegen_->AddSlowPath(slow_path);
1271 __ b(slow_path->GetEntryLabel(), EQ);
1272
1273 __ LoadFromOffset(
1274 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001275 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001276 __ blx(LR);
Roland Levillainf969a202016-03-09 16:14:00 +00001277 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001278 __ Bind(slow_path->GetExitLabel());
1279}
1280
1281void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1282 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1283 LocationSummary::kCall,
1284 kIntrinsified);
1285 InvokeRuntimeCallingConvention calling_convention;
1286 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1287 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1288 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1289 locations->SetOut(Location::RegisterLocation(R0));
1290}
1291
1292void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1293 ArmAssembler* assembler = GetAssembler();
1294
Roland Levillaincc3839c2016-02-29 16:23:48 +00001295 // No need to emit code checking whether `locations->InAt(2)` is a null
1296 // pointer, as callers of the native method
1297 //
1298 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1299 //
1300 // all include a null check on `data` before calling that method.
Jeff Hao848f70a2014-01-15 13:49:50 -08001301 __ LoadFromOffset(
1302 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001303 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001304 __ blx(LR);
Roland Levillainf969a202016-03-09 16:14:00 +00001305 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001306}
1307
1308void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
1309 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1310 LocationSummary::kCall,
1311 kIntrinsified);
1312 InvokeRuntimeCallingConvention calling_convention;
1313 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1314 locations->SetOut(Location::RegisterLocation(R0));
1315}
1316
1317void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1318 ArmAssembler* assembler = GetAssembler();
1319 LocationSummary* locations = invoke->GetLocations();
1320
1321 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1322 __ cmp(string_to_copy, ShifterOperand(0));
Andreas Gampe85b62f22015-09-09 13:15:38 -07001323 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001324 codegen_->AddSlowPath(slow_path);
1325 __ b(slow_path->GetEntryLabel(), EQ);
1326
1327 __ LoadFromOffset(kLoadWord,
1328 LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
Roland Levillainf969a202016-03-09 16:14:00 +00001329 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001330 __ blx(LR);
Roland Levillainf969a202016-03-09 16:14:00 +00001331 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
Jeff Hao848f70a2014-01-15 13:49:50 -08001332 __ Bind(slow_path->GetExitLabel());
1333}
1334
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001335void IntrinsicLocationsBuilderARM::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain3d312422016-06-23 13:53:42 +01001336 // TODO(rpl): Implement read barriers in the SystemArrayCopy
1337 // intrinsic and re-enable it (b/29516905).
1338 if (kEmitCompilerReadBarrier) {
1339 return;
1340 }
1341
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001342 CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke);
1343 LocationSummary* locations = invoke->GetLocations();
1344 if (locations == nullptr) {
1345 return;
1346 }
1347
1348 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1349 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1350 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1351
1352 if (src_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(src_pos->GetValue())) {
1353 locations->SetInAt(1, Location::RequiresRegister());
1354 }
1355 if (dest_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(dest_pos->GetValue())) {
1356 locations->SetInAt(3, Location::RequiresRegister());
1357 }
1358 if (length != nullptr && !assembler_->ShifterOperandCanAlwaysHold(length->GetValue())) {
1359 locations->SetInAt(4, Location::RequiresRegister());
1360 }
1361}
1362
1363static void CheckPosition(ArmAssembler* assembler,
1364 Location pos,
1365 Register input,
1366 Location length,
1367 SlowPathCode* slow_path,
1368 Register input_len,
1369 Register temp,
1370 bool length_is_input_length = false) {
1371 // Where is the length in the Array?
1372 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1373
1374 if (pos.IsConstant()) {
1375 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1376 if (pos_const == 0) {
1377 if (!length_is_input_length) {
1378 // Check that length(input) >= length.
1379 __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1380 if (length.IsConstant()) {
1381 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1382 } else {
1383 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1384 }
1385 __ b(slow_path->GetEntryLabel(), LT);
1386 }
1387 } else {
1388 // Check that length(input) >= pos.
1389 __ LoadFromOffset(kLoadWord, input_len, input, length_offset);
1390 __ subs(temp, input_len, ShifterOperand(pos_const));
1391 __ b(slow_path->GetEntryLabel(), LT);
1392
1393 // Check that (length(input) - pos) >= length.
1394 if (length.IsConstant()) {
1395 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1396 } else {
1397 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1398 }
1399 __ b(slow_path->GetEntryLabel(), LT);
1400 }
1401 } else if (length_is_input_length) {
1402 // The only way the copy can succeed is if pos is zero.
1403 Register pos_reg = pos.AsRegister<Register>();
1404 __ CompareAndBranchIfNonZero(pos_reg, slow_path->GetEntryLabel());
1405 } else {
1406 // Check that pos >= 0.
1407 Register pos_reg = pos.AsRegister<Register>();
1408 __ cmp(pos_reg, ShifterOperand(0));
1409 __ b(slow_path->GetEntryLabel(), LT);
1410
1411 // Check that pos <= length(input).
1412 __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1413 __ subs(temp, temp, ShifterOperand(pos_reg));
1414 __ b(slow_path->GetEntryLabel(), LT);
1415
1416 // Check that (length(input) - pos) >= length.
1417 if (length.IsConstant()) {
1418 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1419 } else {
1420 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1421 }
1422 __ b(slow_path->GetEntryLabel(), LT);
1423 }
1424}
1425
1426void IntrinsicCodeGeneratorARM::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain3d312422016-06-23 13:53:42 +01001427 // TODO(rpl): Implement read barriers in the SystemArrayCopy
1428 // intrinsic and re-enable it (b/29516905).
1429 DCHECK(!kEmitCompilerReadBarrier);
1430
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001431 ArmAssembler* assembler = GetAssembler();
1432 LocationSummary* locations = invoke->GetLocations();
1433
1434 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1435 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
1436 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
1437 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
1438
1439 Register src = locations->InAt(0).AsRegister<Register>();
1440 Location src_pos = locations->InAt(1);
1441 Register dest = locations->InAt(2).AsRegister<Register>();
1442 Location dest_pos = locations->InAt(3);
1443 Location length = locations->InAt(4);
1444 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
1445 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
1446 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
1447
1448 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1449 codegen_->AddSlowPath(slow_path);
1450
Roland Levillainebea3d22016-04-12 15:42:57 +01001451 Label conditions_on_positions_validated;
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001452 SystemArrayCopyOptimizations optimizations(invoke);
1453
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001454 // If source and destination are the same, we go to slow path if we need to do
1455 // forward copying.
1456 if (src_pos.IsConstant()) {
1457 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1458 if (dest_pos.IsConstant()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01001459 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1460 if (optimizations.GetDestinationIsSource()) {
1461 // Checked when building locations.
1462 DCHECK_GE(src_pos_constant, dest_pos_constant);
1463 } else if (src_pos_constant < dest_pos_constant) {
1464 __ cmp(src, ShifterOperand(dest));
1465 __ b(slow_path->GetEntryLabel(), EQ);
1466 }
1467
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001468 // Checked when building locations.
1469 DCHECK(!optimizations.GetDestinationIsSource()
1470 || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
1471 } else {
1472 if (!optimizations.GetDestinationIsSource()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01001473 __ cmp(src, ShifterOperand(dest));
Roland Levillainebea3d22016-04-12 15:42:57 +01001474 __ b(&conditions_on_positions_validated, NE);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001475 }
1476 __ cmp(dest_pos.AsRegister<Register>(), ShifterOperand(src_pos_constant));
1477 __ b(slow_path->GetEntryLabel(), GT);
1478 }
1479 } else {
1480 if (!optimizations.GetDestinationIsSource()) {
Nicolas Geoffray9f65db82016-07-07 12:07:42 +01001481 __ cmp(src, ShifterOperand(dest));
Roland Levillainebea3d22016-04-12 15:42:57 +01001482 __ b(&conditions_on_positions_validated, NE);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001483 }
1484 if (dest_pos.IsConstant()) {
1485 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1486 __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos_constant));
1487 } else {
1488 __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos.AsRegister<Register>()));
1489 }
1490 __ b(slow_path->GetEntryLabel(), LT);
1491 }
1492
Roland Levillainebea3d22016-04-12 15:42:57 +01001493 __ Bind(&conditions_on_positions_validated);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001494
1495 if (!optimizations.GetSourceIsNotNull()) {
1496 // Bail out if the source is null.
1497 __ CompareAndBranchIfZero(src, slow_path->GetEntryLabel());
1498 }
1499
1500 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
1501 // Bail out if the destination is null.
1502 __ CompareAndBranchIfZero(dest, slow_path->GetEntryLabel());
1503 }
1504
1505 // If the length is negative, bail out.
1506 // We have already checked in the LocationsBuilder for the constant case.
1507 if (!length.IsConstant() &&
1508 !optimizations.GetCountIsSourceLength() &&
1509 !optimizations.GetCountIsDestinationLength()) {
1510 __ cmp(length.AsRegister<Register>(), ShifterOperand(0));
1511 __ b(slow_path->GetEntryLabel(), LT);
1512 }
1513
1514 // Validity checks: source.
1515 CheckPosition(assembler,
1516 src_pos,
1517 src,
1518 length,
1519 slow_path,
1520 temp1,
1521 temp2,
1522 optimizations.GetCountIsSourceLength());
1523
1524 // Validity checks: dest.
1525 CheckPosition(assembler,
1526 dest_pos,
1527 dest,
1528 length,
1529 slow_path,
1530 temp1,
1531 temp2,
1532 optimizations.GetCountIsDestinationLength());
1533
1534 if (!optimizations.GetDoesNotNeedTypeCheck()) {
1535 // Check whether all elements of the source array are assignable to the component
1536 // type of the destination array. We do two checks: the classes are the same,
1537 // or the destination is Object[]. If none of these checks succeed, we go to the
1538 // slow path.
1539 __ LoadFromOffset(kLoadWord, temp1, dest, class_offset);
1540 __ LoadFromOffset(kLoadWord, temp2, src, class_offset);
1541 bool did_unpoison = false;
1542 if (!optimizations.GetDestinationIsNonPrimitiveArray() ||
1543 !optimizations.GetSourceIsNonPrimitiveArray()) {
Roland Levillainebea3d22016-04-12 15:42:57 +01001544 // One or two of the references need to be unpoisoned. Unpoison them
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001545 // both to make the identity check valid.
1546 __ MaybeUnpoisonHeapReference(temp1);
1547 __ MaybeUnpoisonHeapReference(temp2);
1548 did_unpoison = true;
1549 }
1550
1551 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
1552 // Bail out if the destination is not a non primitive array.
Roland Levillainebea3d22016-04-12 15:42:57 +01001553 // /* HeapReference<Class> */ temp3 = temp1->component_type_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001554 __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1555 __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1556 __ MaybeUnpoisonHeapReference(temp3);
1557 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1558 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1559 __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1560 }
1561
1562 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1563 // Bail out if the source is not a non primitive array.
Roland Levillainebea3d22016-04-12 15:42:57 +01001564 // /* HeapReference<Class> */ temp3 = temp2->component_type_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001565 __ LoadFromOffset(kLoadWord, temp3, temp2, component_offset);
1566 __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1567 __ MaybeUnpoisonHeapReference(temp3);
1568 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1569 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1570 __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1571 }
1572
1573 __ cmp(temp1, ShifterOperand(temp2));
1574
1575 if (optimizations.GetDestinationIsTypedObjectArray()) {
1576 Label do_copy;
1577 __ b(&do_copy, EQ);
1578 if (!did_unpoison) {
1579 __ MaybeUnpoisonHeapReference(temp1);
1580 }
Roland Levillainebea3d22016-04-12 15:42:57 +01001581 // /* HeapReference<Class> */ temp1 = temp1->component_type_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001582 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
1583 __ MaybeUnpoisonHeapReference(temp1);
Roland Levillainebea3d22016-04-12 15:42:57 +01001584 // /* HeapReference<Class> */ temp1 = temp1->super_class_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001585 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
1586 // No need to unpoison the result, we're comparing against null.
1587 __ CompareAndBranchIfNonZero(temp1, slow_path->GetEntryLabel());
1588 __ Bind(&do_copy);
1589 } else {
1590 __ b(slow_path->GetEntryLabel(), NE);
1591 }
1592 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1593 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
1594 // Bail out if the source is not a non primitive array.
Roland Levillainebea3d22016-04-12 15:42:57 +01001595 // /* HeapReference<Class> */ temp1 = src->klass_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001596 __ LoadFromOffset(kLoadWord, temp1, src, class_offset);
1597 __ MaybeUnpoisonHeapReference(temp1);
Roland Levillainebea3d22016-04-12 15:42:57 +01001598 // /* HeapReference<Class> */ temp3 = temp1->component_type_
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001599 __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1600 __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1601 __ MaybeUnpoisonHeapReference(temp3);
1602 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1603 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1604 __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1605 }
1606
1607 // Compute base source address, base destination address, and end source address.
1608
1609 uint32_t element_size = sizeof(int32_t);
1610 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
1611 if (src_pos.IsConstant()) {
1612 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1613 __ AddConstant(temp1, src, element_size * constant + offset);
1614 } else {
1615 __ add(temp1, src, ShifterOperand(src_pos.AsRegister<Register>(), LSL, 2));
1616 __ AddConstant(temp1, offset);
1617 }
1618
1619 if (dest_pos.IsConstant()) {
1620 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1621 __ AddConstant(temp2, dest, element_size * constant + offset);
1622 } else {
1623 __ add(temp2, dest, ShifterOperand(dest_pos.AsRegister<Register>(), LSL, 2));
1624 __ AddConstant(temp2, offset);
1625 }
1626
1627 if (length.IsConstant()) {
1628 int32_t constant = length.GetConstant()->AsIntConstant()->GetValue();
1629 __ AddConstant(temp3, temp1, element_size * constant);
1630 } else {
1631 __ add(temp3, temp1, ShifterOperand(length.AsRegister<Register>(), LSL, 2));
1632 }
1633
1634 // Iterate over the arrays and do a raw copy of the objects. We don't need to
1635 // poison/unpoison, nor do any read barrier as the next uses of the destination
1636 // array will do it.
1637 Label loop, done;
1638 __ cmp(temp1, ShifterOperand(temp3));
1639 __ b(&done, EQ);
1640 __ Bind(&loop);
1641 __ ldr(IP, Address(temp1, element_size, Address::PostIndex));
1642 __ str(IP, Address(temp2, element_size, Address::PostIndex));
1643 __ cmp(temp1, ShifterOperand(temp3));
1644 __ b(&loop, NE);
1645 __ Bind(&done);
1646
1647 // We only need one card marking on the destination array.
1648 codegen_->MarkGCCard(temp1,
1649 temp2,
1650 dest,
1651 Register(kNoRegister),
Roland Levillainebea3d22016-04-12 15:42:57 +01001652 /* value_can_be_null */ false);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001653
1654 __ Bind(slow_path->GetExitLabel());
1655}
1656
Anton Kirilovd70dc9d2016-02-04 14:59:04 +00001657static void CreateFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1658 // If the graph is debuggable, all callee-saved floating-point registers are blocked by
1659 // the code generator. Furthermore, the register allocator creates fixed live intervals
1660 // for all caller-saved registers because we are doing a function call. As a result, if
1661 // the input and output locations are unallocated, the register allocator runs out of
1662 // registers and fails; however, a debuggable graph is not the common case.
1663 if (invoke->GetBlock()->GetGraph()->IsDebuggable()) {
1664 return;
1665 }
1666
1667 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1668 DCHECK_EQ(invoke->InputAt(0)->GetType(), Primitive::kPrimDouble);
1669 DCHECK_EQ(invoke->GetType(), Primitive::kPrimDouble);
1670
1671 LocationSummary* const locations = new (arena) LocationSummary(invoke,
1672 LocationSummary::kCall,
1673 kIntrinsified);
1674 const InvokeRuntimeCallingConvention calling_convention;
1675
1676 locations->SetInAt(0, Location::RequiresFpuRegister());
1677 locations->SetOut(Location::RequiresFpuRegister());
1678 // Native code uses the soft float ABI.
1679 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1680 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1681}
1682
1683static void CreateFPFPToFPCallLocations(ArenaAllocator* arena, HInvoke* invoke) {
1684 // If the graph is debuggable, all callee-saved floating-point registers are blocked by
1685 // the code generator. Furthermore, the register allocator creates fixed live intervals
1686 // for all caller-saved registers because we are doing a function call. As a result, if
1687 // the input and output locations are unallocated, the register allocator runs out of
1688 // registers and fails; however, a debuggable graph is not the common case.
1689 if (invoke->GetBlock()->GetGraph()->IsDebuggable()) {
1690 return;
1691 }
1692
1693 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
1694 DCHECK_EQ(invoke->InputAt(0)->GetType(), Primitive::kPrimDouble);
1695 DCHECK_EQ(invoke->InputAt(1)->GetType(), Primitive::kPrimDouble);
1696 DCHECK_EQ(invoke->GetType(), Primitive::kPrimDouble);
1697
1698 LocationSummary* const locations = new (arena) LocationSummary(invoke,
1699 LocationSummary::kCall,
1700 kIntrinsified);
1701 const InvokeRuntimeCallingConvention calling_convention;
1702
1703 locations->SetInAt(0, Location::RequiresFpuRegister());
1704 locations->SetInAt(1, Location::RequiresFpuRegister());
1705 locations->SetOut(Location::RequiresFpuRegister());
1706 // Native code uses the soft float ABI.
1707 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1708 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1709 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1710 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1711}
1712
1713static void GenFPToFPCall(HInvoke* invoke,
1714 ArmAssembler* assembler,
1715 CodeGeneratorARM* codegen,
1716 QuickEntrypointEnum entry) {
1717 LocationSummary* const locations = invoke->GetLocations();
1718 const InvokeRuntimeCallingConvention calling_convention;
1719
1720 DCHECK_EQ(invoke->GetNumberOfArguments(), 1U);
1721 DCHECK(locations->WillCall() && locations->Intrinsified());
1722 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(0)));
1723 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(1)));
1724
1725 __ LoadFromOffset(kLoadWord, LR, TR, GetThreadOffset<kArmWordSize>(entry).Int32Value());
1726 // Native code uses the soft float ABI.
1727 __ vmovrrd(calling_convention.GetRegisterAt(0),
1728 calling_convention.GetRegisterAt(1),
1729 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
1730 __ blx(LR);
1731 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
1732 __ vmovdrr(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
1733 calling_convention.GetRegisterAt(0),
1734 calling_convention.GetRegisterAt(1));
1735}
1736
1737static void GenFPFPToFPCall(HInvoke* invoke,
1738 ArmAssembler* assembler,
1739 CodeGeneratorARM* codegen,
1740 QuickEntrypointEnum entry) {
1741 LocationSummary* const locations = invoke->GetLocations();
1742 const InvokeRuntimeCallingConvention calling_convention;
1743
1744 DCHECK_EQ(invoke->GetNumberOfArguments(), 2U);
1745 DCHECK(locations->WillCall() && locations->Intrinsified());
1746 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(0)));
1747 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(1)));
1748 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(2)));
1749 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(calling_convention.GetRegisterAt(3)));
1750
1751 __ LoadFromOffset(kLoadWord, LR, TR, GetThreadOffset<kArmWordSize>(entry).Int32Value());
1752 // Native code uses the soft float ABI.
1753 __ vmovrrd(calling_convention.GetRegisterAt(0),
1754 calling_convention.GetRegisterAt(1),
1755 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
1756 __ vmovrrd(calling_convention.GetRegisterAt(2),
1757 calling_convention.GetRegisterAt(3),
1758 FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>()));
1759 __ blx(LR);
1760 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
1761 __ vmovdrr(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
1762 calling_convention.GetRegisterAt(0),
1763 calling_convention.GetRegisterAt(1));
1764}
1765
1766void IntrinsicLocationsBuilderARM::VisitMathCos(HInvoke* invoke) {
1767 CreateFPToFPCallLocations(arena_, invoke);
1768}
1769
1770void IntrinsicCodeGeneratorARM::VisitMathCos(HInvoke* invoke) {
1771 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCos);
1772}
1773
1774void IntrinsicLocationsBuilderARM::VisitMathSin(HInvoke* invoke) {
1775 CreateFPToFPCallLocations(arena_, invoke);
1776}
1777
1778void IntrinsicCodeGeneratorARM::VisitMathSin(HInvoke* invoke) {
1779 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickSin);
1780}
1781
1782void IntrinsicLocationsBuilderARM::VisitMathAcos(HInvoke* invoke) {
1783 CreateFPToFPCallLocations(arena_, invoke);
1784}
1785
1786void IntrinsicCodeGeneratorARM::VisitMathAcos(HInvoke* invoke) {
1787 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAcos);
1788}
1789
1790void IntrinsicLocationsBuilderARM::VisitMathAsin(HInvoke* invoke) {
1791 CreateFPToFPCallLocations(arena_, invoke);
1792}
1793
1794void IntrinsicCodeGeneratorARM::VisitMathAsin(HInvoke* invoke) {
1795 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAsin);
1796}
1797
1798void IntrinsicLocationsBuilderARM::VisitMathAtan(HInvoke* invoke) {
1799 CreateFPToFPCallLocations(arena_, invoke);
1800}
1801
1802void IntrinsicCodeGeneratorARM::VisitMathAtan(HInvoke* invoke) {
1803 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAtan);
1804}
1805
1806void IntrinsicLocationsBuilderARM::VisitMathCbrt(HInvoke* invoke) {
1807 CreateFPToFPCallLocations(arena_, invoke);
1808}
1809
1810void IntrinsicCodeGeneratorARM::VisitMathCbrt(HInvoke* invoke) {
1811 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCbrt);
1812}
1813
1814void IntrinsicLocationsBuilderARM::VisitMathCosh(HInvoke* invoke) {
1815 CreateFPToFPCallLocations(arena_, invoke);
1816}
1817
1818void IntrinsicCodeGeneratorARM::VisitMathCosh(HInvoke* invoke) {
1819 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickCosh);
1820}
1821
1822void IntrinsicLocationsBuilderARM::VisitMathExp(HInvoke* invoke) {
1823 CreateFPToFPCallLocations(arena_, invoke);
1824}
1825
1826void IntrinsicCodeGeneratorARM::VisitMathExp(HInvoke* invoke) {
1827 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickExp);
1828}
1829
1830void IntrinsicLocationsBuilderARM::VisitMathExpm1(HInvoke* invoke) {
1831 CreateFPToFPCallLocations(arena_, invoke);
1832}
1833
1834void IntrinsicCodeGeneratorARM::VisitMathExpm1(HInvoke* invoke) {
1835 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickExpm1);
1836}
1837
1838void IntrinsicLocationsBuilderARM::VisitMathLog(HInvoke* invoke) {
1839 CreateFPToFPCallLocations(arena_, invoke);
1840}
1841
1842void IntrinsicCodeGeneratorARM::VisitMathLog(HInvoke* invoke) {
1843 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickLog);
1844}
1845
1846void IntrinsicLocationsBuilderARM::VisitMathLog10(HInvoke* invoke) {
1847 CreateFPToFPCallLocations(arena_, invoke);
1848}
1849
1850void IntrinsicCodeGeneratorARM::VisitMathLog10(HInvoke* invoke) {
1851 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickLog10);
1852}
1853
1854void IntrinsicLocationsBuilderARM::VisitMathSinh(HInvoke* invoke) {
1855 CreateFPToFPCallLocations(arena_, invoke);
1856}
1857
1858void IntrinsicCodeGeneratorARM::VisitMathSinh(HInvoke* invoke) {
1859 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickSinh);
1860}
1861
1862void IntrinsicLocationsBuilderARM::VisitMathTan(HInvoke* invoke) {
1863 CreateFPToFPCallLocations(arena_, invoke);
1864}
1865
1866void IntrinsicCodeGeneratorARM::VisitMathTan(HInvoke* invoke) {
1867 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickTan);
1868}
1869
1870void IntrinsicLocationsBuilderARM::VisitMathTanh(HInvoke* invoke) {
1871 CreateFPToFPCallLocations(arena_, invoke);
1872}
1873
1874void IntrinsicCodeGeneratorARM::VisitMathTanh(HInvoke* invoke) {
1875 GenFPToFPCall(invoke, GetAssembler(), codegen_, kQuickTanh);
1876}
1877
1878void IntrinsicLocationsBuilderARM::VisitMathAtan2(HInvoke* invoke) {
1879 CreateFPFPToFPCallLocations(arena_, invoke);
1880}
1881
1882void IntrinsicCodeGeneratorARM::VisitMathAtan2(HInvoke* invoke) {
1883 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickAtan2);
1884}
1885
1886void IntrinsicLocationsBuilderARM::VisitMathHypot(HInvoke* invoke) {
1887 CreateFPFPToFPCallLocations(arena_, invoke);
1888}
1889
1890void IntrinsicCodeGeneratorARM::VisitMathHypot(HInvoke* invoke) {
1891 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickHypot);
1892}
1893
1894void IntrinsicLocationsBuilderARM::VisitMathNextAfter(HInvoke* invoke) {
1895 CreateFPFPToFPCallLocations(arena_, invoke);
1896}
1897
1898void IntrinsicCodeGeneratorARM::VisitMathNextAfter(HInvoke* invoke) {
1899 GenFPFPToFPCall(invoke, GetAssembler(), codegen_, kQuickNextAfter);
1900}
1901
Artem Serovc257da72016-02-02 13:49:43 +00001902void IntrinsicLocationsBuilderARM::VisitIntegerReverse(HInvoke* invoke) {
1903 CreateIntToIntLocations(arena_, invoke);
1904}
1905
1906void IntrinsicCodeGeneratorARM::VisitIntegerReverse(HInvoke* invoke) {
1907 ArmAssembler* assembler = GetAssembler();
1908 LocationSummary* locations = invoke->GetLocations();
1909
1910 Register out = locations->Out().AsRegister<Register>();
1911 Register in = locations->InAt(0).AsRegister<Register>();
1912
1913 __ rbit(out, in);
1914}
1915
1916void IntrinsicLocationsBuilderARM::VisitLongReverse(HInvoke* invoke) {
1917 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1918 LocationSummary::kNoCall,
1919 kIntrinsified);
1920 locations->SetInAt(0, Location::RequiresRegister());
1921 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1922}
1923
1924void IntrinsicCodeGeneratorARM::VisitLongReverse(HInvoke* invoke) {
1925 ArmAssembler* assembler = GetAssembler();
1926 LocationSummary* locations = invoke->GetLocations();
1927
1928 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
1929 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
1930 Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
1931 Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
1932
1933 __ rbit(out_reg_lo, in_reg_hi);
1934 __ rbit(out_reg_hi, in_reg_lo);
1935}
1936
1937void IntrinsicLocationsBuilderARM::VisitIntegerReverseBytes(HInvoke* invoke) {
1938 CreateIntToIntLocations(arena_, invoke);
1939}
1940
1941void IntrinsicCodeGeneratorARM::VisitIntegerReverseBytes(HInvoke* invoke) {
1942 ArmAssembler* assembler = GetAssembler();
1943 LocationSummary* locations = invoke->GetLocations();
1944
1945 Register out = locations->Out().AsRegister<Register>();
1946 Register in = locations->InAt(0).AsRegister<Register>();
1947
1948 __ rev(out, in);
1949}
1950
1951void IntrinsicLocationsBuilderARM::VisitLongReverseBytes(HInvoke* invoke) {
1952 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1953 LocationSummary::kNoCall,
1954 kIntrinsified);
1955 locations->SetInAt(0, Location::RequiresRegister());
1956 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1957}
1958
1959void IntrinsicCodeGeneratorARM::VisitLongReverseBytes(HInvoke* invoke) {
1960 ArmAssembler* assembler = GetAssembler();
1961 LocationSummary* locations = invoke->GetLocations();
1962
1963 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
1964 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
1965 Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
1966 Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
1967
1968 __ rev(out_reg_lo, in_reg_hi);
1969 __ rev(out_reg_hi, in_reg_lo);
1970}
1971
1972void IntrinsicLocationsBuilderARM::VisitShortReverseBytes(HInvoke* invoke) {
1973 CreateIntToIntLocations(arena_, invoke);
1974}
1975
1976void IntrinsicCodeGeneratorARM::VisitShortReverseBytes(HInvoke* invoke) {
1977 ArmAssembler* assembler = GetAssembler();
1978 LocationSummary* locations = invoke->GetLocations();
1979
1980 Register out = locations->Out().AsRegister<Register>();
1981 Register in = locations->InAt(0).AsRegister<Register>();
1982
1983 __ revsh(out, in);
1984}
1985
Tim Zhang25abd6c2016-01-19 23:39:24 +08001986void IntrinsicLocationsBuilderARM::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1987 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1988 LocationSummary::kNoCall,
1989 kIntrinsified);
1990 locations->SetInAt(0, Location::RequiresRegister());
1991 locations->SetInAt(1, Location::RequiresRegister());
1992 locations->SetInAt(2, Location::RequiresRegister());
1993 locations->SetInAt(3, Location::RequiresRegister());
1994 locations->SetInAt(4, Location::RequiresRegister());
1995
Scott Wakeling3fdab772016-04-25 11:32:37 +01001996 // Temporary registers to store lengths of strings and for calculations.
Tim Zhang25abd6c2016-01-19 23:39:24 +08001997 locations->AddTemp(Location::RequiresRegister());
1998 locations->AddTemp(Location::RequiresRegister());
1999 locations->AddTemp(Location::RequiresRegister());
2000}
2001
2002void IntrinsicCodeGeneratorARM::VisitStringGetCharsNoCheck(HInvoke* invoke) {
2003 ArmAssembler* assembler = GetAssembler();
2004 LocationSummary* locations = invoke->GetLocations();
2005
2006 // Check assumption that sizeof(Char) is 2 (used in scaling below).
2007 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
2008 DCHECK_EQ(char_size, 2u);
2009
2010 // Location of data in char array buffer.
2011 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
2012
2013 // Location of char array data in string.
2014 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
2015
2016 // void getCharsNoCheck(int srcBegin, int srcEnd, char[] dst, int dstBegin);
2017 // Since getChars() calls getCharsNoCheck() - we use registers rather than constants.
2018 Register srcObj = locations->InAt(0).AsRegister<Register>();
2019 Register srcBegin = locations->InAt(1).AsRegister<Register>();
2020 Register srcEnd = locations->InAt(2).AsRegister<Register>();
2021 Register dstObj = locations->InAt(3).AsRegister<Register>();
2022 Register dstBegin = locations->InAt(4).AsRegister<Register>();
2023
Scott Wakeling3fdab772016-04-25 11:32:37 +01002024 Register num_chr = locations->GetTemp(0).AsRegister<Register>();
2025 Register src_ptr = locations->GetTemp(1).AsRegister<Register>();
Tim Zhang25abd6c2016-01-19 23:39:24 +08002026 Register dst_ptr = locations->GetTemp(2).AsRegister<Register>();
Tim Zhang25abd6c2016-01-19 23:39:24 +08002027
2028 // src range to copy.
2029 __ add(src_ptr, srcObj, ShifterOperand(value_offset));
Tim Zhang25abd6c2016-01-19 23:39:24 +08002030 __ add(src_ptr, src_ptr, ShifterOperand(srcBegin, LSL, 1));
2031
2032 // dst to be copied.
2033 __ add(dst_ptr, dstObj, ShifterOperand(data_offset));
2034 __ add(dst_ptr, dst_ptr, ShifterOperand(dstBegin, LSL, 1));
2035
Scott Wakeling3fdab772016-04-25 11:32:37 +01002036 __ subs(num_chr, srcEnd, ShifterOperand(srcBegin));
2037
Tim Zhang25abd6c2016-01-19 23:39:24 +08002038 // Do the copy.
Scott Wakeling3fdab772016-04-25 11:32:37 +01002039 Label loop, remainder, done;
2040
2041 // Early out for valid zero-length retrievals.
Tim Zhang25abd6c2016-01-19 23:39:24 +08002042 __ b(&done, EQ);
Scott Wakeling3fdab772016-04-25 11:32:37 +01002043
2044 // Save repairing the value of num_chr on the < 4 character path.
2045 __ subs(IP, num_chr, ShifterOperand(4));
2046 __ b(&remainder, LT);
2047
2048 // Keep the result of the earlier subs, we are going to fetch at least 4 characters.
2049 __ mov(num_chr, ShifterOperand(IP));
2050
2051 // Main loop used for longer fetches loads and stores 4x16-bit characters at a time.
2052 // (LDRD/STRD fault on unaligned addresses and it's not worth inlining extra code
2053 // to rectify these everywhere this intrinsic applies.)
2054 __ Bind(&loop);
2055 __ ldr(IP, Address(src_ptr, char_size * 2));
2056 __ subs(num_chr, num_chr, ShifterOperand(4));
2057 __ str(IP, Address(dst_ptr, char_size * 2));
2058 __ ldr(IP, Address(src_ptr, char_size * 4, Address::PostIndex));
2059 __ str(IP, Address(dst_ptr, char_size * 4, Address::PostIndex));
2060 __ b(&loop, GE);
2061
2062 __ adds(num_chr, num_chr, ShifterOperand(4));
2063 __ b(&done, EQ);
2064
2065 // Main loop for < 4 character case and remainder handling. Loads and stores one
2066 // 16-bit Java character at a time.
2067 __ Bind(&remainder);
2068 __ ldrh(IP, Address(src_ptr, char_size, Address::PostIndex));
2069 __ subs(num_chr, num_chr, ShifterOperand(1));
2070 __ strh(IP, Address(dst_ptr, char_size, Address::PostIndex));
2071 __ b(&remainder, GT);
2072
Tim Zhang25abd6c2016-01-19 23:39:24 +08002073 __ Bind(&done);
2074}
2075
Anton Kirilova3ffea22016-04-07 17:02:37 +01002076void IntrinsicLocationsBuilderARM::VisitFloatIsInfinite(HInvoke* invoke) {
2077 CreateFPToIntLocations(arena_, invoke);
2078}
2079
2080void IntrinsicCodeGeneratorARM::VisitFloatIsInfinite(HInvoke* invoke) {
2081 ArmAssembler* const assembler = GetAssembler();
2082 LocationSummary* const locations = invoke->GetLocations();
2083 const Register out = locations->Out().AsRegister<Register>();
2084 // Shifting left by 1 bit makes the value encodable as an immediate operand;
2085 // we don't care about the sign bit anyway.
2086 constexpr uint32_t infinity = kPositiveInfinityFloat << 1U;
2087
2088 __ vmovrs(out, locations->InAt(0).AsFpuRegister<SRegister>());
2089 // We don't care about the sign bit, so shift left.
2090 __ Lsl(out, out, 1);
2091 __ eor(out, out, ShifterOperand(infinity));
2092 // If the result is 0, then it has 32 leading zeros, and less than that otherwise.
2093 __ clz(out, out);
2094 // Any number less than 32 logically shifted right by 5 bits results in 0;
2095 // the same operation on 32 yields 1.
2096 __ Lsr(out, out, 5);
2097}
2098
2099void IntrinsicLocationsBuilderARM::VisitDoubleIsInfinite(HInvoke* invoke) {
2100 CreateFPToIntLocations(arena_, invoke);
2101}
2102
2103void IntrinsicCodeGeneratorARM::VisitDoubleIsInfinite(HInvoke* invoke) {
2104 ArmAssembler* const assembler = GetAssembler();
2105 LocationSummary* const locations = invoke->GetLocations();
2106 const Register out = locations->Out().AsRegister<Register>();
2107 // The highest 32 bits of double precision positive infinity separated into
2108 // two constants encodable as immediate operands.
2109 constexpr uint32_t infinity_high = 0x7f000000U;
2110 constexpr uint32_t infinity_high2 = 0x00f00000U;
2111
2112 static_assert((infinity_high | infinity_high2) == static_cast<uint32_t>(kPositiveInfinityDouble >> 32U),
2113 "The constants do not add up to the high 32 bits of double precision positive infinity.");
2114 __ vmovrrd(IP, out, FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
2115 __ eor(out, out, ShifterOperand(infinity_high));
2116 __ eor(out, out, ShifterOperand(infinity_high2));
2117 // We don't care about the sign bit, so shift left.
2118 __ orr(out, IP, ShifterOperand(out, LSL, 1));
2119 // If the result is 0, then it has 32 leading zeros, and less than that otherwise.
2120 __ clz(out, out);
2121 // Any number less than 32 logically shifted right by 5 bits results in 0;
2122 // the same operation on 32 yields 1.
2123 __ Lsr(out, out, 5);
2124}
2125
Nicolas Geoffray35ca3592016-06-30 08:36:33 +00002126UNIMPLEMENTED_INTRINSIC(ARM, IntegerBitCount)
2127UNIMPLEMENTED_INTRINSIC(ARM, LongBitCount)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002128UNIMPLEMENTED_INTRINSIC(ARM, MathMinDoubleDouble)
2129UNIMPLEMENTED_INTRINSIC(ARM, MathMinFloatFloat)
2130UNIMPLEMENTED_INTRINSIC(ARM, MathMaxDoubleDouble)
2131UNIMPLEMENTED_INTRINSIC(ARM, MathMaxFloatFloat)
2132UNIMPLEMENTED_INTRINSIC(ARM, MathMinLongLong)
2133UNIMPLEMENTED_INTRINSIC(ARM, MathMaxLongLong)
2134UNIMPLEMENTED_INTRINSIC(ARM, MathCeil) // Could be done by changing rounding mode, maybe?
2135UNIMPLEMENTED_INTRINSIC(ARM, MathFloor) // Could be done by changing rounding mode, maybe?
2136UNIMPLEMENTED_INTRINSIC(ARM, MathRint)
2137UNIMPLEMENTED_INTRINSIC(ARM, MathRoundDouble) // Could be done by changing rounding mode, maybe?
2138UNIMPLEMENTED_INTRINSIC(ARM, MathRoundFloat) // Could be done by changing rounding mode, maybe?
2139UNIMPLEMENTED_INTRINSIC(ARM, UnsafeCASLong) // High register pressure.
2140UNIMPLEMENTED_INTRINSIC(ARM, SystemArrayCopyChar)
2141UNIMPLEMENTED_INTRINSIC(ARM, ReferenceGetReferent)
Aart Bik2f9fcc92016-03-01 15:16:54 -08002142UNIMPLEMENTED_INTRINSIC(ARM, IntegerHighestOneBit)
2143UNIMPLEMENTED_INTRINSIC(ARM, LongHighestOneBit)
2144UNIMPLEMENTED_INTRINSIC(ARM, IntegerLowestOneBit)
2145UNIMPLEMENTED_INTRINSIC(ARM, LongLowestOneBit)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08002146
Aart Bik0e54c012016-03-04 12:08:31 -08002147// 1.8.
2148UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndAddInt)
2149UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndAddLong)
2150UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetInt)
2151UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetLong)
2152UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08002153
Aart Bik2f9fcc92016-03-01 15:16:54 -08002154UNREACHABLE_INTRINSICS(ARM)
Roland Levillain4d027112015-07-01 15:41:14 +01002155
2156#undef __
2157
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08002158} // namespace arm
2159} // namespace art