blob: 9820c5de0aadff8b5ceebf62bdd0703f65ab3765 [file] [log] [blame]
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_arm.h"
18
19#include "arch/arm/instruction_set_features_arm.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080021#include "code_generator_arm.h"
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070024#include "intrinsics_utils.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080025#include "mirror/array-inl.h"
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080026#include "mirror/string.h"
27#include "thread.h"
28#include "utils/arm/assembler_arm.h"
29
30namespace art {
31
32namespace arm {
33
34ArmAssembler* IntrinsicCodeGeneratorARM::GetAssembler() {
35 return codegen_->GetAssembler();
36}
37
38ArenaAllocator* IntrinsicCodeGeneratorARM::GetAllocator() {
39 return codegen_->GetGraph()->GetArena();
40}
41
Andreas Gampe85b62f22015-09-09 13:15:38 -070042using IntrinsicSlowPathARM = IntrinsicSlowPath<InvokeDexCallingConventionVisitorARM>;
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080043
44bool IntrinsicLocationsBuilderARM::TryDispatch(HInvoke* invoke) {
45 Dispatch(invoke);
46 LocationSummary* res = invoke->GetLocations();
Roland Levillain3b359c72015-11-17 19:35:12 +000047 if (res == nullptr) {
48 return false;
49 }
50 if (kEmitCompilerReadBarrier && res->CanCall()) {
51 // Generating an intrinsic for this HInvoke may produce an
52 // IntrinsicSlowPathARM slow path. Currently this approach
53 // does not work when using read barriers, as the emitted
54 // calling sequence will make use of another slow path
55 // (ReadBarrierForRootSlowPathARM for HInvokeStaticOrDirect,
56 // ReadBarrierSlowPathARM for HInvokeVirtual). So we bail
57 // out in this case.
58 //
59 // TODO: Find a way to have intrinsics work with read barriers.
60 invoke->SetLocations(nullptr);
61 return false;
62 }
63 return res->Intrinsified();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -080064}
65
66#define __ assembler->
67
68static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
69 LocationSummary* locations = new (arena) LocationSummary(invoke,
70 LocationSummary::kNoCall,
71 kIntrinsified);
72 locations->SetInAt(0, Location::RequiresFpuRegister());
73 locations->SetOut(Location::RequiresRegister());
74}
75
76static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
77 LocationSummary* locations = new (arena) LocationSummary(invoke,
78 LocationSummary::kNoCall,
79 kIntrinsified);
80 locations->SetInAt(0, Location::RequiresRegister());
81 locations->SetOut(Location::RequiresFpuRegister());
82}
83
84static void MoveFPToInt(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
85 Location input = locations->InAt(0);
86 Location output = locations->Out();
87 if (is64bit) {
88 __ vmovrrd(output.AsRegisterPairLow<Register>(),
89 output.AsRegisterPairHigh<Register>(),
90 FromLowSToD(input.AsFpuRegisterPairLow<SRegister>()));
91 } else {
92 __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>());
93 }
94}
95
96static void MoveIntToFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
97 Location input = locations->InAt(0);
98 Location output = locations->Out();
99 if (is64bit) {
100 __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()),
101 input.AsRegisterPairLow<Register>(),
102 input.AsRegisterPairHigh<Register>());
103 } else {
104 __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>());
105 }
106}
107
108void IntrinsicLocationsBuilderARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
109 CreateFPToIntLocations(arena_, invoke);
110}
111void IntrinsicLocationsBuilderARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
112 CreateIntToFPLocations(arena_, invoke);
113}
114
115void IntrinsicCodeGeneratorARM::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000116 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800117}
118void IntrinsicCodeGeneratorARM::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000119 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800120}
121
122void IntrinsicLocationsBuilderARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
123 CreateFPToIntLocations(arena_, invoke);
124}
125void IntrinsicLocationsBuilderARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
126 CreateIntToFPLocations(arena_, invoke);
127}
128
129void IntrinsicCodeGeneratorARM::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000130 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800131}
132void IntrinsicCodeGeneratorARM::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000133 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800134}
135
136static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
137 LocationSummary* locations = new (arena) LocationSummary(invoke,
138 LocationSummary::kNoCall,
139 kIntrinsified);
140 locations->SetInAt(0, Location::RequiresRegister());
141 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
142}
143
144static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
145 LocationSummary* locations = new (arena) LocationSummary(invoke,
146 LocationSummary::kNoCall,
147 kIntrinsified);
148 locations->SetInAt(0, Location::RequiresFpuRegister());
149 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
150}
151
Scott Wakeling611d3392015-07-10 11:42:06 +0100152static void GenNumberOfLeadingZeros(LocationSummary* locations,
153 Primitive::Type type,
154 ArmAssembler* assembler) {
155 Location in = locations->InAt(0);
156 Register out = locations->Out().AsRegister<Register>();
157
158 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
159
160 if (type == Primitive::kPrimLong) {
161 Register in_reg_lo = in.AsRegisterPairLow<Register>();
162 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
163 Label end;
164 __ clz(out, in_reg_hi);
165 __ CompareAndBranchIfNonZero(in_reg_hi, &end);
166 __ clz(out, in_reg_lo);
167 __ AddConstant(out, 32);
168 __ Bind(&end);
169 } else {
170 __ clz(out, in.AsRegister<Register>());
171 }
172}
173
174void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
175 CreateIntToIntLocations(arena_, invoke);
176}
177
178void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
179 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
180}
181
182void IntrinsicLocationsBuilderARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
183 LocationSummary* locations = new (arena_) LocationSummary(invoke,
184 LocationSummary::kNoCall,
185 kIntrinsified);
186 locations->SetInAt(0, Location::RequiresRegister());
187 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
188}
189
190void IntrinsicCodeGeneratorARM::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
191 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
192}
193
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100194static void GenNumberOfTrailingZeros(LocationSummary* locations,
195 Primitive::Type type,
196 ArmAssembler* assembler) {
197 DCHECK((type == Primitive::kPrimInt) || (type == Primitive::kPrimLong));
198
199 Register out = locations->Out().AsRegister<Register>();
200
201 if (type == Primitive::kPrimLong) {
202 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
203 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
204 Label end;
205 __ rbit(out, in_reg_lo);
206 __ clz(out, out);
207 __ CompareAndBranchIfNonZero(in_reg_lo, &end);
208 __ rbit(out, in_reg_hi);
209 __ clz(out, out);
210 __ AddConstant(out, 32);
211 __ Bind(&end);
212 } else {
213 Register in = locations->InAt(0).AsRegister<Register>();
214 __ rbit(out, in);
215 __ clz(out, out);
216 }
217}
218
219void IntrinsicLocationsBuilderARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
220 LocationSummary* locations = new (arena_) LocationSummary(invoke,
221 LocationSummary::kNoCall,
222 kIntrinsified);
223 locations->SetInAt(0, Location::RequiresRegister());
224 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
225}
226
227void IntrinsicCodeGeneratorARM::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
228 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
229}
230
231void IntrinsicLocationsBuilderARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
232 LocationSummary* locations = new (arena_) LocationSummary(invoke,
233 LocationSummary::kNoCall,
234 kIntrinsified);
235 locations->SetInAt(0, Location::RequiresRegister());
236 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
237}
238
239void IntrinsicCodeGeneratorARM::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
240 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
241}
242
243static void GenIntegerRotate(LocationSummary* locations,
244 ArmAssembler* assembler,
245 bool is_left) {
246 Register in = locations->InAt(0).AsRegister<Register>();
247 Location rhs = locations->InAt(1);
248 Register out = locations->Out().AsRegister<Register>();
249
250 if (rhs.IsConstant()) {
251 // Arm32 and Thumb2 assemblers require a rotation on the interval [1,31],
252 // so map all rotations to a +ve. equivalent in that range.
253 // (e.g. left *or* right by -2 bits == 30 bits in the same direction.)
254 uint32_t rot = rhs.GetConstant()->AsIntConstant()->GetValue() & 0x1F;
255 if (rot) {
256 // Rotate, mapping left rotations to right equivalents if necessary.
257 // (e.g. left by 2 bits == right by 30.)
258 __ Ror(out, in, is_left ? (0x20 - rot) : rot);
259 } else if (out != in) {
260 __ Mov(out, in);
261 }
262 } else {
263 if (is_left) {
264 __ rsb(out, rhs.AsRegister<Register>(), ShifterOperand(0));
265 __ Ror(out, in, out);
266 } else {
267 __ Ror(out, in, rhs.AsRegister<Register>());
268 }
269 }
270}
271
272// Gain some speed by mapping all Long rotates onto equivalent pairs of Integer
273// rotates by swapping input regs (effectively rotating by the first 32-bits of
274// a larger rotation) or flipping direction (thus treating larger right/left
275// rotations as sub-word sized rotations in the other direction) as appropriate.
276static void GenLongRotate(LocationSummary* locations,
277 ArmAssembler* assembler,
278 bool is_left) {
279 Register in_reg_lo = locations->InAt(0).AsRegisterPairLow<Register>();
280 Register in_reg_hi = locations->InAt(0).AsRegisterPairHigh<Register>();
281 Location rhs = locations->InAt(1);
282 Register out_reg_lo = locations->Out().AsRegisterPairLow<Register>();
283 Register out_reg_hi = locations->Out().AsRegisterPairHigh<Register>();
284
285 if (rhs.IsConstant()) {
286 uint32_t rot = rhs.GetConstant()->AsIntConstant()->GetValue();
287 // Map all left rotations to right equivalents.
288 if (is_left) {
289 rot = 0x40 - rot;
290 }
291 // Map all rotations to +ve. equivalents on the interval [0,63].
292 rot &= 0x3F;
293 // For rotates over a word in size, 'pre-rotate' by 32-bits to keep rotate
294 // logic below to a simple pair of binary orr.
295 // (e.g. 34 bits == in_reg swap + 2 bits right.)
296 if (rot >= 0x20) {
297 rot -= 0x20;
298 std::swap(in_reg_hi, in_reg_lo);
299 }
300 // Rotate, or mov to out for zero or word size rotations.
301 if (rot) {
302 __ Lsr(out_reg_hi, in_reg_hi, rot);
303 __ orr(out_reg_hi, out_reg_hi, ShifterOperand(in_reg_lo, arm::LSL, 0x20 - rot));
304 __ Lsr(out_reg_lo, in_reg_lo, rot);
305 __ orr(out_reg_lo, out_reg_lo, ShifterOperand(in_reg_hi, arm::LSL, 0x20 - rot));
306 } else {
307 __ Mov(out_reg_lo, in_reg_lo);
308 __ Mov(out_reg_hi, in_reg_hi);
309 }
310 } else {
311 Register shift_left = locations->GetTemp(0).AsRegister<Register>();
312 Register shift_right = locations->GetTemp(1).AsRegister<Register>();
313 Label end;
314 Label right;
315
316 __ and_(shift_left, rhs.AsRegister<Register>(), ShifterOperand(0x1F));
317 __ Lsrs(shift_right, rhs.AsRegister<Register>(), 6);
318 __ rsb(shift_right, shift_left, ShifterOperand(0x20), AL, kCcKeep);
319
320 if (is_left) {
321 __ b(&right, CS);
322 } else {
323 __ b(&right, CC);
324 std::swap(shift_left, shift_right);
325 }
326
327 // out_reg_hi = (reg_hi << shift_left) | (reg_lo >> shift_right).
328 // out_reg_lo = (reg_lo << shift_left) | (reg_hi >> shift_right).
329 __ Lsl(out_reg_hi, in_reg_hi, shift_left);
330 __ Lsr(out_reg_lo, in_reg_lo, shift_right);
331 __ add(out_reg_hi, out_reg_hi, ShifterOperand(out_reg_lo));
332 __ Lsl(out_reg_lo, in_reg_lo, shift_left);
333 __ Lsr(shift_left, in_reg_hi, shift_right);
334 __ add(out_reg_lo, out_reg_lo, ShifterOperand(shift_left));
335 __ b(&end);
336
337 // out_reg_hi = (reg_hi >> shift_right) | (reg_lo << shift_left).
338 // out_reg_lo = (reg_lo >> shift_right) | (reg_hi << shift_left).
339 __ Bind(&right);
340 __ Lsr(out_reg_hi, in_reg_hi, shift_right);
341 __ Lsl(out_reg_lo, in_reg_lo, shift_left);
342 __ add(out_reg_hi, out_reg_hi, ShifterOperand(out_reg_lo));
343 __ Lsr(out_reg_lo, in_reg_lo, shift_right);
344 __ Lsl(shift_right, in_reg_hi, shift_left);
345 __ add(out_reg_lo, out_reg_lo, ShifterOperand(shift_right));
346
347 __ Bind(&end);
348 }
349}
350
351void IntrinsicLocationsBuilderARM::VisitIntegerRotateRight(HInvoke* invoke) {
352 LocationSummary* locations = new (arena_) LocationSummary(invoke,
353 LocationSummary::kNoCall,
354 kIntrinsified);
355 locations->SetInAt(0, Location::RequiresRegister());
356 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
357 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
358}
359
360void IntrinsicCodeGeneratorARM::VisitIntegerRotateRight(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000361 GenIntegerRotate(invoke->GetLocations(), GetAssembler(), /* is_left */ false);
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100362}
363
364void IntrinsicLocationsBuilderARM::VisitLongRotateRight(HInvoke* invoke) {
365 LocationSummary* locations = new (arena_) LocationSummary(invoke,
366 LocationSummary::kNoCall,
367 kIntrinsified);
368 locations->SetInAt(0, Location::RequiresRegister());
369 if (invoke->InputAt(1)->IsConstant()) {
370 locations->SetInAt(1, Location::ConstantLocation(invoke->InputAt(1)->AsConstant()));
371 } else {
372 locations->SetInAt(1, Location::RequiresRegister());
373 locations->AddTemp(Location::RequiresRegister());
374 locations->AddTemp(Location::RequiresRegister());
375 }
376 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
377}
378
379void IntrinsicCodeGeneratorARM::VisitLongRotateRight(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000380 GenLongRotate(invoke->GetLocations(), GetAssembler(), /* is_left */ false);
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100381}
382
383void IntrinsicLocationsBuilderARM::VisitIntegerRotateLeft(HInvoke* invoke) {
384 LocationSummary* locations = new (arena_) LocationSummary(invoke,
385 LocationSummary::kNoCall,
386 kIntrinsified);
387 locations->SetInAt(0, Location::RequiresRegister());
388 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
389 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
390}
391
392void IntrinsicCodeGeneratorARM::VisitIntegerRotateLeft(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000393 GenIntegerRotate(invoke->GetLocations(), GetAssembler(), /* is_left */ true);
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100394}
395
396void IntrinsicLocationsBuilderARM::VisitLongRotateLeft(HInvoke* invoke) {
397 LocationSummary* locations = new (arena_) LocationSummary(invoke,
398 LocationSummary::kNoCall,
399 kIntrinsified);
400 locations->SetInAt(0, Location::RequiresRegister());
401 if (invoke->InputAt(1)->IsConstant()) {
402 locations->SetInAt(1, Location::ConstantLocation(invoke->InputAt(1)->AsConstant()));
403 } else {
404 locations->SetInAt(1, Location::RequiresRegister());
405 locations->AddTemp(Location::RequiresRegister());
406 locations->AddTemp(Location::RequiresRegister());
407 }
408 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
409}
410
411void IntrinsicCodeGeneratorARM::VisitLongRotateLeft(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000412 GenLongRotate(invoke->GetLocations(), GetAssembler(), /* is_left */ true);
Scott Wakeling9ee23f42015-07-23 10:44:35 +0100413}
414
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800415static void MathAbsFP(LocationSummary* locations, bool is64bit, ArmAssembler* assembler) {
416 Location in = locations->InAt(0);
417 Location out = locations->Out();
418
419 if (is64bit) {
420 __ vabsd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
421 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>()));
422 } else {
423 __ vabss(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>());
424 }
425}
426
427void IntrinsicLocationsBuilderARM::VisitMathAbsDouble(HInvoke* invoke) {
428 CreateFPToFPLocations(arena_, invoke);
429}
430
431void IntrinsicCodeGeneratorARM::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000432 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800433}
434
435void IntrinsicLocationsBuilderARM::VisitMathAbsFloat(HInvoke* invoke) {
436 CreateFPToFPLocations(arena_, invoke);
437}
438
439void IntrinsicCodeGeneratorARM::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000440 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800441}
442
443static void CreateIntToIntPlusTemp(ArenaAllocator* arena, HInvoke* invoke) {
444 LocationSummary* locations = new (arena) LocationSummary(invoke,
445 LocationSummary::kNoCall,
446 kIntrinsified);
447 locations->SetInAt(0, Location::RequiresRegister());
448 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
449
450 locations->AddTemp(Location::RequiresRegister());
451}
452
453static void GenAbsInteger(LocationSummary* locations,
454 bool is64bit,
455 ArmAssembler* assembler) {
456 Location in = locations->InAt(0);
457 Location output = locations->Out();
458
459 Register mask = locations->GetTemp(0).AsRegister<Register>();
460
461 if (is64bit) {
462 Register in_reg_lo = in.AsRegisterPairLow<Register>();
463 Register in_reg_hi = in.AsRegisterPairHigh<Register>();
464 Register out_reg_lo = output.AsRegisterPairLow<Register>();
465 Register out_reg_hi = output.AsRegisterPairHigh<Register>();
466
467 DCHECK_NE(out_reg_lo, in_reg_hi) << "Diagonal overlap unexpected.";
468
469 __ Asr(mask, in_reg_hi, 31);
470 __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask));
471 __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask));
472 __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo));
473 __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi));
474 } else {
475 Register in_reg = in.AsRegister<Register>();
476 Register out_reg = output.AsRegister<Register>();
477
478 __ Asr(mask, in_reg, 31);
479 __ add(out_reg, in_reg, ShifterOperand(mask));
480 __ eor(out_reg, mask, ShifterOperand(out_reg));
481 }
482}
483
484void IntrinsicLocationsBuilderARM::VisitMathAbsInt(HInvoke* invoke) {
485 CreateIntToIntPlusTemp(arena_, invoke);
486}
487
488void IntrinsicCodeGeneratorARM::VisitMathAbsInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000489 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800490}
491
492
493void IntrinsicLocationsBuilderARM::VisitMathAbsLong(HInvoke* invoke) {
494 CreateIntToIntPlusTemp(arena_, invoke);
495}
496
497void IntrinsicCodeGeneratorARM::VisitMathAbsLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000498 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800499}
500
501static void GenMinMax(LocationSummary* locations,
502 bool is_min,
503 ArmAssembler* assembler) {
504 Register op1 = locations->InAt(0).AsRegister<Register>();
505 Register op2 = locations->InAt(1).AsRegister<Register>();
506 Register out = locations->Out().AsRegister<Register>();
507
508 __ cmp(op1, ShifterOperand(op2));
509
510 __ it((is_min) ? Condition::LT : Condition::GT, kItElse);
511 __ mov(out, ShifterOperand(op1), is_min ? Condition::LT : Condition::GT);
512 __ mov(out, ShifterOperand(op2), is_min ? Condition::GE : Condition::LE);
513}
514
515static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
516 LocationSummary* locations = new (arena) LocationSummary(invoke,
517 LocationSummary::kNoCall,
518 kIntrinsified);
519 locations->SetInAt(0, Location::RequiresRegister());
520 locations->SetInAt(1, Location::RequiresRegister());
521 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
522}
523
524void IntrinsicLocationsBuilderARM::VisitMathMinIntInt(HInvoke* invoke) {
525 CreateIntIntToIntLocations(arena_, invoke);
526}
527
528void IntrinsicCodeGeneratorARM::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000529 GenMinMax(invoke->GetLocations(), /* is_min */ true, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800530}
531
532void IntrinsicLocationsBuilderARM::VisitMathMaxIntInt(HInvoke* invoke) {
533 CreateIntIntToIntLocations(arena_, invoke);
534}
535
536void IntrinsicCodeGeneratorARM::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000537 GenMinMax(invoke->GetLocations(), /* is_min */ false, GetAssembler());
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800538}
539
540void IntrinsicLocationsBuilderARM::VisitMathSqrt(HInvoke* invoke) {
541 CreateFPToFPLocations(arena_, invoke);
542}
543
544void IntrinsicCodeGeneratorARM::VisitMathSqrt(HInvoke* invoke) {
545 LocationSummary* locations = invoke->GetLocations();
546 ArmAssembler* assembler = GetAssembler();
547 __ vsqrtd(FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()),
548 FromLowSToD(locations->InAt(0).AsFpuRegisterPairLow<SRegister>()));
549}
550
551void IntrinsicLocationsBuilderARM::VisitMemoryPeekByte(HInvoke* invoke) {
552 CreateIntToIntLocations(arena_, invoke);
553}
554
555void IntrinsicCodeGeneratorARM::VisitMemoryPeekByte(HInvoke* invoke) {
556 ArmAssembler* assembler = GetAssembler();
557 // Ignore upper 4B of long address.
558 __ ldrsb(invoke->GetLocations()->Out().AsRegister<Register>(),
559 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
560}
561
562void IntrinsicLocationsBuilderARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
563 CreateIntToIntLocations(arena_, invoke);
564}
565
566void IntrinsicCodeGeneratorARM::VisitMemoryPeekIntNative(HInvoke* invoke) {
567 ArmAssembler* assembler = GetAssembler();
568 // Ignore upper 4B of long address.
569 __ ldr(invoke->GetLocations()->Out().AsRegister<Register>(),
570 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
571}
572
573void IntrinsicLocationsBuilderARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
574 CreateIntToIntLocations(arena_, invoke);
575}
576
577void IntrinsicCodeGeneratorARM::VisitMemoryPeekLongNative(HInvoke* invoke) {
578 ArmAssembler* assembler = GetAssembler();
579 // Ignore upper 4B of long address.
580 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
581 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
582 // exception. So we can't use ldrd as addr may be unaligned.
583 Register lo = invoke->GetLocations()->Out().AsRegisterPairLow<Register>();
584 Register hi = invoke->GetLocations()->Out().AsRegisterPairHigh<Register>();
585 if (addr == lo) {
586 __ ldr(hi, Address(addr, 4));
587 __ ldr(lo, Address(addr, 0));
588 } else {
589 __ ldr(lo, Address(addr, 0));
590 __ ldr(hi, Address(addr, 4));
591 }
592}
593
594void IntrinsicLocationsBuilderARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
595 CreateIntToIntLocations(arena_, invoke);
596}
597
598void IntrinsicCodeGeneratorARM::VisitMemoryPeekShortNative(HInvoke* invoke) {
599 ArmAssembler* assembler = GetAssembler();
600 // Ignore upper 4B of long address.
601 __ ldrsh(invoke->GetLocations()->Out().AsRegister<Register>(),
602 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
603}
604
605static void CreateIntIntToVoidLocations(ArenaAllocator* arena, HInvoke* invoke) {
606 LocationSummary* locations = new (arena) LocationSummary(invoke,
607 LocationSummary::kNoCall,
608 kIntrinsified);
609 locations->SetInAt(0, Location::RequiresRegister());
610 locations->SetInAt(1, Location::RequiresRegister());
611}
612
613void IntrinsicLocationsBuilderARM::VisitMemoryPokeByte(HInvoke* invoke) {
614 CreateIntIntToVoidLocations(arena_, invoke);
615}
616
617void IntrinsicCodeGeneratorARM::VisitMemoryPokeByte(HInvoke* invoke) {
618 ArmAssembler* assembler = GetAssembler();
619 __ strb(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
620 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
621}
622
623void IntrinsicLocationsBuilderARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
624 CreateIntIntToVoidLocations(arena_, invoke);
625}
626
627void IntrinsicCodeGeneratorARM::VisitMemoryPokeIntNative(HInvoke* invoke) {
628 ArmAssembler* assembler = GetAssembler();
629 __ str(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
630 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
631}
632
633void IntrinsicLocationsBuilderARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
634 CreateIntIntToVoidLocations(arena_, invoke);
635}
636
637void IntrinsicCodeGeneratorARM::VisitMemoryPokeLongNative(HInvoke* invoke) {
638 ArmAssembler* assembler = GetAssembler();
639 // Ignore upper 4B of long address.
640 Register addr = invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>();
641 // Worst case: Control register bit SCTLR.A = 0. Then unaligned accesses throw a processor
642 // exception. So we can't use ldrd as addr may be unaligned.
643 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairLow<Register>(), Address(addr, 0));
644 __ str(invoke->GetLocations()->InAt(1).AsRegisterPairHigh<Register>(), Address(addr, 4));
645}
646
647void IntrinsicLocationsBuilderARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
648 CreateIntIntToVoidLocations(arena_, invoke);
649}
650
651void IntrinsicCodeGeneratorARM::VisitMemoryPokeShortNative(HInvoke* invoke) {
652 ArmAssembler* assembler = GetAssembler();
653 __ strh(invoke->GetLocations()->InAt(1).AsRegister<Register>(),
654 Address(invoke->GetLocations()->InAt(0).AsRegisterPairLow<Register>()));
655}
656
657void IntrinsicLocationsBuilderARM::VisitThreadCurrentThread(HInvoke* invoke) {
658 LocationSummary* locations = new (arena_) LocationSummary(invoke,
659 LocationSummary::kNoCall,
660 kIntrinsified);
661 locations->SetOut(Location::RequiresRegister());
662}
663
664void IntrinsicCodeGeneratorARM::VisitThreadCurrentThread(HInvoke* invoke) {
665 ArmAssembler* assembler = GetAssembler();
666 __ LoadFromOffset(kLoadWord,
667 invoke->GetLocations()->Out().AsRegister<Register>(),
668 TR,
669 Thread::PeerOffset<kArmPointerSize>().Int32Value());
670}
671
672static void GenUnsafeGet(HInvoke* invoke,
673 Primitive::Type type,
674 bool is_volatile,
675 CodeGeneratorARM* codegen) {
676 LocationSummary* locations = invoke->GetLocations();
677 DCHECK((type == Primitive::kPrimInt) ||
678 (type == Primitive::kPrimLong) ||
679 (type == Primitive::kPrimNot));
680 ArmAssembler* assembler = codegen->GetAssembler();
Roland Levillain3b359c72015-11-17 19:35:12 +0000681 Location base_loc = locations->InAt(1);
682 Register base = base_loc.AsRegister<Register>(); // Object pointer.
683 Location offset_loc = locations->InAt(2);
684 Register offset = offset_loc.AsRegisterPairLow<Register>(); // Long offset, lo part only.
685 Location trg_loc = locations->Out();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800686
687 if (type == Primitive::kPrimLong) {
Roland Levillain3b359c72015-11-17 19:35:12 +0000688 Register trg_lo = trg_loc.AsRegisterPairLow<Register>();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800689 __ add(IP, base, ShifterOperand(offset));
690 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
Roland Levillain3b359c72015-11-17 19:35:12 +0000691 Register trg_hi = trg_loc.AsRegisterPairHigh<Register>();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800692 __ ldrexd(trg_lo, trg_hi, IP);
693 } else {
694 __ ldrd(trg_lo, Address(IP));
695 }
696 } else {
Roland Levillain3b359c72015-11-17 19:35:12 +0000697 Register trg = trg_loc.AsRegister<Register>();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800698 __ ldr(trg, Address(base, offset));
699 }
700
701 if (is_volatile) {
702 __ dmb(ISH);
703 }
Roland Levillain4d027112015-07-01 15:41:14 +0100704
705 if (type == Primitive::kPrimNot) {
Roland Levillain3b359c72015-11-17 19:35:12 +0000706 codegen->MaybeGenerateReadBarrier(invoke, trg_loc, trg_loc, base_loc, 0U, offset_loc);
Roland Levillain4d027112015-07-01 15:41:14 +0100707 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800708}
709
710static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
Roland Levillain3b359c72015-11-17 19:35:12 +0000711 bool can_call = kEmitCompilerReadBarrier &&
712 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
713 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800714 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain3b359c72015-11-17 19:35:12 +0000715 can_call ?
716 LocationSummary::kCallOnSlowPath :
717 LocationSummary::kNoCall,
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800718 kIntrinsified);
719 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
720 locations->SetInAt(1, Location::RequiresRegister());
721 locations->SetInAt(2, Location::RequiresRegister());
722 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
723}
724
725void IntrinsicLocationsBuilderARM::VisitUnsafeGet(HInvoke* invoke) {
726 CreateIntIntIntToIntLocations(arena_, invoke);
727}
728void IntrinsicLocationsBuilderARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
729 CreateIntIntIntToIntLocations(arena_, invoke);
730}
731void IntrinsicLocationsBuilderARM::VisitUnsafeGetLong(HInvoke* invoke) {
732 CreateIntIntIntToIntLocations(arena_, invoke);
733}
734void IntrinsicLocationsBuilderARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
735 CreateIntIntIntToIntLocations(arena_, invoke);
736}
737void IntrinsicLocationsBuilderARM::VisitUnsafeGetObject(HInvoke* invoke) {
738 CreateIntIntIntToIntLocations(arena_, invoke);
739}
740void IntrinsicLocationsBuilderARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
741 CreateIntIntIntToIntLocations(arena_, invoke);
742}
743
744void IntrinsicCodeGeneratorARM::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000745 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800746}
747void IntrinsicCodeGeneratorARM::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000748 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800749}
750void IntrinsicCodeGeneratorARM::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000751 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800752}
753void IntrinsicCodeGeneratorARM::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000754 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800755}
756void IntrinsicCodeGeneratorARM::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000757 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800758}
759void IntrinsicCodeGeneratorARM::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000760 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800761}
762
763static void CreateIntIntIntIntToVoid(ArenaAllocator* arena,
764 const ArmInstructionSetFeatures& features,
765 Primitive::Type type,
766 bool is_volatile,
767 HInvoke* invoke) {
768 LocationSummary* locations = new (arena) LocationSummary(invoke,
769 LocationSummary::kNoCall,
770 kIntrinsified);
771 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
772 locations->SetInAt(1, Location::RequiresRegister());
773 locations->SetInAt(2, Location::RequiresRegister());
774 locations->SetInAt(3, Location::RequiresRegister());
775
776 if (type == Primitive::kPrimLong) {
777 // Potentially need temps for ldrexd-strexd loop.
778 if (is_volatile && !features.HasAtomicLdrdAndStrd()) {
779 locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
780 locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
781 }
782 } else if (type == Primitive::kPrimNot) {
783 // Temps for card-marking.
784 locations->AddTemp(Location::RequiresRegister()); // Temp.
785 locations->AddTemp(Location::RequiresRegister()); // Card.
786 }
787}
788
789void IntrinsicLocationsBuilderARM::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000790 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800791}
792void IntrinsicLocationsBuilderARM::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000793 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800794}
795void IntrinsicLocationsBuilderARM::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000796 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimInt, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800797}
798void IntrinsicLocationsBuilderARM::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000799 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800800}
801void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000802 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800803}
804void IntrinsicLocationsBuilderARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000805 CreateIntIntIntIntToVoid(arena_, features_, Primitive::kPrimNot, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800806}
807void IntrinsicLocationsBuilderARM::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000808 CreateIntIntIntIntToVoid(
809 arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800810}
811void IntrinsicLocationsBuilderARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000812 CreateIntIntIntIntToVoid(
813 arena_, features_, Primitive::kPrimLong, /* is_volatile */ false, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800814}
815void IntrinsicLocationsBuilderARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000816 CreateIntIntIntIntToVoid(
817 arena_, features_, Primitive::kPrimLong, /* is_volatile */ true, invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800818}
819
820static void GenUnsafePut(LocationSummary* locations,
821 Primitive::Type type,
822 bool is_volatile,
823 bool is_ordered,
824 CodeGeneratorARM* codegen) {
825 ArmAssembler* assembler = codegen->GetAssembler();
826
827 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
828 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Long offset, lo part only.
829 Register value;
830
831 if (is_volatile || is_ordered) {
832 __ dmb(ISH);
833 }
834
835 if (type == Primitive::kPrimLong) {
836 Register value_lo = locations->InAt(3).AsRegisterPairLow<Register>();
837 value = value_lo;
838 if (is_volatile && !codegen->GetInstructionSetFeatures().HasAtomicLdrdAndStrd()) {
839 Register temp_lo = locations->GetTemp(0).AsRegister<Register>();
840 Register temp_hi = locations->GetTemp(1).AsRegister<Register>();
841 Register value_hi = locations->InAt(3).AsRegisterPairHigh<Register>();
842
843 __ add(IP, base, ShifterOperand(offset));
844 Label loop_head;
845 __ Bind(&loop_head);
846 __ ldrexd(temp_lo, temp_hi, IP);
847 __ strexd(temp_lo, value_lo, value_hi, IP);
848 __ cmp(temp_lo, ShifterOperand(0));
849 __ b(&loop_head, NE);
850 } else {
851 __ add(IP, base, ShifterOperand(offset));
852 __ strd(value_lo, Address(IP));
853 }
854 } else {
Roland Levillain4d027112015-07-01 15:41:14 +0100855 value = locations->InAt(3).AsRegister<Register>();
856 Register source = value;
857 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
858 Register temp = locations->GetTemp(0).AsRegister<Register>();
859 __ Mov(temp, value);
860 __ PoisonHeapReference(temp);
861 source = temp;
862 }
863 __ str(source, Address(base, offset));
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800864 }
865
866 if (is_volatile) {
867 __ dmb(ISH);
868 }
869
870 if (type == Primitive::kPrimNot) {
871 Register temp = locations->GetTemp(0).AsRegister<Register>();
872 Register card = locations->GetTemp(1).AsRegister<Register>();
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100873 bool value_can_be_null = true; // TODO: Worth finding out this information?
874 codegen->MarkGCCard(temp, card, base, value, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800875 }
876}
877
878void IntrinsicCodeGeneratorARM::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000879 GenUnsafePut(invoke->GetLocations(),
880 Primitive::kPrimInt,
881 /* is_volatile */ false,
882 /* is_ordered */ false,
883 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800884}
885void IntrinsicCodeGeneratorARM::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000886 GenUnsafePut(invoke->GetLocations(),
887 Primitive::kPrimInt,
888 /* is_volatile */ false,
889 /* is_ordered */ true,
890 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800891}
892void IntrinsicCodeGeneratorARM::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000893 GenUnsafePut(invoke->GetLocations(),
894 Primitive::kPrimInt,
895 /* is_volatile */ true,
896 /* is_ordered */ false,
897 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800898}
899void IntrinsicCodeGeneratorARM::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000900 GenUnsafePut(invoke->GetLocations(),
901 Primitive::kPrimNot,
902 /* is_volatile */ false,
903 /* is_ordered */ false,
904 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800905}
906void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000907 GenUnsafePut(invoke->GetLocations(),
908 Primitive::kPrimNot,
909 /* is_volatile */ false,
910 /* is_ordered */ true,
911 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800912}
913void IntrinsicCodeGeneratorARM::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000914 GenUnsafePut(invoke->GetLocations(),
915 Primitive::kPrimNot,
916 /* is_volatile */ true,
917 /* is_ordered */ false,
918 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800919}
920void IntrinsicCodeGeneratorARM::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000921 GenUnsafePut(invoke->GetLocations(),
922 Primitive::kPrimLong,
923 /* is_volatile */ false,
924 /* is_ordered */ false,
925 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800926}
927void IntrinsicCodeGeneratorARM::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000928 GenUnsafePut(invoke->GetLocations(),
929 Primitive::kPrimLong,
930 /* is_volatile */ false,
931 /* is_ordered */ true,
932 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800933}
934void IntrinsicCodeGeneratorARM::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000935 GenUnsafePut(invoke->GetLocations(),
936 Primitive::kPrimLong,
937 /* is_volatile */ true,
938 /* is_ordered */ false,
939 codegen_);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800940}
941
942static void CreateIntIntIntIntIntToIntPlusTemps(ArenaAllocator* arena,
943 HInvoke* invoke) {
944 LocationSummary* locations = new (arena) LocationSummary(invoke,
945 LocationSummary::kNoCall,
946 kIntrinsified);
947 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
948 locations->SetInAt(1, Location::RequiresRegister());
949 locations->SetInAt(2, Location::RequiresRegister());
950 locations->SetInAt(3, Location::RequiresRegister());
951 locations->SetInAt(4, Location::RequiresRegister());
952
953 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
954
955 locations->AddTemp(Location::RequiresRegister()); // Pointer.
956 locations->AddTemp(Location::RequiresRegister()); // Temp 1.
957 locations->AddTemp(Location::RequiresRegister()); // Temp 2.
958}
959
960static void GenCas(LocationSummary* locations, Primitive::Type type, CodeGeneratorARM* codegen) {
961 DCHECK_NE(type, Primitive::kPrimLong);
962
963 ArmAssembler* assembler = codegen->GetAssembler();
964
965 Register out = locations->Out().AsRegister<Register>(); // Boolean result.
966
967 Register base = locations->InAt(1).AsRegister<Register>(); // Object pointer.
968 Register offset = locations->InAt(2).AsRegisterPairLow<Register>(); // Offset (discard high 4B).
969 Register expected_lo = locations->InAt(3).AsRegister<Register>(); // Expected.
970 Register value_lo = locations->InAt(4).AsRegister<Register>(); // Value.
971
972 Register tmp_ptr = locations->GetTemp(0).AsRegister<Register>(); // Pointer to actual memory.
973 Register tmp_lo = locations->GetTemp(1).AsRegister<Register>(); // Value in memory.
974
975 if (type == Primitive::kPrimNot) {
976 // Mark card for object assuming new value is stored. Worst case we will mark an unchanged
977 // object and scan the receiver at the next GC for nothing.
Nicolas Geoffray07276db2015-05-18 14:22:09 +0100978 bool value_can_be_null = true; // TODO: Worth finding out this information?
979 codegen->MarkGCCard(tmp_ptr, tmp_lo, base, value_lo, value_can_be_null);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800980 }
981
982 // Prevent reordering with prior memory operations.
983 __ dmb(ISH);
984
985 __ add(tmp_ptr, base, ShifterOperand(offset));
986
Roland Levillain4d027112015-07-01 15:41:14 +0100987 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
988 codegen->GetAssembler()->PoisonHeapReference(expected_lo);
989 codegen->GetAssembler()->PoisonHeapReference(value_lo);
990 }
991
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -0800992 // do {
993 // tmp = [r_ptr] - expected;
994 // } while (tmp == 0 && failure([r_ptr] <- r_new_value));
995 // result = tmp != 0;
996
997 Label loop_head;
998 __ Bind(&loop_head);
999
1000 __ ldrex(tmp_lo, tmp_ptr);
Roland Levillain3b359c72015-11-17 19:35:12 +00001001 // TODO: Do we need a read barrier here when `type == Primitive::kPrimNot`?
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001002
1003 __ subs(tmp_lo, tmp_lo, ShifterOperand(expected_lo));
1004
1005 __ it(EQ, ItState::kItT);
1006 __ strex(tmp_lo, value_lo, tmp_ptr, EQ);
1007 __ cmp(tmp_lo, ShifterOperand(1), EQ);
1008
1009 __ b(&loop_head, EQ);
1010
1011 __ dmb(ISH);
1012
1013 __ rsbs(out, tmp_lo, ShifterOperand(1));
1014 __ it(CC);
1015 __ mov(out, ShifterOperand(0), CC);
Roland Levillain4d027112015-07-01 15:41:14 +01001016
1017 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1018 codegen->GetAssembler()->UnpoisonHeapReference(value_lo);
1019 codegen->GetAssembler()->UnpoisonHeapReference(expected_lo);
1020 }
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001021}
1022
Andreas Gampeca714582015-04-03 19:41:34 -07001023void IntrinsicLocationsBuilderARM::VisitUnsafeCASInt(HInvoke* invoke) {
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001024 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
1025}
Andreas Gampeca714582015-04-03 19:41:34 -07001026void IntrinsicLocationsBuilderARM::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain985ff702015-10-23 13:25:35 +01001027 // The UnsafeCASObject intrinsic does not always work when heap
1028 // poisoning is enabled (it breaks run-test 004-UnsafeTest); turn it
1029 // off temporarily as a quick fix.
Roland Levillain3b359c72015-11-17 19:35:12 +00001030 //
Roland Levillain985ff702015-10-23 13:25:35 +01001031 // TODO(rpl): Fix it and turn it back on.
Roland Levillain3b359c72015-11-17 19:35:12 +00001032 //
1033 // TODO(rpl): Also, we should investigate whether we need a read
1034 // barrier in the generated code.
Roland Levillain985ff702015-10-23 13:25:35 +01001035 if (kPoisonHeapReferences) {
1036 return;
1037 }
1038
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001039 CreateIntIntIntIntIntToIntPlusTemps(arena_, invoke);
1040}
1041void IntrinsicCodeGeneratorARM::VisitUnsafeCASInt(HInvoke* invoke) {
1042 GenCas(invoke->GetLocations(), Primitive::kPrimInt, codegen_);
1043}
1044void IntrinsicCodeGeneratorARM::VisitUnsafeCASObject(HInvoke* invoke) {
1045 GenCas(invoke->GetLocations(), Primitive::kPrimNot, codegen_);
1046}
1047
1048void IntrinsicLocationsBuilderARM::VisitStringCharAt(HInvoke* invoke) {
1049 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1050 LocationSummary::kCallOnSlowPath,
1051 kIntrinsified);
1052 locations->SetInAt(0, Location::RequiresRegister());
1053 locations->SetInAt(1, Location::RequiresRegister());
1054 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1055
1056 locations->AddTemp(Location::RequiresRegister());
1057 locations->AddTemp(Location::RequiresRegister());
1058}
1059
1060void IntrinsicCodeGeneratorARM::VisitStringCharAt(HInvoke* invoke) {
1061 ArmAssembler* assembler = GetAssembler();
1062 LocationSummary* locations = invoke->GetLocations();
1063
1064 // Location of reference to data array
1065 const MemberOffset value_offset = mirror::String::ValueOffset();
1066 // Location of count
1067 const MemberOffset count_offset = mirror::String::CountOffset();
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001068
1069 Register obj = locations->InAt(0).AsRegister<Register>(); // String object pointer.
1070 Register idx = locations->InAt(1).AsRegister<Register>(); // Index of character.
1071 Register out = locations->Out().AsRegister<Register>(); // Result character.
1072
1073 Register temp = locations->GetTemp(0).AsRegister<Register>();
1074 Register array_temp = locations->GetTemp(1).AsRegister<Register>();
1075
1076 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
1077 // the cost.
1078 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
1079 // we will not optimize the code for constants (which would save a register).
1080
Andreas Gampe85b62f22015-09-09 13:15:38 -07001081 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001082 codegen_->AddSlowPath(slow_path);
1083
1084 __ ldr(temp, Address(obj, count_offset.Int32Value())); // temp = str.length.
1085 codegen_->MaybeRecordImplicitNullCheck(invoke);
1086 __ cmp(idx, ShifterOperand(temp));
1087 __ b(slow_path->GetEntryLabel(), CS);
1088
Jeff Hao848f70a2014-01-15 13:49:50 -08001089 __ add(array_temp, obj, ShifterOperand(value_offset.Int32Value())); // array_temp := str.value.
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001090
1091 // Load the value.
Jeff Hao848f70a2014-01-15 13:49:50 -08001092 __ ldrh(out, Address(array_temp, idx, LSL, 1)); // out := array_temp[idx].
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001093
1094 __ Bind(slow_path->GetExitLabel());
1095}
1096
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001097void IntrinsicLocationsBuilderARM::VisitStringCompareTo(HInvoke* invoke) {
1098 // The inputs plus one temp.
1099 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1100 LocationSummary::kCall,
1101 kIntrinsified);
1102 InvokeRuntimeCallingConvention calling_convention;
1103 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1104 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1105 locations->SetOut(Location::RegisterLocation(R0));
1106}
1107
1108void IntrinsicCodeGeneratorARM::VisitStringCompareTo(HInvoke* invoke) {
1109 ArmAssembler* assembler = GetAssembler();
1110 LocationSummary* locations = invoke->GetLocations();
1111
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001112 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001113 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001114
1115 Register argument = locations->InAt(1).AsRegister<Register>();
1116 __ cmp(argument, ShifterOperand(0));
Andreas Gampe85b62f22015-09-09 13:15:38 -07001117 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001118 codegen_->AddSlowPath(slow_path);
1119 __ b(slow_path->GetEntryLabel(), EQ);
1120
1121 __ LoadFromOffset(
1122 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pStringCompareTo).Int32Value());
1123 __ blx(LR);
1124 __ Bind(slow_path->GetExitLabel());
1125}
1126
Agi Csaki289cd552015-08-18 17:10:38 -07001127void IntrinsicLocationsBuilderARM::VisitStringEquals(HInvoke* invoke) {
1128 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1129 LocationSummary::kNoCall,
1130 kIntrinsified);
1131 InvokeRuntimeCallingConvention calling_convention;
1132 locations->SetInAt(0, Location::RequiresRegister());
1133 locations->SetInAt(1, Location::RequiresRegister());
1134 // Temporary registers to store lengths of strings and for calculations.
1135 // Using instruction cbz requires a low register, so explicitly set a temp to be R0.
1136 locations->AddTemp(Location::RegisterLocation(R0));
1137 locations->AddTemp(Location::RequiresRegister());
1138 locations->AddTemp(Location::RequiresRegister());
1139
1140 locations->SetOut(Location::RequiresRegister());
1141}
1142
1143void IntrinsicCodeGeneratorARM::VisitStringEquals(HInvoke* invoke) {
1144 ArmAssembler* assembler = GetAssembler();
1145 LocationSummary* locations = invoke->GetLocations();
1146
1147 Register str = locations->InAt(0).AsRegister<Register>();
1148 Register arg = locations->InAt(1).AsRegister<Register>();
1149 Register out = locations->Out().AsRegister<Register>();
1150
1151 Register temp = locations->GetTemp(0).AsRegister<Register>();
1152 Register temp1 = locations->GetTemp(1).AsRegister<Register>();
1153 Register temp2 = locations->GetTemp(2).AsRegister<Register>();
1154
1155 Label loop;
1156 Label end;
1157 Label return_true;
1158 Label return_false;
1159
1160 // Get offsets of count, value, and class fields within a string object.
1161 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1162 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1163 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1164
1165 // Note that the null check must have been done earlier.
1166 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1167
1168 // Check if input is null, return false if it is.
1169 __ CompareAndBranchIfZero(arg, &return_false);
1170
1171 // Instanceof check for the argument by comparing class fields.
1172 // All string objects must have the same type since String cannot be subclassed.
1173 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1174 // If the argument is a string object, its class field must be equal to receiver's class field.
1175 __ ldr(temp, Address(str, class_offset));
1176 __ ldr(temp1, Address(arg, class_offset));
1177 __ cmp(temp, ShifterOperand(temp1));
1178 __ b(&return_false, NE);
1179
1180 // Load lengths of this and argument strings.
1181 __ ldr(temp, Address(str, count_offset));
1182 __ ldr(temp1, Address(arg, count_offset));
1183 // Check if lengths are equal, return false if they're not.
1184 __ cmp(temp, ShifterOperand(temp1));
1185 __ b(&return_false, NE);
1186 // Return true if both strings are empty.
1187 __ cbz(temp, &return_true);
1188
1189 // Reference equality check, return true if same reference.
1190 __ cmp(str, ShifterOperand(arg));
1191 __ b(&return_true, EQ);
1192
1193 // Assertions that must hold in order to compare strings 2 characters at a time.
1194 DCHECK_ALIGNED(value_offset, 4);
1195 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1196
Agi Csaki289cd552015-08-18 17:10:38 -07001197 __ LoadImmediate(temp1, value_offset);
Agi Csaki289cd552015-08-18 17:10:38 -07001198
1199 // Loop to compare strings 2 characters at a time starting at the front of the string.
1200 // Ok to do this because strings with an odd length are zero-padded.
1201 __ Bind(&loop);
1202 __ ldr(out, Address(str, temp1));
1203 __ ldr(temp2, Address(arg, temp1));
1204 __ cmp(out, ShifterOperand(temp2));
1205 __ b(&return_false, NE);
1206 __ add(temp1, temp1, ShifterOperand(sizeof(uint32_t)));
Vladimir Markoa63f0d42015-09-01 13:36:35 +01001207 __ subs(temp, temp, ShifterOperand(sizeof(uint32_t) / sizeof(uint16_t)));
1208 __ b(&loop, GT);
Agi Csaki289cd552015-08-18 17:10:38 -07001209
1210 // Return true and exit the function.
1211 // If loop does not result in returning false, we return true.
1212 __ Bind(&return_true);
1213 __ LoadImmediate(out, 1);
1214 __ b(&end);
1215
1216 // Return false and exit the function.
1217 __ Bind(&return_false);
1218 __ LoadImmediate(out, 0);
1219 __ Bind(&end);
1220}
1221
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001222static void GenerateVisitStringIndexOf(HInvoke* invoke,
1223 ArmAssembler* assembler,
1224 CodeGeneratorARM* codegen,
1225 ArenaAllocator* allocator,
1226 bool start_at_zero) {
1227 LocationSummary* locations = invoke->GetLocations();
1228 Register tmp_reg = locations->GetTemp(0).AsRegister<Register>();
1229
1230 // Note that the null check must have been done earlier.
1231 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1232
1233 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1234 // or directly dispatch if we have a constant.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001235 SlowPathCode* slow_path = nullptr;
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001236 if (invoke->InputAt(1)->IsIntConstant()) {
1237 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
1238 std::numeric_limits<uint16_t>::max()) {
1239 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1240 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1241 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1242 codegen->AddSlowPath(slow_path);
1243 __ b(slow_path->GetEntryLabel());
1244 __ Bind(slow_path->GetExitLabel());
1245 return;
1246 }
1247 } else {
1248 Register char_reg = locations->InAt(1).AsRegister<Register>();
1249 __ LoadImmediate(tmp_reg, std::numeric_limits<uint16_t>::max());
1250 __ cmp(char_reg, ShifterOperand(tmp_reg));
1251 slow_path = new (allocator) IntrinsicSlowPathARM(invoke);
1252 codegen->AddSlowPath(slow_path);
1253 __ b(slow_path->GetEntryLabel(), HI);
1254 }
1255
1256 if (start_at_zero) {
1257 DCHECK_EQ(tmp_reg, R2);
1258 // Start-index = 0.
1259 __ LoadImmediate(tmp_reg, 0);
1260 }
1261
1262 __ LoadFromOffset(kLoadWord, LR, TR,
1263 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pIndexOf).Int32Value());
1264 __ blx(LR);
1265
1266 if (slow_path != nullptr) {
1267 __ Bind(slow_path->GetExitLabel());
1268 }
1269}
1270
1271void IntrinsicLocationsBuilderARM::VisitStringIndexOf(HInvoke* invoke) {
1272 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1273 LocationSummary::kCall,
1274 kIntrinsified);
1275 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1276 // best to align the inputs accordingly.
1277 InvokeRuntimeCallingConvention calling_convention;
1278 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1279 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1280 locations->SetOut(Location::RegisterLocation(R0));
1281
1282 // Need a temp for slow-path codepoint compare, and need to send start-index=0.
1283 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1284}
1285
1286void IntrinsicCodeGeneratorARM::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001287 GenerateVisitStringIndexOf(
1288 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001289}
1290
1291void IntrinsicLocationsBuilderARM::VisitStringIndexOfAfter(HInvoke* invoke) {
1292 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1293 LocationSummary::kCall,
1294 kIntrinsified);
1295 // We have a hand-crafted assembly stub that follows the runtime calling convention. So it's
1296 // best to align the inputs accordingly.
1297 InvokeRuntimeCallingConvention calling_convention;
1298 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1299 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1300 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1301 locations->SetOut(Location::RegisterLocation(R0));
1302
1303 // Need a temp for slow-path codepoint compare.
1304 locations->AddTemp(Location::RequiresRegister());
1305}
1306
1307void IntrinsicCodeGeneratorARM::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001308 GenerateVisitStringIndexOf(
1309 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampeba6fdbc2015-05-07 22:31:55 -07001310}
1311
Jeff Hao848f70a2014-01-15 13:49:50 -08001312void IntrinsicLocationsBuilderARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1313 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1314 LocationSummary::kCall,
1315 kIntrinsified);
1316 InvokeRuntimeCallingConvention calling_convention;
1317 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1318 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1319 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1320 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1321 locations->SetOut(Location::RegisterLocation(R0));
1322}
1323
1324void IntrinsicCodeGeneratorARM::VisitStringNewStringFromBytes(HInvoke* invoke) {
1325 ArmAssembler* assembler = GetAssembler();
1326 LocationSummary* locations = invoke->GetLocations();
1327
1328 Register byte_array = locations->InAt(0).AsRegister<Register>();
1329 __ cmp(byte_array, ShifterOperand(0));
Andreas Gampe85b62f22015-09-09 13:15:38 -07001330 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001331 codegen_->AddSlowPath(slow_path);
1332 __ b(slow_path->GetEntryLabel(), EQ);
1333
1334 __ LoadFromOffset(
1335 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromBytes).Int32Value());
1336 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1337 __ blx(LR);
1338 __ Bind(slow_path->GetExitLabel());
1339}
1340
1341void IntrinsicLocationsBuilderARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1342 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1343 LocationSummary::kCall,
1344 kIntrinsified);
1345 InvokeRuntimeCallingConvention calling_convention;
1346 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1347 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1348 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1349 locations->SetOut(Location::RegisterLocation(R0));
1350}
1351
1352void IntrinsicCodeGeneratorARM::VisitStringNewStringFromChars(HInvoke* invoke) {
1353 ArmAssembler* assembler = GetAssembler();
1354
1355 __ LoadFromOffset(
1356 kLoadWord, LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromChars).Int32Value());
1357 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1358 __ blx(LR);
1359}
1360
1361void IntrinsicLocationsBuilderARM::VisitStringNewStringFromString(HInvoke* invoke) {
1362 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1363 LocationSummary::kCall,
1364 kIntrinsified);
1365 InvokeRuntimeCallingConvention calling_convention;
1366 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1367 locations->SetOut(Location::RegisterLocation(R0));
1368}
1369
1370void IntrinsicCodeGeneratorARM::VisitStringNewStringFromString(HInvoke* invoke) {
1371 ArmAssembler* assembler = GetAssembler();
1372 LocationSummary* locations = invoke->GetLocations();
1373
1374 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1375 __ cmp(string_to_copy, ShifterOperand(0));
Andreas Gampe85b62f22015-09-09 13:15:38 -07001376 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001377 codegen_->AddSlowPath(slow_path);
1378 __ b(slow_path->GetEntryLabel(), EQ);
1379
1380 __ LoadFromOffset(kLoadWord,
1381 LR, TR, QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocStringFromString).Int32Value());
1382 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1383 __ blx(LR);
1384 __ Bind(slow_path->GetExitLabel());
1385}
1386
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001387void IntrinsicLocationsBuilderARM::VisitSystemArrayCopy(HInvoke* invoke) {
1388 CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke);
1389 LocationSummary* locations = invoke->GetLocations();
1390 if (locations == nullptr) {
1391 return;
1392 }
1393
1394 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1395 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1396 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1397
1398 if (src_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(src_pos->GetValue())) {
1399 locations->SetInAt(1, Location::RequiresRegister());
1400 }
1401 if (dest_pos != nullptr && !assembler_->ShifterOperandCanAlwaysHold(dest_pos->GetValue())) {
1402 locations->SetInAt(3, Location::RequiresRegister());
1403 }
1404 if (length != nullptr && !assembler_->ShifterOperandCanAlwaysHold(length->GetValue())) {
1405 locations->SetInAt(4, Location::RequiresRegister());
1406 }
1407}
1408
1409static void CheckPosition(ArmAssembler* assembler,
1410 Location pos,
1411 Register input,
1412 Location length,
1413 SlowPathCode* slow_path,
1414 Register input_len,
1415 Register temp,
1416 bool length_is_input_length = false) {
1417 // Where is the length in the Array?
1418 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1419
1420 if (pos.IsConstant()) {
1421 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1422 if (pos_const == 0) {
1423 if (!length_is_input_length) {
1424 // Check that length(input) >= length.
1425 __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1426 if (length.IsConstant()) {
1427 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1428 } else {
1429 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1430 }
1431 __ b(slow_path->GetEntryLabel(), LT);
1432 }
1433 } else {
1434 // Check that length(input) >= pos.
1435 __ LoadFromOffset(kLoadWord, input_len, input, length_offset);
1436 __ subs(temp, input_len, ShifterOperand(pos_const));
1437 __ b(slow_path->GetEntryLabel(), LT);
1438
1439 // Check that (length(input) - pos) >= length.
1440 if (length.IsConstant()) {
1441 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1442 } else {
1443 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1444 }
1445 __ b(slow_path->GetEntryLabel(), LT);
1446 }
1447 } else if (length_is_input_length) {
1448 // The only way the copy can succeed is if pos is zero.
1449 Register pos_reg = pos.AsRegister<Register>();
1450 __ CompareAndBranchIfNonZero(pos_reg, slow_path->GetEntryLabel());
1451 } else {
1452 // Check that pos >= 0.
1453 Register pos_reg = pos.AsRegister<Register>();
1454 __ cmp(pos_reg, ShifterOperand(0));
1455 __ b(slow_path->GetEntryLabel(), LT);
1456
1457 // Check that pos <= length(input).
1458 __ LoadFromOffset(kLoadWord, temp, input, length_offset);
1459 __ subs(temp, temp, ShifterOperand(pos_reg));
1460 __ b(slow_path->GetEntryLabel(), LT);
1461
1462 // Check that (length(input) - pos) >= length.
1463 if (length.IsConstant()) {
1464 __ cmp(temp, ShifterOperand(length.GetConstant()->AsIntConstant()->GetValue()));
1465 } else {
1466 __ cmp(temp, ShifterOperand(length.AsRegister<Register>()));
1467 }
1468 __ b(slow_path->GetEntryLabel(), LT);
1469 }
1470}
1471
Roland Levillain3b359c72015-11-17 19:35:12 +00001472// TODO: Implement read barriers in the SystemArrayCopy intrinsic.
1473// Note that this code path is not used (yet) because we do not
1474// intrinsify methods that can go into the IntrinsicSlowPathARM
1475// slow path.
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001476void IntrinsicCodeGeneratorARM::VisitSystemArrayCopy(HInvoke* invoke) {
1477 ArmAssembler* assembler = GetAssembler();
1478 LocationSummary* locations = invoke->GetLocations();
1479
1480 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1481 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
1482 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
1483 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
1484
1485 Register src = locations->InAt(0).AsRegister<Register>();
1486 Location src_pos = locations->InAt(1);
1487 Register dest = locations->InAt(2).AsRegister<Register>();
1488 Location dest_pos = locations->InAt(3);
1489 Location length = locations->InAt(4);
1490 Register temp1 = locations->GetTemp(0).AsRegister<Register>();
1491 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
1492 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
1493
1494 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
1495 codegen_->AddSlowPath(slow_path);
1496
1497 Label ok;
1498 SystemArrayCopyOptimizations optimizations(invoke);
1499
1500 if (!optimizations.GetDestinationIsSource()) {
1501 if (!src_pos.IsConstant() || !dest_pos.IsConstant()) {
1502 __ cmp(src, ShifterOperand(dest));
1503 }
1504 }
1505
1506 // If source and destination are the same, we go to slow path if we need to do
1507 // forward copying.
1508 if (src_pos.IsConstant()) {
1509 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1510 if (dest_pos.IsConstant()) {
1511 // Checked when building locations.
1512 DCHECK(!optimizations.GetDestinationIsSource()
1513 || (src_pos_constant >= dest_pos.GetConstant()->AsIntConstant()->GetValue()));
1514 } else {
1515 if (!optimizations.GetDestinationIsSource()) {
1516 __ b(&ok, NE);
1517 }
1518 __ cmp(dest_pos.AsRegister<Register>(), ShifterOperand(src_pos_constant));
1519 __ b(slow_path->GetEntryLabel(), GT);
1520 }
1521 } else {
1522 if (!optimizations.GetDestinationIsSource()) {
1523 __ b(&ok, NE);
1524 }
1525 if (dest_pos.IsConstant()) {
1526 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1527 __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos_constant));
1528 } else {
1529 __ cmp(src_pos.AsRegister<Register>(), ShifterOperand(dest_pos.AsRegister<Register>()));
1530 }
1531 __ b(slow_path->GetEntryLabel(), LT);
1532 }
1533
1534 __ Bind(&ok);
1535
1536 if (!optimizations.GetSourceIsNotNull()) {
1537 // Bail out if the source is null.
1538 __ CompareAndBranchIfZero(src, slow_path->GetEntryLabel());
1539 }
1540
1541 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
1542 // Bail out if the destination is null.
1543 __ CompareAndBranchIfZero(dest, slow_path->GetEntryLabel());
1544 }
1545
1546 // If the length is negative, bail out.
1547 // We have already checked in the LocationsBuilder for the constant case.
1548 if (!length.IsConstant() &&
1549 !optimizations.GetCountIsSourceLength() &&
1550 !optimizations.GetCountIsDestinationLength()) {
1551 __ cmp(length.AsRegister<Register>(), ShifterOperand(0));
1552 __ b(slow_path->GetEntryLabel(), LT);
1553 }
1554
1555 // Validity checks: source.
1556 CheckPosition(assembler,
1557 src_pos,
1558 src,
1559 length,
1560 slow_path,
1561 temp1,
1562 temp2,
1563 optimizations.GetCountIsSourceLength());
1564
1565 // Validity checks: dest.
1566 CheckPosition(assembler,
1567 dest_pos,
1568 dest,
1569 length,
1570 slow_path,
1571 temp1,
1572 temp2,
1573 optimizations.GetCountIsDestinationLength());
1574
1575 if (!optimizations.GetDoesNotNeedTypeCheck()) {
1576 // Check whether all elements of the source array are assignable to the component
1577 // type of the destination array. We do two checks: the classes are the same,
1578 // or the destination is Object[]. If none of these checks succeed, we go to the
1579 // slow path.
1580 __ LoadFromOffset(kLoadWord, temp1, dest, class_offset);
1581 __ LoadFromOffset(kLoadWord, temp2, src, class_offset);
1582 bool did_unpoison = false;
1583 if (!optimizations.GetDestinationIsNonPrimitiveArray() ||
1584 !optimizations.GetSourceIsNonPrimitiveArray()) {
1585 // One or two of the references need to be unpoisoned. Unpoisoned them
1586 // both to make the identity check valid.
1587 __ MaybeUnpoisonHeapReference(temp1);
1588 __ MaybeUnpoisonHeapReference(temp2);
1589 did_unpoison = true;
1590 }
1591
1592 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
1593 // Bail out if the destination is not a non primitive array.
1594 __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1595 __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1596 __ MaybeUnpoisonHeapReference(temp3);
1597 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1598 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1599 __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1600 }
1601
1602 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1603 // Bail out if the source is not a non primitive array.
1604 // Bail out if the destination is not a non primitive array.
1605 __ LoadFromOffset(kLoadWord, temp3, temp2, component_offset);
1606 __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1607 __ MaybeUnpoisonHeapReference(temp3);
1608 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1609 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1610 __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1611 }
1612
1613 __ cmp(temp1, ShifterOperand(temp2));
1614
1615 if (optimizations.GetDestinationIsTypedObjectArray()) {
1616 Label do_copy;
1617 __ b(&do_copy, EQ);
1618 if (!did_unpoison) {
1619 __ MaybeUnpoisonHeapReference(temp1);
1620 }
1621 __ LoadFromOffset(kLoadWord, temp1, temp1, component_offset);
1622 __ MaybeUnpoisonHeapReference(temp1);
1623 __ LoadFromOffset(kLoadWord, temp1, temp1, super_offset);
1624 // No need to unpoison the result, we're comparing against null.
1625 __ CompareAndBranchIfNonZero(temp1, slow_path->GetEntryLabel());
1626 __ Bind(&do_copy);
1627 } else {
1628 __ b(slow_path->GetEntryLabel(), NE);
1629 }
1630 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
1631 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
1632 // Bail out if the source is not a non primitive array.
1633 __ LoadFromOffset(kLoadWord, temp1, src, class_offset);
1634 __ MaybeUnpoisonHeapReference(temp1);
1635 __ LoadFromOffset(kLoadWord, temp3, temp1, component_offset);
1636 __ CompareAndBranchIfZero(temp3, slow_path->GetEntryLabel());
1637 __ MaybeUnpoisonHeapReference(temp3);
1638 __ LoadFromOffset(kLoadUnsignedHalfword, temp3, temp3, primitive_offset);
1639 static_assert(Primitive::kPrimNot == 0, "Expected 0 for kPrimNot");
1640 __ CompareAndBranchIfNonZero(temp3, slow_path->GetEntryLabel());
1641 }
1642
1643 // Compute base source address, base destination address, and end source address.
1644
1645 uint32_t element_size = sizeof(int32_t);
1646 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
1647 if (src_pos.IsConstant()) {
1648 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
1649 __ AddConstant(temp1, src, element_size * constant + offset);
1650 } else {
1651 __ add(temp1, src, ShifterOperand(src_pos.AsRegister<Register>(), LSL, 2));
1652 __ AddConstant(temp1, offset);
1653 }
1654
1655 if (dest_pos.IsConstant()) {
1656 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
1657 __ AddConstant(temp2, dest, element_size * constant + offset);
1658 } else {
1659 __ add(temp2, dest, ShifterOperand(dest_pos.AsRegister<Register>(), LSL, 2));
1660 __ AddConstant(temp2, offset);
1661 }
1662
1663 if (length.IsConstant()) {
1664 int32_t constant = length.GetConstant()->AsIntConstant()->GetValue();
1665 __ AddConstant(temp3, temp1, element_size * constant);
1666 } else {
1667 __ add(temp3, temp1, ShifterOperand(length.AsRegister<Register>(), LSL, 2));
1668 }
1669
1670 // Iterate over the arrays and do a raw copy of the objects. We don't need to
1671 // poison/unpoison, nor do any read barrier as the next uses of the destination
1672 // array will do it.
1673 Label loop, done;
1674 __ cmp(temp1, ShifterOperand(temp3));
1675 __ b(&done, EQ);
1676 __ Bind(&loop);
1677 __ ldr(IP, Address(temp1, element_size, Address::PostIndex));
1678 __ str(IP, Address(temp2, element_size, Address::PostIndex));
1679 __ cmp(temp1, ShifterOperand(temp3));
1680 __ b(&loop, NE);
1681 __ Bind(&done);
1682
1683 // We only need one card marking on the destination array.
1684 codegen_->MarkGCCard(temp1,
1685 temp2,
1686 dest,
1687 Register(kNoRegister),
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001688 /* can_be_null */ false);
Nicolas Geoffray5bd05a52015-10-13 09:48:30 +01001689
1690 __ Bind(slow_path->GetExitLabel());
1691}
1692
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001693// Unimplemented intrinsics.
1694
1695#define UNIMPLEMENTED_INTRINSIC(Name) \
1696void IntrinsicLocationsBuilderARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1697} \
1698void IntrinsicCodeGeneratorARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1699}
1700
1701UNIMPLEMENTED_INTRINSIC(IntegerReverse)
1702UNIMPLEMENTED_INTRINSIC(IntegerReverseBytes)
1703UNIMPLEMENTED_INTRINSIC(LongReverse)
1704UNIMPLEMENTED_INTRINSIC(LongReverseBytes)
1705UNIMPLEMENTED_INTRINSIC(ShortReverseBytes)
1706UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
1707UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
1708UNIMPLEMENTED_INTRINSIC(MathMaxDoubleDouble)
1709UNIMPLEMENTED_INTRINSIC(MathMaxFloatFloat)
1710UNIMPLEMENTED_INTRINSIC(MathMinLongLong)
1711UNIMPLEMENTED_INTRINSIC(MathMaxLongLong)
1712UNIMPLEMENTED_INTRINSIC(MathCeil) // Could be done by changing rounding mode, maybe?
1713UNIMPLEMENTED_INTRINSIC(MathFloor) // Could be done by changing rounding mode, maybe?
1714UNIMPLEMENTED_INTRINSIC(MathRint)
1715UNIMPLEMENTED_INTRINSIC(MathRoundDouble) // Could be done by changing rounding mode, maybe?
1716UNIMPLEMENTED_INTRINSIC(MathRoundFloat) // Could be done by changing rounding mode, maybe?
1717UNIMPLEMENTED_INTRINSIC(UnsafeCASLong) // High register pressure.
1718UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001719UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Jeff Hao848f70a2014-01-15 13:49:50 -08001720UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Mark Mendella4f12202015-08-06 15:23:34 -04001721UNIMPLEMENTED_INTRINSIC(MathCos)
1722UNIMPLEMENTED_INTRINSIC(MathSin)
1723UNIMPLEMENTED_INTRINSIC(MathAcos)
1724UNIMPLEMENTED_INTRINSIC(MathAsin)
1725UNIMPLEMENTED_INTRINSIC(MathAtan)
1726UNIMPLEMENTED_INTRINSIC(MathAtan2)
1727UNIMPLEMENTED_INTRINSIC(MathCbrt)
1728UNIMPLEMENTED_INTRINSIC(MathCosh)
1729UNIMPLEMENTED_INTRINSIC(MathExp)
1730UNIMPLEMENTED_INTRINSIC(MathExpm1)
1731UNIMPLEMENTED_INTRINSIC(MathHypot)
1732UNIMPLEMENTED_INTRINSIC(MathLog)
1733UNIMPLEMENTED_INTRINSIC(MathLog10)
1734UNIMPLEMENTED_INTRINSIC(MathNextAfter)
1735UNIMPLEMENTED_INTRINSIC(MathSinh)
1736UNIMPLEMENTED_INTRINSIC(MathTan)
1737UNIMPLEMENTED_INTRINSIC(MathTanh)
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001738
Roland Levillain4d027112015-07-01 15:41:14 +01001739#undef UNIMPLEMENTED_INTRINSIC
1740
1741#undef __
1742
Andreas Gampe2bcf9bf2015-01-29 09:56:07 -08001743} // namespace arm
1744} // namespace art