blob: 3370e7301b85d3c811b9492f7b41bfea67543e5a [file] [log] [blame]
Mark Mendell09ed1a32015-03-25 08:30:06 -04001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_x86.h"
18
Andreas Gampe21030dd2015-05-07 14:46:15 -070019#include <limits>
20
Mark Mendellfb8d2792015-03-31 22:16:59 -040021#include "arch/x86/instruction_set_features_x86.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include "art_method.h"
Mark Mendelld5897672015-08-12 21:16:41 -040023#include "base/bit_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040024#include "code_generator_x86.h"
25#include "entrypoints/quick/quick_entrypoints.h"
26#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070027#include "intrinsics_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040028#include "mirror/array-inl.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040029#include "mirror/string.h"
30#include "thread.h"
31#include "utils/x86/assembler_x86.h"
32#include "utils/x86/constants_x86.h"
33
34namespace art {
35
36namespace x86 {
37
38static constexpr int kDoubleNaNHigh = 0x7FF80000;
39static constexpr int kDoubleNaNLow = 0x00000000;
40static constexpr int kFloatNaN = 0x7FC00000;
41
Mark Mendellfb8d2792015-03-31 22:16:59 -040042IntrinsicLocationsBuilderX86::IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen)
43 : arena_(codegen->GetGraph()->GetArena()), codegen_(codegen) {
44}
45
46
Mark Mendell09ed1a32015-03-25 08:30:06 -040047X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() {
Roland Levillainb488b782015-10-22 11:38:49 +010048 return down_cast<X86Assembler*>(codegen_->GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -040049}
50
51ArenaAllocator* IntrinsicCodeGeneratorX86::GetAllocator() {
52 return codegen_->GetGraph()->GetArena();
53}
54
55bool IntrinsicLocationsBuilderX86::TryDispatch(HInvoke* invoke) {
56 Dispatch(invoke);
57 LocationSummary* res = invoke->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +000058 if (res == nullptr) {
59 return false;
60 }
61 if (kEmitCompilerReadBarrier && res->CanCall()) {
62 // Generating an intrinsic for this HInvoke may produce an
63 // IntrinsicSlowPathX86 slow path. Currently this approach
64 // does not work when using read barriers, as the emitted
65 // calling sequence will make use of another slow path
66 // (ReadBarrierForRootSlowPathX86 for HInvokeStaticOrDirect,
67 // ReadBarrierSlowPathX86 for HInvokeVirtual). So we bail
68 // out in this case.
69 //
70 // TODO: Find a way to have intrinsics work with read barriers.
71 invoke->SetLocations(nullptr);
72 return false;
73 }
74 return res->Intrinsified();
Mark Mendell09ed1a32015-03-25 08:30:06 -040075}
76
Roland Levillainec525fc2015-04-28 15:50:20 +010077static void MoveArguments(HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010078 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010079 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Mark Mendell09ed1a32015-03-25 08:30:06 -040080}
81
Andreas Gampe85b62f22015-09-09 13:15:38 -070082using IntrinsicSlowPathX86 = IntrinsicSlowPath<InvokeDexCallingConventionVisitorX86>;
Mark Mendell09ed1a32015-03-25 08:30:06 -040083
Mark Mendell09ed1a32015-03-25 08:30:06 -040084#define __ assembler->
85
86static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
87 LocationSummary* locations = new (arena) LocationSummary(invoke,
88 LocationSummary::kNoCall,
89 kIntrinsified);
90 locations->SetInAt(0, Location::RequiresFpuRegister());
91 locations->SetOut(Location::RequiresRegister());
92 if (is64bit) {
93 locations->AddTemp(Location::RequiresFpuRegister());
94 }
95}
96
97static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
98 LocationSummary* locations = new (arena) LocationSummary(invoke,
99 LocationSummary::kNoCall,
100 kIntrinsified);
101 locations->SetInAt(0, Location::RequiresRegister());
102 locations->SetOut(Location::RequiresFpuRegister());
103 if (is64bit) {
104 locations->AddTemp(Location::RequiresFpuRegister());
105 locations->AddTemp(Location::RequiresFpuRegister());
106 }
107}
108
109static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
110 Location input = locations->InAt(0);
111 Location output = locations->Out();
112 if (is64bit) {
113 // Need to use the temporary.
114 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
115 __ movsd(temp, input.AsFpuRegister<XmmRegister>());
116 __ movd(output.AsRegisterPairLow<Register>(), temp);
117 __ psrlq(temp, Immediate(32));
118 __ movd(output.AsRegisterPairHigh<Register>(), temp);
119 } else {
120 __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>());
121 }
122}
123
124static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
125 Location input = locations->InAt(0);
126 Location output = locations->Out();
127 if (is64bit) {
128 // Need to use the temporary.
129 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
130 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
131 __ movd(temp1, input.AsRegisterPairLow<Register>());
132 __ movd(temp2, input.AsRegisterPairHigh<Register>());
133 __ punpckldq(temp1, temp2);
134 __ movsd(output.AsFpuRegister<XmmRegister>(), temp1);
135 } else {
136 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<Register>());
137 }
138}
139
140void IntrinsicLocationsBuilderX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000141 CreateFPToIntLocations(arena_, invoke, /* is64bit */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400142}
143void IntrinsicLocationsBuilderX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000144 CreateIntToFPLocations(arena_, invoke, /* is64bit */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400145}
146
147void IntrinsicCodeGeneratorX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000148 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400149}
150void IntrinsicCodeGeneratorX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000151 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400152}
153
154void IntrinsicLocationsBuilderX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000155 CreateFPToIntLocations(arena_, invoke, /* is64bit */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400156}
157void IntrinsicLocationsBuilderX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000158 CreateIntToFPLocations(arena_, invoke, /* is64bit */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400159}
160
161void IntrinsicCodeGeneratorX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000162 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400163}
164void IntrinsicCodeGeneratorX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000165 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400166}
167
168static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
169 LocationSummary* locations = new (arena) LocationSummary(invoke,
170 LocationSummary::kNoCall,
171 kIntrinsified);
172 locations->SetInAt(0, Location::RequiresRegister());
173 locations->SetOut(Location::SameAsFirstInput());
174}
175
176static void CreateLongToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
177 LocationSummary* locations = new (arena) LocationSummary(invoke,
178 LocationSummary::kNoCall,
179 kIntrinsified);
180 locations->SetInAt(0, Location::RequiresRegister());
181 locations->SetOut(Location::RequiresRegister());
182}
183
184static void CreateLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
185 LocationSummary* locations = new (arena) LocationSummary(invoke,
186 LocationSummary::kNoCall,
187 kIntrinsified);
188 locations->SetInAt(0, Location::RequiresRegister());
189 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
190}
191
192static void GenReverseBytes(LocationSummary* locations,
193 Primitive::Type size,
194 X86Assembler* assembler) {
195 Register out = locations->Out().AsRegister<Register>();
196
197 switch (size) {
198 case Primitive::kPrimShort:
199 // TODO: Can be done with an xchg of 8b registers. This is straight from Quick.
200 __ bswapl(out);
201 __ sarl(out, Immediate(16));
202 break;
203 case Primitive::kPrimInt:
204 __ bswapl(out);
205 break;
206 default:
207 LOG(FATAL) << "Unexpected size for reverse-bytes: " << size;
208 UNREACHABLE();
209 }
210}
211
212void IntrinsicLocationsBuilderX86::VisitIntegerReverseBytes(HInvoke* invoke) {
213 CreateIntToIntLocations(arena_, invoke);
214}
215
216void IntrinsicCodeGeneratorX86::VisitIntegerReverseBytes(HInvoke* invoke) {
217 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
218}
219
Mark Mendell58d25fd2015-04-03 14:52:31 -0400220void IntrinsicLocationsBuilderX86::VisitLongReverseBytes(HInvoke* invoke) {
221 CreateLongToLongLocations(arena_, invoke);
222}
223
224void IntrinsicCodeGeneratorX86::VisitLongReverseBytes(HInvoke* invoke) {
225 LocationSummary* locations = invoke->GetLocations();
226 Location input = locations->InAt(0);
227 Register input_lo = input.AsRegisterPairLow<Register>();
228 Register input_hi = input.AsRegisterPairHigh<Register>();
229 Location output = locations->Out();
230 Register output_lo = output.AsRegisterPairLow<Register>();
231 Register output_hi = output.AsRegisterPairHigh<Register>();
232
233 X86Assembler* assembler = GetAssembler();
234 // Assign the inputs to the outputs, mixing low/high.
235 __ movl(output_lo, input_hi);
236 __ movl(output_hi, input_lo);
237 __ bswapl(output_lo);
238 __ bswapl(output_hi);
239}
240
Mark Mendell09ed1a32015-03-25 08:30:06 -0400241void IntrinsicLocationsBuilderX86::VisitShortReverseBytes(HInvoke* invoke) {
242 CreateIntToIntLocations(arena_, invoke);
243}
244
245void IntrinsicCodeGeneratorX86::VisitShortReverseBytes(HInvoke* invoke) {
246 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
247}
248
249
250// TODO: Consider Quick's way of doing Double abs through integer operations, as the immediate we
251// need is 64b.
252
253static void CreateFloatToFloat(ArenaAllocator* arena, HInvoke* invoke) {
254 // TODO: Enable memory operations when the assembler supports them.
255 LocationSummary* locations = new (arena) LocationSummary(invoke,
256 LocationSummary::kNoCall,
257 kIntrinsified);
258 locations->SetInAt(0, Location::RequiresFpuRegister());
259 // TODO: Allow x86 to work with memory. This requires assembler support, see below.
260 // locations->SetInAt(0, Location::Any()); // X86 can work on memory directly.
261 locations->SetOut(Location::SameAsFirstInput());
262}
263
264static void MathAbsFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
265 Location output = locations->Out();
266
267 if (output.IsFpuRegister()) {
268 // Create the right constant on an aligned stack.
269 if (is64bit) {
270 __ subl(ESP, Immediate(8));
271 __ pushl(Immediate(0x7FFFFFFF));
272 __ pushl(Immediate(0xFFFFFFFF));
273 __ andpd(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
274 } else {
275 __ subl(ESP, Immediate(12));
276 __ pushl(Immediate(0x7FFFFFFF));
277 __ andps(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
278 }
279 __ addl(ESP, Immediate(16));
280 } else {
281 // TODO: update when assember support is available.
282 UNIMPLEMENTED(FATAL) << "Needs assembler support.";
283// Once assembler support is available, in-memory operations look like this:
284// if (is64bit) {
285// DCHECK(output.IsDoubleStackSlot());
286// __ andl(Address(Register(RSP), output.GetHighStackIndex(kX86WordSize)),
287// Immediate(0x7FFFFFFF));
288// } else {
289// DCHECK(output.IsStackSlot());
290// // Can use and with a literal directly.
291// __ andl(Address(Register(RSP), output.GetStackIndex()), Immediate(0x7FFFFFFF));
292// }
293 }
294}
295
296void IntrinsicLocationsBuilderX86::VisitMathAbsDouble(HInvoke* invoke) {
297 CreateFloatToFloat(arena_, invoke);
298}
299
300void IntrinsicCodeGeneratorX86::VisitMathAbsDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000301 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400302}
303
304void IntrinsicLocationsBuilderX86::VisitMathAbsFloat(HInvoke* invoke) {
305 CreateFloatToFloat(arena_, invoke);
306}
307
308void IntrinsicCodeGeneratorX86::VisitMathAbsFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000309 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400310}
311
312static void CreateAbsIntLocation(ArenaAllocator* arena, HInvoke* invoke) {
313 LocationSummary* locations = new (arena) LocationSummary(invoke,
314 LocationSummary::kNoCall,
315 kIntrinsified);
316 locations->SetInAt(0, Location::RegisterLocation(EAX));
317 locations->SetOut(Location::SameAsFirstInput());
318 locations->AddTemp(Location::RegisterLocation(EDX));
319}
320
321static void GenAbsInteger(LocationSummary* locations, X86Assembler* assembler) {
322 Location output = locations->Out();
323 Register out = output.AsRegister<Register>();
324 DCHECK_EQ(out, EAX);
325 Register temp = locations->GetTemp(0).AsRegister<Register>();
326 DCHECK_EQ(temp, EDX);
327
328 // Sign extend EAX into EDX.
329 __ cdq();
330
331 // XOR EAX with sign.
332 __ xorl(EAX, EDX);
333
334 // Subtract out sign to correct.
335 __ subl(EAX, EDX);
336
337 // The result is in EAX.
338}
339
340static void CreateAbsLongLocation(ArenaAllocator* arena, HInvoke* invoke) {
341 LocationSummary* locations = new (arena) LocationSummary(invoke,
342 LocationSummary::kNoCall,
343 kIntrinsified);
344 locations->SetInAt(0, Location::RequiresRegister());
345 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
346 locations->AddTemp(Location::RequiresRegister());
347}
348
349static void GenAbsLong(LocationSummary* locations, X86Assembler* assembler) {
350 Location input = locations->InAt(0);
351 Register input_lo = input.AsRegisterPairLow<Register>();
352 Register input_hi = input.AsRegisterPairHigh<Register>();
353 Location output = locations->Out();
354 Register output_lo = output.AsRegisterPairLow<Register>();
355 Register output_hi = output.AsRegisterPairHigh<Register>();
356 Register temp = locations->GetTemp(0).AsRegister<Register>();
357
358 // Compute the sign into the temporary.
359 __ movl(temp, input_hi);
360 __ sarl(temp, Immediate(31));
361
362 // Store the sign into the output.
363 __ movl(output_lo, temp);
364 __ movl(output_hi, temp);
365
366 // XOR the input to the output.
367 __ xorl(output_lo, input_lo);
368 __ xorl(output_hi, input_hi);
369
370 // Subtract the sign.
371 __ subl(output_lo, temp);
372 __ sbbl(output_hi, temp);
373}
374
375void IntrinsicLocationsBuilderX86::VisitMathAbsInt(HInvoke* invoke) {
376 CreateAbsIntLocation(arena_, invoke);
377}
378
379void IntrinsicCodeGeneratorX86::VisitMathAbsInt(HInvoke* invoke) {
380 GenAbsInteger(invoke->GetLocations(), GetAssembler());
381}
382
383void IntrinsicLocationsBuilderX86::VisitMathAbsLong(HInvoke* invoke) {
384 CreateAbsLongLocation(arena_, invoke);
385}
386
387void IntrinsicCodeGeneratorX86::VisitMathAbsLong(HInvoke* invoke) {
388 GenAbsLong(invoke->GetLocations(), GetAssembler());
389}
390
391static void GenMinMaxFP(LocationSummary* locations, bool is_min, bool is_double,
392 X86Assembler* assembler) {
393 Location op1_loc = locations->InAt(0);
394 Location op2_loc = locations->InAt(1);
395 Location out_loc = locations->Out();
396 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
397
398 // Shortcut for same input locations.
399 if (op1_loc.Equals(op2_loc)) {
400 DCHECK(out_loc.Equals(op1_loc));
401 return;
402 }
403
404 // (out := op1)
405 // out <=? op2
406 // if Nan jmp Nan_label
407 // if out is min jmp done
408 // if op2 is min jmp op2_label
409 // handle -0/+0
410 // jmp done
411 // Nan_label:
412 // out := NaN
413 // op2_label:
414 // out := op2
415 // done:
416 //
417 // This removes one jmp, but needs to copy one input (op1) to out.
418 //
419 // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath?
420
421 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
422
Mark Mendell0c9497d2015-08-21 09:30:05 -0400423 NearLabel nan, done, op2_label;
Mark Mendell09ed1a32015-03-25 08:30:06 -0400424 if (is_double) {
425 __ ucomisd(out, op2);
426 } else {
427 __ ucomiss(out, op2);
428 }
429
430 __ j(Condition::kParityEven, &nan);
431
432 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
433 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
434
435 // Handle 0.0/-0.0.
436 if (is_min) {
437 if (is_double) {
438 __ orpd(out, op2);
439 } else {
440 __ orps(out, op2);
441 }
442 } else {
443 if (is_double) {
444 __ andpd(out, op2);
445 } else {
446 __ andps(out, op2);
447 }
448 }
449 __ jmp(&done);
450
451 // NaN handling.
452 __ Bind(&nan);
453 if (is_double) {
454 __ pushl(Immediate(kDoubleNaNHigh));
455 __ pushl(Immediate(kDoubleNaNLow));
456 __ movsd(out, Address(ESP, 0));
457 __ addl(ESP, Immediate(8));
458 } else {
459 __ pushl(Immediate(kFloatNaN));
460 __ movss(out, Address(ESP, 0));
461 __ addl(ESP, Immediate(4));
462 }
463 __ jmp(&done);
464
465 // out := op2;
466 __ Bind(&op2_label);
467 if (is_double) {
468 __ movsd(out, op2);
469 } else {
470 __ movss(out, op2);
471 }
472
473 // Done.
474 __ Bind(&done);
475}
476
477static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
478 LocationSummary* locations = new (arena) LocationSummary(invoke,
479 LocationSummary::kNoCall,
480 kIntrinsified);
481 locations->SetInAt(0, Location::RequiresFpuRegister());
482 locations->SetInAt(1, Location::RequiresFpuRegister());
483 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
484 // the second input to be the output (we can simply swap inputs).
485 locations->SetOut(Location::SameAsFirstInput());
486}
487
488void IntrinsicLocationsBuilderX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
489 CreateFPFPToFPLocations(arena_, invoke);
490}
491
492void IntrinsicCodeGeneratorX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000493 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400494}
495
496void IntrinsicLocationsBuilderX86::VisitMathMinFloatFloat(HInvoke* invoke) {
497 CreateFPFPToFPLocations(arena_, invoke);
498}
499
500void IntrinsicCodeGeneratorX86::VisitMathMinFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000501 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400502}
503
504void IntrinsicLocationsBuilderX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
505 CreateFPFPToFPLocations(arena_, invoke);
506}
507
508void IntrinsicCodeGeneratorX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000509 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400510}
511
512void IntrinsicLocationsBuilderX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
513 CreateFPFPToFPLocations(arena_, invoke);
514}
515
516void IntrinsicCodeGeneratorX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000517 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400518}
519
520static void GenMinMax(LocationSummary* locations, bool is_min, bool is_long,
521 X86Assembler* assembler) {
522 Location op1_loc = locations->InAt(0);
523 Location op2_loc = locations->InAt(1);
524
525 // Shortcut for same input locations.
526 if (op1_loc.Equals(op2_loc)) {
527 // Can return immediately, as op1_loc == out_loc.
528 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
529 // a copy here.
530 DCHECK(locations->Out().Equals(op1_loc));
531 return;
532 }
533
534 if (is_long) {
535 // Need to perform a subtract to get the sign right.
536 // op1 is already in the same location as the output.
537 Location output = locations->Out();
538 Register output_lo = output.AsRegisterPairLow<Register>();
539 Register output_hi = output.AsRegisterPairHigh<Register>();
540
541 Register op2_lo = op2_loc.AsRegisterPairLow<Register>();
542 Register op2_hi = op2_loc.AsRegisterPairHigh<Register>();
543
544 // Spare register to compute the subtraction to set condition code.
545 Register temp = locations->GetTemp(0).AsRegister<Register>();
546
547 // Subtract off op2_low.
548 __ movl(temp, output_lo);
549 __ subl(temp, op2_lo);
550
551 // Now use the same tempo and the borrow to finish the subtraction of op2_hi.
552 __ movl(temp, output_hi);
553 __ sbbl(temp, op2_hi);
554
555 // Now the condition code is correct.
556 Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess;
557 __ cmovl(cond, output_lo, op2_lo);
558 __ cmovl(cond, output_hi, op2_hi);
559 } else {
560 Register out = locations->Out().AsRegister<Register>();
561 Register op2 = op2_loc.AsRegister<Register>();
562
563 // (out := op1)
564 // out <=? op2
565 // if out is min jmp done
566 // out := op2
567 // done:
568
569 __ cmpl(out, op2);
570 Condition cond = is_min ? Condition::kGreater : Condition::kLess;
571 __ cmovl(cond, out, op2);
572 }
573}
574
575static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
576 LocationSummary* locations = new (arena) LocationSummary(invoke,
577 LocationSummary::kNoCall,
578 kIntrinsified);
579 locations->SetInAt(0, Location::RequiresRegister());
580 locations->SetInAt(1, Location::RequiresRegister());
581 locations->SetOut(Location::SameAsFirstInput());
582}
583
584static void CreateLongLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
585 LocationSummary* locations = new (arena) LocationSummary(invoke,
586 LocationSummary::kNoCall,
587 kIntrinsified);
588 locations->SetInAt(0, Location::RequiresRegister());
589 locations->SetInAt(1, Location::RequiresRegister());
590 locations->SetOut(Location::SameAsFirstInput());
591 // Register to use to perform a long subtract to set cc.
592 locations->AddTemp(Location::RequiresRegister());
593}
594
595void IntrinsicLocationsBuilderX86::VisitMathMinIntInt(HInvoke* invoke) {
596 CreateIntIntToIntLocations(arena_, invoke);
597}
598
599void IntrinsicCodeGeneratorX86::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000600 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400601}
602
603void IntrinsicLocationsBuilderX86::VisitMathMinLongLong(HInvoke* invoke) {
604 CreateLongLongToLongLocations(arena_, invoke);
605}
606
607void IntrinsicCodeGeneratorX86::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000608 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400609}
610
611void IntrinsicLocationsBuilderX86::VisitMathMaxIntInt(HInvoke* invoke) {
612 CreateIntIntToIntLocations(arena_, invoke);
613}
614
615void IntrinsicCodeGeneratorX86::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000616 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400617}
618
619void IntrinsicLocationsBuilderX86::VisitMathMaxLongLong(HInvoke* invoke) {
620 CreateLongLongToLongLocations(arena_, invoke);
621}
622
623void IntrinsicCodeGeneratorX86::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000624 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400625}
626
627static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
628 LocationSummary* locations = new (arena) LocationSummary(invoke,
629 LocationSummary::kNoCall,
630 kIntrinsified);
631 locations->SetInAt(0, Location::RequiresFpuRegister());
632 locations->SetOut(Location::RequiresFpuRegister());
633}
634
635void IntrinsicLocationsBuilderX86::VisitMathSqrt(HInvoke* invoke) {
636 CreateFPToFPLocations(arena_, invoke);
637}
638
639void IntrinsicCodeGeneratorX86::VisitMathSqrt(HInvoke* invoke) {
640 LocationSummary* locations = invoke->GetLocations();
641 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
642 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
643
644 GetAssembler()->sqrtsd(out, in);
645}
646
Mark Mendellfb8d2792015-03-31 22:16:59 -0400647static void InvokeOutOfLineIntrinsic(CodeGeneratorX86* codegen, HInvoke* invoke) {
Roland Levillainec525fc2015-04-28 15:50:20 +0100648 MoveArguments(invoke, codegen);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400649
650 DCHECK(invoke->IsInvokeStaticOrDirect());
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100651 codegen->GenerateStaticOrDirectCall(invoke->AsInvokeStaticOrDirect(),
652 Location::RegisterLocation(EAX));
Mingyao Yange90db122015-04-03 17:56:54 -0700653 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400654
655 // Copy the result back to the expected output.
656 Location out = invoke->GetLocations()->Out();
657 if (out.IsValid()) {
658 DCHECK(out.IsRegister());
Andreas Gampe85b62f22015-09-09 13:15:38 -0700659 codegen->MoveFromReturnRegister(out, invoke->GetType());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400660 }
661}
662
663static void CreateSSE41FPToFPLocations(ArenaAllocator* arena,
664 HInvoke* invoke,
665 CodeGeneratorX86* codegen) {
666 // Do we have instruction support?
667 if (codegen->GetInstructionSetFeatures().HasSSE4_1()) {
668 CreateFPToFPLocations(arena, invoke);
669 return;
670 }
671
672 // We have to fall back to a call to the intrinsic.
673 LocationSummary* locations = new (arena) LocationSummary(invoke,
674 LocationSummary::kCall);
675 InvokeRuntimeCallingConvention calling_convention;
676 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
677 locations->SetOut(Location::FpuRegisterLocation(XMM0));
678 // Needs to be EAX for the invoke.
679 locations->AddTemp(Location::RegisterLocation(EAX));
680}
681
682static void GenSSE41FPToFPIntrinsic(CodeGeneratorX86* codegen,
683 HInvoke* invoke,
684 X86Assembler* assembler,
685 int round_mode) {
686 LocationSummary* locations = invoke->GetLocations();
687 if (locations->WillCall()) {
688 InvokeOutOfLineIntrinsic(codegen, invoke);
689 } else {
690 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
691 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
692 __ roundsd(out, in, Immediate(round_mode));
693 }
694}
695
696void IntrinsicLocationsBuilderX86::VisitMathCeil(HInvoke* invoke) {
697 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
698}
699
700void IntrinsicCodeGeneratorX86::VisitMathCeil(HInvoke* invoke) {
701 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 2);
702}
703
704void IntrinsicLocationsBuilderX86::VisitMathFloor(HInvoke* invoke) {
705 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
706}
707
708void IntrinsicCodeGeneratorX86::VisitMathFloor(HInvoke* invoke) {
709 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 1);
710}
711
712void IntrinsicLocationsBuilderX86::VisitMathRint(HInvoke* invoke) {
713 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
714}
715
716void IntrinsicCodeGeneratorX86::VisitMathRint(HInvoke* invoke) {
717 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 0);
718}
719
720// Note that 32 bit x86 doesn't have the capability to inline MathRoundDouble,
721// as it needs 64 bit instructions.
722void IntrinsicLocationsBuilderX86::VisitMathRoundFloat(HInvoke* invoke) {
723 // Do we have instruction support?
724 if (codegen_->GetInstructionSetFeatures().HasSSE4_1()) {
725 LocationSummary* locations = new (arena_) LocationSummary(invoke,
726 LocationSummary::kNoCall,
727 kIntrinsified);
728 locations->SetInAt(0, Location::RequiresFpuRegister());
Nicolas Geoffrayd9b92402015-04-21 10:02:22 +0100729 locations->SetOut(Location::RequiresRegister());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400730 locations->AddTemp(Location::RequiresFpuRegister());
731 locations->AddTemp(Location::RequiresFpuRegister());
732 return;
733 }
734
735 // We have to fall back to a call to the intrinsic.
736 LocationSummary* locations = new (arena_) LocationSummary(invoke,
737 LocationSummary::kCall);
738 InvokeRuntimeCallingConvention calling_convention;
739 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
740 locations->SetOut(Location::RegisterLocation(EAX));
741 // Needs to be EAX for the invoke.
742 locations->AddTemp(Location::RegisterLocation(EAX));
743}
744
745void IntrinsicCodeGeneratorX86::VisitMathRoundFloat(HInvoke* invoke) {
746 LocationSummary* locations = invoke->GetLocations();
747 if (locations->WillCall()) {
748 InvokeOutOfLineIntrinsic(codegen_, invoke);
749 return;
750 }
751
752 // Implement RoundFloat as t1 = floor(input + 0.5f); convert to int.
753 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
754 Register out = locations->Out().AsRegister<Register>();
755 XmmRegister maxInt = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
756 XmmRegister inPlusPointFive = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -0400757 NearLabel done, nan;
Mark Mendellfb8d2792015-03-31 22:16:59 -0400758 X86Assembler* assembler = GetAssembler();
759
760 // Generate 0.5 into inPlusPointFive.
761 __ movl(out, Immediate(bit_cast<int32_t, float>(0.5f)));
762 __ movd(inPlusPointFive, out);
763
764 // Add in the input.
765 __ addss(inPlusPointFive, in);
766
767 // And truncate to an integer.
768 __ roundss(inPlusPointFive, inPlusPointFive, Immediate(1));
769
770 __ movl(out, Immediate(kPrimIntMax));
771 // maxInt = int-to-float(out)
772 __ cvtsi2ss(maxInt, out);
773
774 // if inPlusPointFive >= maxInt goto done
775 __ comiss(inPlusPointFive, maxInt);
776 __ j(kAboveEqual, &done);
777
778 // if input == NaN goto nan
779 __ j(kUnordered, &nan);
780
781 // output = float-to-int-truncate(input)
782 __ cvttss2si(out, inPlusPointFive);
783 __ jmp(&done);
784 __ Bind(&nan);
785
786 // output = 0
787 __ xorl(out, out);
788 __ Bind(&done);
789}
790
Mark Mendella4f12202015-08-06 15:23:34 -0400791static void CreateFPToFPCallLocations(ArenaAllocator* arena,
792 HInvoke* invoke) {
793 LocationSummary* locations = new (arena) LocationSummary(invoke,
794 LocationSummary::kCall,
795 kIntrinsified);
796 InvokeRuntimeCallingConvention calling_convention;
797 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
798 locations->SetOut(Location::FpuRegisterLocation(XMM0));
799}
800
801static void GenFPToFPCall(HInvoke* invoke, CodeGeneratorX86* codegen, QuickEntrypointEnum entry) {
802 LocationSummary* locations = invoke->GetLocations();
803 DCHECK(locations->WillCall());
804 DCHECK(invoke->IsInvokeStaticOrDirect());
805 X86Assembler* assembler = codegen->GetAssembler();
806
807 // We need some place to pass the parameters.
808 __ subl(ESP, Immediate(16));
809 __ cfi().AdjustCFAOffset(16);
810
811 // Pass the parameters at the bottom of the stack.
812 __ movsd(Address(ESP, 0), XMM0);
813
814 // If we have a second parameter, pass it next.
815 if (invoke->GetNumberOfArguments() == 2) {
816 __ movsd(Address(ESP, 8), XMM1);
817 }
818
819 // Now do the actual call.
820 __ fs()->call(Address::Absolute(GetThreadOffset<kX86WordSize>(entry)));
821
822 // Extract the return value from the FP stack.
823 __ fstpl(Address(ESP, 0));
824 __ movsd(XMM0, Address(ESP, 0));
825
826 // And clean up the stack.
827 __ addl(ESP, Immediate(16));
828 __ cfi().AdjustCFAOffset(-16);
829
830 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
831}
832
833void IntrinsicLocationsBuilderX86::VisitMathCos(HInvoke* invoke) {
834 CreateFPToFPCallLocations(arena_, invoke);
835}
836
837void IntrinsicCodeGeneratorX86::VisitMathCos(HInvoke* invoke) {
838 GenFPToFPCall(invoke, codegen_, kQuickCos);
839}
840
841void IntrinsicLocationsBuilderX86::VisitMathSin(HInvoke* invoke) {
842 CreateFPToFPCallLocations(arena_, invoke);
843}
844
845void IntrinsicCodeGeneratorX86::VisitMathSin(HInvoke* invoke) {
846 GenFPToFPCall(invoke, codegen_, kQuickSin);
847}
848
849void IntrinsicLocationsBuilderX86::VisitMathAcos(HInvoke* invoke) {
850 CreateFPToFPCallLocations(arena_, invoke);
851}
852
853void IntrinsicCodeGeneratorX86::VisitMathAcos(HInvoke* invoke) {
854 GenFPToFPCall(invoke, codegen_, kQuickAcos);
855}
856
857void IntrinsicLocationsBuilderX86::VisitMathAsin(HInvoke* invoke) {
858 CreateFPToFPCallLocations(arena_, invoke);
859}
860
861void IntrinsicCodeGeneratorX86::VisitMathAsin(HInvoke* invoke) {
862 GenFPToFPCall(invoke, codegen_, kQuickAsin);
863}
864
865void IntrinsicLocationsBuilderX86::VisitMathAtan(HInvoke* invoke) {
866 CreateFPToFPCallLocations(arena_, invoke);
867}
868
869void IntrinsicCodeGeneratorX86::VisitMathAtan(HInvoke* invoke) {
870 GenFPToFPCall(invoke, codegen_, kQuickAtan);
871}
872
873void IntrinsicLocationsBuilderX86::VisitMathCbrt(HInvoke* invoke) {
874 CreateFPToFPCallLocations(arena_, invoke);
875}
876
877void IntrinsicCodeGeneratorX86::VisitMathCbrt(HInvoke* invoke) {
878 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
879}
880
881void IntrinsicLocationsBuilderX86::VisitMathCosh(HInvoke* invoke) {
882 CreateFPToFPCallLocations(arena_, invoke);
883}
884
885void IntrinsicCodeGeneratorX86::VisitMathCosh(HInvoke* invoke) {
886 GenFPToFPCall(invoke, codegen_, kQuickCosh);
887}
888
889void IntrinsicLocationsBuilderX86::VisitMathExp(HInvoke* invoke) {
890 CreateFPToFPCallLocations(arena_, invoke);
891}
892
893void IntrinsicCodeGeneratorX86::VisitMathExp(HInvoke* invoke) {
894 GenFPToFPCall(invoke, codegen_, kQuickExp);
895}
896
897void IntrinsicLocationsBuilderX86::VisitMathExpm1(HInvoke* invoke) {
898 CreateFPToFPCallLocations(arena_, invoke);
899}
900
901void IntrinsicCodeGeneratorX86::VisitMathExpm1(HInvoke* invoke) {
902 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
903}
904
905void IntrinsicLocationsBuilderX86::VisitMathLog(HInvoke* invoke) {
906 CreateFPToFPCallLocations(arena_, invoke);
907}
908
909void IntrinsicCodeGeneratorX86::VisitMathLog(HInvoke* invoke) {
910 GenFPToFPCall(invoke, codegen_, kQuickLog);
911}
912
913void IntrinsicLocationsBuilderX86::VisitMathLog10(HInvoke* invoke) {
914 CreateFPToFPCallLocations(arena_, invoke);
915}
916
917void IntrinsicCodeGeneratorX86::VisitMathLog10(HInvoke* invoke) {
918 GenFPToFPCall(invoke, codegen_, kQuickLog10);
919}
920
921void IntrinsicLocationsBuilderX86::VisitMathSinh(HInvoke* invoke) {
922 CreateFPToFPCallLocations(arena_, invoke);
923}
924
925void IntrinsicCodeGeneratorX86::VisitMathSinh(HInvoke* invoke) {
926 GenFPToFPCall(invoke, codegen_, kQuickSinh);
927}
928
929void IntrinsicLocationsBuilderX86::VisitMathTan(HInvoke* invoke) {
930 CreateFPToFPCallLocations(arena_, invoke);
931}
932
933void IntrinsicCodeGeneratorX86::VisitMathTan(HInvoke* invoke) {
934 GenFPToFPCall(invoke, codegen_, kQuickTan);
935}
936
937void IntrinsicLocationsBuilderX86::VisitMathTanh(HInvoke* invoke) {
938 CreateFPToFPCallLocations(arena_, invoke);
939}
940
941void IntrinsicCodeGeneratorX86::VisitMathTanh(HInvoke* invoke) {
942 GenFPToFPCall(invoke, codegen_, kQuickTanh);
943}
944
945static void CreateFPFPToFPCallLocations(ArenaAllocator* arena,
946 HInvoke* invoke) {
947 LocationSummary* locations = new (arena) LocationSummary(invoke,
948 LocationSummary::kCall,
949 kIntrinsified);
950 InvokeRuntimeCallingConvention calling_convention;
951 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
952 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
953 locations->SetOut(Location::FpuRegisterLocation(XMM0));
954}
955
956void IntrinsicLocationsBuilderX86::VisitMathAtan2(HInvoke* invoke) {
957 CreateFPFPToFPCallLocations(arena_, invoke);
958}
959
960void IntrinsicCodeGeneratorX86::VisitMathAtan2(HInvoke* invoke) {
961 GenFPToFPCall(invoke, codegen_, kQuickAtan2);
962}
963
964void IntrinsicLocationsBuilderX86::VisitMathHypot(HInvoke* invoke) {
965 CreateFPFPToFPCallLocations(arena_, invoke);
966}
967
968void IntrinsicCodeGeneratorX86::VisitMathHypot(HInvoke* invoke) {
969 GenFPToFPCall(invoke, codegen_, kQuickHypot);
970}
971
972void IntrinsicLocationsBuilderX86::VisitMathNextAfter(HInvoke* invoke) {
973 CreateFPFPToFPCallLocations(arena_, invoke);
974}
975
976void IntrinsicCodeGeneratorX86::VisitMathNextAfter(HInvoke* invoke) {
977 GenFPToFPCall(invoke, codegen_, kQuickNextAfter);
978}
979
Mark Mendell09ed1a32015-03-25 08:30:06 -0400980void IntrinsicLocationsBuilderX86::VisitStringCharAt(HInvoke* invoke) {
981 // The inputs plus one temp.
982 LocationSummary* locations = new (arena_) LocationSummary(invoke,
983 LocationSummary::kCallOnSlowPath,
984 kIntrinsified);
985 locations->SetInAt(0, Location::RequiresRegister());
986 locations->SetInAt(1, Location::RequiresRegister());
987 locations->SetOut(Location::SameAsFirstInput());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400988}
989
990void IntrinsicCodeGeneratorX86::VisitStringCharAt(HInvoke* invoke) {
991 LocationSummary* locations = invoke->GetLocations();
992
Mark Mendell6bc53a92015-07-01 14:26:52 -0400993 // Location of reference to data array.
Mark Mendell09ed1a32015-03-25 08:30:06 -0400994 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
Mark Mendell6bc53a92015-07-01 14:26:52 -0400995 // Location of count.
Mark Mendell09ed1a32015-03-25 08:30:06 -0400996 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
Mark Mendell09ed1a32015-03-25 08:30:06 -0400997
998 Register obj = locations->InAt(0).AsRegister<Register>();
999 Register idx = locations->InAt(1).AsRegister<Register>();
1000 Register out = locations->Out().AsRegister<Register>();
Mark Mendell09ed1a32015-03-25 08:30:06 -04001001
1002 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
1003 // the cost.
1004 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
1005 // we will not optimize the code for constants (which would save a register).
1006
Andreas Gampe85b62f22015-09-09 13:15:38 -07001007 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001008 codegen_->AddSlowPath(slow_path);
1009
1010 X86Assembler* assembler = GetAssembler();
1011
1012 __ cmpl(idx, Address(obj, count_offset));
1013 codegen_->MaybeRecordImplicitNullCheck(invoke);
1014 __ j(kAboveEqual, slow_path->GetEntryLabel());
1015
Jeff Hao848f70a2014-01-15 13:49:50 -08001016 // out = out[2*idx].
1017 __ movzxw(out, Address(out, idx, ScaleFactor::TIMES_2, value_offset));
Mark Mendell09ed1a32015-03-25 08:30:06 -04001018
1019 __ Bind(slow_path->GetExitLabel());
1020}
1021
Mark Mendell6bc53a92015-07-01 14:26:52 -04001022void IntrinsicLocationsBuilderX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
1023 // We need at least two of the positions or length to be an integer constant,
1024 // or else we won't have enough free registers.
1025 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1026 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1027 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1028
1029 int num_constants =
1030 ((src_pos != nullptr) ? 1 : 0)
1031 + ((dest_pos != nullptr) ? 1 : 0)
1032 + ((length != nullptr) ? 1 : 0);
1033
1034 if (num_constants < 2) {
1035 // Not enough free registers.
1036 return;
1037 }
1038
1039 // As long as we are checking, we might as well check to see if the src and dest
1040 // positions are >= 0.
1041 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
1042 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
1043 // We will have to fail anyways.
1044 return;
1045 }
1046
1047 // And since we are already checking, check the length too.
1048 if (length != nullptr) {
1049 int32_t len = length->GetValue();
1050 if (len < 0) {
1051 // Just call as normal.
1052 return;
1053 }
1054 }
1055
1056 // Okay, it is safe to generate inline code.
1057 LocationSummary* locations =
1058 new (arena_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
1059 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
1060 locations->SetInAt(0, Location::RequiresRegister());
1061 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1062 locations->SetInAt(2, Location::RequiresRegister());
1063 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
1064 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
1065
1066 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
1067 locations->AddTemp(Location::RegisterLocation(ESI));
1068 locations->AddTemp(Location::RegisterLocation(EDI));
1069 locations->AddTemp(Location::RegisterLocation(ECX));
1070}
1071
1072static void CheckPosition(X86Assembler* assembler,
1073 Location pos,
1074 Register input,
1075 Register length,
Andreas Gampe85b62f22015-09-09 13:15:38 -07001076 SlowPathCode* slow_path,
Mark Mendell6bc53a92015-07-01 14:26:52 -04001077 Register input_len,
1078 Register temp) {
1079 // Where is the length in the String?
1080 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1081
1082 if (pos.IsConstant()) {
1083 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1084 if (pos_const == 0) {
1085 // Check that length(input) >= length.
1086 __ cmpl(Address(input, length_offset), length);
1087 __ j(kLess, slow_path->GetEntryLabel());
1088 } else {
1089 // Check that length(input) >= pos.
1090 __ movl(input_len, Address(input, length_offset));
1091 __ cmpl(input_len, Immediate(pos_const));
1092 __ j(kLess, slow_path->GetEntryLabel());
1093
1094 // Check that (length(input) - pos) >= length.
1095 __ leal(temp, Address(input_len, -pos_const));
1096 __ cmpl(temp, length);
1097 __ j(kLess, slow_path->GetEntryLabel());
1098 }
1099 } else {
1100 // Check that pos >= 0.
1101 Register pos_reg = pos.AsRegister<Register>();
1102 __ testl(pos_reg, pos_reg);
1103 __ j(kLess, slow_path->GetEntryLabel());
1104
1105 // Check that pos <= length(input).
1106 __ cmpl(Address(input, length_offset), pos_reg);
1107 __ j(kLess, slow_path->GetEntryLabel());
1108
1109 // Check that (length(input) - pos) >= length.
1110 __ movl(temp, Address(input, length_offset));
1111 __ subl(temp, pos_reg);
1112 __ cmpl(temp, length);
1113 __ j(kLess, slow_path->GetEntryLabel());
1114 }
1115}
1116
1117void IntrinsicCodeGeneratorX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
1118 X86Assembler* assembler = GetAssembler();
1119 LocationSummary* locations = invoke->GetLocations();
1120
1121 Register src = locations->InAt(0).AsRegister<Register>();
1122 Location srcPos = locations->InAt(1);
1123 Register dest = locations->InAt(2).AsRegister<Register>();
1124 Location destPos = locations->InAt(3);
1125 Location length = locations->InAt(4);
1126
1127 // Temporaries that we need for MOVSW.
1128 Register src_base = locations->GetTemp(0).AsRegister<Register>();
1129 DCHECK_EQ(src_base, ESI);
1130 Register dest_base = locations->GetTemp(1).AsRegister<Register>();
1131 DCHECK_EQ(dest_base, EDI);
1132 Register count = locations->GetTemp(2).AsRegister<Register>();
1133 DCHECK_EQ(count, ECX);
1134
Andreas Gampe85b62f22015-09-09 13:15:38 -07001135 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell6bc53a92015-07-01 14:26:52 -04001136 codegen_->AddSlowPath(slow_path);
1137
1138 // Bail out if the source and destination are the same (to handle overlap).
1139 __ cmpl(src, dest);
1140 __ j(kEqual, slow_path->GetEntryLabel());
1141
1142 // Bail out if the source is null.
1143 __ testl(src, src);
1144 __ j(kEqual, slow_path->GetEntryLabel());
1145
1146 // Bail out if the destination is null.
1147 __ testl(dest, dest);
1148 __ j(kEqual, slow_path->GetEntryLabel());
1149
1150 // If the length is negative, bail out.
1151 // We have already checked in the LocationsBuilder for the constant case.
1152 if (!length.IsConstant()) {
1153 __ cmpl(length.AsRegister<Register>(), length.AsRegister<Register>());
1154 __ j(kLess, slow_path->GetEntryLabel());
1155 }
1156
1157 // We need the count in ECX.
1158 if (length.IsConstant()) {
1159 __ movl(count, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1160 } else {
1161 __ movl(count, length.AsRegister<Register>());
1162 }
1163
1164 // Validity checks: source.
1165 CheckPosition(assembler, srcPos, src, count, slow_path, src_base, dest_base);
1166
1167 // Validity checks: dest.
1168 CheckPosition(assembler, destPos, dest, count, slow_path, src_base, dest_base);
1169
1170 // Okay, everything checks out. Finally time to do the copy.
1171 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1172 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1173 DCHECK_EQ(char_size, 2u);
1174
1175 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1176
1177 if (srcPos.IsConstant()) {
1178 int32_t srcPos_const = srcPos.GetConstant()->AsIntConstant()->GetValue();
1179 __ leal(src_base, Address(src, char_size * srcPos_const + data_offset));
1180 } else {
1181 __ leal(src_base, Address(src, srcPos.AsRegister<Register>(),
1182 ScaleFactor::TIMES_2, data_offset));
1183 }
1184 if (destPos.IsConstant()) {
1185 int32_t destPos_const = destPos.GetConstant()->AsIntConstant()->GetValue();
1186
1187 __ leal(dest_base, Address(dest, char_size * destPos_const + data_offset));
1188 } else {
1189 __ leal(dest_base, Address(dest, destPos.AsRegister<Register>(),
1190 ScaleFactor::TIMES_2, data_offset));
1191 }
1192
1193 // Do the move.
1194 __ rep_movsw();
1195
1196 __ Bind(slow_path->GetExitLabel());
1197}
1198
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001199void IntrinsicLocationsBuilderX86::VisitStringCompareTo(HInvoke* invoke) {
1200 // The inputs plus one temp.
1201 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1202 LocationSummary::kCall,
1203 kIntrinsified);
1204 InvokeRuntimeCallingConvention calling_convention;
1205 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1206 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1207 locations->SetOut(Location::RegisterLocation(EAX));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001208}
1209
1210void IntrinsicCodeGeneratorX86::VisitStringCompareTo(HInvoke* invoke) {
1211 X86Assembler* assembler = GetAssembler();
1212 LocationSummary* locations = invoke->GetLocations();
1213
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001214 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001215 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001216
1217 Register argument = locations->InAt(1).AsRegister<Register>();
1218 __ testl(argument, argument);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001219 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001220 codegen_->AddSlowPath(slow_path);
1221 __ j(kEqual, slow_path->GetEntryLabel());
1222
1223 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pStringCompareTo)));
1224 __ Bind(slow_path->GetExitLabel());
1225}
1226
Agi Csakid7138c82015-08-13 17:46:44 -07001227void IntrinsicLocationsBuilderX86::VisitStringEquals(HInvoke* invoke) {
1228 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1229 LocationSummary::kNoCall,
1230 kIntrinsified);
1231 locations->SetInAt(0, Location::RequiresRegister());
1232 locations->SetInAt(1, Location::RequiresRegister());
1233
1234 // Request temporary registers, ECX and EDI needed for repe_cmpsl instruction.
1235 locations->AddTemp(Location::RegisterLocation(ECX));
1236 locations->AddTemp(Location::RegisterLocation(EDI));
1237
1238 // Set output, ESI needed for repe_cmpsl instruction anyways.
1239 locations->SetOut(Location::RegisterLocation(ESI), Location::kOutputOverlap);
1240}
1241
1242void IntrinsicCodeGeneratorX86::VisitStringEquals(HInvoke* invoke) {
1243 X86Assembler* assembler = GetAssembler();
1244 LocationSummary* locations = invoke->GetLocations();
1245
1246 Register str = locations->InAt(0).AsRegister<Register>();
1247 Register arg = locations->InAt(1).AsRegister<Register>();
1248 Register ecx = locations->GetTemp(0).AsRegister<Register>();
1249 Register edi = locations->GetTemp(1).AsRegister<Register>();
1250 Register esi = locations->Out().AsRegister<Register>();
1251
Mark Mendell0c9497d2015-08-21 09:30:05 -04001252 NearLabel end, return_true, return_false;
Agi Csakid7138c82015-08-13 17:46:44 -07001253
1254 // Get offsets of count, value, and class fields within a string object.
1255 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1256 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1257 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1258
1259 // Note that the null check must have been done earlier.
1260 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1261
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001262 StringEqualsOptimizations optimizations(invoke);
1263 if (!optimizations.GetArgumentNotNull()) {
1264 // Check if input is null, return false if it is.
1265 __ testl(arg, arg);
1266 __ j(kEqual, &return_false);
1267 }
Agi Csakid7138c82015-08-13 17:46:44 -07001268
1269 // Instanceof check for the argument by comparing class fields.
1270 // All string objects must have the same type since String cannot be subclassed.
1271 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1272 // If the argument is a string object, its class field must be equal to receiver's class field.
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001273 if (!optimizations.GetArgumentIsString()) {
1274 __ movl(ecx, Address(str, class_offset));
1275 __ cmpl(ecx, Address(arg, class_offset));
1276 __ j(kNotEqual, &return_false);
1277 }
Agi Csakid7138c82015-08-13 17:46:44 -07001278
1279 // Reference equality check, return true if same reference.
1280 __ cmpl(str, arg);
1281 __ j(kEqual, &return_true);
1282
1283 // Load length of receiver string.
1284 __ movl(ecx, Address(str, count_offset));
1285 // Check if lengths are equal, return false if they're not.
1286 __ cmpl(ecx, Address(arg, count_offset));
1287 __ j(kNotEqual, &return_false);
1288 // Return true if both strings are empty.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001289 __ jecxz(&return_true);
Agi Csakid7138c82015-08-13 17:46:44 -07001290
1291 // Load starting addresses of string values into ESI/EDI as required for repe_cmpsl instruction.
1292 __ leal(esi, Address(str, value_offset));
1293 __ leal(edi, Address(arg, value_offset));
1294
1295 // Divide string length by 2 to compare characters 2 at a time and adjust for odd lengths.
1296 __ addl(ecx, Immediate(1));
1297 __ shrl(ecx, Immediate(1));
1298
1299 // Assertions that must hold in order to compare strings 2 characters at a time.
1300 DCHECK_ALIGNED(value_offset, 4);
1301 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1302
1303 // Loop to compare strings two characters at a time starting at the beginning of the string.
1304 __ repe_cmpsl();
1305 // If strings are not equal, zero flag will be cleared.
1306 __ j(kNotEqual, &return_false);
1307
1308 // Return true and exit the function.
1309 // If loop does not result in returning false, we return true.
1310 __ Bind(&return_true);
1311 __ movl(esi, Immediate(1));
1312 __ jmp(&end);
1313
1314 // Return false and exit the function.
1315 __ Bind(&return_false);
1316 __ xorl(esi, esi);
1317 __ Bind(&end);
1318}
1319
Andreas Gampe21030dd2015-05-07 14:46:15 -07001320static void CreateStringIndexOfLocations(HInvoke* invoke,
1321 ArenaAllocator* allocator,
1322 bool start_at_zero) {
1323 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1324 LocationSummary::kCallOnSlowPath,
1325 kIntrinsified);
1326 // The data needs to be in EDI for scasw. So request that the string is there, anyways.
1327 locations->SetInAt(0, Location::RegisterLocation(EDI));
1328 // If we look for a constant char, we'll still have to copy it into EAX. So just request the
1329 // allocator to do that, anyways. We can still do the constant check by checking the parameter
1330 // of the instruction explicitly.
1331 // Note: This works as we don't clobber EAX anywhere.
1332 locations->SetInAt(1, Location::RegisterLocation(EAX));
1333 if (!start_at_zero) {
1334 locations->SetInAt(2, Location::RequiresRegister()); // The starting index.
1335 }
1336 // As we clobber EDI during execution anyways, also use it as the output.
1337 locations->SetOut(Location::SameAsFirstInput());
1338
1339 // repne scasw uses ECX as the counter.
1340 locations->AddTemp(Location::RegisterLocation(ECX));
1341 // Need another temporary to be able to compute the result.
1342 locations->AddTemp(Location::RequiresRegister());
1343}
1344
1345static void GenerateStringIndexOf(HInvoke* invoke,
1346 X86Assembler* assembler,
1347 CodeGeneratorX86* codegen,
1348 ArenaAllocator* allocator,
1349 bool start_at_zero) {
1350 LocationSummary* locations = invoke->GetLocations();
1351
1352 // Note that the null check must have been done earlier.
1353 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1354
1355 Register string_obj = locations->InAt(0).AsRegister<Register>();
1356 Register search_value = locations->InAt(1).AsRegister<Register>();
1357 Register counter = locations->GetTemp(0).AsRegister<Register>();
1358 Register string_length = locations->GetTemp(1).AsRegister<Register>();
1359 Register out = locations->Out().AsRegister<Register>();
1360
1361 // Check our assumptions for registers.
1362 DCHECK_EQ(string_obj, EDI);
1363 DCHECK_EQ(search_value, EAX);
1364 DCHECK_EQ(counter, ECX);
1365 DCHECK_EQ(out, EDI);
1366
1367 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1368 // or directly dispatch if we have a constant.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001369 SlowPathCode* slow_path = nullptr;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001370 if (invoke->InputAt(1)->IsIntConstant()) {
1371 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
1372 std::numeric_limits<uint16_t>::max()) {
1373 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1374 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1375 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1376 codegen->AddSlowPath(slow_path);
1377 __ jmp(slow_path->GetEntryLabel());
1378 __ Bind(slow_path->GetExitLabel());
1379 return;
1380 }
1381 } else {
1382 __ cmpl(search_value, Immediate(std::numeric_limits<uint16_t>::max()));
1383 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1384 codegen->AddSlowPath(slow_path);
1385 __ j(kAbove, slow_path->GetEntryLabel());
1386 }
1387
1388 // From here down, we know that we are looking for a char that fits in 16 bits.
1389 // Location of reference to data array within the String object.
1390 int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1391 // Location of count within the String object.
1392 int32_t count_offset = mirror::String::CountOffset().Int32Value();
1393
1394 // Load string length, i.e., the count field of the string.
1395 __ movl(string_length, Address(string_obj, count_offset));
1396
1397 // Do a zero-length check.
1398 // TODO: Support jecxz.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001399 NearLabel not_found_label;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001400 __ testl(string_length, string_length);
1401 __ j(kEqual, &not_found_label);
1402
1403 if (start_at_zero) {
1404 // Number of chars to scan is the same as the string length.
1405 __ movl(counter, string_length);
1406
1407 // Move to the start of the string.
1408 __ addl(string_obj, Immediate(value_offset));
1409 } else {
1410 Register start_index = locations->InAt(2).AsRegister<Register>();
1411
1412 // Do a start_index check.
1413 __ cmpl(start_index, string_length);
1414 __ j(kGreaterEqual, &not_found_label);
1415
1416 // Ensure we have a start index >= 0;
1417 __ xorl(counter, counter);
1418 __ cmpl(start_index, Immediate(0));
1419 __ cmovl(kGreater, counter, start_index);
1420
1421 // Move to the start of the string: string_obj + value_offset + 2 * start_index.
1422 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset));
1423
1424 // Now update ecx (the repne scasw work counter). We have string.length - start_index left to
1425 // compare.
1426 __ negl(counter);
1427 __ leal(counter, Address(string_length, counter, ScaleFactor::TIMES_1, 0));
1428 }
1429
1430 // Everything is set up for repne scasw:
1431 // * Comparison address in EDI.
1432 // * Counter in ECX.
1433 __ repne_scasw();
1434
1435 // Did we find a match?
1436 __ j(kNotEqual, &not_found_label);
1437
1438 // Yes, we matched. Compute the index of the result.
1439 __ subl(string_length, counter);
1440 __ leal(out, Address(string_length, -1));
1441
Mark Mendell0c9497d2015-08-21 09:30:05 -04001442 NearLabel done;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001443 __ jmp(&done);
1444
1445 // Failed to match; return -1.
1446 __ Bind(&not_found_label);
1447 __ movl(out, Immediate(-1));
1448
1449 // And join up at the end.
1450 __ Bind(&done);
1451 if (slow_path != nullptr) {
1452 __ Bind(slow_path->GetExitLabel());
1453 }
1454}
1455
1456void IntrinsicLocationsBuilderX86::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001457 CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ true);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001458}
1459
1460void IntrinsicCodeGeneratorX86::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001461 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001462}
1463
1464void IntrinsicLocationsBuilderX86::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001465 CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ false);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001466}
1467
1468void IntrinsicCodeGeneratorX86::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001469 GenerateStringIndexOf(
1470 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001471}
1472
Jeff Hao848f70a2014-01-15 13:49:50 -08001473void IntrinsicLocationsBuilderX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1474 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1475 LocationSummary::kCall,
1476 kIntrinsified);
1477 InvokeRuntimeCallingConvention calling_convention;
1478 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1479 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1480 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1481 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1482 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001483}
1484
1485void IntrinsicCodeGeneratorX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1486 X86Assembler* assembler = GetAssembler();
1487 LocationSummary* locations = invoke->GetLocations();
1488
1489 Register byte_array = locations->InAt(0).AsRegister<Register>();
1490 __ testl(byte_array, byte_array);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001491 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001492 codegen_->AddSlowPath(slow_path);
1493 __ j(kEqual, slow_path->GetEntryLabel());
1494
1495 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromBytes)));
1496 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1497 __ Bind(slow_path->GetExitLabel());
1498}
1499
1500void IntrinsicLocationsBuilderX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1501 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1502 LocationSummary::kCall,
1503 kIntrinsified);
1504 InvokeRuntimeCallingConvention calling_convention;
1505 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1506 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1507 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1508 locations->SetOut(Location::RegisterLocation(EAX));
1509}
1510
1511void IntrinsicCodeGeneratorX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1512 X86Assembler* assembler = GetAssembler();
1513
1514 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromChars)));
1515 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1516}
1517
1518void IntrinsicLocationsBuilderX86::VisitStringNewStringFromString(HInvoke* invoke) {
1519 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1520 LocationSummary::kCall,
1521 kIntrinsified);
1522 InvokeRuntimeCallingConvention calling_convention;
1523 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1524 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001525}
1526
1527void IntrinsicCodeGeneratorX86::VisitStringNewStringFromString(HInvoke* invoke) {
1528 X86Assembler* assembler = GetAssembler();
1529 LocationSummary* locations = invoke->GetLocations();
1530
1531 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1532 __ testl(string_to_copy, string_to_copy);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001533 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001534 codegen_->AddSlowPath(slow_path);
1535 __ j(kEqual, slow_path->GetEntryLabel());
1536
1537 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromString)));
1538 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1539 __ Bind(slow_path->GetExitLabel());
1540}
1541
Mark Mendell8f8926a2015-08-17 11:39:06 -04001542void IntrinsicLocationsBuilderX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1543 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1544 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1545 LocationSummary::kNoCall,
1546 kIntrinsified);
1547 locations->SetInAt(0, Location::RequiresRegister());
1548 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1549 // Place srcEnd in ECX to save a move below.
1550 locations->SetInAt(2, Location::RegisterLocation(ECX));
1551 locations->SetInAt(3, Location::RequiresRegister());
1552 locations->SetInAt(4, Location::RequiresRegister());
1553
1554 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
1555 // We don't have enough registers to also grab ECX, so handle below.
1556 locations->AddTemp(Location::RegisterLocation(ESI));
1557 locations->AddTemp(Location::RegisterLocation(EDI));
1558}
1559
1560void IntrinsicCodeGeneratorX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1561 X86Assembler* assembler = GetAssembler();
1562 LocationSummary* locations = invoke->GetLocations();
1563
1564 size_t char_component_size = Primitive::ComponentSize(Primitive::kPrimChar);
1565 // Location of data in char array buffer.
1566 const uint32_t data_offset = mirror::Array::DataOffset(char_component_size).Uint32Value();
1567 // Location of char array data in string.
1568 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1569
1570 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1571 Register obj = locations->InAt(0).AsRegister<Register>();
1572 Location srcBegin = locations->InAt(1);
1573 int srcBegin_value =
1574 srcBegin.IsConstant() ? srcBegin.GetConstant()->AsIntConstant()->GetValue() : 0;
1575 Register srcEnd = locations->InAt(2).AsRegister<Register>();
1576 Register dst = locations->InAt(3).AsRegister<Register>();
1577 Register dstBegin = locations->InAt(4).AsRegister<Register>();
1578
1579 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1580 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1581 DCHECK_EQ(char_size, 2u);
1582
1583 // Compute the address of the destination buffer.
1584 __ leal(EDI, Address(dst, dstBegin, ScaleFactor::TIMES_2, data_offset));
1585
1586 // Compute the address of the source string.
1587 if (srcBegin.IsConstant()) {
1588 // Compute the address of the source string by adding the number of chars from
1589 // the source beginning to the value offset of a string.
1590 __ leal(ESI, Address(obj, srcBegin_value * char_size + value_offset));
1591 } else {
1592 __ leal(ESI, Address(obj, srcBegin.AsRegister<Register>(),
1593 ScaleFactor::TIMES_2, value_offset));
1594 }
1595
1596 // Compute the number of chars (words) to move.
1597 // Now is the time to save ECX, since we don't know if it will be used later.
1598 __ pushl(ECX);
1599 int stack_adjust = kX86WordSize;
1600 __ cfi().AdjustCFAOffset(stack_adjust);
1601 DCHECK_EQ(srcEnd, ECX);
1602 if (srcBegin.IsConstant()) {
1603 if (srcBegin_value != 0) {
1604 __ subl(ECX, Immediate(srcBegin_value));
1605 }
1606 } else {
1607 DCHECK(srcBegin.IsRegister());
1608 __ subl(ECX, srcBegin.AsRegister<Register>());
1609 }
1610
1611 // Do the move.
1612 __ rep_movsw();
1613
1614 // And restore ECX.
1615 __ popl(ECX);
1616 __ cfi().AdjustCFAOffset(-stack_adjust);
1617}
1618
Mark Mendell09ed1a32015-03-25 08:30:06 -04001619static void GenPeek(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1620 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1621 Location out_loc = locations->Out();
1622 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1623 // to avoid a SIGBUS.
1624 switch (size) {
1625 case Primitive::kPrimByte:
1626 __ movsxb(out_loc.AsRegister<Register>(), Address(address, 0));
1627 break;
1628 case Primitive::kPrimShort:
1629 __ movsxw(out_loc.AsRegister<Register>(), Address(address, 0));
1630 break;
1631 case Primitive::kPrimInt:
1632 __ movl(out_loc.AsRegister<Register>(), Address(address, 0));
1633 break;
1634 case Primitive::kPrimLong:
1635 __ movl(out_loc.AsRegisterPairLow<Register>(), Address(address, 0));
1636 __ movl(out_loc.AsRegisterPairHigh<Register>(), Address(address, 4));
1637 break;
1638 default:
1639 LOG(FATAL) << "Type not recognized for peek: " << size;
1640 UNREACHABLE();
1641 }
1642}
1643
1644void IntrinsicLocationsBuilderX86::VisitMemoryPeekByte(HInvoke* invoke) {
1645 CreateLongToIntLocations(arena_, invoke);
1646}
1647
1648void IntrinsicCodeGeneratorX86::VisitMemoryPeekByte(HInvoke* invoke) {
1649 GenPeek(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1650}
1651
1652void IntrinsicLocationsBuilderX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1653 CreateLongToIntLocations(arena_, invoke);
1654}
1655
1656void IntrinsicCodeGeneratorX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1657 GenPeek(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1658}
1659
1660void IntrinsicLocationsBuilderX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1661 CreateLongToLongLocations(arena_, invoke);
1662}
1663
1664void IntrinsicCodeGeneratorX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1665 GenPeek(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1666}
1667
1668void IntrinsicLocationsBuilderX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1669 CreateLongToIntLocations(arena_, invoke);
1670}
1671
1672void IntrinsicCodeGeneratorX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1673 GenPeek(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1674}
1675
1676static void CreateLongIntToVoidLocations(ArenaAllocator* arena, Primitive::Type size,
1677 HInvoke* invoke) {
1678 LocationSummary* locations = new (arena) LocationSummary(invoke,
1679 LocationSummary::kNoCall,
1680 kIntrinsified);
1681 locations->SetInAt(0, Location::RequiresRegister());
Roland Levillain4c0eb422015-04-24 16:43:49 +01001682 HInstruction* value = invoke->InputAt(1);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001683 if (size == Primitive::kPrimByte) {
1684 locations->SetInAt(1, Location::ByteRegisterOrConstant(EDX, value));
1685 } else {
1686 locations->SetInAt(1, Location::RegisterOrConstant(value));
1687 }
1688}
1689
1690static void GenPoke(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1691 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1692 Location value_loc = locations->InAt(1);
1693 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1694 // to avoid a SIGBUS.
1695 switch (size) {
1696 case Primitive::kPrimByte:
1697 if (value_loc.IsConstant()) {
1698 __ movb(Address(address, 0),
1699 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1700 } else {
1701 __ movb(Address(address, 0), value_loc.AsRegister<ByteRegister>());
1702 }
1703 break;
1704 case Primitive::kPrimShort:
1705 if (value_loc.IsConstant()) {
1706 __ movw(Address(address, 0),
1707 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1708 } else {
1709 __ movw(Address(address, 0), value_loc.AsRegister<Register>());
1710 }
1711 break;
1712 case Primitive::kPrimInt:
1713 if (value_loc.IsConstant()) {
1714 __ movl(Address(address, 0),
1715 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1716 } else {
1717 __ movl(Address(address, 0), value_loc.AsRegister<Register>());
1718 }
1719 break;
1720 case Primitive::kPrimLong:
1721 if (value_loc.IsConstant()) {
1722 int64_t value = value_loc.GetConstant()->AsLongConstant()->GetValue();
1723 __ movl(Address(address, 0), Immediate(Low32Bits(value)));
1724 __ movl(Address(address, 4), Immediate(High32Bits(value)));
1725 } else {
1726 __ movl(Address(address, 0), value_loc.AsRegisterPairLow<Register>());
1727 __ movl(Address(address, 4), value_loc.AsRegisterPairHigh<Register>());
1728 }
1729 break;
1730 default:
1731 LOG(FATAL) << "Type not recognized for poke: " << size;
1732 UNREACHABLE();
1733 }
1734}
1735
1736void IntrinsicLocationsBuilderX86::VisitMemoryPokeByte(HInvoke* invoke) {
1737 CreateLongIntToVoidLocations(arena_, Primitive::kPrimByte, invoke);
1738}
1739
1740void IntrinsicCodeGeneratorX86::VisitMemoryPokeByte(HInvoke* invoke) {
1741 GenPoke(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1742}
1743
1744void IntrinsicLocationsBuilderX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1745 CreateLongIntToVoidLocations(arena_, Primitive::kPrimInt, invoke);
1746}
1747
1748void IntrinsicCodeGeneratorX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1749 GenPoke(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1750}
1751
1752void IntrinsicLocationsBuilderX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1753 CreateLongIntToVoidLocations(arena_, Primitive::kPrimLong, invoke);
1754}
1755
1756void IntrinsicCodeGeneratorX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1757 GenPoke(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1758}
1759
1760void IntrinsicLocationsBuilderX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1761 CreateLongIntToVoidLocations(arena_, Primitive::kPrimShort, invoke);
1762}
1763
1764void IntrinsicCodeGeneratorX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1765 GenPoke(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1766}
1767
1768void IntrinsicLocationsBuilderX86::VisitThreadCurrentThread(HInvoke* invoke) {
1769 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1770 LocationSummary::kNoCall,
1771 kIntrinsified);
1772 locations->SetOut(Location::RequiresRegister());
1773}
1774
1775void IntrinsicCodeGeneratorX86::VisitThreadCurrentThread(HInvoke* invoke) {
1776 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
1777 GetAssembler()->fs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86WordSize>()));
1778}
1779
Roland Levillain0d5a2812015-11-13 10:07:31 +00001780static void GenUnsafeGet(HInvoke* invoke,
1781 Primitive::Type type,
1782 bool is_volatile,
1783 CodeGeneratorX86* codegen) {
1784 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
1785 LocationSummary* locations = invoke->GetLocations();
1786 Location base_loc = locations->InAt(1);
1787 Register base = base_loc.AsRegister<Register>();
1788 Location offset_loc = locations->InAt(2);
1789 Register offset = offset_loc.AsRegisterPairLow<Register>();
1790 Location output_loc = locations->Out();
Mark Mendell09ed1a32015-03-25 08:30:06 -04001791
1792 switch (type) {
1793 case Primitive::kPrimInt:
Roland Levillain4d027112015-07-01 15:41:14 +01001794 case Primitive::kPrimNot: {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001795 Register output = output_loc.AsRegister<Register>();
1796 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
Roland Levillain4d027112015-07-01 15:41:14 +01001797 if (type == Primitive::kPrimNot) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001798 codegen->MaybeGenerateReadBarrier(invoke, output_loc, output_loc, base_loc, 0U, offset_loc);
Roland Levillain4d027112015-07-01 15:41:14 +01001799 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001800 break;
Roland Levillain4d027112015-07-01 15:41:14 +01001801 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001802
1803 case Primitive::kPrimLong: {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001804 Register output_lo = output_loc.AsRegisterPairLow<Register>();
1805 Register output_hi = output_loc.AsRegisterPairHigh<Register>();
Mark Mendell09ed1a32015-03-25 08:30:06 -04001806 if (is_volatile) {
1807 // Need to use a XMM to read atomically.
1808 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1809 __ movsd(temp, Address(base, offset, ScaleFactor::TIMES_1, 0));
1810 __ movd(output_lo, temp);
1811 __ psrlq(temp, Immediate(32));
1812 __ movd(output_hi, temp);
1813 } else {
1814 __ movl(output_lo, Address(base, offset, ScaleFactor::TIMES_1, 0));
1815 __ movl(output_hi, Address(base, offset, ScaleFactor::TIMES_1, 4));
1816 }
1817 }
1818 break;
1819
1820 default:
1821 LOG(FATAL) << "Unsupported op size " << type;
1822 UNREACHABLE();
1823 }
1824}
1825
1826static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke,
1827 bool is_long, bool is_volatile) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001828 bool can_call = kEmitCompilerReadBarrier &&
1829 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
1830 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001831 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001832 can_call ?
1833 LocationSummary::kCallOnSlowPath :
1834 LocationSummary::kNoCall,
Mark Mendell09ed1a32015-03-25 08:30:06 -04001835 kIntrinsified);
1836 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1837 locations->SetInAt(1, Location::RequiresRegister());
1838 locations->SetInAt(2, Location::RequiresRegister());
1839 if (is_long) {
1840 if (is_volatile) {
1841 // Need to use XMM to read volatile.
1842 locations->AddTemp(Location::RequiresFpuRegister());
1843 locations->SetOut(Location::RequiresRegister());
1844 } else {
1845 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1846 }
1847 } else {
1848 locations->SetOut(Location::RequiresRegister());
1849 }
1850}
1851
1852void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001853 CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ false, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001854}
1855void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001856 CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ false, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001857}
1858void IntrinsicLocationsBuilderX86::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001859 CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ true, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001860}
1861void IntrinsicLocationsBuilderX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001862 CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ true, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001863}
1864void IntrinsicLocationsBuilderX86::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001865 CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ false, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001866}
1867void IntrinsicLocationsBuilderX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001868 CreateIntIntIntToIntLocations(arena_, invoke, /* is_long */ false, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001869}
1870
1871
1872void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001873 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001874}
1875void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001876 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001877}
1878void IntrinsicCodeGeneratorX86::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001879 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001880}
1881void IntrinsicCodeGeneratorX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001882 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001883}
1884void IntrinsicCodeGeneratorX86::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001885 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001886}
1887void IntrinsicCodeGeneratorX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001888 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001889}
1890
1891
1892static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* arena,
1893 Primitive::Type type,
1894 HInvoke* invoke,
1895 bool is_volatile) {
1896 LocationSummary* locations = new (arena) LocationSummary(invoke,
1897 LocationSummary::kNoCall,
1898 kIntrinsified);
1899 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1900 locations->SetInAt(1, Location::RequiresRegister());
1901 locations->SetInAt(2, Location::RequiresRegister());
1902 locations->SetInAt(3, Location::RequiresRegister());
1903 if (type == Primitive::kPrimNot) {
1904 // Need temp registers for card-marking.
Roland Levillain4d027112015-07-01 15:41:14 +01001905 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell09ed1a32015-03-25 08:30:06 -04001906 // Ensure the value is in a byte register.
1907 locations->AddTemp(Location::RegisterLocation(ECX));
1908 } else if (type == Primitive::kPrimLong && is_volatile) {
1909 locations->AddTemp(Location::RequiresFpuRegister());
1910 locations->AddTemp(Location::RequiresFpuRegister());
1911 }
1912}
1913
1914void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001915 CreateIntIntIntIntToVoidPlusTempsLocations(
1916 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001917}
1918void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001919 CreateIntIntIntIntToVoidPlusTempsLocations(
1920 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001921}
1922void IntrinsicLocationsBuilderX86::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001923 CreateIntIntIntIntToVoidPlusTempsLocations(
1924 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001925}
1926void IntrinsicLocationsBuilderX86::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001927 CreateIntIntIntIntToVoidPlusTempsLocations(
1928 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001929}
1930void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001931 CreateIntIntIntIntToVoidPlusTempsLocations(
1932 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001933}
1934void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001935 CreateIntIntIntIntToVoidPlusTempsLocations(
1936 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001937}
1938void IntrinsicLocationsBuilderX86::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001939 CreateIntIntIntIntToVoidPlusTempsLocations(
1940 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001941}
1942void IntrinsicLocationsBuilderX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001943 CreateIntIntIntIntToVoidPlusTempsLocations(
1944 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001945}
1946void IntrinsicLocationsBuilderX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001947 CreateIntIntIntIntToVoidPlusTempsLocations(
1948 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001949}
1950
1951// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86
1952// memory model.
1953static void GenUnsafePut(LocationSummary* locations,
1954 Primitive::Type type,
1955 bool is_volatile,
1956 CodeGeneratorX86* codegen) {
Roland Levillainb488b782015-10-22 11:38:49 +01001957 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04001958 Register base = locations->InAt(1).AsRegister<Register>();
1959 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1960 Location value_loc = locations->InAt(3);
1961
1962 if (type == Primitive::kPrimLong) {
1963 Register value_lo = value_loc.AsRegisterPairLow<Register>();
1964 Register value_hi = value_loc.AsRegisterPairHigh<Register>();
1965 if (is_volatile) {
1966 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1967 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
1968 __ movd(temp1, value_lo);
1969 __ movd(temp2, value_hi);
1970 __ punpckldq(temp1, temp2);
1971 __ movsd(Address(base, offset, ScaleFactor::TIMES_1, 0), temp1);
1972 } else {
1973 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_lo);
1974 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 4), value_hi);
1975 }
Roland Levillain4d027112015-07-01 15:41:14 +01001976 } else if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1977 Register temp = locations->GetTemp(0).AsRegister<Register>();
1978 __ movl(temp, value_loc.AsRegister<Register>());
1979 __ PoisonHeapReference(temp);
1980 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001981 } else {
1982 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_loc.AsRegister<Register>());
1983 }
1984
1985 if (is_volatile) {
1986 __ mfence();
1987 }
1988
1989 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001990 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell09ed1a32015-03-25 08:30:06 -04001991 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
1992 locations->GetTemp(1).AsRegister<Register>(),
1993 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001994 value_loc.AsRegister<Register>(),
1995 value_can_be_null);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001996 }
1997}
1998
1999void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002000 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002001}
2002void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002003 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002004}
2005void IntrinsicCodeGeneratorX86::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002006 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002007}
2008void IntrinsicCodeGeneratorX86::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002009 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002010}
2011void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002012 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002013}
2014void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002015 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002016}
2017void IntrinsicCodeGeneratorX86::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002018 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002019}
2020void IntrinsicCodeGeneratorX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002021 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002022}
2023void IntrinsicCodeGeneratorX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002024 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002025}
2026
Mark Mendell58d25fd2015-04-03 14:52:31 -04002027static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, Primitive::Type type,
2028 HInvoke* invoke) {
2029 LocationSummary* locations = new (arena) LocationSummary(invoke,
2030 LocationSummary::kNoCall,
2031 kIntrinsified);
2032 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
2033 locations->SetInAt(1, Location::RequiresRegister());
2034 // Offset is a long, but in 32 bit mode, we only need the low word.
2035 // Can we update the invoke here to remove a TypeConvert to Long?
2036 locations->SetInAt(2, Location::RequiresRegister());
2037 // Expected value must be in EAX or EDX:EAX.
2038 // For long, new value must be in ECX:EBX.
2039 if (type == Primitive::kPrimLong) {
2040 locations->SetInAt(3, Location::RegisterPairLocation(EAX, EDX));
2041 locations->SetInAt(4, Location::RegisterPairLocation(EBX, ECX));
2042 } else {
2043 locations->SetInAt(3, Location::RegisterLocation(EAX));
2044 locations->SetInAt(4, Location::RequiresRegister());
2045 }
2046
2047 // Force a byte register for the output.
2048 locations->SetOut(Location::RegisterLocation(EAX));
2049 if (type == Primitive::kPrimNot) {
2050 // Need temp registers for card-marking.
Roland Levillainb488b782015-10-22 11:38:49 +01002051 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell58d25fd2015-04-03 14:52:31 -04002052 // Need a byte register for marking.
2053 locations->AddTemp(Location::RegisterLocation(ECX));
2054 }
2055}
2056
2057void IntrinsicLocationsBuilderX86::VisitUnsafeCASInt(HInvoke* invoke) {
2058 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimInt, invoke);
2059}
2060
2061void IntrinsicLocationsBuilderX86::VisitUnsafeCASLong(HInvoke* invoke) {
2062 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimLong, invoke);
2063}
2064
2065void IntrinsicLocationsBuilderX86::VisitUnsafeCASObject(HInvoke* invoke) {
2066 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimNot, invoke);
2067}
2068
2069static void GenCAS(Primitive::Type type, HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillainb488b782015-10-22 11:38:49 +01002070 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
Mark Mendell58d25fd2015-04-03 14:52:31 -04002071 LocationSummary* locations = invoke->GetLocations();
2072
2073 Register base = locations->InAt(1).AsRegister<Register>();
2074 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
2075 Location out = locations->Out();
2076 DCHECK_EQ(out.AsRegister<Register>(), EAX);
2077
Roland Levillainb488b782015-10-22 11:38:49 +01002078 if (type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +01002079 Register expected = locations->InAt(3).AsRegister<Register>();
Roland Levillainb488b782015-10-22 11:38:49 +01002080 // Ensure `expected` is in EAX (required by the CMPXCHG instruction).
Roland Levillain4d027112015-07-01 15:41:14 +01002081 DCHECK_EQ(expected, EAX);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002082 Register value = locations->InAt(4).AsRegister<Register>();
Roland Levillain4d027112015-07-01 15:41:14 +01002083
Roland Levillainb488b782015-10-22 11:38:49 +01002084 // Mark card for object assuming new value is stored.
2085 bool value_can_be_null = true; // TODO: Worth finding out this information?
2086 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
2087 locations->GetTemp(1).AsRegister<Register>(),
2088 base,
2089 value,
2090 value_can_be_null);
2091
2092 bool base_equals_value = (base == value);
2093 if (kPoisonHeapReferences) {
2094 if (base_equals_value) {
2095 // If `base` and `value` are the same register location, move
2096 // `value` to a temporary register. This way, poisoning
2097 // `value` won't invalidate `base`.
2098 value = locations->GetTemp(0).AsRegister<Register>();
2099 __ movl(value, base);
Roland Levillain4d027112015-07-01 15:41:14 +01002100 }
Roland Levillainb488b782015-10-22 11:38:49 +01002101
2102 // Check that the register allocator did not assign the location
2103 // of `expected` (EAX) to `value` nor to `base`, so that heap
2104 // poisoning (when enabled) works as intended below.
2105 // - If `value` were equal to `expected`, both references would
2106 // be poisoned twice, meaning they would not be poisoned at
2107 // all, as heap poisoning uses address negation.
2108 // - If `base` were equal to `expected`, poisoning `expected`
2109 // would invalidate `base`.
2110 DCHECK_NE(value, expected);
2111 DCHECK_NE(base, expected);
2112
2113 __ PoisonHeapReference(expected);
2114 __ PoisonHeapReference(value);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002115 }
2116
2117 __ LockCmpxchgl(Address(base, offset, TIMES_1, 0), value);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002118
Roland Levillain0d5a2812015-11-13 10:07:31 +00002119 // LOCK CMPXCHG has full barrier semantics, and we don't need
Roland Levillainb488b782015-10-22 11:38:49 +01002120 // scheduling barriers at this time.
Mark Mendell58d25fd2015-04-03 14:52:31 -04002121
Roland Levillainb488b782015-10-22 11:38:49 +01002122 // Convert ZF into the boolean result.
2123 __ setb(kZero, out.AsRegister<Register>());
2124 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01002125
Roland Levillain0d5a2812015-11-13 10:07:31 +00002126 // In the case of the `UnsafeCASObject` intrinsic, accessing an
2127 // object in the heap with LOCK CMPXCHG does not require a read
2128 // barrier, as we do not keep a reference to this heap location.
2129 // However, if heap poisoning is enabled, we need to unpoison the
2130 // values that were poisoned earlier.
Roland Levillainb488b782015-10-22 11:38:49 +01002131 if (kPoisonHeapReferences) {
2132 if (base_equals_value) {
2133 // `value` has been moved to a temporary register, no need to
2134 // unpoison it.
2135 } else {
2136 // Ensure `value` is different from `out`, so that unpoisoning
2137 // the former does not invalidate the latter.
2138 DCHECK_NE(value, out.AsRegister<Register>());
2139 __ UnpoisonHeapReference(value);
2140 }
2141 // Do not unpoison the reference contained in register
2142 // `expected`, as it is the same as register `out` (EAX).
2143 }
2144 } else {
2145 if (type == Primitive::kPrimInt) {
2146 // Ensure the expected value is in EAX (required by the CMPXCHG
2147 // instruction).
2148 DCHECK_EQ(locations->InAt(3).AsRegister<Register>(), EAX);
2149 __ LockCmpxchgl(Address(base, offset, TIMES_1, 0),
2150 locations->InAt(4).AsRegister<Register>());
2151 } else if (type == Primitive::kPrimLong) {
2152 // Ensure the expected value is in EAX:EDX and that the new
2153 // value is in EBX:ECX (required by the CMPXCHG8B instruction).
2154 DCHECK_EQ(locations->InAt(3).AsRegisterPairLow<Register>(), EAX);
2155 DCHECK_EQ(locations->InAt(3).AsRegisterPairHigh<Register>(), EDX);
2156 DCHECK_EQ(locations->InAt(4).AsRegisterPairLow<Register>(), EBX);
2157 DCHECK_EQ(locations->InAt(4).AsRegisterPairHigh<Register>(), ECX);
2158 __ LockCmpxchg8b(Address(base, offset, TIMES_1, 0));
2159 } else {
2160 LOG(FATAL) << "Unexpected CAS type " << type;
2161 }
2162
Roland Levillain0d5a2812015-11-13 10:07:31 +00002163 // LOCK CMPXCHG/LOCK CMPXCHG8B have full barrier semantics, and we
2164 // don't need scheduling barriers at this time.
Roland Levillainb488b782015-10-22 11:38:49 +01002165
2166 // Convert ZF into the boolean result.
2167 __ setb(kZero, out.AsRegister<Register>());
2168 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01002169 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04002170}
2171
2172void IntrinsicCodeGeneratorX86::VisitUnsafeCASInt(HInvoke* invoke) {
2173 GenCAS(Primitive::kPrimInt, invoke, codegen_);
2174}
2175
2176void IntrinsicCodeGeneratorX86::VisitUnsafeCASLong(HInvoke* invoke) {
2177 GenCAS(Primitive::kPrimLong, invoke, codegen_);
2178}
2179
2180void IntrinsicCodeGeneratorX86::VisitUnsafeCASObject(HInvoke* invoke) {
2181 GenCAS(Primitive::kPrimNot, invoke, codegen_);
2182}
2183
2184void IntrinsicLocationsBuilderX86::VisitIntegerReverse(HInvoke* invoke) {
2185 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2186 LocationSummary::kNoCall,
2187 kIntrinsified);
2188 locations->SetInAt(0, Location::RequiresRegister());
2189 locations->SetOut(Location::SameAsFirstInput());
2190 locations->AddTemp(Location::RequiresRegister());
2191}
2192
2193static void SwapBits(Register reg, Register temp, int32_t shift, int32_t mask,
2194 X86Assembler* assembler) {
2195 Immediate imm_shift(shift);
2196 Immediate imm_mask(mask);
2197 __ movl(temp, reg);
2198 __ shrl(reg, imm_shift);
2199 __ andl(temp, imm_mask);
2200 __ andl(reg, imm_mask);
2201 __ shll(temp, imm_shift);
2202 __ orl(reg, temp);
2203}
2204
2205void IntrinsicCodeGeneratorX86::VisitIntegerReverse(HInvoke* invoke) {
Roland Levillainb488b782015-10-22 11:38:49 +01002206 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
Mark Mendell58d25fd2015-04-03 14:52:31 -04002207 LocationSummary* locations = invoke->GetLocations();
2208
2209 Register reg = locations->InAt(0).AsRegister<Register>();
2210 Register temp = locations->GetTemp(0).AsRegister<Register>();
2211
2212 /*
2213 * Use one bswap instruction to reverse byte order first and then use 3 rounds of
2214 * swapping bits to reverse bits in a number x. Using bswap to save instructions
2215 * compared to generic luni implementation which has 5 rounds of swapping bits.
2216 * x = bswap x
2217 * x = (x & 0x55555555) << 1 | (x >> 1) & 0x55555555;
2218 * x = (x & 0x33333333) << 2 | (x >> 2) & 0x33333333;
2219 * x = (x & 0x0F0F0F0F) << 4 | (x >> 4) & 0x0F0F0F0F;
2220 */
2221 __ bswapl(reg);
2222 SwapBits(reg, temp, 1, 0x55555555, assembler);
2223 SwapBits(reg, temp, 2, 0x33333333, assembler);
2224 SwapBits(reg, temp, 4, 0x0f0f0f0f, assembler);
2225}
2226
2227void IntrinsicLocationsBuilderX86::VisitLongReverse(HInvoke* invoke) {
2228 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2229 LocationSummary::kNoCall,
2230 kIntrinsified);
2231 locations->SetInAt(0, Location::RequiresRegister());
2232 locations->SetOut(Location::SameAsFirstInput());
2233 locations->AddTemp(Location::RequiresRegister());
2234}
2235
2236void IntrinsicCodeGeneratorX86::VisitLongReverse(HInvoke* invoke) {
Roland Levillainb488b782015-10-22 11:38:49 +01002237 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
Mark Mendell58d25fd2015-04-03 14:52:31 -04002238 LocationSummary* locations = invoke->GetLocations();
2239
2240 Register reg_low = locations->InAt(0).AsRegisterPairLow<Register>();
2241 Register reg_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2242 Register temp = locations->GetTemp(0).AsRegister<Register>();
2243
2244 // We want to swap high/low, then bswap each one, and then do the same
2245 // as a 32 bit reverse.
2246 // Exchange high and low.
2247 __ movl(temp, reg_low);
2248 __ movl(reg_low, reg_high);
2249 __ movl(reg_high, temp);
2250
2251 // bit-reverse low
2252 __ bswapl(reg_low);
2253 SwapBits(reg_low, temp, 1, 0x55555555, assembler);
2254 SwapBits(reg_low, temp, 2, 0x33333333, assembler);
2255 SwapBits(reg_low, temp, 4, 0x0f0f0f0f, assembler);
2256
2257 // bit-reverse high
2258 __ bswapl(reg_high);
2259 SwapBits(reg_high, temp, 1, 0x55555555, assembler);
2260 SwapBits(reg_high, temp, 2, 0x33333333, assembler);
2261 SwapBits(reg_high, temp, 4, 0x0f0f0f0f, assembler);
2262}
2263
Mark Mendelld5897672015-08-12 21:16:41 -04002264static void CreateLeadingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2265 LocationSummary* locations = new (arena) LocationSummary(invoke,
2266 LocationSummary::kNoCall,
2267 kIntrinsified);
2268 if (is_long) {
2269 locations->SetInAt(0, Location::RequiresRegister());
2270 } else {
2271 locations->SetInAt(0, Location::Any());
2272 }
2273 locations->SetOut(Location::RequiresRegister());
2274}
2275
2276static void GenLeadingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_long) {
2277 LocationSummary* locations = invoke->GetLocations();
2278 Location src = locations->InAt(0);
2279 Register out = locations->Out().AsRegister<Register>();
2280
2281 if (invoke->InputAt(0)->IsConstant()) {
2282 // Evaluate this at compile time.
2283 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2284 if (value == 0) {
2285 value = is_long ? 64 : 32;
2286 } else {
2287 value = is_long ? CLZ(static_cast<uint64_t>(value)) : CLZ(static_cast<uint32_t>(value));
2288 }
2289 if (value == 0) {
2290 __ xorl(out, out);
2291 } else {
2292 __ movl(out, Immediate(value));
2293 }
2294 return;
2295 }
2296
2297 // Handle the non-constant cases.
2298 if (!is_long) {
2299 if (src.IsRegister()) {
2300 __ bsrl(out, src.AsRegister<Register>());
2301 } else {
2302 DCHECK(src.IsStackSlot());
2303 __ bsrl(out, Address(ESP, src.GetStackIndex()));
2304 }
2305
2306 // BSR sets ZF if the input was zero, and the output is undefined.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002307 NearLabel all_zeroes, done;
Mark Mendelld5897672015-08-12 21:16:41 -04002308 __ j(kEqual, &all_zeroes);
2309
2310 // Correct the result from BSR to get the final CLZ result.
2311 __ xorl(out, Immediate(31));
2312 __ jmp(&done);
2313
2314 // Fix the zero case with the expected result.
2315 __ Bind(&all_zeroes);
2316 __ movl(out, Immediate(32));
2317
2318 __ Bind(&done);
2319 return;
2320 }
2321
2322 // 64 bit case needs to worry about both parts of the register.
2323 DCHECK(src.IsRegisterPair());
2324 Register src_lo = src.AsRegisterPairLow<Register>();
2325 Register src_hi = src.AsRegisterPairHigh<Register>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002326 NearLabel handle_low, done, all_zeroes;
Mark Mendelld5897672015-08-12 21:16:41 -04002327
2328 // Is the high word zero?
2329 __ testl(src_hi, src_hi);
2330 __ j(kEqual, &handle_low);
2331
2332 // High word is not zero. We know that the BSR result is defined in this case.
2333 __ bsrl(out, src_hi);
2334
2335 // Correct the result from BSR to get the final CLZ result.
2336 __ xorl(out, Immediate(31));
2337 __ jmp(&done);
2338
2339 // High word was zero. We have to compute the low word count and add 32.
2340 __ Bind(&handle_low);
2341 __ bsrl(out, src_lo);
2342 __ j(kEqual, &all_zeroes);
2343
2344 // We had a valid result. Use an XOR to both correct the result and add 32.
2345 __ xorl(out, Immediate(63));
2346 __ jmp(&done);
2347
2348 // All zero case.
2349 __ Bind(&all_zeroes);
2350 __ movl(out, Immediate(64));
2351
2352 __ Bind(&done);
2353}
2354
2355void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
2356 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ false);
2357}
2358
2359void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
2360 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2361 GenLeadingZeros(assembler, invoke, /* is_long */ false);
2362}
2363
2364void IntrinsicLocationsBuilderX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
2365 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ true);
2366}
2367
2368void IntrinsicCodeGeneratorX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
2369 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2370 GenLeadingZeros(assembler, invoke, /* is_long */ true);
2371}
2372
Mark Mendell2d554792015-09-15 21:45:18 -04002373static void CreateTrailingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2374 LocationSummary* locations = new (arena) LocationSummary(invoke,
2375 LocationSummary::kNoCall,
2376 kIntrinsified);
2377 if (is_long) {
2378 locations->SetInAt(0, Location::RequiresRegister());
2379 } else {
2380 locations->SetInAt(0, Location::Any());
2381 }
2382 locations->SetOut(Location::RequiresRegister());
2383}
2384
2385static void GenTrailingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_long) {
2386 LocationSummary* locations = invoke->GetLocations();
2387 Location src = locations->InAt(0);
2388 Register out = locations->Out().AsRegister<Register>();
2389
2390 if (invoke->InputAt(0)->IsConstant()) {
2391 // Evaluate this at compile time.
2392 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2393 if (value == 0) {
2394 value = is_long ? 64 : 32;
2395 } else {
2396 value = is_long ? CTZ(static_cast<uint64_t>(value)) : CTZ(static_cast<uint32_t>(value));
2397 }
2398 if (value == 0) {
2399 __ xorl(out, out);
2400 } else {
2401 __ movl(out, Immediate(value));
2402 }
2403 return;
2404 }
2405
2406 // Handle the non-constant cases.
2407 if (!is_long) {
2408 if (src.IsRegister()) {
2409 __ bsfl(out, src.AsRegister<Register>());
2410 } else {
2411 DCHECK(src.IsStackSlot());
2412 __ bsfl(out, Address(ESP, src.GetStackIndex()));
2413 }
2414
2415 // BSF sets ZF if the input was zero, and the output is undefined.
2416 NearLabel done;
2417 __ j(kNotEqual, &done);
2418
2419 // Fix the zero case with the expected result.
2420 __ movl(out, Immediate(32));
2421
2422 __ Bind(&done);
2423 return;
2424 }
2425
2426 // 64 bit case needs to worry about both parts of the register.
2427 DCHECK(src.IsRegisterPair());
2428 Register src_lo = src.AsRegisterPairLow<Register>();
2429 Register src_hi = src.AsRegisterPairHigh<Register>();
2430 NearLabel done, all_zeroes;
2431
2432 // If the low word is zero, then ZF will be set. If not, we have the answer.
2433 __ bsfl(out, src_lo);
2434 __ j(kNotEqual, &done);
2435
2436 // Low word was zero. We have to compute the high word count and add 32.
2437 __ bsfl(out, src_hi);
2438 __ j(kEqual, &all_zeroes);
2439
2440 // We had a valid result. Add 32 to account for the low word being zero.
2441 __ addl(out, Immediate(32));
2442 __ jmp(&done);
2443
2444 // All zero case.
2445 __ Bind(&all_zeroes);
2446 __ movl(out, Immediate(64));
2447
2448 __ Bind(&done);
2449}
2450
2451void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
2452 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ false);
2453}
2454
2455void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
2456 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2457 GenTrailingZeros(assembler, invoke, /* is_long */ false);
2458}
2459
2460void IntrinsicLocationsBuilderX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
2461 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ true);
2462}
2463
2464void IntrinsicCodeGeneratorX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
2465 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2466 GenTrailingZeros(assembler, invoke, /* is_long */ true);
2467}
2468
2469static void CreateRotateLocations(ArenaAllocator* arena, HInvoke* invoke) {
2470 LocationSummary* locations = new (arena) LocationSummary(invoke,
2471 LocationSummary::kNoCall,
2472 kIntrinsified);
2473 locations->SetInAt(0, Location::RequiresRegister());
2474 // The shift count needs to be in CL or a constant.
2475 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, invoke->InputAt(1)));
2476 locations->SetOut(Location::SameAsFirstInput());
2477}
2478
2479static void GenRotate(X86Assembler* assembler, HInvoke* invoke, bool is_left) {
2480 LocationSummary* locations = invoke->GetLocations();
2481 Register first_reg = locations->InAt(0).AsRegister<Register>();
2482 Location second = locations->InAt(1);
2483
2484 if (second.IsRegister()) {
2485 Register second_reg = second.AsRegister<Register>();
2486 if (is_left) {
2487 __ roll(first_reg, second_reg);
2488 } else {
2489 __ rorl(first_reg, second_reg);
2490 }
2491 } else {
2492 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftValue);
2493 if (is_left) {
2494 __ roll(first_reg, imm);
2495 } else {
2496 __ rorl(first_reg, imm);
2497 }
2498 }
2499}
2500
2501void IntrinsicLocationsBuilderX86::VisitIntegerRotateLeft(HInvoke* invoke) {
2502 CreateRotateLocations(arena_, invoke);
2503}
2504
2505void IntrinsicCodeGeneratorX86::VisitIntegerRotateLeft(HInvoke* invoke) {
2506 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2507 GenRotate(assembler, invoke, /* is_left */ true);
2508}
2509
2510void IntrinsicLocationsBuilderX86::VisitIntegerRotateRight(HInvoke* invoke) {
2511 CreateRotateLocations(arena_, invoke);
2512}
2513
2514void IntrinsicCodeGeneratorX86::VisitIntegerRotateRight(HInvoke* invoke) {
2515 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2516 GenRotate(assembler, invoke, /* is_left */ false);
2517}
2518
Mark Mendell09ed1a32015-03-25 08:30:06 -04002519// Unimplemented intrinsics.
2520
2521#define UNIMPLEMENTED_INTRINSIC(Name) \
2522void IntrinsicLocationsBuilderX86::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
2523} \
2524void IntrinsicCodeGeneratorX86::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
2525}
2526
Mark Mendell09ed1a32015-03-25 08:30:06 -04002527UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
Mark Mendell09ed1a32015-03-25 08:30:06 -04002528UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002529UNIMPLEMENTED_INTRINSIC(LongRotateRight)
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002530UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
Nicolas Geoffrayee3cf072015-10-06 11:45:02 +01002531UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
Mark Mendell09ed1a32015-03-25 08:30:06 -04002532
Roland Levillain4d027112015-07-01 15:41:14 +01002533#undef UNIMPLEMENTED_INTRINSIC
2534
2535#undef __
2536
Mark Mendell09ed1a32015-03-25 08:30:06 -04002537} // namespace x86
2538} // namespace art