blob: 5becf0fb69966b61524dc804d469821d56e3923b [file] [log] [blame]
Mark Mendell09ed1a32015-03-25 08:30:06 -04001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_x86.h"
18
Andreas Gampe21030dd2015-05-07 14:46:15 -070019#include <limits>
20
Mark Mendellfb8d2792015-03-31 22:16:59 -040021#include "arch/x86/instruction_set_features_x86.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include "art_method.h"
Mark Mendelld5897672015-08-12 21:16:41 -040023#include "base/bit_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040024#include "code_generator_x86.h"
25#include "entrypoints/quick/quick_entrypoints.h"
26#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070027#include "intrinsics_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040028#include "mirror/array-inl.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040029#include "mirror/string.h"
30#include "thread.h"
31#include "utils/x86/assembler_x86.h"
32#include "utils/x86/constants_x86.h"
33
34namespace art {
35
36namespace x86 {
37
38static constexpr int kDoubleNaNHigh = 0x7FF80000;
39static constexpr int kDoubleNaNLow = 0x00000000;
40static constexpr int kFloatNaN = 0x7FC00000;
41
Mark Mendellfb8d2792015-03-31 22:16:59 -040042IntrinsicLocationsBuilderX86::IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen)
43 : arena_(codegen->GetGraph()->GetArena()), codegen_(codegen) {
44}
45
46
Mark Mendell09ed1a32015-03-25 08:30:06 -040047X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() {
48 return reinterpret_cast<X86Assembler*>(codegen_->GetAssembler());
49}
50
51ArenaAllocator* IntrinsicCodeGeneratorX86::GetAllocator() {
52 return codegen_->GetGraph()->GetArena();
53}
54
55bool IntrinsicLocationsBuilderX86::TryDispatch(HInvoke* invoke) {
56 Dispatch(invoke);
57 LocationSummary* res = invoke->GetLocations();
58 return res != nullptr && res->Intrinsified();
59}
60
Roland Levillainec525fc2015-04-28 15:50:20 +010061static void MoveArguments(HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010062 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010063 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Mark Mendell09ed1a32015-03-25 08:30:06 -040064}
65
Andreas Gampe85b62f22015-09-09 13:15:38 -070066using IntrinsicSlowPathX86 = IntrinsicSlowPath<InvokeDexCallingConventionVisitorX86>;
Mark Mendell09ed1a32015-03-25 08:30:06 -040067
Mark Mendell09ed1a32015-03-25 08:30:06 -040068#define __ assembler->
69
70static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
71 LocationSummary* locations = new (arena) LocationSummary(invoke,
72 LocationSummary::kNoCall,
73 kIntrinsified);
74 locations->SetInAt(0, Location::RequiresFpuRegister());
75 locations->SetOut(Location::RequiresRegister());
76 if (is64bit) {
77 locations->AddTemp(Location::RequiresFpuRegister());
78 }
79}
80
81static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
82 LocationSummary* locations = new (arena) LocationSummary(invoke,
83 LocationSummary::kNoCall,
84 kIntrinsified);
85 locations->SetInAt(0, Location::RequiresRegister());
86 locations->SetOut(Location::RequiresFpuRegister());
87 if (is64bit) {
88 locations->AddTemp(Location::RequiresFpuRegister());
89 locations->AddTemp(Location::RequiresFpuRegister());
90 }
91}
92
93static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
94 Location input = locations->InAt(0);
95 Location output = locations->Out();
96 if (is64bit) {
97 // Need to use the temporary.
98 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
99 __ movsd(temp, input.AsFpuRegister<XmmRegister>());
100 __ movd(output.AsRegisterPairLow<Register>(), temp);
101 __ psrlq(temp, Immediate(32));
102 __ movd(output.AsRegisterPairHigh<Register>(), temp);
103 } else {
104 __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>());
105 }
106}
107
108static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
109 Location input = locations->InAt(0);
110 Location output = locations->Out();
111 if (is64bit) {
112 // Need to use the temporary.
113 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
114 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
115 __ movd(temp1, input.AsRegisterPairLow<Register>());
116 __ movd(temp2, input.AsRegisterPairHigh<Register>());
117 __ punpckldq(temp1, temp2);
118 __ movsd(output.AsFpuRegister<XmmRegister>(), temp1);
119 } else {
120 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<Register>());
121 }
122}
123
124void IntrinsicLocationsBuilderX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
125 CreateFPToIntLocations(arena_, invoke, true);
126}
127void IntrinsicLocationsBuilderX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
128 CreateIntToFPLocations(arena_, invoke, true);
129}
130
131void IntrinsicCodeGeneratorX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
132 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
133}
134void IntrinsicCodeGeneratorX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
135 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
136}
137
138void IntrinsicLocationsBuilderX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
139 CreateFPToIntLocations(arena_, invoke, false);
140}
141void IntrinsicLocationsBuilderX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
142 CreateIntToFPLocations(arena_, invoke, false);
143}
144
145void IntrinsicCodeGeneratorX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
146 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
147}
148void IntrinsicCodeGeneratorX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
149 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
150}
151
152static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
153 LocationSummary* locations = new (arena) LocationSummary(invoke,
154 LocationSummary::kNoCall,
155 kIntrinsified);
156 locations->SetInAt(0, Location::RequiresRegister());
157 locations->SetOut(Location::SameAsFirstInput());
158}
159
160static void CreateLongToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
161 LocationSummary* locations = new (arena) LocationSummary(invoke,
162 LocationSummary::kNoCall,
163 kIntrinsified);
164 locations->SetInAt(0, Location::RequiresRegister());
165 locations->SetOut(Location::RequiresRegister());
166}
167
168static void CreateLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
169 LocationSummary* locations = new (arena) LocationSummary(invoke,
170 LocationSummary::kNoCall,
171 kIntrinsified);
172 locations->SetInAt(0, Location::RequiresRegister());
173 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
174}
175
176static void GenReverseBytes(LocationSummary* locations,
177 Primitive::Type size,
178 X86Assembler* assembler) {
179 Register out = locations->Out().AsRegister<Register>();
180
181 switch (size) {
182 case Primitive::kPrimShort:
183 // TODO: Can be done with an xchg of 8b registers. This is straight from Quick.
184 __ bswapl(out);
185 __ sarl(out, Immediate(16));
186 break;
187 case Primitive::kPrimInt:
188 __ bswapl(out);
189 break;
190 default:
191 LOG(FATAL) << "Unexpected size for reverse-bytes: " << size;
192 UNREACHABLE();
193 }
194}
195
196void IntrinsicLocationsBuilderX86::VisitIntegerReverseBytes(HInvoke* invoke) {
197 CreateIntToIntLocations(arena_, invoke);
198}
199
200void IntrinsicCodeGeneratorX86::VisitIntegerReverseBytes(HInvoke* invoke) {
201 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
202}
203
Mark Mendell58d25fd2015-04-03 14:52:31 -0400204void IntrinsicLocationsBuilderX86::VisitLongReverseBytes(HInvoke* invoke) {
205 CreateLongToLongLocations(arena_, invoke);
206}
207
208void IntrinsicCodeGeneratorX86::VisitLongReverseBytes(HInvoke* invoke) {
209 LocationSummary* locations = invoke->GetLocations();
210 Location input = locations->InAt(0);
211 Register input_lo = input.AsRegisterPairLow<Register>();
212 Register input_hi = input.AsRegisterPairHigh<Register>();
213 Location output = locations->Out();
214 Register output_lo = output.AsRegisterPairLow<Register>();
215 Register output_hi = output.AsRegisterPairHigh<Register>();
216
217 X86Assembler* assembler = GetAssembler();
218 // Assign the inputs to the outputs, mixing low/high.
219 __ movl(output_lo, input_hi);
220 __ movl(output_hi, input_lo);
221 __ bswapl(output_lo);
222 __ bswapl(output_hi);
223}
224
Mark Mendell09ed1a32015-03-25 08:30:06 -0400225void IntrinsicLocationsBuilderX86::VisitShortReverseBytes(HInvoke* invoke) {
226 CreateIntToIntLocations(arena_, invoke);
227}
228
229void IntrinsicCodeGeneratorX86::VisitShortReverseBytes(HInvoke* invoke) {
230 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
231}
232
233
234// TODO: Consider Quick's way of doing Double abs through integer operations, as the immediate we
235// need is 64b.
236
237static void CreateFloatToFloat(ArenaAllocator* arena, HInvoke* invoke) {
238 // TODO: Enable memory operations when the assembler supports them.
239 LocationSummary* locations = new (arena) LocationSummary(invoke,
240 LocationSummary::kNoCall,
241 kIntrinsified);
242 locations->SetInAt(0, Location::RequiresFpuRegister());
243 // TODO: Allow x86 to work with memory. This requires assembler support, see below.
244 // locations->SetInAt(0, Location::Any()); // X86 can work on memory directly.
245 locations->SetOut(Location::SameAsFirstInput());
246}
247
248static void MathAbsFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
249 Location output = locations->Out();
250
251 if (output.IsFpuRegister()) {
252 // Create the right constant on an aligned stack.
253 if (is64bit) {
254 __ subl(ESP, Immediate(8));
255 __ pushl(Immediate(0x7FFFFFFF));
256 __ pushl(Immediate(0xFFFFFFFF));
257 __ andpd(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
258 } else {
259 __ subl(ESP, Immediate(12));
260 __ pushl(Immediate(0x7FFFFFFF));
261 __ andps(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
262 }
263 __ addl(ESP, Immediate(16));
264 } else {
265 // TODO: update when assember support is available.
266 UNIMPLEMENTED(FATAL) << "Needs assembler support.";
267// Once assembler support is available, in-memory operations look like this:
268// if (is64bit) {
269// DCHECK(output.IsDoubleStackSlot());
270// __ andl(Address(Register(RSP), output.GetHighStackIndex(kX86WordSize)),
271// Immediate(0x7FFFFFFF));
272// } else {
273// DCHECK(output.IsStackSlot());
274// // Can use and with a literal directly.
275// __ andl(Address(Register(RSP), output.GetStackIndex()), Immediate(0x7FFFFFFF));
276// }
277 }
278}
279
280void IntrinsicLocationsBuilderX86::VisitMathAbsDouble(HInvoke* invoke) {
281 CreateFloatToFloat(arena_, invoke);
282}
283
284void IntrinsicCodeGeneratorX86::VisitMathAbsDouble(HInvoke* invoke) {
285 MathAbsFP(invoke->GetLocations(), true, GetAssembler());
286}
287
288void IntrinsicLocationsBuilderX86::VisitMathAbsFloat(HInvoke* invoke) {
289 CreateFloatToFloat(arena_, invoke);
290}
291
292void IntrinsicCodeGeneratorX86::VisitMathAbsFloat(HInvoke* invoke) {
293 MathAbsFP(invoke->GetLocations(), false, GetAssembler());
294}
295
296static void CreateAbsIntLocation(ArenaAllocator* arena, HInvoke* invoke) {
297 LocationSummary* locations = new (arena) LocationSummary(invoke,
298 LocationSummary::kNoCall,
299 kIntrinsified);
300 locations->SetInAt(0, Location::RegisterLocation(EAX));
301 locations->SetOut(Location::SameAsFirstInput());
302 locations->AddTemp(Location::RegisterLocation(EDX));
303}
304
305static void GenAbsInteger(LocationSummary* locations, X86Assembler* assembler) {
306 Location output = locations->Out();
307 Register out = output.AsRegister<Register>();
308 DCHECK_EQ(out, EAX);
309 Register temp = locations->GetTemp(0).AsRegister<Register>();
310 DCHECK_EQ(temp, EDX);
311
312 // Sign extend EAX into EDX.
313 __ cdq();
314
315 // XOR EAX with sign.
316 __ xorl(EAX, EDX);
317
318 // Subtract out sign to correct.
319 __ subl(EAX, EDX);
320
321 // The result is in EAX.
322}
323
324static void CreateAbsLongLocation(ArenaAllocator* arena, HInvoke* invoke) {
325 LocationSummary* locations = new (arena) LocationSummary(invoke,
326 LocationSummary::kNoCall,
327 kIntrinsified);
328 locations->SetInAt(0, Location::RequiresRegister());
329 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
330 locations->AddTemp(Location::RequiresRegister());
331}
332
333static void GenAbsLong(LocationSummary* locations, X86Assembler* assembler) {
334 Location input = locations->InAt(0);
335 Register input_lo = input.AsRegisterPairLow<Register>();
336 Register input_hi = input.AsRegisterPairHigh<Register>();
337 Location output = locations->Out();
338 Register output_lo = output.AsRegisterPairLow<Register>();
339 Register output_hi = output.AsRegisterPairHigh<Register>();
340 Register temp = locations->GetTemp(0).AsRegister<Register>();
341
342 // Compute the sign into the temporary.
343 __ movl(temp, input_hi);
344 __ sarl(temp, Immediate(31));
345
346 // Store the sign into the output.
347 __ movl(output_lo, temp);
348 __ movl(output_hi, temp);
349
350 // XOR the input to the output.
351 __ xorl(output_lo, input_lo);
352 __ xorl(output_hi, input_hi);
353
354 // Subtract the sign.
355 __ subl(output_lo, temp);
356 __ sbbl(output_hi, temp);
357}
358
359void IntrinsicLocationsBuilderX86::VisitMathAbsInt(HInvoke* invoke) {
360 CreateAbsIntLocation(arena_, invoke);
361}
362
363void IntrinsicCodeGeneratorX86::VisitMathAbsInt(HInvoke* invoke) {
364 GenAbsInteger(invoke->GetLocations(), GetAssembler());
365}
366
367void IntrinsicLocationsBuilderX86::VisitMathAbsLong(HInvoke* invoke) {
368 CreateAbsLongLocation(arena_, invoke);
369}
370
371void IntrinsicCodeGeneratorX86::VisitMathAbsLong(HInvoke* invoke) {
372 GenAbsLong(invoke->GetLocations(), GetAssembler());
373}
374
375static void GenMinMaxFP(LocationSummary* locations, bool is_min, bool is_double,
376 X86Assembler* assembler) {
377 Location op1_loc = locations->InAt(0);
378 Location op2_loc = locations->InAt(1);
379 Location out_loc = locations->Out();
380 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
381
382 // Shortcut for same input locations.
383 if (op1_loc.Equals(op2_loc)) {
384 DCHECK(out_loc.Equals(op1_loc));
385 return;
386 }
387
388 // (out := op1)
389 // out <=? op2
390 // if Nan jmp Nan_label
391 // if out is min jmp done
392 // if op2 is min jmp op2_label
393 // handle -0/+0
394 // jmp done
395 // Nan_label:
396 // out := NaN
397 // op2_label:
398 // out := op2
399 // done:
400 //
401 // This removes one jmp, but needs to copy one input (op1) to out.
402 //
403 // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath?
404
405 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
406
Mark Mendell0c9497d2015-08-21 09:30:05 -0400407 NearLabel nan, done, op2_label;
Mark Mendell09ed1a32015-03-25 08:30:06 -0400408 if (is_double) {
409 __ ucomisd(out, op2);
410 } else {
411 __ ucomiss(out, op2);
412 }
413
414 __ j(Condition::kParityEven, &nan);
415
416 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
417 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
418
419 // Handle 0.0/-0.0.
420 if (is_min) {
421 if (is_double) {
422 __ orpd(out, op2);
423 } else {
424 __ orps(out, op2);
425 }
426 } else {
427 if (is_double) {
428 __ andpd(out, op2);
429 } else {
430 __ andps(out, op2);
431 }
432 }
433 __ jmp(&done);
434
435 // NaN handling.
436 __ Bind(&nan);
437 if (is_double) {
438 __ pushl(Immediate(kDoubleNaNHigh));
439 __ pushl(Immediate(kDoubleNaNLow));
440 __ movsd(out, Address(ESP, 0));
441 __ addl(ESP, Immediate(8));
442 } else {
443 __ pushl(Immediate(kFloatNaN));
444 __ movss(out, Address(ESP, 0));
445 __ addl(ESP, Immediate(4));
446 }
447 __ jmp(&done);
448
449 // out := op2;
450 __ Bind(&op2_label);
451 if (is_double) {
452 __ movsd(out, op2);
453 } else {
454 __ movss(out, op2);
455 }
456
457 // Done.
458 __ Bind(&done);
459}
460
461static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
462 LocationSummary* locations = new (arena) LocationSummary(invoke,
463 LocationSummary::kNoCall,
464 kIntrinsified);
465 locations->SetInAt(0, Location::RequiresFpuRegister());
466 locations->SetInAt(1, Location::RequiresFpuRegister());
467 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
468 // the second input to be the output (we can simply swap inputs).
469 locations->SetOut(Location::SameAsFirstInput());
470}
471
472void IntrinsicLocationsBuilderX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
473 CreateFPFPToFPLocations(arena_, invoke);
474}
475
476void IntrinsicCodeGeneratorX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
477 GenMinMaxFP(invoke->GetLocations(), true, true, GetAssembler());
478}
479
480void IntrinsicLocationsBuilderX86::VisitMathMinFloatFloat(HInvoke* invoke) {
481 CreateFPFPToFPLocations(arena_, invoke);
482}
483
484void IntrinsicCodeGeneratorX86::VisitMathMinFloatFloat(HInvoke* invoke) {
485 GenMinMaxFP(invoke->GetLocations(), true, false, GetAssembler());
486}
487
488void IntrinsicLocationsBuilderX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
489 CreateFPFPToFPLocations(arena_, invoke);
490}
491
492void IntrinsicCodeGeneratorX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
493 GenMinMaxFP(invoke->GetLocations(), false, true, GetAssembler());
494}
495
496void IntrinsicLocationsBuilderX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
497 CreateFPFPToFPLocations(arena_, invoke);
498}
499
500void IntrinsicCodeGeneratorX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
501 GenMinMaxFP(invoke->GetLocations(), false, false, GetAssembler());
502}
503
504static void GenMinMax(LocationSummary* locations, bool is_min, bool is_long,
505 X86Assembler* assembler) {
506 Location op1_loc = locations->InAt(0);
507 Location op2_loc = locations->InAt(1);
508
509 // Shortcut for same input locations.
510 if (op1_loc.Equals(op2_loc)) {
511 // Can return immediately, as op1_loc == out_loc.
512 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
513 // a copy here.
514 DCHECK(locations->Out().Equals(op1_loc));
515 return;
516 }
517
518 if (is_long) {
519 // Need to perform a subtract to get the sign right.
520 // op1 is already in the same location as the output.
521 Location output = locations->Out();
522 Register output_lo = output.AsRegisterPairLow<Register>();
523 Register output_hi = output.AsRegisterPairHigh<Register>();
524
525 Register op2_lo = op2_loc.AsRegisterPairLow<Register>();
526 Register op2_hi = op2_loc.AsRegisterPairHigh<Register>();
527
528 // Spare register to compute the subtraction to set condition code.
529 Register temp = locations->GetTemp(0).AsRegister<Register>();
530
531 // Subtract off op2_low.
532 __ movl(temp, output_lo);
533 __ subl(temp, op2_lo);
534
535 // Now use the same tempo and the borrow to finish the subtraction of op2_hi.
536 __ movl(temp, output_hi);
537 __ sbbl(temp, op2_hi);
538
539 // Now the condition code is correct.
540 Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess;
541 __ cmovl(cond, output_lo, op2_lo);
542 __ cmovl(cond, output_hi, op2_hi);
543 } else {
544 Register out = locations->Out().AsRegister<Register>();
545 Register op2 = op2_loc.AsRegister<Register>();
546
547 // (out := op1)
548 // out <=? op2
549 // if out is min jmp done
550 // out := op2
551 // done:
552
553 __ cmpl(out, op2);
554 Condition cond = is_min ? Condition::kGreater : Condition::kLess;
555 __ cmovl(cond, out, op2);
556 }
557}
558
559static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
560 LocationSummary* locations = new (arena) LocationSummary(invoke,
561 LocationSummary::kNoCall,
562 kIntrinsified);
563 locations->SetInAt(0, Location::RequiresRegister());
564 locations->SetInAt(1, Location::RequiresRegister());
565 locations->SetOut(Location::SameAsFirstInput());
566}
567
568static void CreateLongLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
569 LocationSummary* locations = new (arena) LocationSummary(invoke,
570 LocationSummary::kNoCall,
571 kIntrinsified);
572 locations->SetInAt(0, Location::RequiresRegister());
573 locations->SetInAt(1, Location::RequiresRegister());
574 locations->SetOut(Location::SameAsFirstInput());
575 // Register to use to perform a long subtract to set cc.
576 locations->AddTemp(Location::RequiresRegister());
577}
578
579void IntrinsicLocationsBuilderX86::VisitMathMinIntInt(HInvoke* invoke) {
580 CreateIntIntToIntLocations(arena_, invoke);
581}
582
583void IntrinsicCodeGeneratorX86::VisitMathMinIntInt(HInvoke* invoke) {
584 GenMinMax(invoke->GetLocations(), true, false, GetAssembler());
585}
586
587void IntrinsicLocationsBuilderX86::VisitMathMinLongLong(HInvoke* invoke) {
588 CreateLongLongToLongLocations(arena_, invoke);
589}
590
591void IntrinsicCodeGeneratorX86::VisitMathMinLongLong(HInvoke* invoke) {
592 GenMinMax(invoke->GetLocations(), true, true, GetAssembler());
593}
594
595void IntrinsicLocationsBuilderX86::VisitMathMaxIntInt(HInvoke* invoke) {
596 CreateIntIntToIntLocations(arena_, invoke);
597}
598
599void IntrinsicCodeGeneratorX86::VisitMathMaxIntInt(HInvoke* invoke) {
600 GenMinMax(invoke->GetLocations(), false, false, GetAssembler());
601}
602
603void IntrinsicLocationsBuilderX86::VisitMathMaxLongLong(HInvoke* invoke) {
604 CreateLongLongToLongLocations(arena_, invoke);
605}
606
607void IntrinsicCodeGeneratorX86::VisitMathMaxLongLong(HInvoke* invoke) {
608 GenMinMax(invoke->GetLocations(), false, true, GetAssembler());
609}
610
611static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
612 LocationSummary* locations = new (arena) LocationSummary(invoke,
613 LocationSummary::kNoCall,
614 kIntrinsified);
615 locations->SetInAt(0, Location::RequiresFpuRegister());
616 locations->SetOut(Location::RequiresFpuRegister());
617}
618
619void IntrinsicLocationsBuilderX86::VisitMathSqrt(HInvoke* invoke) {
620 CreateFPToFPLocations(arena_, invoke);
621}
622
623void IntrinsicCodeGeneratorX86::VisitMathSqrt(HInvoke* invoke) {
624 LocationSummary* locations = invoke->GetLocations();
625 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
626 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
627
628 GetAssembler()->sqrtsd(out, in);
629}
630
Mark Mendellfb8d2792015-03-31 22:16:59 -0400631static void InvokeOutOfLineIntrinsic(CodeGeneratorX86* codegen, HInvoke* invoke) {
Roland Levillainec525fc2015-04-28 15:50:20 +0100632 MoveArguments(invoke, codegen);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400633
634 DCHECK(invoke->IsInvokeStaticOrDirect());
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100635 codegen->GenerateStaticOrDirectCall(invoke->AsInvokeStaticOrDirect(),
636 Location::RegisterLocation(EAX));
Mingyao Yange90db122015-04-03 17:56:54 -0700637 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400638
639 // Copy the result back to the expected output.
640 Location out = invoke->GetLocations()->Out();
641 if (out.IsValid()) {
642 DCHECK(out.IsRegister());
Andreas Gampe85b62f22015-09-09 13:15:38 -0700643 codegen->MoveFromReturnRegister(out, invoke->GetType());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400644 }
645}
646
647static void CreateSSE41FPToFPLocations(ArenaAllocator* arena,
648 HInvoke* invoke,
649 CodeGeneratorX86* codegen) {
650 // Do we have instruction support?
651 if (codegen->GetInstructionSetFeatures().HasSSE4_1()) {
652 CreateFPToFPLocations(arena, invoke);
653 return;
654 }
655
656 // We have to fall back to a call to the intrinsic.
657 LocationSummary* locations = new (arena) LocationSummary(invoke,
658 LocationSummary::kCall);
659 InvokeRuntimeCallingConvention calling_convention;
660 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
661 locations->SetOut(Location::FpuRegisterLocation(XMM0));
662 // Needs to be EAX for the invoke.
663 locations->AddTemp(Location::RegisterLocation(EAX));
664}
665
666static void GenSSE41FPToFPIntrinsic(CodeGeneratorX86* codegen,
667 HInvoke* invoke,
668 X86Assembler* assembler,
669 int round_mode) {
670 LocationSummary* locations = invoke->GetLocations();
671 if (locations->WillCall()) {
672 InvokeOutOfLineIntrinsic(codegen, invoke);
673 } else {
674 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
675 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
676 __ roundsd(out, in, Immediate(round_mode));
677 }
678}
679
680void IntrinsicLocationsBuilderX86::VisitMathCeil(HInvoke* invoke) {
681 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
682}
683
684void IntrinsicCodeGeneratorX86::VisitMathCeil(HInvoke* invoke) {
685 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 2);
686}
687
688void IntrinsicLocationsBuilderX86::VisitMathFloor(HInvoke* invoke) {
689 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
690}
691
692void IntrinsicCodeGeneratorX86::VisitMathFloor(HInvoke* invoke) {
693 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 1);
694}
695
696void IntrinsicLocationsBuilderX86::VisitMathRint(HInvoke* invoke) {
697 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
698}
699
700void IntrinsicCodeGeneratorX86::VisitMathRint(HInvoke* invoke) {
701 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 0);
702}
703
704// Note that 32 bit x86 doesn't have the capability to inline MathRoundDouble,
705// as it needs 64 bit instructions.
706void IntrinsicLocationsBuilderX86::VisitMathRoundFloat(HInvoke* invoke) {
707 // Do we have instruction support?
708 if (codegen_->GetInstructionSetFeatures().HasSSE4_1()) {
709 LocationSummary* locations = new (arena_) LocationSummary(invoke,
710 LocationSummary::kNoCall,
711 kIntrinsified);
712 locations->SetInAt(0, Location::RequiresFpuRegister());
Nicolas Geoffrayd9b92402015-04-21 10:02:22 +0100713 locations->SetOut(Location::RequiresRegister());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400714 locations->AddTemp(Location::RequiresFpuRegister());
715 locations->AddTemp(Location::RequiresFpuRegister());
716 return;
717 }
718
719 // We have to fall back to a call to the intrinsic.
720 LocationSummary* locations = new (arena_) LocationSummary(invoke,
721 LocationSummary::kCall);
722 InvokeRuntimeCallingConvention calling_convention;
723 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
724 locations->SetOut(Location::RegisterLocation(EAX));
725 // Needs to be EAX for the invoke.
726 locations->AddTemp(Location::RegisterLocation(EAX));
727}
728
729void IntrinsicCodeGeneratorX86::VisitMathRoundFloat(HInvoke* invoke) {
730 LocationSummary* locations = invoke->GetLocations();
731 if (locations->WillCall()) {
732 InvokeOutOfLineIntrinsic(codegen_, invoke);
733 return;
734 }
735
736 // Implement RoundFloat as t1 = floor(input + 0.5f); convert to int.
737 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
738 Register out = locations->Out().AsRegister<Register>();
739 XmmRegister maxInt = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
740 XmmRegister inPlusPointFive = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -0400741 NearLabel done, nan;
Mark Mendellfb8d2792015-03-31 22:16:59 -0400742 X86Assembler* assembler = GetAssembler();
743
744 // Generate 0.5 into inPlusPointFive.
745 __ movl(out, Immediate(bit_cast<int32_t, float>(0.5f)));
746 __ movd(inPlusPointFive, out);
747
748 // Add in the input.
749 __ addss(inPlusPointFive, in);
750
751 // And truncate to an integer.
752 __ roundss(inPlusPointFive, inPlusPointFive, Immediate(1));
753
754 __ movl(out, Immediate(kPrimIntMax));
755 // maxInt = int-to-float(out)
756 __ cvtsi2ss(maxInt, out);
757
758 // if inPlusPointFive >= maxInt goto done
759 __ comiss(inPlusPointFive, maxInt);
760 __ j(kAboveEqual, &done);
761
762 // if input == NaN goto nan
763 __ j(kUnordered, &nan);
764
765 // output = float-to-int-truncate(input)
766 __ cvttss2si(out, inPlusPointFive);
767 __ jmp(&done);
768 __ Bind(&nan);
769
770 // output = 0
771 __ xorl(out, out);
772 __ Bind(&done);
773}
774
Mark Mendell09ed1a32015-03-25 08:30:06 -0400775void IntrinsicLocationsBuilderX86::VisitStringCharAt(HInvoke* invoke) {
776 // The inputs plus one temp.
777 LocationSummary* locations = new (arena_) LocationSummary(invoke,
778 LocationSummary::kCallOnSlowPath,
779 kIntrinsified);
780 locations->SetInAt(0, Location::RequiresRegister());
781 locations->SetInAt(1, Location::RequiresRegister());
782 locations->SetOut(Location::SameAsFirstInput());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400783}
784
785void IntrinsicCodeGeneratorX86::VisitStringCharAt(HInvoke* invoke) {
786 LocationSummary* locations = invoke->GetLocations();
787
Mark Mendell6bc53a92015-07-01 14:26:52 -0400788 // Location of reference to data array.
Mark Mendell09ed1a32015-03-25 08:30:06 -0400789 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
Mark Mendell6bc53a92015-07-01 14:26:52 -0400790 // Location of count.
Mark Mendell09ed1a32015-03-25 08:30:06 -0400791 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
Mark Mendell09ed1a32015-03-25 08:30:06 -0400792
793 Register obj = locations->InAt(0).AsRegister<Register>();
794 Register idx = locations->InAt(1).AsRegister<Register>();
795 Register out = locations->Out().AsRegister<Register>();
Mark Mendell09ed1a32015-03-25 08:30:06 -0400796
797 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
798 // the cost.
799 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
800 // we will not optimize the code for constants (which would save a register).
801
Andreas Gampe85b62f22015-09-09 13:15:38 -0700802 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400803 codegen_->AddSlowPath(slow_path);
804
805 X86Assembler* assembler = GetAssembler();
806
807 __ cmpl(idx, Address(obj, count_offset));
808 codegen_->MaybeRecordImplicitNullCheck(invoke);
809 __ j(kAboveEqual, slow_path->GetEntryLabel());
810
Jeff Hao848f70a2014-01-15 13:49:50 -0800811 // out = out[2*idx].
812 __ movzxw(out, Address(out, idx, ScaleFactor::TIMES_2, value_offset));
Mark Mendell09ed1a32015-03-25 08:30:06 -0400813
814 __ Bind(slow_path->GetExitLabel());
815}
816
Mark Mendell6bc53a92015-07-01 14:26:52 -0400817void IntrinsicLocationsBuilderX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
818 // We need at least two of the positions or length to be an integer constant,
819 // or else we won't have enough free registers.
820 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
821 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
822 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
823
824 int num_constants =
825 ((src_pos != nullptr) ? 1 : 0)
826 + ((dest_pos != nullptr) ? 1 : 0)
827 + ((length != nullptr) ? 1 : 0);
828
829 if (num_constants < 2) {
830 // Not enough free registers.
831 return;
832 }
833
834 // As long as we are checking, we might as well check to see if the src and dest
835 // positions are >= 0.
836 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
837 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
838 // We will have to fail anyways.
839 return;
840 }
841
842 // And since we are already checking, check the length too.
843 if (length != nullptr) {
844 int32_t len = length->GetValue();
845 if (len < 0) {
846 // Just call as normal.
847 return;
848 }
849 }
850
851 // Okay, it is safe to generate inline code.
852 LocationSummary* locations =
853 new (arena_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
854 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
855 locations->SetInAt(0, Location::RequiresRegister());
856 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
857 locations->SetInAt(2, Location::RequiresRegister());
858 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
859 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
860
861 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
862 locations->AddTemp(Location::RegisterLocation(ESI));
863 locations->AddTemp(Location::RegisterLocation(EDI));
864 locations->AddTemp(Location::RegisterLocation(ECX));
865}
866
867static void CheckPosition(X86Assembler* assembler,
868 Location pos,
869 Register input,
870 Register length,
Andreas Gampe85b62f22015-09-09 13:15:38 -0700871 SlowPathCode* slow_path,
Mark Mendell6bc53a92015-07-01 14:26:52 -0400872 Register input_len,
873 Register temp) {
874 // Where is the length in the String?
875 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
876
877 if (pos.IsConstant()) {
878 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
879 if (pos_const == 0) {
880 // Check that length(input) >= length.
881 __ cmpl(Address(input, length_offset), length);
882 __ j(kLess, slow_path->GetEntryLabel());
883 } else {
884 // Check that length(input) >= pos.
885 __ movl(input_len, Address(input, length_offset));
886 __ cmpl(input_len, Immediate(pos_const));
887 __ j(kLess, slow_path->GetEntryLabel());
888
889 // Check that (length(input) - pos) >= length.
890 __ leal(temp, Address(input_len, -pos_const));
891 __ cmpl(temp, length);
892 __ j(kLess, slow_path->GetEntryLabel());
893 }
894 } else {
895 // Check that pos >= 0.
896 Register pos_reg = pos.AsRegister<Register>();
897 __ testl(pos_reg, pos_reg);
898 __ j(kLess, slow_path->GetEntryLabel());
899
900 // Check that pos <= length(input).
901 __ cmpl(Address(input, length_offset), pos_reg);
902 __ j(kLess, slow_path->GetEntryLabel());
903
904 // Check that (length(input) - pos) >= length.
905 __ movl(temp, Address(input, length_offset));
906 __ subl(temp, pos_reg);
907 __ cmpl(temp, length);
908 __ j(kLess, slow_path->GetEntryLabel());
909 }
910}
911
912void IntrinsicCodeGeneratorX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
913 X86Assembler* assembler = GetAssembler();
914 LocationSummary* locations = invoke->GetLocations();
915
916 Register src = locations->InAt(0).AsRegister<Register>();
917 Location srcPos = locations->InAt(1);
918 Register dest = locations->InAt(2).AsRegister<Register>();
919 Location destPos = locations->InAt(3);
920 Location length = locations->InAt(4);
921
922 // Temporaries that we need for MOVSW.
923 Register src_base = locations->GetTemp(0).AsRegister<Register>();
924 DCHECK_EQ(src_base, ESI);
925 Register dest_base = locations->GetTemp(1).AsRegister<Register>();
926 DCHECK_EQ(dest_base, EDI);
927 Register count = locations->GetTemp(2).AsRegister<Register>();
928 DCHECK_EQ(count, ECX);
929
Andreas Gampe85b62f22015-09-09 13:15:38 -0700930 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell6bc53a92015-07-01 14:26:52 -0400931 codegen_->AddSlowPath(slow_path);
932
933 // Bail out if the source and destination are the same (to handle overlap).
934 __ cmpl(src, dest);
935 __ j(kEqual, slow_path->GetEntryLabel());
936
937 // Bail out if the source is null.
938 __ testl(src, src);
939 __ j(kEqual, slow_path->GetEntryLabel());
940
941 // Bail out if the destination is null.
942 __ testl(dest, dest);
943 __ j(kEqual, slow_path->GetEntryLabel());
944
945 // If the length is negative, bail out.
946 // We have already checked in the LocationsBuilder for the constant case.
947 if (!length.IsConstant()) {
948 __ cmpl(length.AsRegister<Register>(), length.AsRegister<Register>());
949 __ j(kLess, slow_path->GetEntryLabel());
950 }
951
952 // We need the count in ECX.
953 if (length.IsConstant()) {
954 __ movl(count, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
955 } else {
956 __ movl(count, length.AsRegister<Register>());
957 }
958
959 // Validity checks: source.
960 CheckPosition(assembler, srcPos, src, count, slow_path, src_base, dest_base);
961
962 // Validity checks: dest.
963 CheckPosition(assembler, destPos, dest, count, slow_path, src_base, dest_base);
964
965 // Okay, everything checks out. Finally time to do the copy.
966 // Check assumption that sizeof(Char) is 2 (used in scaling below).
967 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
968 DCHECK_EQ(char_size, 2u);
969
970 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
971
972 if (srcPos.IsConstant()) {
973 int32_t srcPos_const = srcPos.GetConstant()->AsIntConstant()->GetValue();
974 __ leal(src_base, Address(src, char_size * srcPos_const + data_offset));
975 } else {
976 __ leal(src_base, Address(src, srcPos.AsRegister<Register>(),
977 ScaleFactor::TIMES_2, data_offset));
978 }
979 if (destPos.IsConstant()) {
980 int32_t destPos_const = destPos.GetConstant()->AsIntConstant()->GetValue();
981
982 __ leal(dest_base, Address(dest, char_size * destPos_const + data_offset));
983 } else {
984 __ leal(dest_base, Address(dest, destPos.AsRegister<Register>(),
985 ScaleFactor::TIMES_2, data_offset));
986 }
987
988 // Do the move.
989 __ rep_movsw();
990
991 __ Bind(slow_path->GetExitLabel());
992}
993
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000994void IntrinsicLocationsBuilderX86::VisitStringCompareTo(HInvoke* invoke) {
995 // The inputs plus one temp.
996 LocationSummary* locations = new (arena_) LocationSummary(invoke,
997 LocationSummary::kCall,
998 kIntrinsified);
999 InvokeRuntimeCallingConvention calling_convention;
1000 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1001 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1002 locations->SetOut(Location::RegisterLocation(EAX));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001003}
1004
1005void IntrinsicCodeGeneratorX86::VisitStringCompareTo(HInvoke* invoke) {
1006 X86Assembler* assembler = GetAssembler();
1007 LocationSummary* locations = invoke->GetLocations();
1008
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001009 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001010 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001011
1012 Register argument = locations->InAt(1).AsRegister<Register>();
1013 __ testl(argument, argument);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001014 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001015 codegen_->AddSlowPath(slow_path);
1016 __ j(kEqual, slow_path->GetEntryLabel());
1017
1018 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pStringCompareTo)));
1019 __ Bind(slow_path->GetExitLabel());
1020}
1021
Agi Csakid7138c82015-08-13 17:46:44 -07001022void IntrinsicLocationsBuilderX86::VisitStringEquals(HInvoke* invoke) {
1023 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1024 LocationSummary::kNoCall,
1025 kIntrinsified);
1026 locations->SetInAt(0, Location::RequiresRegister());
1027 locations->SetInAt(1, Location::RequiresRegister());
1028
1029 // Request temporary registers, ECX and EDI needed for repe_cmpsl instruction.
1030 locations->AddTemp(Location::RegisterLocation(ECX));
1031 locations->AddTemp(Location::RegisterLocation(EDI));
1032
1033 // Set output, ESI needed for repe_cmpsl instruction anyways.
1034 locations->SetOut(Location::RegisterLocation(ESI), Location::kOutputOverlap);
1035}
1036
1037void IntrinsicCodeGeneratorX86::VisitStringEquals(HInvoke* invoke) {
1038 X86Assembler* assembler = GetAssembler();
1039 LocationSummary* locations = invoke->GetLocations();
1040
1041 Register str = locations->InAt(0).AsRegister<Register>();
1042 Register arg = locations->InAt(1).AsRegister<Register>();
1043 Register ecx = locations->GetTemp(0).AsRegister<Register>();
1044 Register edi = locations->GetTemp(1).AsRegister<Register>();
1045 Register esi = locations->Out().AsRegister<Register>();
1046
Mark Mendell0c9497d2015-08-21 09:30:05 -04001047 NearLabel end, return_true, return_false;
Agi Csakid7138c82015-08-13 17:46:44 -07001048
1049 // Get offsets of count, value, and class fields within a string object.
1050 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1051 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1052 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1053
1054 // Note that the null check must have been done earlier.
1055 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1056
1057 // Check if input is null, return false if it is.
1058 __ testl(arg, arg);
1059 __ j(kEqual, &return_false);
1060
1061 // Instanceof check for the argument by comparing class fields.
1062 // All string objects must have the same type since String cannot be subclassed.
1063 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1064 // If the argument is a string object, its class field must be equal to receiver's class field.
1065 __ movl(ecx, Address(str, class_offset));
1066 __ cmpl(ecx, Address(arg, class_offset));
1067 __ j(kNotEqual, &return_false);
1068
1069 // Reference equality check, return true if same reference.
1070 __ cmpl(str, arg);
1071 __ j(kEqual, &return_true);
1072
1073 // Load length of receiver string.
1074 __ movl(ecx, Address(str, count_offset));
1075 // Check if lengths are equal, return false if they're not.
1076 __ cmpl(ecx, Address(arg, count_offset));
1077 __ j(kNotEqual, &return_false);
1078 // Return true if both strings are empty.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001079 __ jecxz(&return_true);
Agi Csakid7138c82015-08-13 17:46:44 -07001080
1081 // Load starting addresses of string values into ESI/EDI as required for repe_cmpsl instruction.
1082 __ leal(esi, Address(str, value_offset));
1083 __ leal(edi, Address(arg, value_offset));
1084
1085 // Divide string length by 2 to compare characters 2 at a time and adjust for odd lengths.
1086 __ addl(ecx, Immediate(1));
1087 __ shrl(ecx, Immediate(1));
1088
1089 // Assertions that must hold in order to compare strings 2 characters at a time.
1090 DCHECK_ALIGNED(value_offset, 4);
1091 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1092
1093 // Loop to compare strings two characters at a time starting at the beginning of the string.
1094 __ repe_cmpsl();
1095 // If strings are not equal, zero flag will be cleared.
1096 __ j(kNotEqual, &return_false);
1097
1098 // Return true and exit the function.
1099 // If loop does not result in returning false, we return true.
1100 __ Bind(&return_true);
1101 __ movl(esi, Immediate(1));
1102 __ jmp(&end);
1103
1104 // Return false and exit the function.
1105 __ Bind(&return_false);
1106 __ xorl(esi, esi);
1107 __ Bind(&end);
1108}
1109
Andreas Gampe21030dd2015-05-07 14:46:15 -07001110static void CreateStringIndexOfLocations(HInvoke* invoke,
1111 ArenaAllocator* allocator,
1112 bool start_at_zero) {
1113 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1114 LocationSummary::kCallOnSlowPath,
1115 kIntrinsified);
1116 // The data needs to be in EDI for scasw. So request that the string is there, anyways.
1117 locations->SetInAt(0, Location::RegisterLocation(EDI));
1118 // If we look for a constant char, we'll still have to copy it into EAX. So just request the
1119 // allocator to do that, anyways. We can still do the constant check by checking the parameter
1120 // of the instruction explicitly.
1121 // Note: This works as we don't clobber EAX anywhere.
1122 locations->SetInAt(1, Location::RegisterLocation(EAX));
1123 if (!start_at_zero) {
1124 locations->SetInAt(2, Location::RequiresRegister()); // The starting index.
1125 }
1126 // As we clobber EDI during execution anyways, also use it as the output.
1127 locations->SetOut(Location::SameAsFirstInput());
1128
1129 // repne scasw uses ECX as the counter.
1130 locations->AddTemp(Location::RegisterLocation(ECX));
1131 // Need another temporary to be able to compute the result.
1132 locations->AddTemp(Location::RequiresRegister());
1133}
1134
1135static void GenerateStringIndexOf(HInvoke* invoke,
1136 X86Assembler* assembler,
1137 CodeGeneratorX86* codegen,
1138 ArenaAllocator* allocator,
1139 bool start_at_zero) {
1140 LocationSummary* locations = invoke->GetLocations();
1141
1142 // Note that the null check must have been done earlier.
1143 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1144
1145 Register string_obj = locations->InAt(0).AsRegister<Register>();
1146 Register search_value = locations->InAt(1).AsRegister<Register>();
1147 Register counter = locations->GetTemp(0).AsRegister<Register>();
1148 Register string_length = locations->GetTemp(1).AsRegister<Register>();
1149 Register out = locations->Out().AsRegister<Register>();
1150
1151 // Check our assumptions for registers.
1152 DCHECK_EQ(string_obj, EDI);
1153 DCHECK_EQ(search_value, EAX);
1154 DCHECK_EQ(counter, ECX);
1155 DCHECK_EQ(out, EDI);
1156
1157 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1158 // or directly dispatch if we have a constant.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001159 SlowPathCode* slow_path = nullptr;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001160 if (invoke->InputAt(1)->IsIntConstant()) {
1161 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
1162 std::numeric_limits<uint16_t>::max()) {
1163 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1164 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1165 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1166 codegen->AddSlowPath(slow_path);
1167 __ jmp(slow_path->GetEntryLabel());
1168 __ Bind(slow_path->GetExitLabel());
1169 return;
1170 }
1171 } else {
1172 __ cmpl(search_value, Immediate(std::numeric_limits<uint16_t>::max()));
1173 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1174 codegen->AddSlowPath(slow_path);
1175 __ j(kAbove, slow_path->GetEntryLabel());
1176 }
1177
1178 // From here down, we know that we are looking for a char that fits in 16 bits.
1179 // Location of reference to data array within the String object.
1180 int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1181 // Location of count within the String object.
1182 int32_t count_offset = mirror::String::CountOffset().Int32Value();
1183
1184 // Load string length, i.e., the count field of the string.
1185 __ movl(string_length, Address(string_obj, count_offset));
1186
1187 // Do a zero-length check.
1188 // TODO: Support jecxz.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001189 NearLabel not_found_label;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001190 __ testl(string_length, string_length);
1191 __ j(kEqual, &not_found_label);
1192
1193 if (start_at_zero) {
1194 // Number of chars to scan is the same as the string length.
1195 __ movl(counter, string_length);
1196
1197 // Move to the start of the string.
1198 __ addl(string_obj, Immediate(value_offset));
1199 } else {
1200 Register start_index = locations->InAt(2).AsRegister<Register>();
1201
1202 // Do a start_index check.
1203 __ cmpl(start_index, string_length);
1204 __ j(kGreaterEqual, &not_found_label);
1205
1206 // Ensure we have a start index >= 0;
1207 __ xorl(counter, counter);
1208 __ cmpl(start_index, Immediate(0));
1209 __ cmovl(kGreater, counter, start_index);
1210
1211 // Move to the start of the string: string_obj + value_offset + 2 * start_index.
1212 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset));
1213
1214 // Now update ecx (the repne scasw work counter). We have string.length - start_index left to
1215 // compare.
1216 __ negl(counter);
1217 __ leal(counter, Address(string_length, counter, ScaleFactor::TIMES_1, 0));
1218 }
1219
1220 // Everything is set up for repne scasw:
1221 // * Comparison address in EDI.
1222 // * Counter in ECX.
1223 __ repne_scasw();
1224
1225 // Did we find a match?
1226 __ j(kNotEqual, &not_found_label);
1227
1228 // Yes, we matched. Compute the index of the result.
1229 __ subl(string_length, counter);
1230 __ leal(out, Address(string_length, -1));
1231
Mark Mendell0c9497d2015-08-21 09:30:05 -04001232 NearLabel done;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001233 __ jmp(&done);
1234
1235 // Failed to match; return -1.
1236 __ Bind(&not_found_label);
1237 __ movl(out, Immediate(-1));
1238
1239 // And join up at the end.
1240 __ Bind(&done);
1241 if (slow_path != nullptr) {
1242 __ Bind(slow_path->GetExitLabel());
1243 }
1244}
1245
1246void IntrinsicLocationsBuilderX86::VisitStringIndexOf(HInvoke* invoke) {
1247 CreateStringIndexOfLocations(invoke, arena_, true);
1248}
1249
1250void IntrinsicCodeGeneratorX86::VisitStringIndexOf(HInvoke* invoke) {
1251 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
1252}
1253
1254void IntrinsicLocationsBuilderX86::VisitStringIndexOfAfter(HInvoke* invoke) {
1255 CreateStringIndexOfLocations(invoke, arena_, false);
1256}
1257
1258void IntrinsicCodeGeneratorX86::VisitStringIndexOfAfter(HInvoke* invoke) {
1259 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
1260}
1261
Jeff Hao848f70a2014-01-15 13:49:50 -08001262void IntrinsicLocationsBuilderX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1263 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1264 LocationSummary::kCall,
1265 kIntrinsified);
1266 InvokeRuntimeCallingConvention calling_convention;
1267 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1268 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1269 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1270 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1271 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001272}
1273
1274void IntrinsicCodeGeneratorX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1275 X86Assembler* assembler = GetAssembler();
1276 LocationSummary* locations = invoke->GetLocations();
1277
1278 Register byte_array = locations->InAt(0).AsRegister<Register>();
1279 __ testl(byte_array, byte_array);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001280 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001281 codegen_->AddSlowPath(slow_path);
1282 __ j(kEqual, slow_path->GetEntryLabel());
1283
1284 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromBytes)));
1285 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1286 __ Bind(slow_path->GetExitLabel());
1287}
1288
1289void IntrinsicLocationsBuilderX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1290 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1291 LocationSummary::kCall,
1292 kIntrinsified);
1293 InvokeRuntimeCallingConvention calling_convention;
1294 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1295 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1296 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1297 locations->SetOut(Location::RegisterLocation(EAX));
1298}
1299
1300void IntrinsicCodeGeneratorX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1301 X86Assembler* assembler = GetAssembler();
1302
1303 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromChars)));
1304 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1305}
1306
1307void IntrinsicLocationsBuilderX86::VisitStringNewStringFromString(HInvoke* invoke) {
1308 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1309 LocationSummary::kCall,
1310 kIntrinsified);
1311 InvokeRuntimeCallingConvention calling_convention;
1312 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1313 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001314}
1315
1316void IntrinsicCodeGeneratorX86::VisitStringNewStringFromString(HInvoke* invoke) {
1317 X86Assembler* assembler = GetAssembler();
1318 LocationSummary* locations = invoke->GetLocations();
1319
1320 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1321 __ testl(string_to_copy, string_to_copy);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001322 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001323 codegen_->AddSlowPath(slow_path);
1324 __ j(kEqual, slow_path->GetEntryLabel());
1325
1326 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromString)));
1327 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1328 __ Bind(slow_path->GetExitLabel());
1329}
1330
Mark Mendell09ed1a32015-03-25 08:30:06 -04001331static void GenPeek(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1332 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1333 Location out_loc = locations->Out();
1334 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1335 // to avoid a SIGBUS.
1336 switch (size) {
1337 case Primitive::kPrimByte:
1338 __ movsxb(out_loc.AsRegister<Register>(), Address(address, 0));
1339 break;
1340 case Primitive::kPrimShort:
1341 __ movsxw(out_loc.AsRegister<Register>(), Address(address, 0));
1342 break;
1343 case Primitive::kPrimInt:
1344 __ movl(out_loc.AsRegister<Register>(), Address(address, 0));
1345 break;
1346 case Primitive::kPrimLong:
1347 __ movl(out_loc.AsRegisterPairLow<Register>(), Address(address, 0));
1348 __ movl(out_loc.AsRegisterPairHigh<Register>(), Address(address, 4));
1349 break;
1350 default:
1351 LOG(FATAL) << "Type not recognized for peek: " << size;
1352 UNREACHABLE();
1353 }
1354}
1355
1356void IntrinsicLocationsBuilderX86::VisitMemoryPeekByte(HInvoke* invoke) {
1357 CreateLongToIntLocations(arena_, invoke);
1358}
1359
1360void IntrinsicCodeGeneratorX86::VisitMemoryPeekByte(HInvoke* invoke) {
1361 GenPeek(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1362}
1363
1364void IntrinsicLocationsBuilderX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1365 CreateLongToIntLocations(arena_, invoke);
1366}
1367
1368void IntrinsicCodeGeneratorX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1369 GenPeek(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1370}
1371
1372void IntrinsicLocationsBuilderX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1373 CreateLongToLongLocations(arena_, invoke);
1374}
1375
1376void IntrinsicCodeGeneratorX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1377 GenPeek(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1378}
1379
1380void IntrinsicLocationsBuilderX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1381 CreateLongToIntLocations(arena_, invoke);
1382}
1383
1384void IntrinsicCodeGeneratorX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1385 GenPeek(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1386}
1387
1388static void CreateLongIntToVoidLocations(ArenaAllocator* arena, Primitive::Type size,
1389 HInvoke* invoke) {
1390 LocationSummary* locations = new (arena) LocationSummary(invoke,
1391 LocationSummary::kNoCall,
1392 kIntrinsified);
1393 locations->SetInAt(0, Location::RequiresRegister());
Roland Levillain4c0eb422015-04-24 16:43:49 +01001394 HInstruction* value = invoke->InputAt(1);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001395 if (size == Primitive::kPrimByte) {
1396 locations->SetInAt(1, Location::ByteRegisterOrConstant(EDX, value));
1397 } else {
1398 locations->SetInAt(1, Location::RegisterOrConstant(value));
1399 }
1400}
1401
1402static void GenPoke(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1403 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1404 Location value_loc = locations->InAt(1);
1405 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1406 // to avoid a SIGBUS.
1407 switch (size) {
1408 case Primitive::kPrimByte:
1409 if (value_loc.IsConstant()) {
1410 __ movb(Address(address, 0),
1411 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1412 } else {
1413 __ movb(Address(address, 0), value_loc.AsRegister<ByteRegister>());
1414 }
1415 break;
1416 case Primitive::kPrimShort:
1417 if (value_loc.IsConstant()) {
1418 __ movw(Address(address, 0),
1419 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1420 } else {
1421 __ movw(Address(address, 0), value_loc.AsRegister<Register>());
1422 }
1423 break;
1424 case Primitive::kPrimInt:
1425 if (value_loc.IsConstant()) {
1426 __ movl(Address(address, 0),
1427 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1428 } else {
1429 __ movl(Address(address, 0), value_loc.AsRegister<Register>());
1430 }
1431 break;
1432 case Primitive::kPrimLong:
1433 if (value_loc.IsConstant()) {
1434 int64_t value = value_loc.GetConstant()->AsLongConstant()->GetValue();
1435 __ movl(Address(address, 0), Immediate(Low32Bits(value)));
1436 __ movl(Address(address, 4), Immediate(High32Bits(value)));
1437 } else {
1438 __ movl(Address(address, 0), value_loc.AsRegisterPairLow<Register>());
1439 __ movl(Address(address, 4), value_loc.AsRegisterPairHigh<Register>());
1440 }
1441 break;
1442 default:
1443 LOG(FATAL) << "Type not recognized for poke: " << size;
1444 UNREACHABLE();
1445 }
1446}
1447
1448void IntrinsicLocationsBuilderX86::VisitMemoryPokeByte(HInvoke* invoke) {
1449 CreateLongIntToVoidLocations(arena_, Primitive::kPrimByte, invoke);
1450}
1451
1452void IntrinsicCodeGeneratorX86::VisitMemoryPokeByte(HInvoke* invoke) {
1453 GenPoke(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1454}
1455
1456void IntrinsicLocationsBuilderX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1457 CreateLongIntToVoidLocations(arena_, Primitive::kPrimInt, invoke);
1458}
1459
1460void IntrinsicCodeGeneratorX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1461 GenPoke(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1462}
1463
1464void IntrinsicLocationsBuilderX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1465 CreateLongIntToVoidLocations(arena_, Primitive::kPrimLong, invoke);
1466}
1467
1468void IntrinsicCodeGeneratorX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1469 GenPoke(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1470}
1471
1472void IntrinsicLocationsBuilderX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1473 CreateLongIntToVoidLocations(arena_, Primitive::kPrimShort, invoke);
1474}
1475
1476void IntrinsicCodeGeneratorX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1477 GenPoke(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1478}
1479
1480void IntrinsicLocationsBuilderX86::VisitThreadCurrentThread(HInvoke* invoke) {
1481 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1482 LocationSummary::kNoCall,
1483 kIntrinsified);
1484 locations->SetOut(Location::RequiresRegister());
1485}
1486
1487void IntrinsicCodeGeneratorX86::VisitThreadCurrentThread(HInvoke* invoke) {
1488 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
1489 GetAssembler()->fs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86WordSize>()));
1490}
1491
1492static void GenUnsafeGet(LocationSummary* locations, Primitive::Type type,
1493 bool is_volatile, X86Assembler* assembler) {
1494 Register base = locations->InAt(1).AsRegister<Register>();
1495 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1496 Location output = locations->Out();
1497
1498 switch (type) {
1499 case Primitive::kPrimInt:
Roland Levillain4d027112015-07-01 15:41:14 +01001500 case Primitive::kPrimNot: {
1501 Register output_reg = output.AsRegister<Register>();
1502 __ movl(output_reg, Address(base, offset, ScaleFactor::TIMES_1, 0));
1503 if (type == Primitive::kPrimNot) {
1504 __ MaybeUnpoisonHeapReference(output_reg);
1505 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001506 break;
Roland Levillain4d027112015-07-01 15:41:14 +01001507 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001508
1509 case Primitive::kPrimLong: {
1510 Register output_lo = output.AsRegisterPairLow<Register>();
1511 Register output_hi = output.AsRegisterPairHigh<Register>();
1512 if (is_volatile) {
1513 // Need to use a XMM to read atomically.
1514 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1515 __ movsd(temp, Address(base, offset, ScaleFactor::TIMES_1, 0));
1516 __ movd(output_lo, temp);
1517 __ psrlq(temp, Immediate(32));
1518 __ movd(output_hi, temp);
1519 } else {
1520 __ movl(output_lo, Address(base, offset, ScaleFactor::TIMES_1, 0));
1521 __ movl(output_hi, Address(base, offset, ScaleFactor::TIMES_1, 4));
1522 }
1523 }
1524 break;
1525
1526 default:
1527 LOG(FATAL) << "Unsupported op size " << type;
1528 UNREACHABLE();
1529 }
1530}
1531
1532static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke,
1533 bool is_long, bool is_volatile) {
1534 LocationSummary* locations = new (arena) LocationSummary(invoke,
1535 LocationSummary::kNoCall,
1536 kIntrinsified);
1537 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1538 locations->SetInAt(1, Location::RequiresRegister());
1539 locations->SetInAt(2, Location::RequiresRegister());
1540 if (is_long) {
1541 if (is_volatile) {
1542 // Need to use XMM to read volatile.
1543 locations->AddTemp(Location::RequiresFpuRegister());
1544 locations->SetOut(Location::RequiresRegister());
1545 } else {
1546 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1547 }
1548 } else {
1549 locations->SetOut(Location::RequiresRegister());
1550 }
1551}
1552
1553void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) {
1554 CreateIntIntIntToIntLocations(arena_, invoke, false, false);
1555}
1556void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
1557 CreateIntIntIntToIntLocations(arena_, invoke, false, true);
1558}
1559void IntrinsicLocationsBuilderX86::VisitUnsafeGetLong(HInvoke* invoke) {
1560 CreateIntIntIntToIntLocations(arena_, invoke, false, false);
1561}
1562void IntrinsicLocationsBuilderX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1563 CreateIntIntIntToIntLocations(arena_, invoke, true, true);
1564}
1565void IntrinsicLocationsBuilderX86::VisitUnsafeGetObject(HInvoke* invoke) {
1566 CreateIntIntIntToIntLocations(arena_, invoke, false, false);
1567}
1568void IntrinsicLocationsBuilderX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1569 CreateIntIntIntToIntLocations(arena_, invoke, false, true);
1570}
1571
1572
1573void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) {
1574 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimInt, false, GetAssembler());
1575}
1576void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
1577 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimInt, true, GetAssembler());
1578}
1579void IntrinsicCodeGeneratorX86::VisitUnsafeGetLong(HInvoke* invoke) {
1580 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimLong, false, GetAssembler());
1581}
1582void IntrinsicCodeGeneratorX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1583 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimLong, true, GetAssembler());
1584}
1585void IntrinsicCodeGeneratorX86::VisitUnsafeGetObject(HInvoke* invoke) {
1586 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimNot, false, GetAssembler());
1587}
1588void IntrinsicCodeGeneratorX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1589 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimNot, true, GetAssembler());
1590}
1591
1592
1593static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* arena,
1594 Primitive::Type type,
1595 HInvoke* invoke,
1596 bool is_volatile) {
1597 LocationSummary* locations = new (arena) LocationSummary(invoke,
1598 LocationSummary::kNoCall,
1599 kIntrinsified);
1600 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1601 locations->SetInAt(1, Location::RequiresRegister());
1602 locations->SetInAt(2, Location::RequiresRegister());
1603 locations->SetInAt(3, Location::RequiresRegister());
1604 if (type == Primitive::kPrimNot) {
1605 // Need temp registers for card-marking.
Roland Levillain4d027112015-07-01 15:41:14 +01001606 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell09ed1a32015-03-25 08:30:06 -04001607 // Ensure the value is in a byte register.
1608 locations->AddTemp(Location::RegisterLocation(ECX));
1609 } else if (type == Primitive::kPrimLong && is_volatile) {
1610 locations->AddTemp(Location::RequiresFpuRegister());
1611 locations->AddTemp(Location::RequiresFpuRegister());
1612 }
1613}
1614
1615void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) {
1616 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, false);
1617}
1618void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) {
1619 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, false);
1620}
1621void IntrinsicLocationsBuilderX86::VisitUnsafePutVolatile(HInvoke* invoke) {
1622 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, true);
1623}
1624void IntrinsicLocationsBuilderX86::VisitUnsafePutObject(HInvoke* invoke) {
1625 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, false);
1626}
1627void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1628 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, false);
1629}
1630void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1631 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, true);
1632}
1633void IntrinsicLocationsBuilderX86::VisitUnsafePutLong(HInvoke* invoke) {
1634 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, false);
1635}
1636void IntrinsicLocationsBuilderX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1637 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, false);
1638}
1639void IntrinsicLocationsBuilderX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1640 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, true);
1641}
1642
1643// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86
1644// memory model.
1645static void GenUnsafePut(LocationSummary* locations,
1646 Primitive::Type type,
1647 bool is_volatile,
1648 CodeGeneratorX86* codegen) {
1649 X86Assembler* assembler = reinterpret_cast<X86Assembler*>(codegen->GetAssembler());
1650 Register base = locations->InAt(1).AsRegister<Register>();
1651 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1652 Location value_loc = locations->InAt(3);
1653
1654 if (type == Primitive::kPrimLong) {
1655 Register value_lo = value_loc.AsRegisterPairLow<Register>();
1656 Register value_hi = value_loc.AsRegisterPairHigh<Register>();
1657 if (is_volatile) {
1658 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1659 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
1660 __ movd(temp1, value_lo);
1661 __ movd(temp2, value_hi);
1662 __ punpckldq(temp1, temp2);
1663 __ movsd(Address(base, offset, ScaleFactor::TIMES_1, 0), temp1);
1664 } else {
1665 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_lo);
1666 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 4), value_hi);
1667 }
Roland Levillain4d027112015-07-01 15:41:14 +01001668 } else if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1669 Register temp = locations->GetTemp(0).AsRegister<Register>();
1670 __ movl(temp, value_loc.AsRegister<Register>());
1671 __ PoisonHeapReference(temp);
1672 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001673 } else {
1674 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_loc.AsRegister<Register>());
1675 }
1676
1677 if (is_volatile) {
1678 __ mfence();
1679 }
1680
1681 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001682 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell09ed1a32015-03-25 08:30:06 -04001683 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
1684 locations->GetTemp(1).AsRegister<Register>(),
1685 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001686 value_loc.AsRegister<Register>(),
1687 value_can_be_null);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001688 }
1689}
1690
1691void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) {
1692 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_);
1693}
1694void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) {
1695 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_);
1696}
1697void IntrinsicCodeGeneratorX86::VisitUnsafePutVolatile(HInvoke* invoke) {
1698 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, codegen_);
1699}
1700void IntrinsicCodeGeneratorX86::VisitUnsafePutObject(HInvoke* invoke) {
1701 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_);
1702}
1703void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1704 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_);
1705}
1706void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1707 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, codegen_);
1708}
1709void IntrinsicCodeGeneratorX86::VisitUnsafePutLong(HInvoke* invoke) {
1710 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_);
1711}
1712void IntrinsicCodeGeneratorX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1713 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_);
1714}
1715void IntrinsicCodeGeneratorX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1716 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, codegen_);
1717}
1718
Mark Mendell58d25fd2015-04-03 14:52:31 -04001719static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, Primitive::Type type,
1720 HInvoke* invoke) {
1721 LocationSummary* locations = new (arena) LocationSummary(invoke,
1722 LocationSummary::kNoCall,
1723 kIntrinsified);
1724 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1725 locations->SetInAt(1, Location::RequiresRegister());
1726 // Offset is a long, but in 32 bit mode, we only need the low word.
1727 // Can we update the invoke here to remove a TypeConvert to Long?
1728 locations->SetInAt(2, Location::RequiresRegister());
1729 // Expected value must be in EAX or EDX:EAX.
1730 // For long, new value must be in ECX:EBX.
1731 if (type == Primitive::kPrimLong) {
1732 locations->SetInAt(3, Location::RegisterPairLocation(EAX, EDX));
1733 locations->SetInAt(4, Location::RegisterPairLocation(EBX, ECX));
1734 } else {
1735 locations->SetInAt(3, Location::RegisterLocation(EAX));
1736 locations->SetInAt(4, Location::RequiresRegister());
1737 }
1738
1739 // Force a byte register for the output.
1740 locations->SetOut(Location::RegisterLocation(EAX));
1741 if (type == Primitive::kPrimNot) {
1742 // Need temp registers for card-marking.
1743 locations->AddTemp(Location::RequiresRegister());
1744 // Need a byte register for marking.
1745 locations->AddTemp(Location::RegisterLocation(ECX));
1746 }
1747}
1748
1749void IntrinsicLocationsBuilderX86::VisitUnsafeCASInt(HInvoke* invoke) {
1750 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimInt, invoke);
1751}
1752
1753void IntrinsicLocationsBuilderX86::VisitUnsafeCASLong(HInvoke* invoke) {
1754 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimLong, invoke);
1755}
1756
1757void IntrinsicLocationsBuilderX86::VisitUnsafeCASObject(HInvoke* invoke) {
1758 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimNot, invoke);
1759}
1760
1761static void GenCAS(Primitive::Type type, HInvoke* invoke, CodeGeneratorX86* codegen) {
1762 X86Assembler* assembler =
1763 reinterpret_cast<X86Assembler*>(codegen->GetAssembler());
1764 LocationSummary* locations = invoke->GetLocations();
1765
1766 Register base = locations->InAt(1).AsRegister<Register>();
1767 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1768 Location out = locations->Out();
1769 DCHECK_EQ(out.AsRegister<Register>(), EAX);
1770
1771 if (type == Primitive::kPrimLong) {
1772 DCHECK_EQ(locations->InAt(3).AsRegisterPairLow<Register>(), EAX);
1773 DCHECK_EQ(locations->InAt(3).AsRegisterPairHigh<Register>(), EDX);
1774 DCHECK_EQ(locations->InAt(4).AsRegisterPairLow<Register>(), EBX);
1775 DCHECK_EQ(locations->InAt(4).AsRegisterPairHigh<Register>(), ECX);
1776 __ LockCmpxchg8b(Address(base, offset, TIMES_1, 0));
1777 } else {
1778 // Integer or object.
Roland Levillain4d027112015-07-01 15:41:14 +01001779 Register expected = locations->InAt(3).AsRegister<Register>();
1780 DCHECK_EQ(expected, EAX);
Mark Mendell58d25fd2015-04-03 14:52:31 -04001781 Register value = locations->InAt(4).AsRegister<Register>();
1782 if (type == Primitive::kPrimNot) {
1783 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001784 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell58d25fd2015-04-03 14:52:31 -04001785 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
1786 locations->GetTemp(1).AsRegister<Register>(),
1787 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001788 value,
1789 value_can_be_null);
Roland Levillain4d027112015-07-01 15:41:14 +01001790
1791 if (kPoisonHeapReferences) {
1792 __ PoisonHeapReference(expected);
1793 __ PoisonHeapReference(value);
1794 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04001795 }
1796
1797 __ LockCmpxchgl(Address(base, offset, TIMES_1, 0), value);
1798 }
1799
1800 // locked cmpxchg has full barrier semantics, and we don't need scheduling
1801 // barriers at this time.
1802
1803 // Convert ZF into the boolean result.
1804 __ setb(kZero, out.AsRegister<Register>());
1805 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01001806
1807 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1808 Register value = locations->InAt(4).AsRegister<Register>();
1809 __ UnpoisonHeapReference(value);
1810 // Do not unpoison the reference contained in register `expected`,
1811 // as it is the same as register `out`.
1812 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04001813}
1814
1815void IntrinsicCodeGeneratorX86::VisitUnsafeCASInt(HInvoke* invoke) {
1816 GenCAS(Primitive::kPrimInt, invoke, codegen_);
1817}
1818
1819void IntrinsicCodeGeneratorX86::VisitUnsafeCASLong(HInvoke* invoke) {
1820 GenCAS(Primitive::kPrimLong, invoke, codegen_);
1821}
1822
1823void IntrinsicCodeGeneratorX86::VisitUnsafeCASObject(HInvoke* invoke) {
1824 GenCAS(Primitive::kPrimNot, invoke, codegen_);
1825}
1826
1827void IntrinsicLocationsBuilderX86::VisitIntegerReverse(HInvoke* invoke) {
1828 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1829 LocationSummary::kNoCall,
1830 kIntrinsified);
1831 locations->SetInAt(0, Location::RequiresRegister());
1832 locations->SetOut(Location::SameAsFirstInput());
1833 locations->AddTemp(Location::RequiresRegister());
1834}
1835
1836static void SwapBits(Register reg, Register temp, int32_t shift, int32_t mask,
1837 X86Assembler* assembler) {
1838 Immediate imm_shift(shift);
1839 Immediate imm_mask(mask);
1840 __ movl(temp, reg);
1841 __ shrl(reg, imm_shift);
1842 __ andl(temp, imm_mask);
1843 __ andl(reg, imm_mask);
1844 __ shll(temp, imm_shift);
1845 __ orl(reg, temp);
1846}
1847
1848void IntrinsicCodeGeneratorX86::VisitIntegerReverse(HInvoke* invoke) {
1849 X86Assembler* assembler =
1850 reinterpret_cast<X86Assembler*>(codegen_->GetAssembler());
1851 LocationSummary* locations = invoke->GetLocations();
1852
1853 Register reg = locations->InAt(0).AsRegister<Register>();
1854 Register temp = locations->GetTemp(0).AsRegister<Register>();
1855
1856 /*
1857 * Use one bswap instruction to reverse byte order first and then use 3 rounds of
1858 * swapping bits to reverse bits in a number x. Using bswap to save instructions
1859 * compared to generic luni implementation which has 5 rounds of swapping bits.
1860 * x = bswap x
1861 * x = (x & 0x55555555) << 1 | (x >> 1) & 0x55555555;
1862 * x = (x & 0x33333333) << 2 | (x >> 2) & 0x33333333;
1863 * x = (x & 0x0F0F0F0F) << 4 | (x >> 4) & 0x0F0F0F0F;
1864 */
1865 __ bswapl(reg);
1866 SwapBits(reg, temp, 1, 0x55555555, assembler);
1867 SwapBits(reg, temp, 2, 0x33333333, assembler);
1868 SwapBits(reg, temp, 4, 0x0f0f0f0f, assembler);
1869}
1870
1871void IntrinsicLocationsBuilderX86::VisitLongReverse(HInvoke* invoke) {
1872 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1873 LocationSummary::kNoCall,
1874 kIntrinsified);
1875 locations->SetInAt(0, Location::RequiresRegister());
1876 locations->SetOut(Location::SameAsFirstInput());
1877 locations->AddTemp(Location::RequiresRegister());
1878}
1879
1880void IntrinsicCodeGeneratorX86::VisitLongReverse(HInvoke* invoke) {
1881 X86Assembler* assembler =
1882 reinterpret_cast<X86Assembler*>(codegen_->GetAssembler());
1883 LocationSummary* locations = invoke->GetLocations();
1884
1885 Register reg_low = locations->InAt(0).AsRegisterPairLow<Register>();
1886 Register reg_high = locations->InAt(0).AsRegisterPairHigh<Register>();
1887 Register temp = locations->GetTemp(0).AsRegister<Register>();
1888
1889 // We want to swap high/low, then bswap each one, and then do the same
1890 // as a 32 bit reverse.
1891 // Exchange high and low.
1892 __ movl(temp, reg_low);
1893 __ movl(reg_low, reg_high);
1894 __ movl(reg_high, temp);
1895
1896 // bit-reverse low
1897 __ bswapl(reg_low);
1898 SwapBits(reg_low, temp, 1, 0x55555555, assembler);
1899 SwapBits(reg_low, temp, 2, 0x33333333, assembler);
1900 SwapBits(reg_low, temp, 4, 0x0f0f0f0f, assembler);
1901
1902 // bit-reverse high
1903 __ bswapl(reg_high);
1904 SwapBits(reg_high, temp, 1, 0x55555555, assembler);
1905 SwapBits(reg_high, temp, 2, 0x33333333, assembler);
1906 SwapBits(reg_high, temp, 4, 0x0f0f0f0f, assembler);
1907}
1908
Mark Mendelld5897672015-08-12 21:16:41 -04001909static void CreateLeadingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
1910 LocationSummary* locations = new (arena) LocationSummary(invoke,
1911 LocationSummary::kNoCall,
1912 kIntrinsified);
1913 if (is_long) {
1914 locations->SetInAt(0, Location::RequiresRegister());
1915 } else {
1916 locations->SetInAt(0, Location::Any());
1917 }
1918 locations->SetOut(Location::RequiresRegister());
1919}
1920
1921static void GenLeadingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_long) {
1922 LocationSummary* locations = invoke->GetLocations();
1923 Location src = locations->InAt(0);
1924 Register out = locations->Out().AsRegister<Register>();
1925
1926 if (invoke->InputAt(0)->IsConstant()) {
1927 // Evaluate this at compile time.
1928 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
1929 if (value == 0) {
1930 value = is_long ? 64 : 32;
1931 } else {
1932 value = is_long ? CLZ(static_cast<uint64_t>(value)) : CLZ(static_cast<uint32_t>(value));
1933 }
1934 if (value == 0) {
1935 __ xorl(out, out);
1936 } else {
1937 __ movl(out, Immediate(value));
1938 }
1939 return;
1940 }
1941
1942 // Handle the non-constant cases.
1943 if (!is_long) {
1944 if (src.IsRegister()) {
1945 __ bsrl(out, src.AsRegister<Register>());
1946 } else {
1947 DCHECK(src.IsStackSlot());
1948 __ bsrl(out, Address(ESP, src.GetStackIndex()));
1949 }
1950
1951 // BSR sets ZF if the input was zero, and the output is undefined.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001952 NearLabel all_zeroes, done;
Mark Mendelld5897672015-08-12 21:16:41 -04001953 __ j(kEqual, &all_zeroes);
1954
1955 // Correct the result from BSR to get the final CLZ result.
1956 __ xorl(out, Immediate(31));
1957 __ jmp(&done);
1958
1959 // Fix the zero case with the expected result.
1960 __ Bind(&all_zeroes);
1961 __ movl(out, Immediate(32));
1962
1963 __ Bind(&done);
1964 return;
1965 }
1966
1967 // 64 bit case needs to worry about both parts of the register.
1968 DCHECK(src.IsRegisterPair());
1969 Register src_lo = src.AsRegisterPairLow<Register>();
1970 Register src_hi = src.AsRegisterPairHigh<Register>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04001971 NearLabel handle_low, done, all_zeroes;
Mark Mendelld5897672015-08-12 21:16:41 -04001972
1973 // Is the high word zero?
1974 __ testl(src_hi, src_hi);
1975 __ j(kEqual, &handle_low);
1976
1977 // High word is not zero. We know that the BSR result is defined in this case.
1978 __ bsrl(out, src_hi);
1979
1980 // Correct the result from BSR to get the final CLZ result.
1981 __ xorl(out, Immediate(31));
1982 __ jmp(&done);
1983
1984 // High word was zero. We have to compute the low word count and add 32.
1985 __ Bind(&handle_low);
1986 __ bsrl(out, src_lo);
1987 __ j(kEqual, &all_zeroes);
1988
1989 // We had a valid result. Use an XOR to both correct the result and add 32.
1990 __ xorl(out, Immediate(63));
1991 __ jmp(&done);
1992
1993 // All zero case.
1994 __ Bind(&all_zeroes);
1995 __ movl(out, Immediate(64));
1996
1997 __ Bind(&done);
1998}
1999
2000void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
2001 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ false);
2002}
2003
2004void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
2005 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2006 GenLeadingZeros(assembler, invoke, /* is_long */ false);
2007}
2008
2009void IntrinsicLocationsBuilderX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
2010 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ true);
2011}
2012
2013void IntrinsicCodeGeneratorX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
2014 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2015 GenLeadingZeros(assembler, invoke, /* is_long */ true);
2016}
2017
Mark Mendell2d554792015-09-15 21:45:18 -04002018static void CreateTrailingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2019 LocationSummary* locations = new (arena) LocationSummary(invoke,
2020 LocationSummary::kNoCall,
2021 kIntrinsified);
2022 if (is_long) {
2023 locations->SetInAt(0, Location::RequiresRegister());
2024 } else {
2025 locations->SetInAt(0, Location::Any());
2026 }
2027 locations->SetOut(Location::RequiresRegister());
2028}
2029
2030static void GenTrailingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_long) {
2031 LocationSummary* locations = invoke->GetLocations();
2032 Location src = locations->InAt(0);
2033 Register out = locations->Out().AsRegister<Register>();
2034
2035 if (invoke->InputAt(0)->IsConstant()) {
2036 // Evaluate this at compile time.
2037 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2038 if (value == 0) {
2039 value = is_long ? 64 : 32;
2040 } else {
2041 value = is_long ? CTZ(static_cast<uint64_t>(value)) : CTZ(static_cast<uint32_t>(value));
2042 }
2043 if (value == 0) {
2044 __ xorl(out, out);
2045 } else {
2046 __ movl(out, Immediate(value));
2047 }
2048 return;
2049 }
2050
2051 // Handle the non-constant cases.
2052 if (!is_long) {
2053 if (src.IsRegister()) {
2054 __ bsfl(out, src.AsRegister<Register>());
2055 } else {
2056 DCHECK(src.IsStackSlot());
2057 __ bsfl(out, Address(ESP, src.GetStackIndex()));
2058 }
2059
2060 // BSF sets ZF if the input was zero, and the output is undefined.
2061 NearLabel done;
2062 __ j(kNotEqual, &done);
2063
2064 // Fix the zero case with the expected result.
2065 __ movl(out, Immediate(32));
2066
2067 __ Bind(&done);
2068 return;
2069 }
2070
2071 // 64 bit case needs to worry about both parts of the register.
2072 DCHECK(src.IsRegisterPair());
2073 Register src_lo = src.AsRegisterPairLow<Register>();
2074 Register src_hi = src.AsRegisterPairHigh<Register>();
2075 NearLabel done, all_zeroes;
2076
2077 // If the low word is zero, then ZF will be set. If not, we have the answer.
2078 __ bsfl(out, src_lo);
2079 __ j(kNotEqual, &done);
2080
2081 // Low word was zero. We have to compute the high word count and add 32.
2082 __ bsfl(out, src_hi);
2083 __ j(kEqual, &all_zeroes);
2084
2085 // We had a valid result. Add 32 to account for the low word being zero.
2086 __ addl(out, Immediate(32));
2087 __ jmp(&done);
2088
2089 // All zero case.
2090 __ Bind(&all_zeroes);
2091 __ movl(out, Immediate(64));
2092
2093 __ Bind(&done);
2094}
2095
2096void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
2097 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ false);
2098}
2099
2100void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
2101 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2102 GenTrailingZeros(assembler, invoke, /* is_long */ false);
2103}
2104
2105void IntrinsicLocationsBuilderX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
2106 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ true);
2107}
2108
2109void IntrinsicCodeGeneratorX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
2110 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2111 GenTrailingZeros(assembler, invoke, /* is_long */ true);
2112}
2113
2114static void CreateRotateLocations(ArenaAllocator* arena, HInvoke* invoke) {
2115 LocationSummary* locations = new (arena) LocationSummary(invoke,
2116 LocationSummary::kNoCall,
2117 kIntrinsified);
2118 locations->SetInAt(0, Location::RequiresRegister());
2119 // The shift count needs to be in CL or a constant.
2120 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, invoke->InputAt(1)));
2121 locations->SetOut(Location::SameAsFirstInput());
2122}
2123
2124static void GenRotate(X86Assembler* assembler, HInvoke* invoke, bool is_left) {
2125 LocationSummary* locations = invoke->GetLocations();
2126 Register first_reg = locations->InAt(0).AsRegister<Register>();
2127 Location second = locations->InAt(1);
2128
2129 if (second.IsRegister()) {
2130 Register second_reg = second.AsRegister<Register>();
2131 if (is_left) {
2132 __ roll(first_reg, second_reg);
2133 } else {
2134 __ rorl(first_reg, second_reg);
2135 }
2136 } else {
2137 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftValue);
2138 if (is_left) {
2139 __ roll(first_reg, imm);
2140 } else {
2141 __ rorl(first_reg, imm);
2142 }
2143 }
2144}
2145
2146void IntrinsicLocationsBuilderX86::VisitIntegerRotateLeft(HInvoke* invoke) {
2147 CreateRotateLocations(arena_, invoke);
2148}
2149
2150void IntrinsicCodeGeneratorX86::VisitIntegerRotateLeft(HInvoke* invoke) {
2151 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2152 GenRotate(assembler, invoke, /* is_left */ true);
2153}
2154
2155void IntrinsicLocationsBuilderX86::VisitIntegerRotateRight(HInvoke* invoke) {
2156 CreateRotateLocations(arena_, invoke);
2157}
2158
2159void IntrinsicCodeGeneratorX86::VisitIntegerRotateRight(HInvoke* invoke) {
2160 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2161 GenRotate(assembler, invoke, /* is_left */ false);
2162}
2163
Mark Mendell09ed1a32015-03-25 08:30:06 -04002164// Unimplemented intrinsics.
2165
2166#define UNIMPLEMENTED_INTRINSIC(Name) \
2167void IntrinsicLocationsBuilderX86::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
2168} \
2169void IntrinsicCodeGeneratorX86::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
2170}
2171
Mark Mendell09ed1a32015-03-25 08:30:06 -04002172UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
Jeff Hao848f70a2014-01-15 13:49:50 -08002173UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Mark Mendell09ed1a32015-03-25 08:30:06 -04002174UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002175UNIMPLEMENTED_INTRINSIC(LongRotateRight)
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002176UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
Mark Mendell09ed1a32015-03-25 08:30:06 -04002177
Roland Levillain4d027112015-07-01 15:41:14 +01002178#undef UNIMPLEMENTED_INTRINSIC
2179
2180#undef __
2181
Mark Mendell09ed1a32015-03-25 08:30:06 -04002182} // namespace x86
2183} // namespace art