blob: 8a7aded9354ce3eac3300899cd361de09c82c0e7 [file] [log] [blame]
Mark Mendell09ed1a32015-03-25 08:30:06 -04001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_x86.h"
18
Andreas Gampe21030dd2015-05-07 14:46:15 -070019#include <limits>
20
Mark Mendellfb8d2792015-03-31 22:16:59 -040021#include "arch/x86/instruction_set_features_x86.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include "art_method.h"
Mark Mendelld5897672015-08-12 21:16:41 -040023#include "base/bit_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040024#include "code_generator_x86.h"
25#include "entrypoints/quick/quick_entrypoints.h"
26#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070027#include "intrinsics_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040028#include "mirror/array-inl.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040029#include "mirror/string.h"
30#include "thread.h"
31#include "utils/x86/assembler_x86.h"
32#include "utils/x86/constants_x86.h"
33
34namespace art {
35
36namespace x86 {
37
38static constexpr int kDoubleNaNHigh = 0x7FF80000;
39static constexpr int kDoubleNaNLow = 0x00000000;
40static constexpr int kFloatNaN = 0x7FC00000;
41
Mark Mendellfb8d2792015-03-31 22:16:59 -040042IntrinsicLocationsBuilderX86::IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen)
43 : arena_(codegen->GetGraph()->GetArena()), codegen_(codegen) {
44}
45
46
Mark Mendell09ed1a32015-03-25 08:30:06 -040047X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() {
48 return reinterpret_cast<X86Assembler*>(codegen_->GetAssembler());
49}
50
51ArenaAllocator* IntrinsicCodeGeneratorX86::GetAllocator() {
52 return codegen_->GetGraph()->GetArena();
53}
54
55bool IntrinsicLocationsBuilderX86::TryDispatch(HInvoke* invoke) {
56 Dispatch(invoke);
57 LocationSummary* res = invoke->GetLocations();
58 return res != nullptr && res->Intrinsified();
59}
60
Roland Levillainec525fc2015-04-28 15:50:20 +010061static void MoveArguments(HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010062 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010063 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Mark Mendell09ed1a32015-03-25 08:30:06 -040064}
65
Andreas Gampe85b62f22015-09-09 13:15:38 -070066using IntrinsicSlowPathX86 = IntrinsicSlowPath<InvokeDexCallingConventionVisitorX86>;
Mark Mendell09ed1a32015-03-25 08:30:06 -040067
Mark Mendell09ed1a32015-03-25 08:30:06 -040068#define __ assembler->
69
70static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
71 LocationSummary* locations = new (arena) LocationSummary(invoke,
72 LocationSummary::kNoCall,
73 kIntrinsified);
74 locations->SetInAt(0, Location::RequiresFpuRegister());
75 locations->SetOut(Location::RequiresRegister());
76 if (is64bit) {
77 locations->AddTemp(Location::RequiresFpuRegister());
78 }
79}
80
81static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
82 LocationSummary* locations = new (arena) LocationSummary(invoke,
83 LocationSummary::kNoCall,
84 kIntrinsified);
85 locations->SetInAt(0, Location::RequiresRegister());
86 locations->SetOut(Location::RequiresFpuRegister());
87 if (is64bit) {
88 locations->AddTemp(Location::RequiresFpuRegister());
89 locations->AddTemp(Location::RequiresFpuRegister());
90 }
91}
92
93static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
94 Location input = locations->InAt(0);
95 Location output = locations->Out();
96 if (is64bit) {
97 // Need to use the temporary.
98 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
99 __ movsd(temp, input.AsFpuRegister<XmmRegister>());
100 __ movd(output.AsRegisterPairLow<Register>(), temp);
101 __ psrlq(temp, Immediate(32));
102 __ movd(output.AsRegisterPairHigh<Register>(), temp);
103 } else {
104 __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>());
105 }
106}
107
108static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
109 Location input = locations->InAt(0);
110 Location output = locations->Out();
111 if (is64bit) {
112 // Need to use the temporary.
113 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
114 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
115 __ movd(temp1, input.AsRegisterPairLow<Register>());
116 __ movd(temp2, input.AsRegisterPairHigh<Register>());
117 __ punpckldq(temp1, temp2);
118 __ movsd(output.AsFpuRegister<XmmRegister>(), temp1);
119 } else {
120 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<Register>());
121 }
122}
123
124void IntrinsicLocationsBuilderX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
125 CreateFPToIntLocations(arena_, invoke, true);
126}
127void IntrinsicLocationsBuilderX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
128 CreateIntToFPLocations(arena_, invoke, true);
129}
130
131void IntrinsicCodeGeneratorX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
132 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
133}
134void IntrinsicCodeGeneratorX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
135 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
136}
137
138void IntrinsicLocationsBuilderX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
139 CreateFPToIntLocations(arena_, invoke, false);
140}
141void IntrinsicLocationsBuilderX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
142 CreateIntToFPLocations(arena_, invoke, false);
143}
144
145void IntrinsicCodeGeneratorX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
146 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
147}
148void IntrinsicCodeGeneratorX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
149 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
150}
151
152static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
153 LocationSummary* locations = new (arena) LocationSummary(invoke,
154 LocationSummary::kNoCall,
155 kIntrinsified);
156 locations->SetInAt(0, Location::RequiresRegister());
157 locations->SetOut(Location::SameAsFirstInput());
158}
159
160static void CreateLongToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
161 LocationSummary* locations = new (arena) LocationSummary(invoke,
162 LocationSummary::kNoCall,
163 kIntrinsified);
164 locations->SetInAt(0, Location::RequiresRegister());
165 locations->SetOut(Location::RequiresRegister());
166}
167
168static void CreateLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
169 LocationSummary* locations = new (arena) LocationSummary(invoke,
170 LocationSummary::kNoCall,
171 kIntrinsified);
172 locations->SetInAt(0, Location::RequiresRegister());
173 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
174}
175
176static void GenReverseBytes(LocationSummary* locations,
177 Primitive::Type size,
178 X86Assembler* assembler) {
179 Register out = locations->Out().AsRegister<Register>();
180
181 switch (size) {
182 case Primitive::kPrimShort:
183 // TODO: Can be done with an xchg of 8b registers. This is straight from Quick.
184 __ bswapl(out);
185 __ sarl(out, Immediate(16));
186 break;
187 case Primitive::kPrimInt:
188 __ bswapl(out);
189 break;
190 default:
191 LOG(FATAL) << "Unexpected size for reverse-bytes: " << size;
192 UNREACHABLE();
193 }
194}
195
196void IntrinsicLocationsBuilderX86::VisitIntegerReverseBytes(HInvoke* invoke) {
197 CreateIntToIntLocations(arena_, invoke);
198}
199
200void IntrinsicCodeGeneratorX86::VisitIntegerReverseBytes(HInvoke* invoke) {
201 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
202}
203
Mark Mendell58d25fd2015-04-03 14:52:31 -0400204void IntrinsicLocationsBuilderX86::VisitLongReverseBytes(HInvoke* invoke) {
205 CreateLongToLongLocations(arena_, invoke);
206}
207
208void IntrinsicCodeGeneratorX86::VisitLongReverseBytes(HInvoke* invoke) {
209 LocationSummary* locations = invoke->GetLocations();
210 Location input = locations->InAt(0);
211 Register input_lo = input.AsRegisterPairLow<Register>();
212 Register input_hi = input.AsRegisterPairHigh<Register>();
213 Location output = locations->Out();
214 Register output_lo = output.AsRegisterPairLow<Register>();
215 Register output_hi = output.AsRegisterPairHigh<Register>();
216
217 X86Assembler* assembler = GetAssembler();
218 // Assign the inputs to the outputs, mixing low/high.
219 __ movl(output_lo, input_hi);
220 __ movl(output_hi, input_lo);
221 __ bswapl(output_lo);
222 __ bswapl(output_hi);
223}
224
Mark Mendell09ed1a32015-03-25 08:30:06 -0400225void IntrinsicLocationsBuilderX86::VisitShortReverseBytes(HInvoke* invoke) {
226 CreateIntToIntLocations(arena_, invoke);
227}
228
229void IntrinsicCodeGeneratorX86::VisitShortReverseBytes(HInvoke* invoke) {
230 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
231}
232
233
234// TODO: Consider Quick's way of doing Double abs through integer operations, as the immediate we
235// need is 64b.
236
237static void CreateFloatToFloat(ArenaAllocator* arena, HInvoke* invoke) {
238 // TODO: Enable memory operations when the assembler supports them.
239 LocationSummary* locations = new (arena) LocationSummary(invoke,
240 LocationSummary::kNoCall,
241 kIntrinsified);
242 locations->SetInAt(0, Location::RequiresFpuRegister());
243 // TODO: Allow x86 to work with memory. This requires assembler support, see below.
244 // locations->SetInAt(0, Location::Any()); // X86 can work on memory directly.
245 locations->SetOut(Location::SameAsFirstInput());
246}
247
248static void MathAbsFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
249 Location output = locations->Out();
250
251 if (output.IsFpuRegister()) {
252 // Create the right constant on an aligned stack.
253 if (is64bit) {
254 __ subl(ESP, Immediate(8));
255 __ pushl(Immediate(0x7FFFFFFF));
256 __ pushl(Immediate(0xFFFFFFFF));
257 __ andpd(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
258 } else {
259 __ subl(ESP, Immediate(12));
260 __ pushl(Immediate(0x7FFFFFFF));
261 __ andps(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
262 }
263 __ addl(ESP, Immediate(16));
264 } else {
265 // TODO: update when assember support is available.
266 UNIMPLEMENTED(FATAL) << "Needs assembler support.";
267// Once assembler support is available, in-memory operations look like this:
268// if (is64bit) {
269// DCHECK(output.IsDoubleStackSlot());
270// __ andl(Address(Register(RSP), output.GetHighStackIndex(kX86WordSize)),
271// Immediate(0x7FFFFFFF));
272// } else {
273// DCHECK(output.IsStackSlot());
274// // Can use and with a literal directly.
275// __ andl(Address(Register(RSP), output.GetStackIndex()), Immediate(0x7FFFFFFF));
276// }
277 }
278}
279
280void IntrinsicLocationsBuilderX86::VisitMathAbsDouble(HInvoke* invoke) {
281 CreateFloatToFloat(arena_, invoke);
282}
283
284void IntrinsicCodeGeneratorX86::VisitMathAbsDouble(HInvoke* invoke) {
285 MathAbsFP(invoke->GetLocations(), true, GetAssembler());
286}
287
288void IntrinsicLocationsBuilderX86::VisitMathAbsFloat(HInvoke* invoke) {
289 CreateFloatToFloat(arena_, invoke);
290}
291
292void IntrinsicCodeGeneratorX86::VisitMathAbsFloat(HInvoke* invoke) {
293 MathAbsFP(invoke->GetLocations(), false, GetAssembler());
294}
295
296static void CreateAbsIntLocation(ArenaAllocator* arena, HInvoke* invoke) {
297 LocationSummary* locations = new (arena) LocationSummary(invoke,
298 LocationSummary::kNoCall,
299 kIntrinsified);
300 locations->SetInAt(0, Location::RegisterLocation(EAX));
301 locations->SetOut(Location::SameAsFirstInput());
302 locations->AddTemp(Location::RegisterLocation(EDX));
303}
304
305static void GenAbsInteger(LocationSummary* locations, X86Assembler* assembler) {
306 Location output = locations->Out();
307 Register out = output.AsRegister<Register>();
308 DCHECK_EQ(out, EAX);
309 Register temp = locations->GetTemp(0).AsRegister<Register>();
310 DCHECK_EQ(temp, EDX);
311
312 // Sign extend EAX into EDX.
313 __ cdq();
314
315 // XOR EAX with sign.
316 __ xorl(EAX, EDX);
317
318 // Subtract out sign to correct.
319 __ subl(EAX, EDX);
320
321 // The result is in EAX.
322}
323
324static void CreateAbsLongLocation(ArenaAllocator* arena, HInvoke* invoke) {
325 LocationSummary* locations = new (arena) LocationSummary(invoke,
326 LocationSummary::kNoCall,
327 kIntrinsified);
328 locations->SetInAt(0, Location::RequiresRegister());
329 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
330 locations->AddTemp(Location::RequiresRegister());
331}
332
333static void GenAbsLong(LocationSummary* locations, X86Assembler* assembler) {
334 Location input = locations->InAt(0);
335 Register input_lo = input.AsRegisterPairLow<Register>();
336 Register input_hi = input.AsRegisterPairHigh<Register>();
337 Location output = locations->Out();
338 Register output_lo = output.AsRegisterPairLow<Register>();
339 Register output_hi = output.AsRegisterPairHigh<Register>();
340 Register temp = locations->GetTemp(0).AsRegister<Register>();
341
342 // Compute the sign into the temporary.
343 __ movl(temp, input_hi);
344 __ sarl(temp, Immediate(31));
345
346 // Store the sign into the output.
347 __ movl(output_lo, temp);
348 __ movl(output_hi, temp);
349
350 // XOR the input to the output.
351 __ xorl(output_lo, input_lo);
352 __ xorl(output_hi, input_hi);
353
354 // Subtract the sign.
355 __ subl(output_lo, temp);
356 __ sbbl(output_hi, temp);
357}
358
359void IntrinsicLocationsBuilderX86::VisitMathAbsInt(HInvoke* invoke) {
360 CreateAbsIntLocation(arena_, invoke);
361}
362
363void IntrinsicCodeGeneratorX86::VisitMathAbsInt(HInvoke* invoke) {
364 GenAbsInteger(invoke->GetLocations(), GetAssembler());
365}
366
367void IntrinsicLocationsBuilderX86::VisitMathAbsLong(HInvoke* invoke) {
368 CreateAbsLongLocation(arena_, invoke);
369}
370
371void IntrinsicCodeGeneratorX86::VisitMathAbsLong(HInvoke* invoke) {
372 GenAbsLong(invoke->GetLocations(), GetAssembler());
373}
374
375static void GenMinMaxFP(LocationSummary* locations, bool is_min, bool is_double,
376 X86Assembler* assembler) {
377 Location op1_loc = locations->InAt(0);
378 Location op2_loc = locations->InAt(1);
379 Location out_loc = locations->Out();
380 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
381
382 // Shortcut for same input locations.
383 if (op1_loc.Equals(op2_loc)) {
384 DCHECK(out_loc.Equals(op1_loc));
385 return;
386 }
387
388 // (out := op1)
389 // out <=? op2
390 // if Nan jmp Nan_label
391 // if out is min jmp done
392 // if op2 is min jmp op2_label
393 // handle -0/+0
394 // jmp done
395 // Nan_label:
396 // out := NaN
397 // op2_label:
398 // out := op2
399 // done:
400 //
401 // This removes one jmp, but needs to copy one input (op1) to out.
402 //
403 // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath?
404
405 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
406
Mark Mendell0c9497d2015-08-21 09:30:05 -0400407 NearLabel nan, done, op2_label;
Mark Mendell09ed1a32015-03-25 08:30:06 -0400408 if (is_double) {
409 __ ucomisd(out, op2);
410 } else {
411 __ ucomiss(out, op2);
412 }
413
414 __ j(Condition::kParityEven, &nan);
415
416 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
417 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
418
419 // Handle 0.0/-0.0.
420 if (is_min) {
421 if (is_double) {
422 __ orpd(out, op2);
423 } else {
424 __ orps(out, op2);
425 }
426 } else {
427 if (is_double) {
428 __ andpd(out, op2);
429 } else {
430 __ andps(out, op2);
431 }
432 }
433 __ jmp(&done);
434
435 // NaN handling.
436 __ Bind(&nan);
437 if (is_double) {
438 __ pushl(Immediate(kDoubleNaNHigh));
439 __ pushl(Immediate(kDoubleNaNLow));
440 __ movsd(out, Address(ESP, 0));
441 __ addl(ESP, Immediate(8));
442 } else {
443 __ pushl(Immediate(kFloatNaN));
444 __ movss(out, Address(ESP, 0));
445 __ addl(ESP, Immediate(4));
446 }
447 __ jmp(&done);
448
449 // out := op2;
450 __ Bind(&op2_label);
451 if (is_double) {
452 __ movsd(out, op2);
453 } else {
454 __ movss(out, op2);
455 }
456
457 // Done.
458 __ Bind(&done);
459}
460
461static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
462 LocationSummary* locations = new (arena) LocationSummary(invoke,
463 LocationSummary::kNoCall,
464 kIntrinsified);
465 locations->SetInAt(0, Location::RequiresFpuRegister());
466 locations->SetInAt(1, Location::RequiresFpuRegister());
467 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
468 // the second input to be the output (we can simply swap inputs).
469 locations->SetOut(Location::SameAsFirstInput());
470}
471
472void IntrinsicLocationsBuilderX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
473 CreateFPFPToFPLocations(arena_, invoke);
474}
475
476void IntrinsicCodeGeneratorX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
477 GenMinMaxFP(invoke->GetLocations(), true, true, GetAssembler());
478}
479
480void IntrinsicLocationsBuilderX86::VisitMathMinFloatFloat(HInvoke* invoke) {
481 CreateFPFPToFPLocations(arena_, invoke);
482}
483
484void IntrinsicCodeGeneratorX86::VisitMathMinFloatFloat(HInvoke* invoke) {
485 GenMinMaxFP(invoke->GetLocations(), true, false, GetAssembler());
486}
487
488void IntrinsicLocationsBuilderX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
489 CreateFPFPToFPLocations(arena_, invoke);
490}
491
492void IntrinsicCodeGeneratorX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
493 GenMinMaxFP(invoke->GetLocations(), false, true, GetAssembler());
494}
495
496void IntrinsicLocationsBuilderX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
497 CreateFPFPToFPLocations(arena_, invoke);
498}
499
500void IntrinsicCodeGeneratorX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
501 GenMinMaxFP(invoke->GetLocations(), false, false, GetAssembler());
502}
503
504static void GenMinMax(LocationSummary* locations, bool is_min, bool is_long,
505 X86Assembler* assembler) {
506 Location op1_loc = locations->InAt(0);
507 Location op2_loc = locations->InAt(1);
508
509 // Shortcut for same input locations.
510 if (op1_loc.Equals(op2_loc)) {
511 // Can return immediately, as op1_loc == out_loc.
512 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
513 // a copy here.
514 DCHECK(locations->Out().Equals(op1_loc));
515 return;
516 }
517
518 if (is_long) {
519 // Need to perform a subtract to get the sign right.
520 // op1 is already in the same location as the output.
521 Location output = locations->Out();
522 Register output_lo = output.AsRegisterPairLow<Register>();
523 Register output_hi = output.AsRegisterPairHigh<Register>();
524
525 Register op2_lo = op2_loc.AsRegisterPairLow<Register>();
526 Register op2_hi = op2_loc.AsRegisterPairHigh<Register>();
527
528 // Spare register to compute the subtraction to set condition code.
529 Register temp = locations->GetTemp(0).AsRegister<Register>();
530
531 // Subtract off op2_low.
532 __ movl(temp, output_lo);
533 __ subl(temp, op2_lo);
534
535 // Now use the same tempo and the borrow to finish the subtraction of op2_hi.
536 __ movl(temp, output_hi);
537 __ sbbl(temp, op2_hi);
538
539 // Now the condition code is correct.
540 Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess;
541 __ cmovl(cond, output_lo, op2_lo);
542 __ cmovl(cond, output_hi, op2_hi);
543 } else {
544 Register out = locations->Out().AsRegister<Register>();
545 Register op2 = op2_loc.AsRegister<Register>();
546
547 // (out := op1)
548 // out <=? op2
549 // if out is min jmp done
550 // out := op2
551 // done:
552
553 __ cmpl(out, op2);
554 Condition cond = is_min ? Condition::kGreater : Condition::kLess;
555 __ cmovl(cond, out, op2);
556 }
557}
558
559static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
560 LocationSummary* locations = new (arena) LocationSummary(invoke,
561 LocationSummary::kNoCall,
562 kIntrinsified);
563 locations->SetInAt(0, Location::RequiresRegister());
564 locations->SetInAt(1, Location::RequiresRegister());
565 locations->SetOut(Location::SameAsFirstInput());
566}
567
568static void CreateLongLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
569 LocationSummary* locations = new (arena) LocationSummary(invoke,
570 LocationSummary::kNoCall,
571 kIntrinsified);
572 locations->SetInAt(0, Location::RequiresRegister());
573 locations->SetInAt(1, Location::RequiresRegister());
574 locations->SetOut(Location::SameAsFirstInput());
575 // Register to use to perform a long subtract to set cc.
576 locations->AddTemp(Location::RequiresRegister());
577}
578
579void IntrinsicLocationsBuilderX86::VisitMathMinIntInt(HInvoke* invoke) {
580 CreateIntIntToIntLocations(arena_, invoke);
581}
582
583void IntrinsicCodeGeneratorX86::VisitMathMinIntInt(HInvoke* invoke) {
584 GenMinMax(invoke->GetLocations(), true, false, GetAssembler());
585}
586
587void IntrinsicLocationsBuilderX86::VisitMathMinLongLong(HInvoke* invoke) {
588 CreateLongLongToLongLocations(arena_, invoke);
589}
590
591void IntrinsicCodeGeneratorX86::VisitMathMinLongLong(HInvoke* invoke) {
592 GenMinMax(invoke->GetLocations(), true, true, GetAssembler());
593}
594
595void IntrinsicLocationsBuilderX86::VisitMathMaxIntInt(HInvoke* invoke) {
596 CreateIntIntToIntLocations(arena_, invoke);
597}
598
599void IntrinsicCodeGeneratorX86::VisitMathMaxIntInt(HInvoke* invoke) {
600 GenMinMax(invoke->GetLocations(), false, false, GetAssembler());
601}
602
603void IntrinsicLocationsBuilderX86::VisitMathMaxLongLong(HInvoke* invoke) {
604 CreateLongLongToLongLocations(arena_, invoke);
605}
606
607void IntrinsicCodeGeneratorX86::VisitMathMaxLongLong(HInvoke* invoke) {
608 GenMinMax(invoke->GetLocations(), false, true, GetAssembler());
609}
610
611static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
612 LocationSummary* locations = new (arena) LocationSummary(invoke,
613 LocationSummary::kNoCall,
614 kIntrinsified);
615 locations->SetInAt(0, Location::RequiresFpuRegister());
616 locations->SetOut(Location::RequiresFpuRegister());
617}
618
619void IntrinsicLocationsBuilderX86::VisitMathSqrt(HInvoke* invoke) {
620 CreateFPToFPLocations(arena_, invoke);
621}
622
623void IntrinsicCodeGeneratorX86::VisitMathSqrt(HInvoke* invoke) {
624 LocationSummary* locations = invoke->GetLocations();
625 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
626 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
627
628 GetAssembler()->sqrtsd(out, in);
629}
630
Mark Mendellfb8d2792015-03-31 22:16:59 -0400631static void InvokeOutOfLineIntrinsic(CodeGeneratorX86* codegen, HInvoke* invoke) {
Roland Levillainec525fc2015-04-28 15:50:20 +0100632 MoveArguments(invoke, codegen);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400633
634 DCHECK(invoke->IsInvokeStaticOrDirect());
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100635 codegen->GenerateStaticOrDirectCall(invoke->AsInvokeStaticOrDirect(),
636 Location::RegisterLocation(EAX));
Mingyao Yange90db122015-04-03 17:56:54 -0700637 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400638
639 // Copy the result back to the expected output.
640 Location out = invoke->GetLocations()->Out();
641 if (out.IsValid()) {
642 DCHECK(out.IsRegister());
Andreas Gampe85b62f22015-09-09 13:15:38 -0700643 codegen->MoveFromReturnRegister(out, invoke->GetType());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400644 }
645}
646
647static void CreateSSE41FPToFPLocations(ArenaAllocator* arena,
648 HInvoke* invoke,
649 CodeGeneratorX86* codegen) {
650 // Do we have instruction support?
651 if (codegen->GetInstructionSetFeatures().HasSSE4_1()) {
652 CreateFPToFPLocations(arena, invoke);
653 return;
654 }
655
656 // We have to fall back to a call to the intrinsic.
657 LocationSummary* locations = new (arena) LocationSummary(invoke,
658 LocationSummary::kCall);
659 InvokeRuntimeCallingConvention calling_convention;
660 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
661 locations->SetOut(Location::FpuRegisterLocation(XMM0));
662 // Needs to be EAX for the invoke.
663 locations->AddTemp(Location::RegisterLocation(EAX));
664}
665
666static void GenSSE41FPToFPIntrinsic(CodeGeneratorX86* codegen,
667 HInvoke* invoke,
668 X86Assembler* assembler,
669 int round_mode) {
670 LocationSummary* locations = invoke->GetLocations();
671 if (locations->WillCall()) {
672 InvokeOutOfLineIntrinsic(codegen, invoke);
673 } else {
674 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
675 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
676 __ roundsd(out, in, Immediate(round_mode));
677 }
678}
679
680void IntrinsicLocationsBuilderX86::VisitMathCeil(HInvoke* invoke) {
681 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
682}
683
684void IntrinsicCodeGeneratorX86::VisitMathCeil(HInvoke* invoke) {
685 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 2);
686}
687
688void IntrinsicLocationsBuilderX86::VisitMathFloor(HInvoke* invoke) {
689 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
690}
691
692void IntrinsicCodeGeneratorX86::VisitMathFloor(HInvoke* invoke) {
693 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 1);
694}
695
696void IntrinsicLocationsBuilderX86::VisitMathRint(HInvoke* invoke) {
697 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
698}
699
700void IntrinsicCodeGeneratorX86::VisitMathRint(HInvoke* invoke) {
701 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 0);
702}
703
704// Note that 32 bit x86 doesn't have the capability to inline MathRoundDouble,
705// as it needs 64 bit instructions.
706void IntrinsicLocationsBuilderX86::VisitMathRoundFloat(HInvoke* invoke) {
707 // Do we have instruction support?
708 if (codegen_->GetInstructionSetFeatures().HasSSE4_1()) {
709 LocationSummary* locations = new (arena_) LocationSummary(invoke,
710 LocationSummary::kNoCall,
711 kIntrinsified);
712 locations->SetInAt(0, Location::RequiresFpuRegister());
Nicolas Geoffrayd9b92402015-04-21 10:02:22 +0100713 locations->SetOut(Location::RequiresRegister());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400714 locations->AddTemp(Location::RequiresFpuRegister());
715 locations->AddTemp(Location::RequiresFpuRegister());
716 return;
717 }
718
719 // We have to fall back to a call to the intrinsic.
720 LocationSummary* locations = new (arena_) LocationSummary(invoke,
721 LocationSummary::kCall);
722 InvokeRuntimeCallingConvention calling_convention;
723 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
724 locations->SetOut(Location::RegisterLocation(EAX));
725 // Needs to be EAX for the invoke.
726 locations->AddTemp(Location::RegisterLocation(EAX));
727}
728
729void IntrinsicCodeGeneratorX86::VisitMathRoundFloat(HInvoke* invoke) {
730 LocationSummary* locations = invoke->GetLocations();
731 if (locations->WillCall()) {
732 InvokeOutOfLineIntrinsic(codegen_, invoke);
733 return;
734 }
735
736 // Implement RoundFloat as t1 = floor(input + 0.5f); convert to int.
737 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
738 Register out = locations->Out().AsRegister<Register>();
739 XmmRegister maxInt = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
740 XmmRegister inPlusPointFive = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -0400741 NearLabel done, nan;
Mark Mendellfb8d2792015-03-31 22:16:59 -0400742 X86Assembler* assembler = GetAssembler();
743
744 // Generate 0.5 into inPlusPointFive.
745 __ movl(out, Immediate(bit_cast<int32_t, float>(0.5f)));
746 __ movd(inPlusPointFive, out);
747
748 // Add in the input.
749 __ addss(inPlusPointFive, in);
750
751 // And truncate to an integer.
752 __ roundss(inPlusPointFive, inPlusPointFive, Immediate(1));
753
754 __ movl(out, Immediate(kPrimIntMax));
755 // maxInt = int-to-float(out)
756 __ cvtsi2ss(maxInt, out);
757
758 // if inPlusPointFive >= maxInt goto done
759 __ comiss(inPlusPointFive, maxInt);
760 __ j(kAboveEqual, &done);
761
762 // if input == NaN goto nan
763 __ j(kUnordered, &nan);
764
765 // output = float-to-int-truncate(input)
766 __ cvttss2si(out, inPlusPointFive);
767 __ jmp(&done);
768 __ Bind(&nan);
769
770 // output = 0
771 __ xorl(out, out);
772 __ Bind(&done);
773}
774
Mark Mendell09ed1a32015-03-25 08:30:06 -0400775void IntrinsicLocationsBuilderX86::VisitStringCharAt(HInvoke* invoke) {
776 // The inputs plus one temp.
777 LocationSummary* locations = new (arena_) LocationSummary(invoke,
778 LocationSummary::kCallOnSlowPath,
779 kIntrinsified);
780 locations->SetInAt(0, Location::RequiresRegister());
781 locations->SetInAt(1, Location::RequiresRegister());
782 locations->SetOut(Location::SameAsFirstInput());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400783}
784
785void IntrinsicCodeGeneratorX86::VisitStringCharAt(HInvoke* invoke) {
786 LocationSummary* locations = invoke->GetLocations();
787
Mark Mendell6bc53a92015-07-01 14:26:52 -0400788 // Location of reference to data array.
Mark Mendell09ed1a32015-03-25 08:30:06 -0400789 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
Mark Mendell6bc53a92015-07-01 14:26:52 -0400790 // Location of count.
Mark Mendell09ed1a32015-03-25 08:30:06 -0400791 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
Mark Mendell09ed1a32015-03-25 08:30:06 -0400792
793 Register obj = locations->InAt(0).AsRegister<Register>();
794 Register idx = locations->InAt(1).AsRegister<Register>();
795 Register out = locations->Out().AsRegister<Register>();
Mark Mendell09ed1a32015-03-25 08:30:06 -0400796
797 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
798 // the cost.
799 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
800 // we will not optimize the code for constants (which would save a register).
801
Andreas Gampe85b62f22015-09-09 13:15:38 -0700802 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400803 codegen_->AddSlowPath(slow_path);
804
805 X86Assembler* assembler = GetAssembler();
806
807 __ cmpl(idx, Address(obj, count_offset));
808 codegen_->MaybeRecordImplicitNullCheck(invoke);
809 __ j(kAboveEqual, slow_path->GetEntryLabel());
810
Jeff Hao848f70a2014-01-15 13:49:50 -0800811 // out = out[2*idx].
812 __ movzxw(out, Address(out, idx, ScaleFactor::TIMES_2, value_offset));
Mark Mendell09ed1a32015-03-25 08:30:06 -0400813
814 __ Bind(slow_path->GetExitLabel());
815}
816
Mark Mendell6bc53a92015-07-01 14:26:52 -0400817void IntrinsicLocationsBuilderX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
818 // We need at least two of the positions or length to be an integer constant,
819 // or else we won't have enough free registers.
820 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
821 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
822 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
823
824 int num_constants =
825 ((src_pos != nullptr) ? 1 : 0)
826 + ((dest_pos != nullptr) ? 1 : 0)
827 + ((length != nullptr) ? 1 : 0);
828
829 if (num_constants < 2) {
830 // Not enough free registers.
831 return;
832 }
833
834 // As long as we are checking, we might as well check to see if the src and dest
835 // positions are >= 0.
836 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
837 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
838 // We will have to fail anyways.
839 return;
840 }
841
842 // And since we are already checking, check the length too.
843 if (length != nullptr) {
844 int32_t len = length->GetValue();
845 if (len < 0) {
846 // Just call as normal.
847 return;
848 }
849 }
850
851 // Okay, it is safe to generate inline code.
852 LocationSummary* locations =
853 new (arena_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
854 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
855 locations->SetInAt(0, Location::RequiresRegister());
856 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
857 locations->SetInAt(2, Location::RequiresRegister());
858 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
859 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
860
861 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
862 locations->AddTemp(Location::RegisterLocation(ESI));
863 locations->AddTemp(Location::RegisterLocation(EDI));
864 locations->AddTemp(Location::RegisterLocation(ECX));
865}
866
867static void CheckPosition(X86Assembler* assembler,
868 Location pos,
869 Register input,
870 Register length,
Andreas Gampe85b62f22015-09-09 13:15:38 -0700871 SlowPathCode* slow_path,
Mark Mendell6bc53a92015-07-01 14:26:52 -0400872 Register input_len,
873 Register temp) {
874 // Where is the length in the String?
875 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
876
877 if (pos.IsConstant()) {
878 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
879 if (pos_const == 0) {
880 // Check that length(input) >= length.
881 __ cmpl(Address(input, length_offset), length);
882 __ j(kLess, slow_path->GetEntryLabel());
883 } else {
884 // Check that length(input) >= pos.
885 __ movl(input_len, Address(input, length_offset));
886 __ cmpl(input_len, Immediate(pos_const));
887 __ j(kLess, slow_path->GetEntryLabel());
888
889 // Check that (length(input) - pos) >= length.
890 __ leal(temp, Address(input_len, -pos_const));
891 __ cmpl(temp, length);
892 __ j(kLess, slow_path->GetEntryLabel());
893 }
894 } else {
895 // Check that pos >= 0.
896 Register pos_reg = pos.AsRegister<Register>();
897 __ testl(pos_reg, pos_reg);
898 __ j(kLess, slow_path->GetEntryLabel());
899
900 // Check that pos <= length(input).
901 __ cmpl(Address(input, length_offset), pos_reg);
902 __ j(kLess, slow_path->GetEntryLabel());
903
904 // Check that (length(input) - pos) >= length.
905 __ movl(temp, Address(input, length_offset));
906 __ subl(temp, pos_reg);
907 __ cmpl(temp, length);
908 __ j(kLess, slow_path->GetEntryLabel());
909 }
910}
911
912void IntrinsicCodeGeneratorX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
913 X86Assembler* assembler = GetAssembler();
914 LocationSummary* locations = invoke->GetLocations();
915
916 Register src = locations->InAt(0).AsRegister<Register>();
917 Location srcPos = locations->InAt(1);
918 Register dest = locations->InAt(2).AsRegister<Register>();
919 Location destPos = locations->InAt(3);
920 Location length = locations->InAt(4);
921
922 // Temporaries that we need for MOVSW.
923 Register src_base = locations->GetTemp(0).AsRegister<Register>();
924 DCHECK_EQ(src_base, ESI);
925 Register dest_base = locations->GetTemp(1).AsRegister<Register>();
926 DCHECK_EQ(dest_base, EDI);
927 Register count = locations->GetTemp(2).AsRegister<Register>();
928 DCHECK_EQ(count, ECX);
929
Andreas Gampe85b62f22015-09-09 13:15:38 -0700930 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell6bc53a92015-07-01 14:26:52 -0400931 codegen_->AddSlowPath(slow_path);
932
933 // Bail out if the source and destination are the same (to handle overlap).
934 __ cmpl(src, dest);
935 __ j(kEqual, slow_path->GetEntryLabel());
936
937 // Bail out if the source is null.
938 __ testl(src, src);
939 __ j(kEqual, slow_path->GetEntryLabel());
940
941 // Bail out if the destination is null.
942 __ testl(dest, dest);
943 __ j(kEqual, slow_path->GetEntryLabel());
944
945 // If the length is negative, bail out.
946 // We have already checked in the LocationsBuilder for the constant case.
947 if (!length.IsConstant()) {
948 __ cmpl(length.AsRegister<Register>(), length.AsRegister<Register>());
949 __ j(kLess, slow_path->GetEntryLabel());
950 }
951
952 // We need the count in ECX.
953 if (length.IsConstant()) {
954 __ movl(count, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
955 } else {
956 __ movl(count, length.AsRegister<Register>());
957 }
958
959 // Validity checks: source.
960 CheckPosition(assembler, srcPos, src, count, slow_path, src_base, dest_base);
961
962 // Validity checks: dest.
963 CheckPosition(assembler, destPos, dest, count, slow_path, src_base, dest_base);
964
965 // Okay, everything checks out. Finally time to do the copy.
966 // Check assumption that sizeof(Char) is 2 (used in scaling below).
967 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
968 DCHECK_EQ(char_size, 2u);
969
970 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
971
972 if (srcPos.IsConstant()) {
973 int32_t srcPos_const = srcPos.GetConstant()->AsIntConstant()->GetValue();
974 __ leal(src_base, Address(src, char_size * srcPos_const + data_offset));
975 } else {
976 __ leal(src_base, Address(src, srcPos.AsRegister<Register>(),
977 ScaleFactor::TIMES_2, data_offset));
978 }
979 if (destPos.IsConstant()) {
980 int32_t destPos_const = destPos.GetConstant()->AsIntConstant()->GetValue();
981
982 __ leal(dest_base, Address(dest, char_size * destPos_const + data_offset));
983 } else {
984 __ leal(dest_base, Address(dest, destPos.AsRegister<Register>(),
985 ScaleFactor::TIMES_2, data_offset));
986 }
987
988 // Do the move.
989 __ rep_movsw();
990
991 __ Bind(slow_path->GetExitLabel());
992}
993
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000994void IntrinsicLocationsBuilderX86::VisitStringCompareTo(HInvoke* invoke) {
995 // The inputs plus one temp.
996 LocationSummary* locations = new (arena_) LocationSummary(invoke,
997 LocationSummary::kCall,
998 kIntrinsified);
999 InvokeRuntimeCallingConvention calling_convention;
1000 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1001 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1002 locations->SetOut(Location::RegisterLocation(EAX));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001003}
1004
1005void IntrinsicCodeGeneratorX86::VisitStringCompareTo(HInvoke* invoke) {
1006 X86Assembler* assembler = GetAssembler();
1007 LocationSummary* locations = invoke->GetLocations();
1008
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001009 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001010 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001011
1012 Register argument = locations->InAt(1).AsRegister<Register>();
1013 __ testl(argument, argument);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001014 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001015 codegen_->AddSlowPath(slow_path);
1016 __ j(kEqual, slow_path->GetEntryLabel());
1017
1018 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pStringCompareTo)));
1019 __ Bind(slow_path->GetExitLabel());
1020}
1021
Agi Csakid7138c82015-08-13 17:46:44 -07001022void IntrinsicLocationsBuilderX86::VisitStringEquals(HInvoke* invoke) {
1023 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1024 LocationSummary::kNoCall,
1025 kIntrinsified);
1026 locations->SetInAt(0, Location::RequiresRegister());
1027 locations->SetInAt(1, Location::RequiresRegister());
1028
1029 // Request temporary registers, ECX and EDI needed for repe_cmpsl instruction.
1030 locations->AddTemp(Location::RegisterLocation(ECX));
1031 locations->AddTemp(Location::RegisterLocation(EDI));
1032
1033 // Set output, ESI needed for repe_cmpsl instruction anyways.
1034 locations->SetOut(Location::RegisterLocation(ESI), Location::kOutputOverlap);
1035}
1036
1037void IntrinsicCodeGeneratorX86::VisitStringEquals(HInvoke* invoke) {
1038 X86Assembler* assembler = GetAssembler();
1039 LocationSummary* locations = invoke->GetLocations();
1040
1041 Register str = locations->InAt(0).AsRegister<Register>();
1042 Register arg = locations->InAt(1).AsRegister<Register>();
1043 Register ecx = locations->GetTemp(0).AsRegister<Register>();
1044 Register edi = locations->GetTemp(1).AsRegister<Register>();
1045 Register esi = locations->Out().AsRegister<Register>();
1046
Mark Mendell0c9497d2015-08-21 09:30:05 -04001047 NearLabel end, return_true, return_false;
Agi Csakid7138c82015-08-13 17:46:44 -07001048
1049 // Get offsets of count, value, and class fields within a string object.
1050 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1051 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1052 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1053
1054 // Note that the null check must have been done earlier.
1055 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1056
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001057 StringEqualsOptimizations optimizations(invoke);
1058 if (!optimizations.GetArgumentNotNull()) {
1059 // Check if input is null, return false if it is.
1060 __ testl(arg, arg);
1061 __ j(kEqual, &return_false);
1062 }
Agi Csakid7138c82015-08-13 17:46:44 -07001063
1064 // Instanceof check for the argument by comparing class fields.
1065 // All string objects must have the same type since String cannot be subclassed.
1066 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1067 // If the argument is a string object, its class field must be equal to receiver's class field.
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001068 if (!optimizations.GetArgumentIsString()) {
1069 __ movl(ecx, Address(str, class_offset));
1070 __ cmpl(ecx, Address(arg, class_offset));
1071 __ j(kNotEqual, &return_false);
1072 }
Agi Csakid7138c82015-08-13 17:46:44 -07001073
1074 // Reference equality check, return true if same reference.
1075 __ cmpl(str, arg);
1076 __ j(kEqual, &return_true);
1077
1078 // Load length of receiver string.
1079 __ movl(ecx, Address(str, count_offset));
1080 // Check if lengths are equal, return false if they're not.
1081 __ cmpl(ecx, Address(arg, count_offset));
1082 __ j(kNotEqual, &return_false);
1083 // Return true if both strings are empty.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001084 __ jecxz(&return_true);
Agi Csakid7138c82015-08-13 17:46:44 -07001085
1086 // Load starting addresses of string values into ESI/EDI as required for repe_cmpsl instruction.
1087 __ leal(esi, Address(str, value_offset));
1088 __ leal(edi, Address(arg, value_offset));
1089
1090 // Divide string length by 2 to compare characters 2 at a time and adjust for odd lengths.
1091 __ addl(ecx, Immediate(1));
1092 __ shrl(ecx, Immediate(1));
1093
1094 // Assertions that must hold in order to compare strings 2 characters at a time.
1095 DCHECK_ALIGNED(value_offset, 4);
1096 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1097
1098 // Loop to compare strings two characters at a time starting at the beginning of the string.
1099 __ repe_cmpsl();
1100 // If strings are not equal, zero flag will be cleared.
1101 __ j(kNotEqual, &return_false);
1102
1103 // Return true and exit the function.
1104 // If loop does not result in returning false, we return true.
1105 __ Bind(&return_true);
1106 __ movl(esi, Immediate(1));
1107 __ jmp(&end);
1108
1109 // Return false and exit the function.
1110 __ Bind(&return_false);
1111 __ xorl(esi, esi);
1112 __ Bind(&end);
1113}
1114
Andreas Gampe21030dd2015-05-07 14:46:15 -07001115static void CreateStringIndexOfLocations(HInvoke* invoke,
1116 ArenaAllocator* allocator,
1117 bool start_at_zero) {
1118 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1119 LocationSummary::kCallOnSlowPath,
1120 kIntrinsified);
1121 // The data needs to be in EDI for scasw. So request that the string is there, anyways.
1122 locations->SetInAt(0, Location::RegisterLocation(EDI));
1123 // If we look for a constant char, we'll still have to copy it into EAX. So just request the
1124 // allocator to do that, anyways. We can still do the constant check by checking the parameter
1125 // of the instruction explicitly.
1126 // Note: This works as we don't clobber EAX anywhere.
1127 locations->SetInAt(1, Location::RegisterLocation(EAX));
1128 if (!start_at_zero) {
1129 locations->SetInAt(2, Location::RequiresRegister()); // The starting index.
1130 }
1131 // As we clobber EDI during execution anyways, also use it as the output.
1132 locations->SetOut(Location::SameAsFirstInput());
1133
1134 // repne scasw uses ECX as the counter.
1135 locations->AddTemp(Location::RegisterLocation(ECX));
1136 // Need another temporary to be able to compute the result.
1137 locations->AddTemp(Location::RequiresRegister());
1138}
1139
1140static void GenerateStringIndexOf(HInvoke* invoke,
1141 X86Assembler* assembler,
1142 CodeGeneratorX86* codegen,
1143 ArenaAllocator* allocator,
1144 bool start_at_zero) {
1145 LocationSummary* locations = invoke->GetLocations();
1146
1147 // Note that the null check must have been done earlier.
1148 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1149
1150 Register string_obj = locations->InAt(0).AsRegister<Register>();
1151 Register search_value = locations->InAt(1).AsRegister<Register>();
1152 Register counter = locations->GetTemp(0).AsRegister<Register>();
1153 Register string_length = locations->GetTemp(1).AsRegister<Register>();
1154 Register out = locations->Out().AsRegister<Register>();
1155
1156 // Check our assumptions for registers.
1157 DCHECK_EQ(string_obj, EDI);
1158 DCHECK_EQ(search_value, EAX);
1159 DCHECK_EQ(counter, ECX);
1160 DCHECK_EQ(out, EDI);
1161
1162 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1163 // or directly dispatch if we have a constant.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001164 SlowPathCode* slow_path = nullptr;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001165 if (invoke->InputAt(1)->IsIntConstant()) {
1166 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
1167 std::numeric_limits<uint16_t>::max()) {
1168 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1169 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1170 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1171 codegen->AddSlowPath(slow_path);
1172 __ jmp(slow_path->GetEntryLabel());
1173 __ Bind(slow_path->GetExitLabel());
1174 return;
1175 }
1176 } else {
1177 __ cmpl(search_value, Immediate(std::numeric_limits<uint16_t>::max()));
1178 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1179 codegen->AddSlowPath(slow_path);
1180 __ j(kAbove, slow_path->GetEntryLabel());
1181 }
1182
1183 // From here down, we know that we are looking for a char that fits in 16 bits.
1184 // Location of reference to data array within the String object.
1185 int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1186 // Location of count within the String object.
1187 int32_t count_offset = mirror::String::CountOffset().Int32Value();
1188
1189 // Load string length, i.e., the count field of the string.
1190 __ movl(string_length, Address(string_obj, count_offset));
1191
1192 // Do a zero-length check.
1193 // TODO: Support jecxz.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001194 NearLabel not_found_label;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001195 __ testl(string_length, string_length);
1196 __ j(kEqual, &not_found_label);
1197
1198 if (start_at_zero) {
1199 // Number of chars to scan is the same as the string length.
1200 __ movl(counter, string_length);
1201
1202 // Move to the start of the string.
1203 __ addl(string_obj, Immediate(value_offset));
1204 } else {
1205 Register start_index = locations->InAt(2).AsRegister<Register>();
1206
1207 // Do a start_index check.
1208 __ cmpl(start_index, string_length);
1209 __ j(kGreaterEqual, &not_found_label);
1210
1211 // Ensure we have a start index >= 0;
1212 __ xorl(counter, counter);
1213 __ cmpl(start_index, Immediate(0));
1214 __ cmovl(kGreater, counter, start_index);
1215
1216 // Move to the start of the string: string_obj + value_offset + 2 * start_index.
1217 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset));
1218
1219 // Now update ecx (the repne scasw work counter). We have string.length - start_index left to
1220 // compare.
1221 __ negl(counter);
1222 __ leal(counter, Address(string_length, counter, ScaleFactor::TIMES_1, 0));
1223 }
1224
1225 // Everything is set up for repne scasw:
1226 // * Comparison address in EDI.
1227 // * Counter in ECX.
1228 __ repne_scasw();
1229
1230 // Did we find a match?
1231 __ j(kNotEqual, &not_found_label);
1232
1233 // Yes, we matched. Compute the index of the result.
1234 __ subl(string_length, counter);
1235 __ leal(out, Address(string_length, -1));
1236
Mark Mendell0c9497d2015-08-21 09:30:05 -04001237 NearLabel done;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001238 __ jmp(&done);
1239
1240 // Failed to match; return -1.
1241 __ Bind(&not_found_label);
1242 __ movl(out, Immediate(-1));
1243
1244 // And join up at the end.
1245 __ Bind(&done);
1246 if (slow_path != nullptr) {
1247 __ Bind(slow_path->GetExitLabel());
1248 }
1249}
1250
1251void IntrinsicLocationsBuilderX86::VisitStringIndexOf(HInvoke* invoke) {
1252 CreateStringIndexOfLocations(invoke, arena_, true);
1253}
1254
1255void IntrinsicCodeGeneratorX86::VisitStringIndexOf(HInvoke* invoke) {
1256 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
1257}
1258
1259void IntrinsicLocationsBuilderX86::VisitStringIndexOfAfter(HInvoke* invoke) {
1260 CreateStringIndexOfLocations(invoke, arena_, false);
1261}
1262
1263void IntrinsicCodeGeneratorX86::VisitStringIndexOfAfter(HInvoke* invoke) {
1264 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
1265}
1266
Jeff Hao848f70a2014-01-15 13:49:50 -08001267void IntrinsicLocationsBuilderX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1268 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1269 LocationSummary::kCall,
1270 kIntrinsified);
1271 InvokeRuntimeCallingConvention calling_convention;
1272 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1273 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1274 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1275 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1276 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001277}
1278
1279void IntrinsicCodeGeneratorX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1280 X86Assembler* assembler = GetAssembler();
1281 LocationSummary* locations = invoke->GetLocations();
1282
1283 Register byte_array = locations->InAt(0).AsRegister<Register>();
1284 __ testl(byte_array, byte_array);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001285 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001286 codegen_->AddSlowPath(slow_path);
1287 __ j(kEqual, slow_path->GetEntryLabel());
1288
1289 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromBytes)));
1290 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1291 __ Bind(slow_path->GetExitLabel());
1292}
1293
1294void IntrinsicLocationsBuilderX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1295 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1296 LocationSummary::kCall,
1297 kIntrinsified);
1298 InvokeRuntimeCallingConvention calling_convention;
1299 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1300 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1301 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1302 locations->SetOut(Location::RegisterLocation(EAX));
1303}
1304
1305void IntrinsicCodeGeneratorX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1306 X86Assembler* assembler = GetAssembler();
1307
1308 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromChars)));
1309 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1310}
1311
1312void IntrinsicLocationsBuilderX86::VisitStringNewStringFromString(HInvoke* invoke) {
1313 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1314 LocationSummary::kCall,
1315 kIntrinsified);
1316 InvokeRuntimeCallingConvention calling_convention;
1317 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1318 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001319}
1320
1321void IntrinsicCodeGeneratorX86::VisitStringNewStringFromString(HInvoke* invoke) {
1322 X86Assembler* assembler = GetAssembler();
1323 LocationSummary* locations = invoke->GetLocations();
1324
1325 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1326 __ testl(string_to_copy, string_to_copy);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001327 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001328 codegen_->AddSlowPath(slow_path);
1329 __ j(kEqual, slow_path->GetEntryLabel());
1330
1331 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromString)));
1332 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1333 __ Bind(slow_path->GetExitLabel());
1334}
1335
Mark Mendell8f8926a2015-08-17 11:39:06 -04001336void IntrinsicLocationsBuilderX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1337 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1338 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1339 LocationSummary::kNoCall,
1340 kIntrinsified);
1341 locations->SetInAt(0, Location::RequiresRegister());
1342 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1343 // Place srcEnd in ECX to save a move below.
1344 locations->SetInAt(2, Location::RegisterLocation(ECX));
1345 locations->SetInAt(3, Location::RequiresRegister());
1346 locations->SetInAt(4, Location::RequiresRegister());
1347
1348 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
1349 // We don't have enough registers to also grab ECX, so handle below.
1350 locations->AddTemp(Location::RegisterLocation(ESI));
1351 locations->AddTemp(Location::RegisterLocation(EDI));
1352}
1353
1354void IntrinsicCodeGeneratorX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1355 X86Assembler* assembler = GetAssembler();
1356 LocationSummary* locations = invoke->GetLocations();
1357
1358 size_t char_component_size = Primitive::ComponentSize(Primitive::kPrimChar);
1359 // Location of data in char array buffer.
1360 const uint32_t data_offset = mirror::Array::DataOffset(char_component_size).Uint32Value();
1361 // Location of char array data in string.
1362 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1363
1364 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1365 Register obj = locations->InAt(0).AsRegister<Register>();
1366 Location srcBegin = locations->InAt(1);
1367 int srcBegin_value =
1368 srcBegin.IsConstant() ? srcBegin.GetConstant()->AsIntConstant()->GetValue() : 0;
1369 Register srcEnd = locations->InAt(2).AsRegister<Register>();
1370 Register dst = locations->InAt(3).AsRegister<Register>();
1371 Register dstBegin = locations->InAt(4).AsRegister<Register>();
1372
1373 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1374 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1375 DCHECK_EQ(char_size, 2u);
1376
1377 // Compute the address of the destination buffer.
1378 __ leal(EDI, Address(dst, dstBegin, ScaleFactor::TIMES_2, data_offset));
1379
1380 // Compute the address of the source string.
1381 if (srcBegin.IsConstant()) {
1382 // Compute the address of the source string by adding the number of chars from
1383 // the source beginning to the value offset of a string.
1384 __ leal(ESI, Address(obj, srcBegin_value * char_size + value_offset));
1385 } else {
1386 __ leal(ESI, Address(obj, srcBegin.AsRegister<Register>(),
1387 ScaleFactor::TIMES_2, value_offset));
1388 }
1389
1390 // Compute the number of chars (words) to move.
1391 // Now is the time to save ECX, since we don't know if it will be used later.
1392 __ pushl(ECX);
1393 int stack_adjust = kX86WordSize;
1394 __ cfi().AdjustCFAOffset(stack_adjust);
1395 DCHECK_EQ(srcEnd, ECX);
1396 if (srcBegin.IsConstant()) {
1397 if (srcBegin_value != 0) {
1398 __ subl(ECX, Immediate(srcBegin_value));
1399 }
1400 } else {
1401 DCHECK(srcBegin.IsRegister());
1402 __ subl(ECX, srcBegin.AsRegister<Register>());
1403 }
1404
1405 // Do the move.
1406 __ rep_movsw();
1407
1408 // And restore ECX.
1409 __ popl(ECX);
1410 __ cfi().AdjustCFAOffset(-stack_adjust);
1411}
1412
Mark Mendell09ed1a32015-03-25 08:30:06 -04001413static void GenPeek(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1414 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1415 Location out_loc = locations->Out();
1416 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1417 // to avoid a SIGBUS.
1418 switch (size) {
1419 case Primitive::kPrimByte:
1420 __ movsxb(out_loc.AsRegister<Register>(), Address(address, 0));
1421 break;
1422 case Primitive::kPrimShort:
1423 __ movsxw(out_loc.AsRegister<Register>(), Address(address, 0));
1424 break;
1425 case Primitive::kPrimInt:
1426 __ movl(out_loc.AsRegister<Register>(), Address(address, 0));
1427 break;
1428 case Primitive::kPrimLong:
1429 __ movl(out_loc.AsRegisterPairLow<Register>(), Address(address, 0));
1430 __ movl(out_loc.AsRegisterPairHigh<Register>(), Address(address, 4));
1431 break;
1432 default:
1433 LOG(FATAL) << "Type not recognized for peek: " << size;
1434 UNREACHABLE();
1435 }
1436}
1437
1438void IntrinsicLocationsBuilderX86::VisitMemoryPeekByte(HInvoke* invoke) {
1439 CreateLongToIntLocations(arena_, invoke);
1440}
1441
1442void IntrinsicCodeGeneratorX86::VisitMemoryPeekByte(HInvoke* invoke) {
1443 GenPeek(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1444}
1445
1446void IntrinsicLocationsBuilderX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1447 CreateLongToIntLocations(arena_, invoke);
1448}
1449
1450void IntrinsicCodeGeneratorX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1451 GenPeek(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1452}
1453
1454void IntrinsicLocationsBuilderX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1455 CreateLongToLongLocations(arena_, invoke);
1456}
1457
1458void IntrinsicCodeGeneratorX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1459 GenPeek(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1460}
1461
1462void IntrinsicLocationsBuilderX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1463 CreateLongToIntLocations(arena_, invoke);
1464}
1465
1466void IntrinsicCodeGeneratorX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1467 GenPeek(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1468}
1469
1470static void CreateLongIntToVoidLocations(ArenaAllocator* arena, Primitive::Type size,
1471 HInvoke* invoke) {
1472 LocationSummary* locations = new (arena) LocationSummary(invoke,
1473 LocationSummary::kNoCall,
1474 kIntrinsified);
1475 locations->SetInAt(0, Location::RequiresRegister());
Roland Levillain4c0eb422015-04-24 16:43:49 +01001476 HInstruction* value = invoke->InputAt(1);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001477 if (size == Primitive::kPrimByte) {
1478 locations->SetInAt(1, Location::ByteRegisterOrConstant(EDX, value));
1479 } else {
1480 locations->SetInAt(1, Location::RegisterOrConstant(value));
1481 }
1482}
1483
1484static void GenPoke(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1485 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1486 Location value_loc = locations->InAt(1);
1487 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1488 // to avoid a SIGBUS.
1489 switch (size) {
1490 case Primitive::kPrimByte:
1491 if (value_loc.IsConstant()) {
1492 __ movb(Address(address, 0),
1493 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1494 } else {
1495 __ movb(Address(address, 0), value_loc.AsRegister<ByteRegister>());
1496 }
1497 break;
1498 case Primitive::kPrimShort:
1499 if (value_loc.IsConstant()) {
1500 __ movw(Address(address, 0),
1501 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1502 } else {
1503 __ movw(Address(address, 0), value_loc.AsRegister<Register>());
1504 }
1505 break;
1506 case Primitive::kPrimInt:
1507 if (value_loc.IsConstant()) {
1508 __ movl(Address(address, 0),
1509 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1510 } else {
1511 __ movl(Address(address, 0), value_loc.AsRegister<Register>());
1512 }
1513 break;
1514 case Primitive::kPrimLong:
1515 if (value_loc.IsConstant()) {
1516 int64_t value = value_loc.GetConstant()->AsLongConstant()->GetValue();
1517 __ movl(Address(address, 0), Immediate(Low32Bits(value)));
1518 __ movl(Address(address, 4), Immediate(High32Bits(value)));
1519 } else {
1520 __ movl(Address(address, 0), value_loc.AsRegisterPairLow<Register>());
1521 __ movl(Address(address, 4), value_loc.AsRegisterPairHigh<Register>());
1522 }
1523 break;
1524 default:
1525 LOG(FATAL) << "Type not recognized for poke: " << size;
1526 UNREACHABLE();
1527 }
1528}
1529
1530void IntrinsicLocationsBuilderX86::VisitMemoryPokeByte(HInvoke* invoke) {
1531 CreateLongIntToVoidLocations(arena_, Primitive::kPrimByte, invoke);
1532}
1533
1534void IntrinsicCodeGeneratorX86::VisitMemoryPokeByte(HInvoke* invoke) {
1535 GenPoke(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1536}
1537
1538void IntrinsicLocationsBuilderX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1539 CreateLongIntToVoidLocations(arena_, Primitive::kPrimInt, invoke);
1540}
1541
1542void IntrinsicCodeGeneratorX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1543 GenPoke(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1544}
1545
1546void IntrinsicLocationsBuilderX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1547 CreateLongIntToVoidLocations(arena_, Primitive::kPrimLong, invoke);
1548}
1549
1550void IntrinsicCodeGeneratorX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1551 GenPoke(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1552}
1553
1554void IntrinsicLocationsBuilderX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1555 CreateLongIntToVoidLocations(arena_, Primitive::kPrimShort, invoke);
1556}
1557
1558void IntrinsicCodeGeneratorX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1559 GenPoke(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1560}
1561
1562void IntrinsicLocationsBuilderX86::VisitThreadCurrentThread(HInvoke* invoke) {
1563 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1564 LocationSummary::kNoCall,
1565 kIntrinsified);
1566 locations->SetOut(Location::RequiresRegister());
1567}
1568
1569void IntrinsicCodeGeneratorX86::VisitThreadCurrentThread(HInvoke* invoke) {
1570 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
1571 GetAssembler()->fs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86WordSize>()));
1572}
1573
1574static void GenUnsafeGet(LocationSummary* locations, Primitive::Type type,
1575 bool is_volatile, X86Assembler* assembler) {
1576 Register base = locations->InAt(1).AsRegister<Register>();
1577 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1578 Location output = locations->Out();
1579
1580 switch (type) {
1581 case Primitive::kPrimInt:
Roland Levillain4d027112015-07-01 15:41:14 +01001582 case Primitive::kPrimNot: {
1583 Register output_reg = output.AsRegister<Register>();
1584 __ movl(output_reg, Address(base, offset, ScaleFactor::TIMES_1, 0));
1585 if (type == Primitive::kPrimNot) {
1586 __ MaybeUnpoisonHeapReference(output_reg);
1587 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001588 break;
Roland Levillain4d027112015-07-01 15:41:14 +01001589 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001590
1591 case Primitive::kPrimLong: {
1592 Register output_lo = output.AsRegisterPairLow<Register>();
1593 Register output_hi = output.AsRegisterPairHigh<Register>();
1594 if (is_volatile) {
1595 // Need to use a XMM to read atomically.
1596 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1597 __ movsd(temp, Address(base, offset, ScaleFactor::TIMES_1, 0));
1598 __ movd(output_lo, temp);
1599 __ psrlq(temp, Immediate(32));
1600 __ movd(output_hi, temp);
1601 } else {
1602 __ movl(output_lo, Address(base, offset, ScaleFactor::TIMES_1, 0));
1603 __ movl(output_hi, Address(base, offset, ScaleFactor::TIMES_1, 4));
1604 }
1605 }
1606 break;
1607
1608 default:
1609 LOG(FATAL) << "Unsupported op size " << type;
1610 UNREACHABLE();
1611 }
1612}
1613
1614static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke,
1615 bool is_long, bool is_volatile) {
1616 LocationSummary* locations = new (arena) LocationSummary(invoke,
1617 LocationSummary::kNoCall,
1618 kIntrinsified);
1619 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1620 locations->SetInAt(1, Location::RequiresRegister());
1621 locations->SetInAt(2, Location::RequiresRegister());
1622 if (is_long) {
1623 if (is_volatile) {
1624 // Need to use XMM to read volatile.
1625 locations->AddTemp(Location::RequiresFpuRegister());
1626 locations->SetOut(Location::RequiresRegister());
1627 } else {
1628 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1629 }
1630 } else {
1631 locations->SetOut(Location::RequiresRegister());
1632 }
1633}
1634
1635void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) {
1636 CreateIntIntIntToIntLocations(arena_, invoke, false, false);
1637}
1638void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
1639 CreateIntIntIntToIntLocations(arena_, invoke, false, true);
1640}
1641void IntrinsicLocationsBuilderX86::VisitUnsafeGetLong(HInvoke* invoke) {
1642 CreateIntIntIntToIntLocations(arena_, invoke, false, false);
1643}
1644void IntrinsicLocationsBuilderX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1645 CreateIntIntIntToIntLocations(arena_, invoke, true, true);
1646}
1647void IntrinsicLocationsBuilderX86::VisitUnsafeGetObject(HInvoke* invoke) {
1648 CreateIntIntIntToIntLocations(arena_, invoke, false, false);
1649}
1650void IntrinsicLocationsBuilderX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1651 CreateIntIntIntToIntLocations(arena_, invoke, false, true);
1652}
1653
1654
1655void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) {
1656 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimInt, false, GetAssembler());
1657}
1658void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
1659 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimInt, true, GetAssembler());
1660}
1661void IntrinsicCodeGeneratorX86::VisitUnsafeGetLong(HInvoke* invoke) {
1662 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimLong, false, GetAssembler());
1663}
1664void IntrinsicCodeGeneratorX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1665 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimLong, true, GetAssembler());
1666}
1667void IntrinsicCodeGeneratorX86::VisitUnsafeGetObject(HInvoke* invoke) {
1668 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimNot, false, GetAssembler());
1669}
1670void IntrinsicCodeGeneratorX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1671 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimNot, true, GetAssembler());
1672}
1673
1674
1675static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* arena,
1676 Primitive::Type type,
1677 HInvoke* invoke,
1678 bool is_volatile) {
1679 LocationSummary* locations = new (arena) LocationSummary(invoke,
1680 LocationSummary::kNoCall,
1681 kIntrinsified);
1682 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1683 locations->SetInAt(1, Location::RequiresRegister());
1684 locations->SetInAt(2, Location::RequiresRegister());
1685 locations->SetInAt(3, Location::RequiresRegister());
1686 if (type == Primitive::kPrimNot) {
1687 // Need temp registers for card-marking.
Roland Levillain4d027112015-07-01 15:41:14 +01001688 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell09ed1a32015-03-25 08:30:06 -04001689 // Ensure the value is in a byte register.
1690 locations->AddTemp(Location::RegisterLocation(ECX));
1691 } else if (type == Primitive::kPrimLong && is_volatile) {
1692 locations->AddTemp(Location::RequiresFpuRegister());
1693 locations->AddTemp(Location::RequiresFpuRegister());
1694 }
1695}
1696
1697void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) {
1698 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, false);
1699}
1700void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) {
1701 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, false);
1702}
1703void IntrinsicLocationsBuilderX86::VisitUnsafePutVolatile(HInvoke* invoke) {
1704 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, true);
1705}
1706void IntrinsicLocationsBuilderX86::VisitUnsafePutObject(HInvoke* invoke) {
1707 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, false);
1708}
1709void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1710 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, false);
1711}
1712void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1713 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, true);
1714}
1715void IntrinsicLocationsBuilderX86::VisitUnsafePutLong(HInvoke* invoke) {
1716 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, false);
1717}
1718void IntrinsicLocationsBuilderX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1719 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, false);
1720}
1721void IntrinsicLocationsBuilderX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1722 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, true);
1723}
1724
1725// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86
1726// memory model.
1727static void GenUnsafePut(LocationSummary* locations,
1728 Primitive::Type type,
1729 bool is_volatile,
1730 CodeGeneratorX86* codegen) {
1731 X86Assembler* assembler = reinterpret_cast<X86Assembler*>(codegen->GetAssembler());
1732 Register base = locations->InAt(1).AsRegister<Register>();
1733 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1734 Location value_loc = locations->InAt(3);
1735
1736 if (type == Primitive::kPrimLong) {
1737 Register value_lo = value_loc.AsRegisterPairLow<Register>();
1738 Register value_hi = value_loc.AsRegisterPairHigh<Register>();
1739 if (is_volatile) {
1740 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1741 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
1742 __ movd(temp1, value_lo);
1743 __ movd(temp2, value_hi);
1744 __ punpckldq(temp1, temp2);
1745 __ movsd(Address(base, offset, ScaleFactor::TIMES_1, 0), temp1);
1746 } else {
1747 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_lo);
1748 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 4), value_hi);
1749 }
Roland Levillain4d027112015-07-01 15:41:14 +01001750 } else if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1751 Register temp = locations->GetTemp(0).AsRegister<Register>();
1752 __ movl(temp, value_loc.AsRegister<Register>());
1753 __ PoisonHeapReference(temp);
1754 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001755 } else {
1756 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_loc.AsRegister<Register>());
1757 }
1758
1759 if (is_volatile) {
1760 __ mfence();
1761 }
1762
1763 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001764 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell09ed1a32015-03-25 08:30:06 -04001765 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
1766 locations->GetTemp(1).AsRegister<Register>(),
1767 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001768 value_loc.AsRegister<Register>(),
1769 value_can_be_null);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001770 }
1771}
1772
1773void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) {
1774 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_);
1775}
1776void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) {
1777 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_);
1778}
1779void IntrinsicCodeGeneratorX86::VisitUnsafePutVolatile(HInvoke* invoke) {
1780 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, codegen_);
1781}
1782void IntrinsicCodeGeneratorX86::VisitUnsafePutObject(HInvoke* invoke) {
1783 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_);
1784}
1785void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1786 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_);
1787}
1788void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1789 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, codegen_);
1790}
1791void IntrinsicCodeGeneratorX86::VisitUnsafePutLong(HInvoke* invoke) {
1792 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_);
1793}
1794void IntrinsicCodeGeneratorX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1795 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_);
1796}
1797void IntrinsicCodeGeneratorX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1798 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, codegen_);
1799}
1800
Mark Mendell58d25fd2015-04-03 14:52:31 -04001801static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, Primitive::Type type,
1802 HInvoke* invoke) {
1803 LocationSummary* locations = new (arena) LocationSummary(invoke,
1804 LocationSummary::kNoCall,
1805 kIntrinsified);
1806 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1807 locations->SetInAt(1, Location::RequiresRegister());
1808 // Offset is a long, but in 32 bit mode, we only need the low word.
1809 // Can we update the invoke here to remove a TypeConvert to Long?
1810 locations->SetInAt(2, Location::RequiresRegister());
1811 // Expected value must be in EAX or EDX:EAX.
1812 // For long, new value must be in ECX:EBX.
1813 if (type == Primitive::kPrimLong) {
1814 locations->SetInAt(3, Location::RegisterPairLocation(EAX, EDX));
1815 locations->SetInAt(4, Location::RegisterPairLocation(EBX, ECX));
1816 } else {
1817 locations->SetInAt(3, Location::RegisterLocation(EAX));
1818 locations->SetInAt(4, Location::RequiresRegister());
1819 }
1820
1821 // Force a byte register for the output.
1822 locations->SetOut(Location::RegisterLocation(EAX));
1823 if (type == Primitive::kPrimNot) {
1824 // Need temp registers for card-marking.
1825 locations->AddTemp(Location::RequiresRegister());
1826 // Need a byte register for marking.
1827 locations->AddTemp(Location::RegisterLocation(ECX));
1828 }
1829}
1830
1831void IntrinsicLocationsBuilderX86::VisitUnsafeCASInt(HInvoke* invoke) {
1832 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimInt, invoke);
1833}
1834
1835void IntrinsicLocationsBuilderX86::VisitUnsafeCASLong(HInvoke* invoke) {
1836 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimLong, invoke);
1837}
1838
1839void IntrinsicLocationsBuilderX86::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillaincfea7d52015-10-20 17:55:06 +01001840 // The UnsafeCASObject intrinsic does not always work when heap
1841 // poisoning is enabled (it breaks several libcore tests); turn it
1842 // off temporarily as a quick fix.
1843 // TODO(rpl): Fix it and turn it back on.
1844 if (kPoisonHeapReferences) {
1845 return;
1846 }
1847
Mark Mendell58d25fd2015-04-03 14:52:31 -04001848 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimNot, invoke);
1849}
1850
1851static void GenCAS(Primitive::Type type, HInvoke* invoke, CodeGeneratorX86* codegen) {
1852 X86Assembler* assembler =
1853 reinterpret_cast<X86Assembler*>(codegen->GetAssembler());
1854 LocationSummary* locations = invoke->GetLocations();
1855
1856 Register base = locations->InAt(1).AsRegister<Register>();
1857 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1858 Location out = locations->Out();
1859 DCHECK_EQ(out.AsRegister<Register>(), EAX);
1860
1861 if (type == Primitive::kPrimLong) {
1862 DCHECK_EQ(locations->InAt(3).AsRegisterPairLow<Register>(), EAX);
1863 DCHECK_EQ(locations->InAt(3).AsRegisterPairHigh<Register>(), EDX);
1864 DCHECK_EQ(locations->InAt(4).AsRegisterPairLow<Register>(), EBX);
1865 DCHECK_EQ(locations->InAt(4).AsRegisterPairHigh<Register>(), ECX);
1866 __ LockCmpxchg8b(Address(base, offset, TIMES_1, 0));
1867 } else {
1868 // Integer or object.
Roland Levillain4d027112015-07-01 15:41:14 +01001869 Register expected = locations->InAt(3).AsRegister<Register>();
1870 DCHECK_EQ(expected, EAX);
Mark Mendell58d25fd2015-04-03 14:52:31 -04001871 Register value = locations->InAt(4).AsRegister<Register>();
1872 if (type == Primitive::kPrimNot) {
1873 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001874 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell58d25fd2015-04-03 14:52:31 -04001875 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
1876 locations->GetTemp(1).AsRegister<Register>(),
1877 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001878 value,
1879 value_can_be_null);
Roland Levillain4d027112015-07-01 15:41:14 +01001880
1881 if (kPoisonHeapReferences) {
1882 __ PoisonHeapReference(expected);
1883 __ PoisonHeapReference(value);
1884 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04001885 }
1886
1887 __ LockCmpxchgl(Address(base, offset, TIMES_1, 0), value);
1888 }
1889
1890 // locked cmpxchg has full barrier semantics, and we don't need scheduling
1891 // barriers at this time.
1892
1893 // Convert ZF into the boolean result.
1894 __ setb(kZero, out.AsRegister<Register>());
1895 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01001896
1897 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1898 Register value = locations->InAt(4).AsRegister<Register>();
1899 __ UnpoisonHeapReference(value);
1900 // Do not unpoison the reference contained in register `expected`,
1901 // as it is the same as register `out`.
1902 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04001903}
1904
1905void IntrinsicCodeGeneratorX86::VisitUnsafeCASInt(HInvoke* invoke) {
1906 GenCAS(Primitive::kPrimInt, invoke, codegen_);
1907}
1908
1909void IntrinsicCodeGeneratorX86::VisitUnsafeCASLong(HInvoke* invoke) {
1910 GenCAS(Primitive::kPrimLong, invoke, codegen_);
1911}
1912
1913void IntrinsicCodeGeneratorX86::VisitUnsafeCASObject(HInvoke* invoke) {
1914 GenCAS(Primitive::kPrimNot, invoke, codegen_);
1915}
1916
1917void IntrinsicLocationsBuilderX86::VisitIntegerReverse(HInvoke* invoke) {
1918 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1919 LocationSummary::kNoCall,
1920 kIntrinsified);
1921 locations->SetInAt(0, Location::RequiresRegister());
1922 locations->SetOut(Location::SameAsFirstInput());
1923 locations->AddTemp(Location::RequiresRegister());
1924}
1925
1926static void SwapBits(Register reg, Register temp, int32_t shift, int32_t mask,
1927 X86Assembler* assembler) {
1928 Immediate imm_shift(shift);
1929 Immediate imm_mask(mask);
1930 __ movl(temp, reg);
1931 __ shrl(reg, imm_shift);
1932 __ andl(temp, imm_mask);
1933 __ andl(reg, imm_mask);
1934 __ shll(temp, imm_shift);
1935 __ orl(reg, temp);
1936}
1937
1938void IntrinsicCodeGeneratorX86::VisitIntegerReverse(HInvoke* invoke) {
1939 X86Assembler* assembler =
1940 reinterpret_cast<X86Assembler*>(codegen_->GetAssembler());
1941 LocationSummary* locations = invoke->GetLocations();
1942
1943 Register reg = locations->InAt(0).AsRegister<Register>();
1944 Register temp = locations->GetTemp(0).AsRegister<Register>();
1945
1946 /*
1947 * Use one bswap instruction to reverse byte order first and then use 3 rounds of
1948 * swapping bits to reverse bits in a number x. Using bswap to save instructions
1949 * compared to generic luni implementation which has 5 rounds of swapping bits.
1950 * x = bswap x
1951 * x = (x & 0x55555555) << 1 | (x >> 1) & 0x55555555;
1952 * x = (x & 0x33333333) << 2 | (x >> 2) & 0x33333333;
1953 * x = (x & 0x0F0F0F0F) << 4 | (x >> 4) & 0x0F0F0F0F;
1954 */
1955 __ bswapl(reg);
1956 SwapBits(reg, temp, 1, 0x55555555, assembler);
1957 SwapBits(reg, temp, 2, 0x33333333, assembler);
1958 SwapBits(reg, temp, 4, 0x0f0f0f0f, assembler);
1959}
1960
1961void IntrinsicLocationsBuilderX86::VisitLongReverse(HInvoke* invoke) {
1962 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1963 LocationSummary::kNoCall,
1964 kIntrinsified);
1965 locations->SetInAt(0, Location::RequiresRegister());
1966 locations->SetOut(Location::SameAsFirstInput());
1967 locations->AddTemp(Location::RequiresRegister());
1968}
1969
1970void IntrinsicCodeGeneratorX86::VisitLongReverse(HInvoke* invoke) {
1971 X86Assembler* assembler =
1972 reinterpret_cast<X86Assembler*>(codegen_->GetAssembler());
1973 LocationSummary* locations = invoke->GetLocations();
1974
1975 Register reg_low = locations->InAt(0).AsRegisterPairLow<Register>();
1976 Register reg_high = locations->InAt(0).AsRegisterPairHigh<Register>();
1977 Register temp = locations->GetTemp(0).AsRegister<Register>();
1978
1979 // We want to swap high/low, then bswap each one, and then do the same
1980 // as a 32 bit reverse.
1981 // Exchange high and low.
1982 __ movl(temp, reg_low);
1983 __ movl(reg_low, reg_high);
1984 __ movl(reg_high, temp);
1985
1986 // bit-reverse low
1987 __ bswapl(reg_low);
1988 SwapBits(reg_low, temp, 1, 0x55555555, assembler);
1989 SwapBits(reg_low, temp, 2, 0x33333333, assembler);
1990 SwapBits(reg_low, temp, 4, 0x0f0f0f0f, assembler);
1991
1992 // bit-reverse high
1993 __ bswapl(reg_high);
1994 SwapBits(reg_high, temp, 1, 0x55555555, assembler);
1995 SwapBits(reg_high, temp, 2, 0x33333333, assembler);
1996 SwapBits(reg_high, temp, 4, 0x0f0f0f0f, assembler);
1997}
1998
Mark Mendelld5897672015-08-12 21:16:41 -04001999static void CreateLeadingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2000 LocationSummary* locations = new (arena) LocationSummary(invoke,
2001 LocationSummary::kNoCall,
2002 kIntrinsified);
2003 if (is_long) {
2004 locations->SetInAt(0, Location::RequiresRegister());
2005 } else {
2006 locations->SetInAt(0, Location::Any());
2007 }
2008 locations->SetOut(Location::RequiresRegister());
2009}
2010
2011static void GenLeadingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_long) {
2012 LocationSummary* locations = invoke->GetLocations();
2013 Location src = locations->InAt(0);
2014 Register out = locations->Out().AsRegister<Register>();
2015
2016 if (invoke->InputAt(0)->IsConstant()) {
2017 // Evaluate this at compile time.
2018 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2019 if (value == 0) {
2020 value = is_long ? 64 : 32;
2021 } else {
2022 value = is_long ? CLZ(static_cast<uint64_t>(value)) : CLZ(static_cast<uint32_t>(value));
2023 }
2024 if (value == 0) {
2025 __ xorl(out, out);
2026 } else {
2027 __ movl(out, Immediate(value));
2028 }
2029 return;
2030 }
2031
2032 // Handle the non-constant cases.
2033 if (!is_long) {
2034 if (src.IsRegister()) {
2035 __ bsrl(out, src.AsRegister<Register>());
2036 } else {
2037 DCHECK(src.IsStackSlot());
2038 __ bsrl(out, Address(ESP, src.GetStackIndex()));
2039 }
2040
2041 // BSR sets ZF if the input was zero, and the output is undefined.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002042 NearLabel all_zeroes, done;
Mark Mendelld5897672015-08-12 21:16:41 -04002043 __ j(kEqual, &all_zeroes);
2044
2045 // Correct the result from BSR to get the final CLZ result.
2046 __ xorl(out, Immediate(31));
2047 __ jmp(&done);
2048
2049 // Fix the zero case with the expected result.
2050 __ Bind(&all_zeroes);
2051 __ movl(out, Immediate(32));
2052
2053 __ Bind(&done);
2054 return;
2055 }
2056
2057 // 64 bit case needs to worry about both parts of the register.
2058 DCHECK(src.IsRegisterPair());
2059 Register src_lo = src.AsRegisterPairLow<Register>();
2060 Register src_hi = src.AsRegisterPairHigh<Register>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002061 NearLabel handle_low, done, all_zeroes;
Mark Mendelld5897672015-08-12 21:16:41 -04002062
2063 // Is the high word zero?
2064 __ testl(src_hi, src_hi);
2065 __ j(kEqual, &handle_low);
2066
2067 // High word is not zero. We know that the BSR result is defined in this case.
2068 __ bsrl(out, src_hi);
2069
2070 // Correct the result from BSR to get the final CLZ result.
2071 __ xorl(out, Immediate(31));
2072 __ jmp(&done);
2073
2074 // High word was zero. We have to compute the low word count and add 32.
2075 __ Bind(&handle_low);
2076 __ bsrl(out, src_lo);
2077 __ j(kEqual, &all_zeroes);
2078
2079 // We had a valid result. Use an XOR to both correct the result and add 32.
2080 __ xorl(out, Immediate(63));
2081 __ jmp(&done);
2082
2083 // All zero case.
2084 __ Bind(&all_zeroes);
2085 __ movl(out, Immediate(64));
2086
2087 __ Bind(&done);
2088}
2089
2090void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
2091 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ false);
2092}
2093
2094void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
2095 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2096 GenLeadingZeros(assembler, invoke, /* is_long */ false);
2097}
2098
2099void IntrinsicLocationsBuilderX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
2100 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ true);
2101}
2102
2103void IntrinsicCodeGeneratorX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
2104 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2105 GenLeadingZeros(assembler, invoke, /* is_long */ true);
2106}
2107
Mark Mendell2d554792015-09-15 21:45:18 -04002108static void CreateTrailingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2109 LocationSummary* locations = new (arena) LocationSummary(invoke,
2110 LocationSummary::kNoCall,
2111 kIntrinsified);
2112 if (is_long) {
2113 locations->SetInAt(0, Location::RequiresRegister());
2114 } else {
2115 locations->SetInAt(0, Location::Any());
2116 }
2117 locations->SetOut(Location::RequiresRegister());
2118}
2119
2120static void GenTrailingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_long) {
2121 LocationSummary* locations = invoke->GetLocations();
2122 Location src = locations->InAt(0);
2123 Register out = locations->Out().AsRegister<Register>();
2124
2125 if (invoke->InputAt(0)->IsConstant()) {
2126 // Evaluate this at compile time.
2127 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2128 if (value == 0) {
2129 value = is_long ? 64 : 32;
2130 } else {
2131 value = is_long ? CTZ(static_cast<uint64_t>(value)) : CTZ(static_cast<uint32_t>(value));
2132 }
2133 if (value == 0) {
2134 __ xorl(out, out);
2135 } else {
2136 __ movl(out, Immediate(value));
2137 }
2138 return;
2139 }
2140
2141 // Handle the non-constant cases.
2142 if (!is_long) {
2143 if (src.IsRegister()) {
2144 __ bsfl(out, src.AsRegister<Register>());
2145 } else {
2146 DCHECK(src.IsStackSlot());
2147 __ bsfl(out, Address(ESP, src.GetStackIndex()));
2148 }
2149
2150 // BSF sets ZF if the input was zero, and the output is undefined.
2151 NearLabel done;
2152 __ j(kNotEqual, &done);
2153
2154 // Fix the zero case with the expected result.
2155 __ movl(out, Immediate(32));
2156
2157 __ Bind(&done);
2158 return;
2159 }
2160
2161 // 64 bit case needs to worry about both parts of the register.
2162 DCHECK(src.IsRegisterPair());
2163 Register src_lo = src.AsRegisterPairLow<Register>();
2164 Register src_hi = src.AsRegisterPairHigh<Register>();
2165 NearLabel done, all_zeroes;
2166
2167 // If the low word is zero, then ZF will be set. If not, we have the answer.
2168 __ bsfl(out, src_lo);
2169 __ j(kNotEqual, &done);
2170
2171 // Low word was zero. We have to compute the high word count and add 32.
2172 __ bsfl(out, src_hi);
2173 __ j(kEqual, &all_zeroes);
2174
2175 // We had a valid result. Add 32 to account for the low word being zero.
2176 __ addl(out, Immediate(32));
2177 __ jmp(&done);
2178
2179 // All zero case.
2180 __ Bind(&all_zeroes);
2181 __ movl(out, Immediate(64));
2182
2183 __ Bind(&done);
2184}
2185
2186void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
2187 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ false);
2188}
2189
2190void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
2191 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2192 GenTrailingZeros(assembler, invoke, /* is_long */ false);
2193}
2194
2195void IntrinsicLocationsBuilderX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
2196 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ true);
2197}
2198
2199void IntrinsicCodeGeneratorX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
2200 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2201 GenTrailingZeros(assembler, invoke, /* is_long */ true);
2202}
2203
2204static void CreateRotateLocations(ArenaAllocator* arena, HInvoke* invoke) {
2205 LocationSummary* locations = new (arena) LocationSummary(invoke,
2206 LocationSummary::kNoCall,
2207 kIntrinsified);
2208 locations->SetInAt(0, Location::RequiresRegister());
2209 // The shift count needs to be in CL or a constant.
2210 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, invoke->InputAt(1)));
2211 locations->SetOut(Location::SameAsFirstInput());
2212}
2213
2214static void GenRotate(X86Assembler* assembler, HInvoke* invoke, bool is_left) {
2215 LocationSummary* locations = invoke->GetLocations();
2216 Register first_reg = locations->InAt(0).AsRegister<Register>();
2217 Location second = locations->InAt(1);
2218
2219 if (second.IsRegister()) {
2220 Register second_reg = second.AsRegister<Register>();
2221 if (is_left) {
2222 __ roll(first_reg, second_reg);
2223 } else {
2224 __ rorl(first_reg, second_reg);
2225 }
2226 } else {
2227 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftValue);
2228 if (is_left) {
2229 __ roll(first_reg, imm);
2230 } else {
2231 __ rorl(first_reg, imm);
2232 }
2233 }
2234}
2235
2236void IntrinsicLocationsBuilderX86::VisitIntegerRotateLeft(HInvoke* invoke) {
2237 CreateRotateLocations(arena_, invoke);
2238}
2239
2240void IntrinsicCodeGeneratorX86::VisitIntegerRotateLeft(HInvoke* invoke) {
2241 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2242 GenRotate(assembler, invoke, /* is_left */ true);
2243}
2244
2245void IntrinsicLocationsBuilderX86::VisitIntegerRotateRight(HInvoke* invoke) {
2246 CreateRotateLocations(arena_, invoke);
2247}
2248
2249void IntrinsicCodeGeneratorX86::VisitIntegerRotateRight(HInvoke* invoke) {
2250 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2251 GenRotate(assembler, invoke, /* is_left */ false);
2252}
2253
Mark Mendell09ed1a32015-03-25 08:30:06 -04002254// Unimplemented intrinsics.
2255
2256#define UNIMPLEMENTED_INTRINSIC(Name) \
2257void IntrinsicLocationsBuilderX86::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
2258} \
2259void IntrinsicCodeGeneratorX86::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
2260}
2261
Mark Mendell09ed1a32015-03-25 08:30:06 -04002262UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
Mark Mendell09ed1a32015-03-25 08:30:06 -04002263UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002264UNIMPLEMENTED_INTRINSIC(LongRotateRight)
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002265UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
Nicolas Geoffrayee3cf072015-10-06 11:45:02 +01002266UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
Mark Mendell09ed1a32015-03-25 08:30:06 -04002267
Roland Levillain4d027112015-07-01 15:41:14 +01002268#undef UNIMPLEMENTED_INTRINSIC
2269
2270#undef __
2271
Mark Mendell09ed1a32015-03-25 08:30:06 -04002272} // namespace x86
2273} // namespace art