blob: e302317d149cbfd66edfa84a59fb73a9485d1829 [file] [log] [blame]
Mark Mendell09ed1a32015-03-25 08:30:06 -04001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_x86.h"
18
Andreas Gampe21030dd2015-05-07 14:46:15 -070019#include <limits>
20
Mark Mendellfb8d2792015-03-31 22:16:59 -040021#include "arch/x86/instruction_set_features_x86.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include "art_method.h"
Mark Mendelld5897672015-08-12 21:16:41 -040023#include "base/bit_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040024#include "code_generator_x86.h"
25#include "entrypoints/quick/quick_entrypoints.h"
26#include "intrinsics.h"
27#include "mirror/array-inl.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040028#include "mirror/string.h"
29#include "thread.h"
30#include "utils/x86/assembler_x86.h"
31#include "utils/x86/constants_x86.h"
32
33namespace art {
34
35namespace x86 {
36
37static constexpr int kDoubleNaNHigh = 0x7FF80000;
38static constexpr int kDoubleNaNLow = 0x00000000;
39static constexpr int kFloatNaN = 0x7FC00000;
40
Mark Mendellfb8d2792015-03-31 22:16:59 -040041IntrinsicLocationsBuilderX86::IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen)
42 : arena_(codegen->GetGraph()->GetArena()), codegen_(codegen) {
43}
44
45
Mark Mendell09ed1a32015-03-25 08:30:06 -040046X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() {
47 return reinterpret_cast<X86Assembler*>(codegen_->GetAssembler());
48}
49
50ArenaAllocator* IntrinsicCodeGeneratorX86::GetAllocator() {
51 return codegen_->GetGraph()->GetArena();
52}
53
54bool IntrinsicLocationsBuilderX86::TryDispatch(HInvoke* invoke) {
55 Dispatch(invoke);
56 LocationSummary* res = invoke->GetLocations();
57 return res != nullptr && res->Intrinsified();
58}
59
60#define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())->
61
62// TODO: target as memory.
63static void MoveFromReturnRegister(Location target,
64 Primitive::Type type,
65 CodeGeneratorX86* codegen) {
66 if (!target.IsValid()) {
67 DCHECK(type == Primitive::kPrimVoid);
68 return;
69 }
70
71 switch (type) {
72 case Primitive::kPrimBoolean:
73 case Primitive::kPrimByte:
74 case Primitive::kPrimChar:
75 case Primitive::kPrimShort:
76 case Primitive::kPrimInt:
77 case Primitive::kPrimNot: {
78 Register target_reg = target.AsRegister<Register>();
79 if (target_reg != EAX) {
80 __ movl(target_reg, EAX);
81 }
82 break;
83 }
84 case Primitive::kPrimLong: {
85 Register target_reg_lo = target.AsRegisterPairLow<Register>();
86 Register target_reg_hi = target.AsRegisterPairHigh<Register>();
87 if (target_reg_lo != EAX) {
88 __ movl(target_reg_lo, EAX);
89 }
90 if (target_reg_hi != EDX) {
91 __ movl(target_reg_hi, EDX);
92 }
93 break;
94 }
95
96 case Primitive::kPrimVoid:
97 LOG(FATAL) << "Unexpected void type for valid location " << target;
98 UNREACHABLE();
99
100 case Primitive::kPrimDouble: {
101 XmmRegister target_reg = target.AsFpuRegister<XmmRegister>();
102 if (target_reg != XMM0) {
103 __ movsd(target_reg, XMM0);
104 }
105 break;
106 }
107 case Primitive::kPrimFloat: {
108 XmmRegister target_reg = target.AsFpuRegister<XmmRegister>();
109 if (target_reg != XMM0) {
110 __ movss(target_reg, XMM0);
111 }
112 break;
113 }
114 }
115}
116
Roland Levillainec525fc2015-04-28 15:50:20 +0100117static void MoveArguments(HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100118 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +0100119 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400120}
121
122// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
123// call. This will copy the arguments into the positions for a regular call.
124//
125// Note: The actual parameters are required to be in the locations given by the invoke's location
126// summary. If an intrinsic modifies those locations before a slowpath call, they must be
127// restored!
128class IntrinsicSlowPathX86 : public SlowPathCodeX86 {
129 public:
Andreas Gampe21030dd2015-05-07 14:46:15 -0700130 explicit IntrinsicSlowPathX86(HInvoke* invoke)
131 : invoke_(invoke) { }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400132
133 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
134 CodeGeneratorX86* codegen = down_cast<CodeGeneratorX86*>(codegen_in);
135 __ Bind(GetEntryLabel());
136
137 SaveLiveRegisters(codegen, invoke_->GetLocations());
138
Roland Levillainec525fc2015-04-28 15:50:20 +0100139 MoveArguments(invoke_, codegen);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400140
141 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100142 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
143 Location::RegisterLocation(EAX));
Mark Mendell09ed1a32015-03-25 08:30:06 -0400144 } else {
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000145 codegen->GenerateVirtualCall(invoke_->AsInvokeVirtual(), Location::RegisterLocation(EAX));
Mark Mendell09ed1a32015-03-25 08:30:06 -0400146 }
Andreas Gampebfb5ba92015-09-01 15:45:02 +0000147 codegen->RecordPcInfo(invoke_, invoke_->GetDexPc(), this);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400148
149 // Copy the result back to the expected output.
150 Location out = invoke_->GetLocations()->Out();
151 if (out.IsValid()) {
152 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
153 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
154 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
155 }
156
157 RestoreLiveRegisters(codegen, invoke_->GetLocations());
158 __ jmp(GetExitLabel());
159 }
160
Alexandre Rames9931f312015-06-19 14:47:01 +0100161 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathX86"; }
162
Mark Mendell09ed1a32015-03-25 08:30:06 -0400163 private:
164 // The instruction where this slow path is happening.
165 HInvoke* const invoke_;
166
167 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathX86);
168};
169
170#undef __
171#define __ assembler->
172
173static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
174 LocationSummary* locations = new (arena) LocationSummary(invoke,
175 LocationSummary::kNoCall,
176 kIntrinsified);
177 locations->SetInAt(0, Location::RequiresFpuRegister());
178 locations->SetOut(Location::RequiresRegister());
179 if (is64bit) {
180 locations->AddTemp(Location::RequiresFpuRegister());
181 }
182}
183
184static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
185 LocationSummary* locations = new (arena) LocationSummary(invoke,
186 LocationSummary::kNoCall,
187 kIntrinsified);
188 locations->SetInAt(0, Location::RequiresRegister());
189 locations->SetOut(Location::RequiresFpuRegister());
190 if (is64bit) {
191 locations->AddTemp(Location::RequiresFpuRegister());
192 locations->AddTemp(Location::RequiresFpuRegister());
193 }
194}
195
196static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
197 Location input = locations->InAt(0);
198 Location output = locations->Out();
199 if (is64bit) {
200 // Need to use the temporary.
201 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
202 __ movsd(temp, input.AsFpuRegister<XmmRegister>());
203 __ movd(output.AsRegisterPairLow<Register>(), temp);
204 __ psrlq(temp, Immediate(32));
205 __ movd(output.AsRegisterPairHigh<Register>(), temp);
206 } else {
207 __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>());
208 }
209}
210
211static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
212 Location input = locations->InAt(0);
213 Location output = locations->Out();
214 if (is64bit) {
215 // Need to use the temporary.
216 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
217 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
218 __ movd(temp1, input.AsRegisterPairLow<Register>());
219 __ movd(temp2, input.AsRegisterPairHigh<Register>());
220 __ punpckldq(temp1, temp2);
221 __ movsd(output.AsFpuRegister<XmmRegister>(), temp1);
222 } else {
223 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<Register>());
224 }
225}
226
227void IntrinsicLocationsBuilderX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
228 CreateFPToIntLocations(arena_, invoke, true);
229}
230void IntrinsicLocationsBuilderX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
231 CreateIntToFPLocations(arena_, invoke, true);
232}
233
234void IntrinsicCodeGeneratorX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
235 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
236}
237void IntrinsicCodeGeneratorX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
238 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
239}
240
241void IntrinsicLocationsBuilderX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
242 CreateFPToIntLocations(arena_, invoke, false);
243}
244void IntrinsicLocationsBuilderX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
245 CreateIntToFPLocations(arena_, invoke, false);
246}
247
248void IntrinsicCodeGeneratorX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
249 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
250}
251void IntrinsicCodeGeneratorX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
252 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
253}
254
255static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
256 LocationSummary* locations = new (arena) LocationSummary(invoke,
257 LocationSummary::kNoCall,
258 kIntrinsified);
259 locations->SetInAt(0, Location::RequiresRegister());
260 locations->SetOut(Location::SameAsFirstInput());
261}
262
263static void CreateLongToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
264 LocationSummary* locations = new (arena) LocationSummary(invoke,
265 LocationSummary::kNoCall,
266 kIntrinsified);
267 locations->SetInAt(0, Location::RequiresRegister());
268 locations->SetOut(Location::RequiresRegister());
269}
270
271static void CreateLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
272 LocationSummary* locations = new (arena) LocationSummary(invoke,
273 LocationSummary::kNoCall,
274 kIntrinsified);
275 locations->SetInAt(0, Location::RequiresRegister());
276 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
277}
278
279static void GenReverseBytes(LocationSummary* locations,
280 Primitive::Type size,
281 X86Assembler* assembler) {
282 Register out = locations->Out().AsRegister<Register>();
283
284 switch (size) {
285 case Primitive::kPrimShort:
286 // TODO: Can be done with an xchg of 8b registers. This is straight from Quick.
287 __ bswapl(out);
288 __ sarl(out, Immediate(16));
289 break;
290 case Primitive::kPrimInt:
291 __ bswapl(out);
292 break;
293 default:
294 LOG(FATAL) << "Unexpected size for reverse-bytes: " << size;
295 UNREACHABLE();
296 }
297}
298
299void IntrinsicLocationsBuilderX86::VisitIntegerReverseBytes(HInvoke* invoke) {
300 CreateIntToIntLocations(arena_, invoke);
301}
302
303void IntrinsicCodeGeneratorX86::VisitIntegerReverseBytes(HInvoke* invoke) {
304 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
305}
306
Mark Mendell58d25fd2015-04-03 14:52:31 -0400307void IntrinsicLocationsBuilderX86::VisitLongReverseBytes(HInvoke* invoke) {
308 CreateLongToLongLocations(arena_, invoke);
309}
310
311void IntrinsicCodeGeneratorX86::VisitLongReverseBytes(HInvoke* invoke) {
312 LocationSummary* locations = invoke->GetLocations();
313 Location input = locations->InAt(0);
314 Register input_lo = input.AsRegisterPairLow<Register>();
315 Register input_hi = input.AsRegisterPairHigh<Register>();
316 Location output = locations->Out();
317 Register output_lo = output.AsRegisterPairLow<Register>();
318 Register output_hi = output.AsRegisterPairHigh<Register>();
319
320 X86Assembler* assembler = GetAssembler();
321 // Assign the inputs to the outputs, mixing low/high.
322 __ movl(output_lo, input_hi);
323 __ movl(output_hi, input_lo);
324 __ bswapl(output_lo);
325 __ bswapl(output_hi);
326}
327
Mark Mendell09ed1a32015-03-25 08:30:06 -0400328void IntrinsicLocationsBuilderX86::VisitShortReverseBytes(HInvoke* invoke) {
329 CreateIntToIntLocations(arena_, invoke);
330}
331
332void IntrinsicCodeGeneratorX86::VisitShortReverseBytes(HInvoke* invoke) {
333 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
334}
335
336
337// TODO: Consider Quick's way of doing Double abs through integer operations, as the immediate we
338// need is 64b.
339
340static void CreateFloatToFloat(ArenaAllocator* arena, HInvoke* invoke) {
341 // TODO: Enable memory operations when the assembler supports them.
342 LocationSummary* locations = new (arena) LocationSummary(invoke,
343 LocationSummary::kNoCall,
344 kIntrinsified);
345 locations->SetInAt(0, Location::RequiresFpuRegister());
346 // TODO: Allow x86 to work with memory. This requires assembler support, see below.
347 // locations->SetInAt(0, Location::Any()); // X86 can work on memory directly.
348 locations->SetOut(Location::SameAsFirstInput());
349}
350
351static void MathAbsFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
352 Location output = locations->Out();
353
354 if (output.IsFpuRegister()) {
355 // Create the right constant on an aligned stack.
356 if (is64bit) {
357 __ subl(ESP, Immediate(8));
358 __ pushl(Immediate(0x7FFFFFFF));
359 __ pushl(Immediate(0xFFFFFFFF));
360 __ andpd(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
361 } else {
362 __ subl(ESP, Immediate(12));
363 __ pushl(Immediate(0x7FFFFFFF));
364 __ andps(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
365 }
366 __ addl(ESP, Immediate(16));
367 } else {
368 // TODO: update when assember support is available.
369 UNIMPLEMENTED(FATAL) << "Needs assembler support.";
370// Once assembler support is available, in-memory operations look like this:
371// if (is64bit) {
372// DCHECK(output.IsDoubleStackSlot());
373// __ andl(Address(Register(RSP), output.GetHighStackIndex(kX86WordSize)),
374// Immediate(0x7FFFFFFF));
375// } else {
376// DCHECK(output.IsStackSlot());
377// // Can use and with a literal directly.
378// __ andl(Address(Register(RSP), output.GetStackIndex()), Immediate(0x7FFFFFFF));
379// }
380 }
381}
382
383void IntrinsicLocationsBuilderX86::VisitMathAbsDouble(HInvoke* invoke) {
384 CreateFloatToFloat(arena_, invoke);
385}
386
387void IntrinsicCodeGeneratorX86::VisitMathAbsDouble(HInvoke* invoke) {
388 MathAbsFP(invoke->GetLocations(), true, GetAssembler());
389}
390
391void IntrinsicLocationsBuilderX86::VisitMathAbsFloat(HInvoke* invoke) {
392 CreateFloatToFloat(arena_, invoke);
393}
394
395void IntrinsicCodeGeneratorX86::VisitMathAbsFloat(HInvoke* invoke) {
396 MathAbsFP(invoke->GetLocations(), false, GetAssembler());
397}
398
399static void CreateAbsIntLocation(ArenaAllocator* arena, HInvoke* invoke) {
400 LocationSummary* locations = new (arena) LocationSummary(invoke,
401 LocationSummary::kNoCall,
402 kIntrinsified);
403 locations->SetInAt(0, Location::RegisterLocation(EAX));
404 locations->SetOut(Location::SameAsFirstInput());
405 locations->AddTemp(Location::RegisterLocation(EDX));
406}
407
408static void GenAbsInteger(LocationSummary* locations, X86Assembler* assembler) {
409 Location output = locations->Out();
410 Register out = output.AsRegister<Register>();
411 DCHECK_EQ(out, EAX);
412 Register temp = locations->GetTemp(0).AsRegister<Register>();
413 DCHECK_EQ(temp, EDX);
414
415 // Sign extend EAX into EDX.
416 __ cdq();
417
418 // XOR EAX with sign.
419 __ xorl(EAX, EDX);
420
421 // Subtract out sign to correct.
422 __ subl(EAX, EDX);
423
424 // The result is in EAX.
425}
426
427static void CreateAbsLongLocation(ArenaAllocator* arena, HInvoke* invoke) {
428 LocationSummary* locations = new (arena) LocationSummary(invoke,
429 LocationSummary::kNoCall,
430 kIntrinsified);
431 locations->SetInAt(0, Location::RequiresRegister());
432 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
433 locations->AddTemp(Location::RequiresRegister());
434}
435
436static void GenAbsLong(LocationSummary* locations, X86Assembler* assembler) {
437 Location input = locations->InAt(0);
438 Register input_lo = input.AsRegisterPairLow<Register>();
439 Register input_hi = input.AsRegisterPairHigh<Register>();
440 Location output = locations->Out();
441 Register output_lo = output.AsRegisterPairLow<Register>();
442 Register output_hi = output.AsRegisterPairHigh<Register>();
443 Register temp = locations->GetTemp(0).AsRegister<Register>();
444
445 // Compute the sign into the temporary.
446 __ movl(temp, input_hi);
447 __ sarl(temp, Immediate(31));
448
449 // Store the sign into the output.
450 __ movl(output_lo, temp);
451 __ movl(output_hi, temp);
452
453 // XOR the input to the output.
454 __ xorl(output_lo, input_lo);
455 __ xorl(output_hi, input_hi);
456
457 // Subtract the sign.
458 __ subl(output_lo, temp);
459 __ sbbl(output_hi, temp);
460}
461
462void IntrinsicLocationsBuilderX86::VisitMathAbsInt(HInvoke* invoke) {
463 CreateAbsIntLocation(arena_, invoke);
464}
465
466void IntrinsicCodeGeneratorX86::VisitMathAbsInt(HInvoke* invoke) {
467 GenAbsInteger(invoke->GetLocations(), GetAssembler());
468}
469
470void IntrinsicLocationsBuilderX86::VisitMathAbsLong(HInvoke* invoke) {
471 CreateAbsLongLocation(arena_, invoke);
472}
473
474void IntrinsicCodeGeneratorX86::VisitMathAbsLong(HInvoke* invoke) {
475 GenAbsLong(invoke->GetLocations(), GetAssembler());
476}
477
478static void GenMinMaxFP(LocationSummary* locations, bool is_min, bool is_double,
479 X86Assembler* assembler) {
480 Location op1_loc = locations->InAt(0);
481 Location op2_loc = locations->InAt(1);
482 Location out_loc = locations->Out();
483 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
484
485 // Shortcut for same input locations.
486 if (op1_loc.Equals(op2_loc)) {
487 DCHECK(out_loc.Equals(op1_loc));
488 return;
489 }
490
491 // (out := op1)
492 // out <=? op2
493 // if Nan jmp Nan_label
494 // if out is min jmp done
495 // if op2 is min jmp op2_label
496 // handle -0/+0
497 // jmp done
498 // Nan_label:
499 // out := NaN
500 // op2_label:
501 // out := op2
502 // done:
503 //
504 // This removes one jmp, but needs to copy one input (op1) to out.
505 //
506 // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath?
507
508 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
509
Mark Mendell0c9497d2015-08-21 09:30:05 -0400510 NearLabel nan, done, op2_label;
Mark Mendell09ed1a32015-03-25 08:30:06 -0400511 if (is_double) {
512 __ ucomisd(out, op2);
513 } else {
514 __ ucomiss(out, op2);
515 }
516
517 __ j(Condition::kParityEven, &nan);
518
519 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
520 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
521
522 // Handle 0.0/-0.0.
523 if (is_min) {
524 if (is_double) {
525 __ orpd(out, op2);
526 } else {
527 __ orps(out, op2);
528 }
529 } else {
530 if (is_double) {
531 __ andpd(out, op2);
532 } else {
533 __ andps(out, op2);
534 }
535 }
536 __ jmp(&done);
537
538 // NaN handling.
539 __ Bind(&nan);
540 if (is_double) {
541 __ pushl(Immediate(kDoubleNaNHigh));
542 __ pushl(Immediate(kDoubleNaNLow));
543 __ movsd(out, Address(ESP, 0));
544 __ addl(ESP, Immediate(8));
545 } else {
546 __ pushl(Immediate(kFloatNaN));
547 __ movss(out, Address(ESP, 0));
548 __ addl(ESP, Immediate(4));
549 }
550 __ jmp(&done);
551
552 // out := op2;
553 __ Bind(&op2_label);
554 if (is_double) {
555 __ movsd(out, op2);
556 } else {
557 __ movss(out, op2);
558 }
559
560 // Done.
561 __ Bind(&done);
562}
563
564static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
565 LocationSummary* locations = new (arena) LocationSummary(invoke,
566 LocationSummary::kNoCall,
567 kIntrinsified);
568 locations->SetInAt(0, Location::RequiresFpuRegister());
569 locations->SetInAt(1, Location::RequiresFpuRegister());
570 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
571 // the second input to be the output (we can simply swap inputs).
572 locations->SetOut(Location::SameAsFirstInput());
573}
574
575void IntrinsicLocationsBuilderX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
576 CreateFPFPToFPLocations(arena_, invoke);
577}
578
579void IntrinsicCodeGeneratorX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
580 GenMinMaxFP(invoke->GetLocations(), true, true, GetAssembler());
581}
582
583void IntrinsicLocationsBuilderX86::VisitMathMinFloatFloat(HInvoke* invoke) {
584 CreateFPFPToFPLocations(arena_, invoke);
585}
586
587void IntrinsicCodeGeneratorX86::VisitMathMinFloatFloat(HInvoke* invoke) {
588 GenMinMaxFP(invoke->GetLocations(), true, false, GetAssembler());
589}
590
591void IntrinsicLocationsBuilderX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
592 CreateFPFPToFPLocations(arena_, invoke);
593}
594
595void IntrinsicCodeGeneratorX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
596 GenMinMaxFP(invoke->GetLocations(), false, true, GetAssembler());
597}
598
599void IntrinsicLocationsBuilderX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
600 CreateFPFPToFPLocations(arena_, invoke);
601}
602
603void IntrinsicCodeGeneratorX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
604 GenMinMaxFP(invoke->GetLocations(), false, false, GetAssembler());
605}
606
607static void GenMinMax(LocationSummary* locations, bool is_min, bool is_long,
608 X86Assembler* assembler) {
609 Location op1_loc = locations->InAt(0);
610 Location op2_loc = locations->InAt(1);
611
612 // Shortcut for same input locations.
613 if (op1_loc.Equals(op2_loc)) {
614 // Can return immediately, as op1_loc == out_loc.
615 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
616 // a copy here.
617 DCHECK(locations->Out().Equals(op1_loc));
618 return;
619 }
620
621 if (is_long) {
622 // Need to perform a subtract to get the sign right.
623 // op1 is already in the same location as the output.
624 Location output = locations->Out();
625 Register output_lo = output.AsRegisterPairLow<Register>();
626 Register output_hi = output.AsRegisterPairHigh<Register>();
627
628 Register op2_lo = op2_loc.AsRegisterPairLow<Register>();
629 Register op2_hi = op2_loc.AsRegisterPairHigh<Register>();
630
631 // Spare register to compute the subtraction to set condition code.
632 Register temp = locations->GetTemp(0).AsRegister<Register>();
633
634 // Subtract off op2_low.
635 __ movl(temp, output_lo);
636 __ subl(temp, op2_lo);
637
638 // Now use the same tempo and the borrow to finish the subtraction of op2_hi.
639 __ movl(temp, output_hi);
640 __ sbbl(temp, op2_hi);
641
642 // Now the condition code is correct.
643 Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess;
644 __ cmovl(cond, output_lo, op2_lo);
645 __ cmovl(cond, output_hi, op2_hi);
646 } else {
647 Register out = locations->Out().AsRegister<Register>();
648 Register op2 = op2_loc.AsRegister<Register>();
649
650 // (out := op1)
651 // out <=? op2
652 // if out is min jmp done
653 // out := op2
654 // done:
655
656 __ cmpl(out, op2);
657 Condition cond = is_min ? Condition::kGreater : Condition::kLess;
658 __ cmovl(cond, out, op2);
659 }
660}
661
662static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
663 LocationSummary* locations = new (arena) LocationSummary(invoke,
664 LocationSummary::kNoCall,
665 kIntrinsified);
666 locations->SetInAt(0, Location::RequiresRegister());
667 locations->SetInAt(1, Location::RequiresRegister());
668 locations->SetOut(Location::SameAsFirstInput());
669}
670
671static void CreateLongLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
672 LocationSummary* locations = new (arena) LocationSummary(invoke,
673 LocationSummary::kNoCall,
674 kIntrinsified);
675 locations->SetInAt(0, Location::RequiresRegister());
676 locations->SetInAt(1, Location::RequiresRegister());
677 locations->SetOut(Location::SameAsFirstInput());
678 // Register to use to perform a long subtract to set cc.
679 locations->AddTemp(Location::RequiresRegister());
680}
681
682void IntrinsicLocationsBuilderX86::VisitMathMinIntInt(HInvoke* invoke) {
683 CreateIntIntToIntLocations(arena_, invoke);
684}
685
686void IntrinsicCodeGeneratorX86::VisitMathMinIntInt(HInvoke* invoke) {
687 GenMinMax(invoke->GetLocations(), true, false, GetAssembler());
688}
689
690void IntrinsicLocationsBuilderX86::VisitMathMinLongLong(HInvoke* invoke) {
691 CreateLongLongToLongLocations(arena_, invoke);
692}
693
694void IntrinsicCodeGeneratorX86::VisitMathMinLongLong(HInvoke* invoke) {
695 GenMinMax(invoke->GetLocations(), true, true, GetAssembler());
696}
697
698void IntrinsicLocationsBuilderX86::VisitMathMaxIntInt(HInvoke* invoke) {
699 CreateIntIntToIntLocations(arena_, invoke);
700}
701
702void IntrinsicCodeGeneratorX86::VisitMathMaxIntInt(HInvoke* invoke) {
703 GenMinMax(invoke->GetLocations(), false, false, GetAssembler());
704}
705
706void IntrinsicLocationsBuilderX86::VisitMathMaxLongLong(HInvoke* invoke) {
707 CreateLongLongToLongLocations(arena_, invoke);
708}
709
710void IntrinsicCodeGeneratorX86::VisitMathMaxLongLong(HInvoke* invoke) {
711 GenMinMax(invoke->GetLocations(), false, true, GetAssembler());
712}
713
714static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
715 LocationSummary* locations = new (arena) LocationSummary(invoke,
716 LocationSummary::kNoCall,
717 kIntrinsified);
718 locations->SetInAt(0, Location::RequiresFpuRegister());
719 locations->SetOut(Location::RequiresFpuRegister());
720}
721
722void IntrinsicLocationsBuilderX86::VisitMathSqrt(HInvoke* invoke) {
723 CreateFPToFPLocations(arena_, invoke);
724}
725
726void IntrinsicCodeGeneratorX86::VisitMathSqrt(HInvoke* invoke) {
727 LocationSummary* locations = invoke->GetLocations();
728 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
729 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
730
731 GetAssembler()->sqrtsd(out, in);
732}
733
Mark Mendellfb8d2792015-03-31 22:16:59 -0400734static void InvokeOutOfLineIntrinsic(CodeGeneratorX86* codegen, HInvoke* invoke) {
Roland Levillainec525fc2015-04-28 15:50:20 +0100735 MoveArguments(invoke, codegen);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400736
737 DCHECK(invoke->IsInvokeStaticOrDirect());
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100738 codegen->GenerateStaticOrDirectCall(invoke->AsInvokeStaticOrDirect(),
739 Location::RegisterLocation(EAX));
Mingyao Yange90db122015-04-03 17:56:54 -0700740 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400741
742 // Copy the result back to the expected output.
743 Location out = invoke->GetLocations()->Out();
744 if (out.IsValid()) {
745 DCHECK(out.IsRegister());
746 MoveFromReturnRegister(out, invoke->GetType(), codegen);
747 }
748}
749
750static void CreateSSE41FPToFPLocations(ArenaAllocator* arena,
751 HInvoke* invoke,
752 CodeGeneratorX86* codegen) {
753 // Do we have instruction support?
754 if (codegen->GetInstructionSetFeatures().HasSSE4_1()) {
755 CreateFPToFPLocations(arena, invoke);
756 return;
757 }
758
759 // We have to fall back to a call to the intrinsic.
760 LocationSummary* locations = new (arena) LocationSummary(invoke,
761 LocationSummary::kCall);
762 InvokeRuntimeCallingConvention calling_convention;
763 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
764 locations->SetOut(Location::FpuRegisterLocation(XMM0));
765 // Needs to be EAX for the invoke.
766 locations->AddTemp(Location::RegisterLocation(EAX));
767}
768
769static void GenSSE41FPToFPIntrinsic(CodeGeneratorX86* codegen,
770 HInvoke* invoke,
771 X86Assembler* assembler,
772 int round_mode) {
773 LocationSummary* locations = invoke->GetLocations();
774 if (locations->WillCall()) {
775 InvokeOutOfLineIntrinsic(codegen, invoke);
776 } else {
777 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
778 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
779 __ roundsd(out, in, Immediate(round_mode));
780 }
781}
782
783void IntrinsicLocationsBuilderX86::VisitMathCeil(HInvoke* invoke) {
784 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
785}
786
787void IntrinsicCodeGeneratorX86::VisitMathCeil(HInvoke* invoke) {
788 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 2);
789}
790
791void IntrinsicLocationsBuilderX86::VisitMathFloor(HInvoke* invoke) {
792 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
793}
794
795void IntrinsicCodeGeneratorX86::VisitMathFloor(HInvoke* invoke) {
796 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 1);
797}
798
799void IntrinsicLocationsBuilderX86::VisitMathRint(HInvoke* invoke) {
800 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
801}
802
803void IntrinsicCodeGeneratorX86::VisitMathRint(HInvoke* invoke) {
804 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 0);
805}
806
807// Note that 32 bit x86 doesn't have the capability to inline MathRoundDouble,
808// as it needs 64 bit instructions.
809void IntrinsicLocationsBuilderX86::VisitMathRoundFloat(HInvoke* invoke) {
810 // Do we have instruction support?
811 if (codegen_->GetInstructionSetFeatures().HasSSE4_1()) {
812 LocationSummary* locations = new (arena_) LocationSummary(invoke,
813 LocationSummary::kNoCall,
814 kIntrinsified);
815 locations->SetInAt(0, Location::RequiresFpuRegister());
Nicolas Geoffrayd9b92402015-04-21 10:02:22 +0100816 locations->SetOut(Location::RequiresRegister());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400817 locations->AddTemp(Location::RequiresFpuRegister());
818 locations->AddTemp(Location::RequiresFpuRegister());
819 return;
820 }
821
822 // We have to fall back to a call to the intrinsic.
823 LocationSummary* locations = new (arena_) LocationSummary(invoke,
824 LocationSummary::kCall);
825 InvokeRuntimeCallingConvention calling_convention;
826 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
827 locations->SetOut(Location::RegisterLocation(EAX));
828 // Needs to be EAX for the invoke.
829 locations->AddTemp(Location::RegisterLocation(EAX));
830}
831
832void IntrinsicCodeGeneratorX86::VisitMathRoundFloat(HInvoke* invoke) {
833 LocationSummary* locations = invoke->GetLocations();
834 if (locations->WillCall()) {
835 InvokeOutOfLineIntrinsic(codegen_, invoke);
836 return;
837 }
838
839 // Implement RoundFloat as t1 = floor(input + 0.5f); convert to int.
840 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
841 Register out = locations->Out().AsRegister<Register>();
842 XmmRegister maxInt = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
843 XmmRegister inPlusPointFive = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -0400844 NearLabel done, nan;
Mark Mendellfb8d2792015-03-31 22:16:59 -0400845 X86Assembler* assembler = GetAssembler();
846
847 // Generate 0.5 into inPlusPointFive.
848 __ movl(out, Immediate(bit_cast<int32_t, float>(0.5f)));
849 __ movd(inPlusPointFive, out);
850
851 // Add in the input.
852 __ addss(inPlusPointFive, in);
853
854 // And truncate to an integer.
855 __ roundss(inPlusPointFive, inPlusPointFive, Immediate(1));
856
857 __ movl(out, Immediate(kPrimIntMax));
858 // maxInt = int-to-float(out)
859 __ cvtsi2ss(maxInt, out);
860
861 // if inPlusPointFive >= maxInt goto done
862 __ comiss(inPlusPointFive, maxInt);
863 __ j(kAboveEqual, &done);
864
865 // if input == NaN goto nan
866 __ j(kUnordered, &nan);
867
868 // output = float-to-int-truncate(input)
869 __ cvttss2si(out, inPlusPointFive);
870 __ jmp(&done);
871 __ Bind(&nan);
872
873 // output = 0
874 __ xorl(out, out);
875 __ Bind(&done);
876}
877
Mark Mendell09ed1a32015-03-25 08:30:06 -0400878void IntrinsicLocationsBuilderX86::VisitStringCharAt(HInvoke* invoke) {
879 // The inputs plus one temp.
880 LocationSummary* locations = new (arena_) LocationSummary(invoke,
881 LocationSummary::kCallOnSlowPath,
882 kIntrinsified);
883 locations->SetInAt(0, Location::RequiresRegister());
884 locations->SetInAt(1, Location::RequiresRegister());
885 locations->SetOut(Location::SameAsFirstInput());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400886}
887
888void IntrinsicCodeGeneratorX86::VisitStringCharAt(HInvoke* invoke) {
889 LocationSummary* locations = invoke->GetLocations();
890
Mark Mendell6bc53a92015-07-01 14:26:52 -0400891 // Location of reference to data array.
Mark Mendell09ed1a32015-03-25 08:30:06 -0400892 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
Mark Mendell6bc53a92015-07-01 14:26:52 -0400893 // Location of count.
Mark Mendell09ed1a32015-03-25 08:30:06 -0400894 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
Mark Mendell09ed1a32015-03-25 08:30:06 -0400895
896 Register obj = locations->InAt(0).AsRegister<Register>();
897 Register idx = locations->InAt(1).AsRegister<Register>();
898 Register out = locations->Out().AsRegister<Register>();
Mark Mendell09ed1a32015-03-25 08:30:06 -0400899
900 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
901 // the cost.
902 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
903 // we will not optimize the code for constants (which would save a register).
904
Andreas Gampe21030dd2015-05-07 14:46:15 -0700905 SlowPathCodeX86* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400906 codegen_->AddSlowPath(slow_path);
907
908 X86Assembler* assembler = GetAssembler();
909
910 __ cmpl(idx, Address(obj, count_offset));
911 codegen_->MaybeRecordImplicitNullCheck(invoke);
912 __ j(kAboveEqual, slow_path->GetEntryLabel());
913
Jeff Hao848f70a2014-01-15 13:49:50 -0800914 // out = out[2*idx].
915 __ movzxw(out, Address(out, idx, ScaleFactor::TIMES_2, value_offset));
Mark Mendell09ed1a32015-03-25 08:30:06 -0400916
917 __ Bind(slow_path->GetExitLabel());
918}
919
Mark Mendell6bc53a92015-07-01 14:26:52 -0400920void IntrinsicLocationsBuilderX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
921 // We need at least two of the positions or length to be an integer constant,
922 // or else we won't have enough free registers.
923 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
924 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
925 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
926
927 int num_constants =
928 ((src_pos != nullptr) ? 1 : 0)
929 + ((dest_pos != nullptr) ? 1 : 0)
930 + ((length != nullptr) ? 1 : 0);
931
932 if (num_constants < 2) {
933 // Not enough free registers.
934 return;
935 }
936
937 // As long as we are checking, we might as well check to see if the src and dest
938 // positions are >= 0.
939 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
940 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
941 // We will have to fail anyways.
942 return;
943 }
944
945 // And since we are already checking, check the length too.
946 if (length != nullptr) {
947 int32_t len = length->GetValue();
948 if (len < 0) {
949 // Just call as normal.
950 return;
951 }
952 }
953
954 // Okay, it is safe to generate inline code.
955 LocationSummary* locations =
956 new (arena_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
957 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
958 locations->SetInAt(0, Location::RequiresRegister());
959 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
960 locations->SetInAt(2, Location::RequiresRegister());
961 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
962 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
963
964 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
965 locations->AddTemp(Location::RegisterLocation(ESI));
966 locations->AddTemp(Location::RegisterLocation(EDI));
967 locations->AddTemp(Location::RegisterLocation(ECX));
968}
969
970static void CheckPosition(X86Assembler* assembler,
971 Location pos,
972 Register input,
973 Register length,
974 SlowPathCodeX86* slow_path,
975 Register input_len,
976 Register temp) {
977 // Where is the length in the String?
978 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
979
980 if (pos.IsConstant()) {
981 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
982 if (pos_const == 0) {
983 // Check that length(input) >= length.
984 __ cmpl(Address(input, length_offset), length);
985 __ j(kLess, slow_path->GetEntryLabel());
986 } else {
987 // Check that length(input) >= pos.
988 __ movl(input_len, Address(input, length_offset));
989 __ cmpl(input_len, Immediate(pos_const));
990 __ j(kLess, slow_path->GetEntryLabel());
991
992 // Check that (length(input) - pos) >= length.
993 __ leal(temp, Address(input_len, -pos_const));
994 __ cmpl(temp, length);
995 __ j(kLess, slow_path->GetEntryLabel());
996 }
997 } else {
998 // Check that pos >= 0.
999 Register pos_reg = pos.AsRegister<Register>();
1000 __ testl(pos_reg, pos_reg);
1001 __ j(kLess, slow_path->GetEntryLabel());
1002
1003 // Check that pos <= length(input).
1004 __ cmpl(Address(input, length_offset), pos_reg);
1005 __ j(kLess, slow_path->GetEntryLabel());
1006
1007 // Check that (length(input) - pos) >= length.
1008 __ movl(temp, Address(input, length_offset));
1009 __ subl(temp, pos_reg);
1010 __ cmpl(temp, length);
1011 __ j(kLess, slow_path->GetEntryLabel());
1012 }
1013}
1014
1015void IntrinsicCodeGeneratorX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
1016 X86Assembler* assembler = GetAssembler();
1017 LocationSummary* locations = invoke->GetLocations();
1018
1019 Register src = locations->InAt(0).AsRegister<Register>();
1020 Location srcPos = locations->InAt(1);
1021 Register dest = locations->InAt(2).AsRegister<Register>();
1022 Location destPos = locations->InAt(3);
1023 Location length = locations->InAt(4);
1024
1025 // Temporaries that we need for MOVSW.
1026 Register src_base = locations->GetTemp(0).AsRegister<Register>();
1027 DCHECK_EQ(src_base, ESI);
1028 Register dest_base = locations->GetTemp(1).AsRegister<Register>();
1029 DCHECK_EQ(dest_base, EDI);
1030 Register count = locations->GetTemp(2).AsRegister<Register>();
1031 DCHECK_EQ(count, ECX);
1032
1033 SlowPathCodeX86* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
1034 codegen_->AddSlowPath(slow_path);
1035
1036 // Bail out if the source and destination are the same (to handle overlap).
1037 __ cmpl(src, dest);
1038 __ j(kEqual, slow_path->GetEntryLabel());
1039
1040 // Bail out if the source is null.
1041 __ testl(src, src);
1042 __ j(kEqual, slow_path->GetEntryLabel());
1043
1044 // Bail out if the destination is null.
1045 __ testl(dest, dest);
1046 __ j(kEqual, slow_path->GetEntryLabel());
1047
1048 // If the length is negative, bail out.
1049 // We have already checked in the LocationsBuilder for the constant case.
1050 if (!length.IsConstant()) {
1051 __ cmpl(length.AsRegister<Register>(), length.AsRegister<Register>());
1052 __ j(kLess, slow_path->GetEntryLabel());
1053 }
1054
1055 // We need the count in ECX.
1056 if (length.IsConstant()) {
1057 __ movl(count, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1058 } else {
1059 __ movl(count, length.AsRegister<Register>());
1060 }
1061
1062 // Validity checks: source.
1063 CheckPosition(assembler, srcPos, src, count, slow_path, src_base, dest_base);
1064
1065 // Validity checks: dest.
1066 CheckPosition(assembler, destPos, dest, count, slow_path, src_base, dest_base);
1067
1068 // Okay, everything checks out. Finally time to do the copy.
1069 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1070 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1071 DCHECK_EQ(char_size, 2u);
1072
1073 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1074
1075 if (srcPos.IsConstant()) {
1076 int32_t srcPos_const = srcPos.GetConstant()->AsIntConstant()->GetValue();
1077 __ leal(src_base, Address(src, char_size * srcPos_const + data_offset));
1078 } else {
1079 __ leal(src_base, Address(src, srcPos.AsRegister<Register>(),
1080 ScaleFactor::TIMES_2, data_offset));
1081 }
1082 if (destPos.IsConstant()) {
1083 int32_t destPos_const = destPos.GetConstant()->AsIntConstant()->GetValue();
1084
1085 __ leal(dest_base, Address(dest, char_size * destPos_const + data_offset));
1086 } else {
1087 __ leal(dest_base, Address(dest, destPos.AsRegister<Register>(),
1088 ScaleFactor::TIMES_2, data_offset));
1089 }
1090
1091 // Do the move.
1092 __ rep_movsw();
1093
1094 __ Bind(slow_path->GetExitLabel());
1095}
1096
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001097void IntrinsicLocationsBuilderX86::VisitStringCompareTo(HInvoke* invoke) {
1098 // The inputs plus one temp.
1099 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1100 LocationSummary::kCall,
1101 kIntrinsified);
1102 InvokeRuntimeCallingConvention calling_convention;
1103 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1104 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1105 locations->SetOut(Location::RegisterLocation(EAX));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001106}
1107
1108void IntrinsicCodeGeneratorX86::VisitStringCompareTo(HInvoke* invoke) {
1109 X86Assembler* assembler = GetAssembler();
1110 LocationSummary* locations = invoke->GetLocations();
1111
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001112 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001113 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001114
1115 Register argument = locations->InAt(1).AsRegister<Register>();
1116 __ testl(argument, argument);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001117 SlowPathCodeX86* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001118 codegen_->AddSlowPath(slow_path);
1119 __ j(kEqual, slow_path->GetEntryLabel());
1120
1121 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pStringCompareTo)));
1122 __ Bind(slow_path->GetExitLabel());
1123}
1124
Agi Csakid7138c82015-08-13 17:46:44 -07001125void IntrinsicLocationsBuilderX86::VisitStringEquals(HInvoke* invoke) {
1126 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1127 LocationSummary::kNoCall,
1128 kIntrinsified);
1129 locations->SetInAt(0, Location::RequiresRegister());
1130 locations->SetInAt(1, Location::RequiresRegister());
1131
1132 // Request temporary registers, ECX and EDI needed for repe_cmpsl instruction.
1133 locations->AddTemp(Location::RegisterLocation(ECX));
1134 locations->AddTemp(Location::RegisterLocation(EDI));
1135
1136 // Set output, ESI needed for repe_cmpsl instruction anyways.
1137 locations->SetOut(Location::RegisterLocation(ESI), Location::kOutputOverlap);
1138}
1139
1140void IntrinsicCodeGeneratorX86::VisitStringEquals(HInvoke* invoke) {
1141 X86Assembler* assembler = GetAssembler();
1142 LocationSummary* locations = invoke->GetLocations();
1143
1144 Register str = locations->InAt(0).AsRegister<Register>();
1145 Register arg = locations->InAt(1).AsRegister<Register>();
1146 Register ecx = locations->GetTemp(0).AsRegister<Register>();
1147 Register edi = locations->GetTemp(1).AsRegister<Register>();
1148 Register esi = locations->Out().AsRegister<Register>();
1149
Mark Mendell0c9497d2015-08-21 09:30:05 -04001150 NearLabel end, return_true, return_false;
Agi Csakid7138c82015-08-13 17:46:44 -07001151
1152 // Get offsets of count, value, and class fields within a string object.
1153 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1154 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1155 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1156
1157 // Note that the null check must have been done earlier.
1158 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1159
1160 // Check if input is null, return false if it is.
1161 __ testl(arg, arg);
1162 __ j(kEqual, &return_false);
1163
1164 // Instanceof check for the argument by comparing class fields.
1165 // All string objects must have the same type since String cannot be subclassed.
1166 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1167 // If the argument is a string object, its class field must be equal to receiver's class field.
1168 __ movl(ecx, Address(str, class_offset));
1169 __ cmpl(ecx, Address(arg, class_offset));
1170 __ j(kNotEqual, &return_false);
1171
1172 // Reference equality check, return true if same reference.
1173 __ cmpl(str, arg);
1174 __ j(kEqual, &return_true);
1175
1176 // Load length of receiver string.
1177 __ movl(ecx, Address(str, count_offset));
1178 // Check if lengths are equal, return false if they're not.
1179 __ cmpl(ecx, Address(arg, count_offset));
1180 __ j(kNotEqual, &return_false);
1181 // Return true if both strings are empty.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001182 __ jecxz(&return_true);
Agi Csakid7138c82015-08-13 17:46:44 -07001183
1184 // Load starting addresses of string values into ESI/EDI as required for repe_cmpsl instruction.
1185 __ leal(esi, Address(str, value_offset));
1186 __ leal(edi, Address(arg, value_offset));
1187
1188 // Divide string length by 2 to compare characters 2 at a time and adjust for odd lengths.
1189 __ addl(ecx, Immediate(1));
1190 __ shrl(ecx, Immediate(1));
1191
1192 // Assertions that must hold in order to compare strings 2 characters at a time.
1193 DCHECK_ALIGNED(value_offset, 4);
1194 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1195
1196 // Loop to compare strings two characters at a time starting at the beginning of the string.
1197 __ repe_cmpsl();
1198 // If strings are not equal, zero flag will be cleared.
1199 __ j(kNotEqual, &return_false);
1200
1201 // Return true and exit the function.
1202 // If loop does not result in returning false, we return true.
1203 __ Bind(&return_true);
1204 __ movl(esi, Immediate(1));
1205 __ jmp(&end);
1206
1207 // Return false and exit the function.
1208 __ Bind(&return_false);
1209 __ xorl(esi, esi);
1210 __ Bind(&end);
1211}
1212
Andreas Gampe21030dd2015-05-07 14:46:15 -07001213static void CreateStringIndexOfLocations(HInvoke* invoke,
1214 ArenaAllocator* allocator,
1215 bool start_at_zero) {
1216 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1217 LocationSummary::kCallOnSlowPath,
1218 kIntrinsified);
1219 // The data needs to be in EDI for scasw. So request that the string is there, anyways.
1220 locations->SetInAt(0, Location::RegisterLocation(EDI));
1221 // If we look for a constant char, we'll still have to copy it into EAX. So just request the
1222 // allocator to do that, anyways. We can still do the constant check by checking the parameter
1223 // of the instruction explicitly.
1224 // Note: This works as we don't clobber EAX anywhere.
1225 locations->SetInAt(1, Location::RegisterLocation(EAX));
1226 if (!start_at_zero) {
1227 locations->SetInAt(2, Location::RequiresRegister()); // The starting index.
1228 }
1229 // As we clobber EDI during execution anyways, also use it as the output.
1230 locations->SetOut(Location::SameAsFirstInput());
1231
1232 // repne scasw uses ECX as the counter.
1233 locations->AddTemp(Location::RegisterLocation(ECX));
1234 // Need another temporary to be able to compute the result.
1235 locations->AddTemp(Location::RequiresRegister());
1236}
1237
1238static void GenerateStringIndexOf(HInvoke* invoke,
1239 X86Assembler* assembler,
1240 CodeGeneratorX86* codegen,
1241 ArenaAllocator* allocator,
1242 bool start_at_zero) {
1243 LocationSummary* locations = invoke->GetLocations();
1244
1245 // Note that the null check must have been done earlier.
1246 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1247
1248 Register string_obj = locations->InAt(0).AsRegister<Register>();
1249 Register search_value = locations->InAt(1).AsRegister<Register>();
1250 Register counter = locations->GetTemp(0).AsRegister<Register>();
1251 Register string_length = locations->GetTemp(1).AsRegister<Register>();
1252 Register out = locations->Out().AsRegister<Register>();
1253
1254 // Check our assumptions for registers.
1255 DCHECK_EQ(string_obj, EDI);
1256 DCHECK_EQ(search_value, EAX);
1257 DCHECK_EQ(counter, ECX);
1258 DCHECK_EQ(out, EDI);
1259
1260 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1261 // or directly dispatch if we have a constant.
1262 SlowPathCodeX86* slow_path = nullptr;
1263 if (invoke->InputAt(1)->IsIntConstant()) {
1264 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
1265 std::numeric_limits<uint16_t>::max()) {
1266 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1267 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1268 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1269 codegen->AddSlowPath(slow_path);
1270 __ jmp(slow_path->GetEntryLabel());
1271 __ Bind(slow_path->GetExitLabel());
1272 return;
1273 }
1274 } else {
1275 __ cmpl(search_value, Immediate(std::numeric_limits<uint16_t>::max()));
1276 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1277 codegen->AddSlowPath(slow_path);
1278 __ j(kAbove, slow_path->GetEntryLabel());
1279 }
1280
1281 // From here down, we know that we are looking for a char that fits in 16 bits.
1282 // Location of reference to data array within the String object.
1283 int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1284 // Location of count within the String object.
1285 int32_t count_offset = mirror::String::CountOffset().Int32Value();
1286
1287 // Load string length, i.e., the count field of the string.
1288 __ movl(string_length, Address(string_obj, count_offset));
1289
1290 // Do a zero-length check.
1291 // TODO: Support jecxz.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001292 NearLabel not_found_label;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001293 __ testl(string_length, string_length);
1294 __ j(kEqual, &not_found_label);
1295
1296 if (start_at_zero) {
1297 // Number of chars to scan is the same as the string length.
1298 __ movl(counter, string_length);
1299
1300 // Move to the start of the string.
1301 __ addl(string_obj, Immediate(value_offset));
1302 } else {
1303 Register start_index = locations->InAt(2).AsRegister<Register>();
1304
1305 // Do a start_index check.
1306 __ cmpl(start_index, string_length);
1307 __ j(kGreaterEqual, &not_found_label);
1308
1309 // Ensure we have a start index >= 0;
1310 __ xorl(counter, counter);
1311 __ cmpl(start_index, Immediate(0));
1312 __ cmovl(kGreater, counter, start_index);
1313
1314 // Move to the start of the string: string_obj + value_offset + 2 * start_index.
1315 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset));
1316
1317 // Now update ecx (the repne scasw work counter). We have string.length - start_index left to
1318 // compare.
1319 __ negl(counter);
1320 __ leal(counter, Address(string_length, counter, ScaleFactor::TIMES_1, 0));
1321 }
1322
1323 // Everything is set up for repne scasw:
1324 // * Comparison address in EDI.
1325 // * Counter in ECX.
1326 __ repne_scasw();
1327
1328 // Did we find a match?
1329 __ j(kNotEqual, &not_found_label);
1330
1331 // Yes, we matched. Compute the index of the result.
1332 __ subl(string_length, counter);
1333 __ leal(out, Address(string_length, -1));
1334
Mark Mendell0c9497d2015-08-21 09:30:05 -04001335 NearLabel done;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001336 __ jmp(&done);
1337
1338 // Failed to match; return -1.
1339 __ Bind(&not_found_label);
1340 __ movl(out, Immediate(-1));
1341
1342 // And join up at the end.
1343 __ Bind(&done);
1344 if (slow_path != nullptr) {
1345 __ Bind(slow_path->GetExitLabel());
1346 }
1347}
1348
1349void IntrinsicLocationsBuilderX86::VisitStringIndexOf(HInvoke* invoke) {
1350 CreateStringIndexOfLocations(invoke, arena_, true);
1351}
1352
1353void IntrinsicCodeGeneratorX86::VisitStringIndexOf(HInvoke* invoke) {
1354 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
1355}
1356
1357void IntrinsicLocationsBuilderX86::VisitStringIndexOfAfter(HInvoke* invoke) {
1358 CreateStringIndexOfLocations(invoke, arena_, false);
1359}
1360
1361void IntrinsicCodeGeneratorX86::VisitStringIndexOfAfter(HInvoke* invoke) {
1362 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
1363}
1364
Jeff Hao848f70a2014-01-15 13:49:50 -08001365void IntrinsicLocationsBuilderX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1366 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1367 LocationSummary::kCall,
1368 kIntrinsified);
1369 InvokeRuntimeCallingConvention calling_convention;
1370 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1371 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1372 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1373 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1374 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001375}
1376
1377void IntrinsicCodeGeneratorX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1378 X86Assembler* assembler = GetAssembler();
1379 LocationSummary* locations = invoke->GetLocations();
1380
1381 Register byte_array = locations->InAt(0).AsRegister<Register>();
1382 __ testl(byte_array, byte_array);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001383 SlowPathCodeX86* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001384 codegen_->AddSlowPath(slow_path);
1385 __ j(kEqual, slow_path->GetEntryLabel());
1386
1387 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromBytes)));
1388 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1389 __ Bind(slow_path->GetExitLabel());
1390}
1391
1392void IntrinsicLocationsBuilderX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1393 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1394 LocationSummary::kCall,
1395 kIntrinsified);
1396 InvokeRuntimeCallingConvention calling_convention;
1397 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1398 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1399 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1400 locations->SetOut(Location::RegisterLocation(EAX));
1401}
1402
1403void IntrinsicCodeGeneratorX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1404 X86Assembler* assembler = GetAssembler();
1405
1406 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromChars)));
1407 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1408}
1409
1410void IntrinsicLocationsBuilderX86::VisitStringNewStringFromString(HInvoke* invoke) {
1411 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1412 LocationSummary::kCall,
1413 kIntrinsified);
1414 InvokeRuntimeCallingConvention calling_convention;
1415 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1416 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001417}
1418
1419void IntrinsicCodeGeneratorX86::VisitStringNewStringFromString(HInvoke* invoke) {
1420 X86Assembler* assembler = GetAssembler();
1421 LocationSummary* locations = invoke->GetLocations();
1422
1423 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1424 __ testl(string_to_copy, string_to_copy);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001425 SlowPathCodeX86* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001426 codegen_->AddSlowPath(slow_path);
1427 __ j(kEqual, slow_path->GetEntryLabel());
1428
1429 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromString)));
1430 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1431 __ Bind(slow_path->GetExitLabel());
1432}
1433
Mark Mendell09ed1a32015-03-25 08:30:06 -04001434static void GenPeek(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1435 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1436 Location out_loc = locations->Out();
1437 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1438 // to avoid a SIGBUS.
1439 switch (size) {
1440 case Primitive::kPrimByte:
1441 __ movsxb(out_loc.AsRegister<Register>(), Address(address, 0));
1442 break;
1443 case Primitive::kPrimShort:
1444 __ movsxw(out_loc.AsRegister<Register>(), Address(address, 0));
1445 break;
1446 case Primitive::kPrimInt:
1447 __ movl(out_loc.AsRegister<Register>(), Address(address, 0));
1448 break;
1449 case Primitive::kPrimLong:
1450 __ movl(out_loc.AsRegisterPairLow<Register>(), Address(address, 0));
1451 __ movl(out_loc.AsRegisterPairHigh<Register>(), Address(address, 4));
1452 break;
1453 default:
1454 LOG(FATAL) << "Type not recognized for peek: " << size;
1455 UNREACHABLE();
1456 }
1457}
1458
1459void IntrinsicLocationsBuilderX86::VisitMemoryPeekByte(HInvoke* invoke) {
1460 CreateLongToIntLocations(arena_, invoke);
1461}
1462
1463void IntrinsicCodeGeneratorX86::VisitMemoryPeekByte(HInvoke* invoke) {
1464 GenPeek(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1465}
1466
1467void IntrinsicLocationsBuilderX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1468 CreateLongToIntLocations(arena_, invoke);
1469}
1470
1471void IntrinsicCodeGeneratorX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1472 GenPeek(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1473}
1474
1475void IntrinsicLocationsBuilderX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1476 CreateLongToLongLocations(arena_, invoke);
1477}
1478
1479void IntrinsicCodeGeneratorX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1480 GenPeek(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1481}
1482
1483void IntrinsicLocationsBuilderX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1484 CreateLongToIntLocations(arena_, invoke);
1485}
1486
1487void IntrinsicCodeGeneratorX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1488 GenPeek(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1489}
1490
1491static void CreateLongIntToVoidLocations(ArenaAllocator* arena, Primitive::Type size,
1492 HInvoke* invoke) {
1493 LocationSummary* locations = new (arena) LocationSummary(invoke,
1494 LocationSummary::kNoCall,
1495 kIntrinsified);
1496 locations->SetInAt(0, Location::RequiresRegister());
Roland Levillain4c0eb422015-04-24 16:43:49 +01001497 HInstruction* value = invoke->InputAt(1);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001498 if (size == Primitive::kPrimByte) {
1499 locations->SetInAt(1, Location::ByteRegisterOrConstant(EDX, value));
1500 } else {
1501 locations->SetInAt(1, Location::RegisterOrConstant(value));
1502 }
1503}
1504
1505static void GenPoke(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1506 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1507 Location value_loc = locations->InAt(1);
1508 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1509 // to avoid a SIGBUS.
1510 switch (size) {
1511 case Primitive::kPrimByte:
1512 if (value_loc.IsConstant()) {
1513 __ movb(Address(address, 0),
1514 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1515 } else {
1516 __ movb(Address(address, 0), value_loc.AsRegister<ByteRegister>());
1517 }
1518 break;
1519 case Primitive::kPrimShort:
1520 if (value_loc.IsConstant()) {
1521 __ movw(Address(address, 0),
1522 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1523 } else {
1524 __ movw(Address(address, 0), value_loc.AsRegister<Register>());
1525 }
1526 break;
1527 case Primitive::kPrimInt:
1528 if (value_loc.IsConstant()) {
1529 __ movl(Address(address, 0),
1530 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1531 } else {
1532 __ movl(Address(address, 0), value_loc.AsRegister<Register>());
1533 }
1534 break;
1535 case Primitive::kPrimLong:
1536 if (value_loc.IsConstant()) {
1537 int64_t value = value_loc.GetConstant()->AsLongConstant()->GetValue();
1538 __ movl(Address(address, 0), Immediate(Low32Bits(value)));
1539 __ movl(Address(address, 4), Immediate(High32Bits(value)));
1540 } else {
1541 __ movl(Address(address, 0), value_loc.AsRegisterPairLow<Register>());
1542 __ movl(Address(address, 4), value_loc.AsRegisterPairHigh<Register>());
1543 }
1544 break;
1545 default:
1546 LOG(FATAL) << "Type not recognized for poke: " << size;
1547 UNREACHABLE();
1548 }
1549}
1550
1551void IntrinsicLocationsBuilderX86::VisitMemoryPokeByte(HInvoke* invoke) {
1552 CreateLongIntToVoidLocations(arena_, Primitive::kPrimByte, invoke);
1553}
1554
1555void IntrinsicCodeGeneratorX86::VisitMemoryPokeByte(HInvoke* invoke) {
1556 GenPoke(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1557}
1558
1559void IntrinsicLocationsBuilderX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1560 CreateLongIntToVoidLocations(arena_, Primitive::kPrimInt, invoke);
1561}
1562
1563void IntrinsicCodeGeneratorX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1564 GenPoke(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1565}
1566
1567void IntrinsicLocationsBuilderX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1568 CreateLongIntToVoidLocations(arena_, Primitive::kPrimLong, invoke);
1569}
1570
1571void IntrinsicCodeGeneratorX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1572 GenPoke(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1573}
1574
1575void IntrinsicLocationsBuilderX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1576 CreateLongIntToVoidLocations(arena_, Primitive::kPrimShort, invoke);
1577}
1578
1579void IntrinsicCodeGeneratorX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1580 GenPoke(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1581}
1582
1583void IntrinsicLocationsBuilderX86::VisitThreadCurrentThread(HInvoke* invoke) {
1584 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1585 LocationSummary::kNoCall,
1586 kIntrinsified);
1587 locations->SetOut(Location::RequiresRegister());
1588}
1589
1590void IntrinsicCodeGeneratorX86::VisitThreadCurrentThread(HInvoke* invoke) {
1591 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
1592 GetAssembler()->fs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86WordSize>()));
1593}
1594
1595static void GenUnsafeGet(LocationSummary* locations, Primitive::Type type,
1596 bool is_volatile, X86Assembler* assembler) {
1597 Register base = locations->InAt(1).AsRegister<Register>();
1598 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1599 Location output = locations->Out();
1600
1601 switch (type) {
1602 case Primitive::kPrimInt:
Roland Levillain4d027112015-07-01 15:41:14 +01001603 case Primitive::kPrimNot: {
1604 Register output_reg = output.AsRegister<Register>();
1605 __ movl(output_reg, Address(base, offset, ScaleFactor::TIMES_1, 0));
1606 if (type == Primitive::kPrimNot) {
1607 __ MaybeUnpoisonHeapReference(output_reg);
1608 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001609 break;
Roland Levillain4d027112015-07-01 15:41:14 +01001610 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001611
1612 case Primitive::kPrimLong: {
1613 Register output_lo = output.AsRegisterPairLow<Register>();
1614 Register output_hi = output.AsRegisterPairHigh<Register>();
1615 if (is_volatile) {
1616 // Need to use a XMM to read atomically.
1617 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1618 __ movsd(temp, Address(base, offset, ScaleFactor::TIMES_1, 0));
1619 __ movd(output_lo, temp);
1620 __ psrlq(temp, Immediate(32));
1621 __ movd(output_hi, temp);
1622 } else {
1623 __ movl(output_lo, Address(base, offset, ScaleFactor::TIMES_1, 0));
1624 __ movl(output_hi, Address(base, offset, ScaleFactor::TIMES_1, 4));
1625 }
1626 }
1627 break;
1628
1629 default:
1630 LOG(FATAL) << "Unsupported op size " << type;
1631 UNREACHABLE();
1632 }
1633}
1634
1635static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke,
1636 bool is_long, bool is_volatile) {
1637 LocationSummary* locations = new (arena) LocationSummary(invoke,
1638 LocationSummary::kNoCall,
1639 kIntrinsified);
1640 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1641 locations->SetInAt(1, Location::RequiresRegister());
1642 locations->SetInAt(2, Location::RequiresRegister());
1643 if (is_long) {
1644 if (is_volatile) {
1645 // Need to use XMM to read volatile.
1646 locations->AddTemp(Location::RequiresFpuRegister());
1647 locations->SetOut(Location::RequiresRegister());
1648 } else {
1649 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1650 }
1651 } else {
1652 locations->SetOut(Location::RequiresRegister());
1653 }
1654}
1655
1656void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) {
1657 CreateIntIntIntToIntLocations(arena_, invoke, false, false);
1658}
1659void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
1660 CreateIntIntIntToIntLocations(arena_, invoke, false, true);
1661}
1662void IntrinsicLocationsBuilderX86::VisitUnsafeGetLong(HInvoke* invoke) {
1663 CreateIntIntIntToIntLocations(arena_, invoke, false, false);
1664}
1665void IntrinsicLocationsBuilderX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1666 CreateIntIntIntToIntLocations(arena_, invoke, true, true);
1667}
1668void IntrinsicLocationsBuilderX86::VisitUnsafeGetObject(HInvoke* invoke) {
1669 CreateIntIntIntToIntLocations(arena_, invoke, false, false);
1670}
1671void IntrinsicLocationsBuilderX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1672 CreateIntIntIntToIntLocations(arena_, invoke, false, true);
1673}
1674
1675
1676void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) {
1677 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimInt, false, GetAssembler());
1678}
1679void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
1680 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimInt, true, GetAssembler());
1681}
1682void IntrinsicCodeGeneratorX86::VisitUnsafeGetLong(HInvoke* invoke) {
1683 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimLong, false, GetAssembler());
1684}
1685void IntrinsicCodeGeneratorX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1686 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimLong, true, GetAssembler());
1687}
1688void IntrinsicCodeGeneratorX86::VisitUnsafeGetObject(HInvoke* invoke) {
1689 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimNot, false, GetAssembler());
1690}
1691void IntrinsicCodeGeneratorX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1692 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimNot, true, GetAssembler());
1693}
1694
1695
1696static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* arena,
1697 Primitive::Type type,
1698 HInvoke* invoke,
1699 bool is_volatile) {
1700 LocationSummary* locations = new (arena) LocationSummary(invoke,
1701 LocationSummary::kNoCall,
1702 kIntrinsified);
1703 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1704 locations->SetInAt(1, Location::RequiresRegister());
1705 locations->SetInAt(2, Location::RequiresRegister());
1706 locations->SetInAt(3, Location::RequiresRegister());
1707 if (type == Primitive::kPrimNot) {
1708 // Need temp registers for card-marking.
Roland Levillain4d027112015-07-01 15:41:14 +01001709 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell09ed1a32015-03-25 08:30:06 -04001710 // Ensure the value is in a byte register.
1711 locations->AddTemp(Location::RegisterLocation(ECX));
1712 } else if (type == Primitive::kPrimLong && is_volatile) {
1713 locations->AddTemp(Location::RequiresFpuRegister());
1714 locations->AddTemp(Location::RequiresFpuRegister());
1715 }
1716}
1717
1718void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) {
1719 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, false);
1720}
1721void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) {
1722 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, false);
1723}
1724void IntrinsicLocationsBuilderX86::VisitUnsafePutVolatile(HInvoke* invoke) {
1725 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, true);
1726}
1727void IntrinsicLocationsBuilderX86::VisitUnsafePutObject(HInvoke* invoke) {
1728 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, false);
1729}
1730void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1731 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, false);
1732}
1733void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1734 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, true);
1735}
1736void IntrinsicLocationsBuilderX86::VisitUnsafePutLong(HInvoke* invoke) {
1737 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, false);
1738}
1739void IntrinsicLocationsBuilderX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1740 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, false);
1741}
1742void IntrinsicLocationsBuilderX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1743 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, true);
1744}
1745
1746// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86
1747// memory model.
1748static void GenUnsafePut(LocationSummary* locations,
1749 Primitive::Type type,
1750 bool is_volatile,
1751 CodeGeneratorX86* codegen) {
1752 X86Assembler* assembler = reinterpret_cast<X86Assembler*>(codegen->GetAssembler());
1753 Register base = locations->InAt(1).AsRegister<Register>();
1754 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1755 Location value_loc = locations->InAt(3);
1756
1757 if (type == Primitive::kPrimLong) {
1758 Register value_lo = value_loc.AsRegisterPairLow<Register>();
1759 Register value_hi = value_loc.AsRegisterPairHigh<Register>();
1760 if (is_volatile) {
1761 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1762 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
1763 __ movd(temp1, value_lo);
1764 __ movd(temp2, value_hi);
1765 __ punpckldq(temp1, temp2);
1766 __ movsd(Address(base, offset, ScaleFactor::TIMES_1, 0), temp1);
1767 } else {
1768 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_lo);
1769 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 4), value_hi);
1770 }
Roland Levillain4d027112015-07-01 15:41:14 +01001771 } else if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1772 Register temp = locations->GetTemp(0).AsRegister<Register>();
1773 __ movl(temp, value_loc.AsRegister<Register>());
1774 __ PoisonHeapReference(temp);
1775 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001776 } else {
1777 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_loc.AsRegister<Register>());
1778 }
1779
1780 if (is_volatile) {
1781 __ mfence();
1782 }
1783
1784 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001785 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell09ed1a32015-03-25 08:30:06 -04001786 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
1787 locations->GetTemp(1).AsRegister<Register>(),
1788 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001789 value_loc.AsRegister<Register>(),
1790 value_can_be_null);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001791 }
1792}
1793
1794void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) {
1795 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_);
1796}
1797void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) {
1798 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_);
1799}
1800void IntrinsicCodeGeneratorX86::VisitUnsafePutVolatile(HInvoke* invoke) {
1801 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, codegen_);
1802}
1803void IntrinsicCodeGeneratorX86::VisitUnsafePutObject(HInvoke* invoke) {
1804 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_);
1805}
1806void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1807 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_);
1808}
1809void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1810 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, codegen_);
1811}
1812void IntrinsicCodeGeneratorX86::VisitUnsafePutLong(HInvoke* invoke) {
1813 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_);
1814}
1815void IntrinsicCodeGeneratorX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1816 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_);
1817}
1818void IntrinsicCodeGeneratorX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1819 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, codegen_);
1820}
1821
Mark Mendell58d25fd2015-04-03 14:52:31 -04001822static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, Primitive::Type type,
1823 HInvoke* invoke) {
1824 LocationSummary* locations = new (arena) LocationSummary(invoke,
1825 LocationSummary::kNoCall,
1826 kIntrinsified);
1827 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1828 locations->SetInAt(1, Location::RequiresRegister());
1829 // Offset is a long, but in 32 bit mode, we only need the low word.
1830 // Can we update the invoke here to remove a TypeConvert to Long?
1831 locations->SetInAt(2, Location::RequiresRegister());
1832 // Expected value must be in EAX or EDX:EAX.
1833 // For long, new value must be in ECX:EBX.
1834 if (type == Primitive::kPrimLong) {
1835 locations->SetInAt(3, Location::RegisterPairLocation(EAX, EDX));
1836 locations->SetInAt(4, Location::RegisterPairLocation(EBX, ECX));
1837 } else {
1838 locations->SetInAt(3, Location::RegisterLocation(EAX));
1839 locations->SetInAt(4, Location::RequiresRegister());
1840 }
1841
1842 // Force a byte register for the output.
1843 locations->SetOut(Location::RegisterLocation(EAX));
1844 if (type == Primitive::kPrimNot) {
1845 // Need temp registers for card-marking.
1846 locations->AddTemp(Location::RequiresRegister());
1847 // Need a byte register for marking.
1848 locations->AddTemp(Location::RegisterLocation(ECX));
1849 }
1850}
1851
1852void IntrinsicLocationsBuilderX86::VisitUnsafeCASInt(HInvoke* invoke) {
1853 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimInt, invoke);
1854}
1855
1856void IntrinsicLocationsBuilderX86::VisitUnsafeCASLong(HInvoke* invoke) {
1857 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimLong, invoke);
1858}
1859
1860void IntrinsicLocationsBuilderX86::VisitUnsafeCASObject(HInvoke* invoke) {
1861 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimNot, invoke);
1862}
1863
1864static void GenCAS(Primitive::Type type, HInvoke* invoke, CodeGeneratorX86* codegen) {
1865 X86Assembler* assembler =
1866 reinterpret_cast<X86Assembler*>(codegen->GetAssembler());
1867 LocationSummary* locations = invoke->GetLocations();
1868
1869 Register base = locations->InAt(1).AsRegister<Register>();
1870 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1871 Location out = locations->Out();
1872 DCHECK_EQ(out.AsRegister<Register>(), EAX);
1873
1874 if (type == Primitive::kPrimLong) {
1875 DCHECK_EQ(locations->InAt(3).AsRegisterPairLow<Register>(), EAX);
1876 DCHECK_EQ(locations->InAt(3).AsRegisterPairHigh<Register>(), EDX);
1877 DCHECK_EQ(locations->InAt(4).AsRegisterPairLow<Register>(), EBX);
1878 DCHECK_EQ(locations->InAt(4).AsRegisterPairHigh<Register>(), ECX);
1879 __ LockCmpxchg8b(Address(base, offset, TIMES_1, 0));
1880 } else {
1881 // Integer or object.
Roland Levillain4d027112015-07-01 15:41:14 +01001882 Register expected = locations->InAt(3).AsRegister<Register>();
1883 DCHECK_EQ(expected, EAX);
Mark Mendell58d25fd2015-04-03 14:52:31 -04001884 Register value = locations->InAt(4).AsRegister<Register>();
1885 if (type == Primitive::kPrimNot) {
1886 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001887 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell58d25fd2015-04-03 14:52:31 -04001888 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
1889 locations->GetTemp(1).AsRegister<Register>(),
1890 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001891 value,
1892 value_can_be_null);
Roland Levillain4d027112015-07-01 15:41:14 +01001893
1894 if (kPoisonHeapReferences) {
1895 __ PoisonHeapReference(expected);
1896 __ PoisonHeapReference(value);
1897 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04001898 }
1899
1900 __ LockCmpxchgl(Address(base, offset, TIMES_1, 0), value);
1901 }
1902
1903 // locked cmpxchg has full barrier semantics, and we don't need scheduling
1904 // barriers at this time.
1905
1906 // Convert ZF into the boolean result.
1907 __ setb(kZero, out.AsRegister<Register>());
1908 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01001909
1910 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1911 Register value = locations->InAt(4).AsRegister<Register>();
1912 __ UnpoisonHeapReference(value);
1913 // Do not unpoison the reference contained in register `expected`,
1914 // as it is the same as register `out`.
1915 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04001916}
1917
1918void IntrinsicCodeGeneratorX86::VisitUnsafeCASInt(HInvoke* invoke) {
1919 GenCAS(Primitive::kPrimInt, invoke, codegen_);
1920}
1921
1922void IntrinsicCodeGeneratorX86::VisitUnsafeCASLong(HInvoke* invoke) {
1923 GenCAS(Primitive::kPrimLong, invoke, codegen_);
1924}
1925
1926void IntrinsicCodeGeneratorX86::VisitUnsafeCASObject(HInvoke* invoke) {
1927 GenCAS(Primitive::kPrimNot, invoke, codegen_);
1928}
1929
1930void IntrinsicLocationsBuilderX86::VisitIntegerReverse(HInvoke* invoke) {
1931 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1932 LocationSummary::kNoCall,
1933 kIntrinsified);
1934 locations->SetInAt(0, Location::RequiresRegister());
1935 locations->SetOut(Location::SameAsFirstInput());
1936 locations->AddTemp(Location::RequiresRegister());
1937}
1938
1939static void SwapBits(Register reg, Register temp, int32_t shift, int32_t mask,
1940 X86Assembler* assembler) {
1941 Immediate imm_shift(shift);
1942 Immediate imm_mask(mask);
1943 __ movl(temp, reg);
1944 __ shrl(reg, imm_shift);
1945 __ andl(temp, imm_mask);
1946 __ andl(reg, imm_mask);
1947 __ shll(temp, imm_shift);
1948 __ orl(reg, temp);
1949}
1950
1951void IntrinsicCodeGeneratorX86::VisitIntegerReverse(HInvoke* invoke) {
1952 X86Assembler* assembler =
1953 reinterpret_cast<X86Assembler*>(codegen_->GetAssembler());
1954 LocationSummary* locations = invoke->GetLocations();
1955
1956 Register reg = locations->InAt(0).AsRegister<Register>();
1957 Register temp = locations->GetTemp(0).AsRegister<Register>();
1958
1959 /*
1960 * Use one bswap instruction to reverse byte order first and then use 3 rounds of
1961 * swapping bits to reverse bits in a number x. Using bswap to save instructions
1962 * compared to generic luni implementation which has 5 rounds of swapping bits.
1963 * x = bswap x
1964 * x = (x & 0x55555555) << 1 | (x >> 1) & 0x55555555;
1965 * x = (x & 0x33333333) << 2 | (x >> 2) & 0x33333333;
1966 * x = (x & 0x0F0F0F0F) << 4 | (x >> 4) & 0x0F0F0F0F;
1967 */
1968 __ bswapl(reg);
1969 SwapBits(reg, temp, 1, 0x55555555, assembler);
1970 SwapBits(reg, temp, 2, 0x33333333, assembler);
1971 SwapBits(reg, temp, 4, 0x0f0f0f0f, assembler);
1972}
1973
1974void IntrinsicLocationsBuilderX86::VisitLongReverse(HInvoke* invoke) {
1975 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1976 LocationSummary::kNoCall,
1977 kIntrinsified);
1978 locations->SetInAt(0, Location::RequiresRegister());
1979 locations->SetOut(Location::SameAsFirstInput());
1980 locations->AddTemp(Location::RequiresRegister());
1981}
1982
1983void IntrinsicCodeGeneratorX86::VisitLongReverse(HInvoke* invoke) {
1984 X86Assembler* assembler =
1985 reinterpret_cast<X86Assembler*>(codegen_->GetAssembler());
1986 LocationSummary* locations = invoke->GetLocations();
1987
1988 Register reg_low = locations->InAt(0).AsRegisterPairLow<Register>();
1989 Register reg_high = locations->InAt(0).AsRegisterPairHigh<Register>();
1990 Register temp = locations->GetTemp(0).AsRegister<Register>();
1991
1992 // We want to swap high/low, then bswap each one, and then do the same
1993 // as a 32 bit reverse.
1994 // Exchange high and low.
1995 __ movl(temp, reg_low);
1996 __ movl(reg_low, reg_high);
1997 __ movl(reg_high, temp);
1998
1999 // bit-reverse low
2000 __ bswapl(reg_low);
2001 SwapBits(reg_low, temp, 1, 0x55555555, assembler);
2002 SwapBits(reg_low, temp, 2, 0x33333333, assembler);
2003 SwapBits(reg_low, temp, 4, 0x0f0f0f0f, assembler);
2004
2005 // bit-reverse high
2006 __ bswapl(reg_high);
2007 SwapBits(reg_high, temp, 1, 0x55555555, assembler);
2008 SwapBits(reg_high, temp, 2, 0x33333333, assembler);
2009 SwapBits(reg_high, temp, 4, 0x0f0f0f0f, assembler);
2010}
2011
Mark Mendelld5897672015-08-12 21:16:41 -04002012static void CreateLeadingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2013 LocationSummary* locations = new (arena) LocationSummary(invoke,
2014 LocationSummary::kNoCall,
2015 kIntrinsified);
2016 if (is_long) {
2017 locations->SetInAt(0, Location::RequiresRegister());
2018 } else {
2019 locations->SetInAt(0, Location::Any());
2020 }
2021 locations->SetOut(Location::RequiresRegister());
2022}
2023
2024static void GenLeadingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_long) {
2025 LocationSummary* locations = invoke->GetLocations();
2026 Location src = locations->InAt(0);
2027 Register out = locations->Out().AsRegister<Register>();
2028
2029 if (invoke->InputAt(0)->IsConstant()) {
2030 // Evaluate this at compile time.
2031 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2032 if (value == 0) {
2033 value = is_long ? 64 : 32;
2034 } else {
2035 value = is_long ? CLZ(static_cast<uint64_t>(value)) : CLZ(static_cast<uint32_t>(value));
2036 }
2037 if (value == 0) {
2038 __ xorl(out, out);
2039 } else {
2040 __ movl(out, Immediate(value));
2041 }
2042 return;
2043 }
2044
2045 // Handle the non-constant cases.
2046 if (!is_long) {
2047 if (src.IsRegister()) {
2048 __ bsrl(out, src.AsRegister<Register>());
2049 } else {
2050 DCHECK(src.IsStackSlot());
2051 __ bsrl(out, Address(ESP, src.GetStackIndex()));
2052 }
2053
2054 // BSR sets ZF if the input was zero, and the output is undefined.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002055 NearLabel all_zeroes, done;
Mark Mendelld5897672015-08-12 21:16:41 -04002056 __ j(kEqual, &all_zeroes);
2057
2058 // Correct the result from BSR to get the final CLZ result.
2059 __ xorl(out, Immediate(31));
2060 __ jmp(&done);
2061
2062 // Fix the zero case with the expected result.
2063 __ Bind(&all_zeroes);
2064 __ movl(out, Immediate(32));
2065
2066 __ Bind(&done);
2067 return;
2068 }
2069
2070 // 64 bit case needs to worry about both parts of the register.
2071 DCHECK(src.IsRegisterPair());
2072 Register src_lo = src.AsRegisterPairLow<Register>();
2073 Register src_hi = src.AsRegisterPairHigh<Register>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002074 NearLabel handle_low, done, all_zeroes;
Mark Mendelld5897672015-08-12 21:16:41 -04002075
2076 // Is the high word zero?
2077 __ testl(src_hi, src_hi);
2078 __ j(kEqual, &handle_low);
2079
2080 // High word is not zero. We know that the BSR result is defined in this case.
2081 __ bsrl(out, src_hi);
2082
2083 // Correct the result from BSR to get the final CLZ result.
2084 __ xorl(out, Immediate(31));
2085 __ jmp(&done);
2086
2087 // High word was zero. We have to compute the low word count and add 32.
2088 __ Bind(&handle_low);
2089 __ bsrl(out, src_lo);
2090 __ j(kEqual, &all_zeroes);
2091
2092 // We had a valid result. Use an XOR to both correct the result and add 32.
2093 __ xorl(out, Immediate(63));
2094 __ jmp(&done);
2095
2096 // All zero case.
2097 __ Bind(&all_zeroes);
2098 __ movl(out, Immediate(64));
2099
2100 __ Bind(&done);
2101}
2102
2103void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
2104 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ false);
2105}
2106
2107void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
2108 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2109 GenLeadingZeros(assembler, invoke, /* is_long */ false);
2110}
2111
2112void IntrinsicLocationsBuilderX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
2113 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ true);
2114}
2115
2116void IntrinsicCodeGeneratorX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
2117 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2118 GenLeadingZeros(assembler, invoke, /* is_long */ true);
2119}
2120
Mark Mendell09ed1a32015-03-25 08:30:06 -04002121// Unimplemented intrinsics.
2122
2123#define UNIMPLEMENTED_INTRINSIC(Name) \
2124void IntrinsicLocationsBuilderX86::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
2125} \
2126void IntrinsicCodeGeneratorX86::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
2127}
2128
Mark Mendell09ed1a32015-03-25 08:30:06 -04002129UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
Jeff Hao848f70a2014-01-15 13:49:50 -08002130UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Mark Mendell09ed1a32015-03-25 08:30:06 -04002131UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002132UNIMPLEMENTED_INTRINSIC(IntegerNumberOfTrailingZeros)
2133UNIMPLEMENTED_INTRINSIC(LongNumberOfTrailingZeros)
2134UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
2135UNIMPLEMENTED_INTRINSIC(LongRotateRight)
2136UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
2137UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
Mark Mendell09ed1a32015-03-25 08:30:06 -04002138
Roland Levillain4d027112015-07-01 15:41:14 +01002139#undef UNIMPLEMENTED_INTRINSIC
2140
2141#undef __
2142
Mark Mendell09ed1a32015-03-25 08:30:06 -04002143} // namespace x86
2144} // namespace art