blob: cc4e705848f34f689d24fa0ae9b8446556b8fd24 [file] [log] [blame]
Mark Mendell09ed1a32015-03-25 08:30:06 -04001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_x86.h"
18
Andreas Gampe21030dd2015-05-07 14:46:15 -070019#include <limits>
20
Mark Mendellfb8d2792015-03-31 22:16:59 -040021#include "arch/x86/instruction_set_features_x86.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include "art_method.h"
Mark Mendelld5897672015-08-12 21:16:41 -040023#include "base/bit_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040024#include "code_generator_x86.h"
25#include "entrypoints/quick/quick_entrypoints.h"
26#include "intrinsics.h"
27#include "mirror/array-inl.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040028#include "mirror/string.h"
29#include "thread.h"
30#include "utils/x86/assembler_x86.h"
31#include "utils/x86/constants_x86.h"
32
33namespace art {
34
35namespace x86 {
36
37static constexpr int kDoubleNaNHigh = 0x7FF80000;
38static constexpr int kDoubleNaNLow = 0x00000000;
39static constexpr int kFloatNaN = 0x7FC00000;
40
Mark Mendellfb8d2792015-03-31 22:16:59 -040041IntrinsicLocationsBuilderX86::IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen)
42 : arena_(codegen->GetGraph()->GetArena()), codegen_(codegen) {
43}
44
45
Mark Mendell09ed1a32015-03-25 08:30:06 -040046X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() {
47 return reinterpret_cast<X86Assembler*>(codegen_->GetAssembler());
48}
49
50ArenaAllocator* IntrinsicCodeGeneratorX86::GetAllocator() {
51 return codegen_->GetGraph()->GetArena();
52}
53
54bool IntrinsicLocationsBuilderX86::TryDispatch(HInvoke* invoke) {
55 Dispatch(invoke);
56 LocationSummary* res = invoke->GetLocations();
57 return res != nullptr && res->Intrinsified();
58}
59
60#define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())->
61
62// TODO: target as memory.
63static void MoveFromReturnRegister(Location target,
64 Primitive::Type type,
65 CodeGeneratorX86* codegen) {
66 if (!target.IsValid()) {
67 DCHECK(type == Primitive::kPrimVoid);
68 return;
69 }
70
71 switch (type) {
72 case Primitive::kPrimBoolean:
73 case Primitive::kPrimByte:
74 case Primitive::kPrimChar:
75 case Primitive::kPrimShort:
76 case Primitive::kPrimInt:
77 case Primitive::kPrimNot: {
78 Register target_reg = target.AsRegister<Register>();
79 if (target_reg != EAX) {
80 __ movl(target_reg, EAX);
81 }
82 break;
83 }
84 case Primitive::kPrimLong: {
85 Register target_reg_lo = target.AsRegisterPairLow<Register>();
86 Register target_reg_hi = target.AsRegisterPairHigh<Register>();
87 if (target_reg_lo != EAX) {
88 __ movl(target_reg_lo, EAX);
89 }
90 if (target_reg_hi != EDX) {
91 __ movl(target_reg_hi, EDX);
92 }
93 break;
94 }
95
96 case Primitive::kPrimVoid:
97 LOG(FATAL) << "Unexpected void type for valid location " << target;
98 UNREACHABLE();
99
100 case Primitive::kPrimDouble: {
101 XmmRegister target_reg = target.AsFpuRegister<XmmRegister>();
102 if (target_reg != XMM0) {
103 __ movsd(target_reg, XMM0);
104 }
105 break;
106 }
107 case Primitive::kPrimFloat: {
108 XmmRegister target_reg = target.AsFpuRegister<XmmRegister>();
109 if (target_reg != XMM0) {
110 __ movss(target_reg, XMM0);
111 }
112 break;
113 }
114 }
115}
116
Roland Levillainec525fc2015-04-28 15:50:20 +0100117static void MoveArguments(HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +0100118 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +0100119 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400120}
121
122// Slow-path for fallback (calling the managed code to handle the intrinsic) in an intrinsified
123// call. This will copy the arguments into the positions for a regular call.
124//
125// Note: The actual parameters are required to be in the locations given by the invoke's location
126// summary. If an intrinsic modifies those locations before a slowpath call, they must be
127// restored!
128class IntrinsicSlowPathX86 : public SlowPathCodeX86 {
129 public:
Andreas Gampe21030dd2015-05-07 14:46:15 -0700130 explicit IntrinsicSlowPathX86(HInvoke* invoke)
131 : invoke_(invoke) { }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400132
133 void EmitNativeCode(CodeGenerator* codegen_in) OVERRIDE {
134 CodeGeneratorX86* codegen = down_cast<CodeGeneratorX86*>(codegen_in);
135 __ Bind(GetEntryLabel());
136
137 SaveLiveRegisters(codegen, invoke_->GetLocations());
138
Roland Levillainec525fc2015-04-28 15:50:20 +0100139 MoveArguments(invoke_, codegen);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400140
141 if (invoke_->IsInvokeStaticOrDirect()) {
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100142 codegen->GenerateStaticOrDirectCall(invoke_->AsInvokeStaticOrDirect(),
143 Location::RegisterLocation(EAX));
Mingyao Yange90db122015-04-03 17:56:54 -0700144 RecordPcInfo(codegen, invoke_, invoke_->GetDexPc());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400145 } else {
146 UNIMPLEMENTED(FATAL) << "Non-direct intrinsic slow-path not yet implemented";
147 UNREACHABLE();
148 }
149
150 // Copy the result back to the expected output.
151 Location out = invoke_->GetLocations()->Out();
152 if (out.IsValid()) {
153 DCHECK(out.IsRegister()); // TODO: Replace this when we support output in memory.
154 DCHECK(!invoke_->GetLocations()->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
155 MoveFromReturnRegister(out, invoke_->GetType(), codegen);
156 }
157
158 RestoreLiveRegisters(codegen, invoke_->GetLocations());
159 __ jmp(GetExitLabel());
160 }
161
Alexandre Rames9931f312015-06-19 14:47:01 +0100162 const char* GetDescription() const OVERRIDE { return "IntrinsicSlowPathX86"; }
163
Mark Mendell09ed1a32015-03-25 08:30:06 -0400164 private:
165 // The instruction where this slow path is happening.
166 HInvoke* const invoke_;
167
168 DISALLOW_COPY_AND_ASSIGN(IntrinsicSlowPathX86);
169};
170
171#undef __
172#define __ assembler->
173
174static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
175 LocationSummary* locations = new (arena) LocationSummary(invoke,
176 LocationSummary::kNoCall,
177 kIntrinsified);
178 locations->SetInAt(0, Location::RequiresFpuRegister());
179 locations->SetOut(Location::RequiresRegister());
180 if (is64bit) {
181 locations->AddTemp(Location::RequiresFpuRegister());
182 }
183}
184
185static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
186 LocationSummary* locations = new (arena) LocationSummary(invoke,
187 LocationSummary::kNoCall,
188 kIntrinsified);
189 locations->SetInAt(0, Location::RequiresRegister());
190 locations->SetOut(Location::RequiresFpuRegister());
191 if (is64bit) {
192 locations->AddTemp(Location::RequiresFpuRegister());
193 locations->AddTemp(Location::RequiresFpuRegister());
194 }
195}
196
197static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
198 Location input = locations->InAt(0);
199 Location output = locations->Out();
200 if (is64bit) {
201 // Need to use the temporary.
202 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
203 __ movsd(temp, input.AsFpuRegister<XmmRegister>());
204 __ movd(output.AsRegisterPairLow<Register>(), temp);
205 __ psrlq(temp, Immediate(32));
206 __ movd(output.AsRegisterPairHigh<Register>(), temp);
207 } else {
208 __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>());
209 }
210}
211
212static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
213 Location input = locations->InAt(0);
214 Location output = locations->Out();
215 if (is64bit) {
216 // Need to use the temporary.
217 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
218 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
219 __ movd(temp1, input.AsRegisterPairLow<Register>());
220 __ movd(temp2, input.AsRegisterPairHigh<Register>());
221 __ punpckldq(temp1, temp2);
222 __ movsd(output.AsFpuRegister<XmmRegister>(), temp1);
223 } else {
224 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<Register>());
225 }
226}
227
228void IntrinsicLocationsBuilderX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
229 CreateFPToIntLocations(arena_, invoke, true);
230}
231void IntrinsicLocationsBuilderX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
232 CreateIntToFPLocations(arena_, invoke, true);
233}
234
235void IntrinsicCodeGeneratorX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
236 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
237}
238void IntrinsicCodeGeneratorX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
239 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
240}
241
242void IntrinsicLocationsBuilderX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
243 CreateFPToIntLocations(arena_, invoke, false);
244}
245void IntrinsicLocationsBuilderX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
246 CreateIntToFPLocations(arena_, invoke, false);
247}
248
249void IntrinsicCodeGeneratorX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
250 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
251}
252void IntrinsicCodeGeneratorX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
253 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
254}
255
256static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
257 LocationSummary* locations = new (arena) LocationSummary(invoke,
258 LocationSummary::kNoCall,
259 kIntrinsified);
260 locations->SetInAt(0, Location::RequiresRegister());
261 locations->SetOut(Location::SameAsFirstInput());
262}
263
264static void CreateLongToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
265 LocationSummary* locations = new (arena) LocationSummary(invoke,
266 LocationSummary::kNoCall,
267 kIntrinsified);
268 locations->SetInAt(0, Location::RequiresRegister());
269 locations->SetOut(Location::RequiresRegister());
270}
271
272static void CreateLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
273 LocationSummary* locations = new (arena) LocationSummary(invoke,
274 LocationSummary::kNoCall,
275 kIntrinsified);
276 locations->SetInAt(0, Location::RequiresRegister());
277 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
278}
279
280static void GenReverseBytes(LocationSummary* locations,
281 Primitive::Type size,
282 X86Assembler* assembler) {
283 Register out = locations->Out().AsRegister<Register>();
284
285 switch (size) {
286 case Primitive::kPrimShort:
287 // TODO: Can be done with an xchg of 8b registers. This is straight from Quick.
288 __ bswapl(out);
289 __ sarl(out, Immediate(16));
290 break;
291 case Primitive::kPrimInt:
292 __ bswapl(out);
293 break;
294 default:
295 LOG(FATAL) << "Unexpected size for reverse-bytes: " << size;
296 UNREACHABLE();
297 }
298}
299
300void IntrinsicLocationsBuilderX86::VisitIntegerReverseBytes(HInvoke* invoke) {
301 CreateIntToIntLocations(arena_, invoke);
302}
303
304void IntrinsicCodeGeneratorX86::VisitIntegerReverseBytes(HInvoke* invoke) {
305 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
306}
307
Mark Mendell58d25fd2015-04-03 14:52:31 -0400308void IntrinsicLocationsBuilderX86::VisitLongReverseBytes(HInvoke* invoke) {
309 CreateLongToLongLocations(arena_, invoke);
310}
311
312void IntrinsicCodeGeneratorX86::VisitLongReverseBytes(HInvoke* invoke) {
313 LocationSummary* locations = invoke->GetLocations();
314 Location input = locations->InAt(0);
315 Register input_lo = input.AsRegisterPairLow<Register>();
316 Register input_hi = input.AsRegisterPairHigh<Register>();
317 Location output = locations->Out();
318 Register output_lo = output.AsRegisterPairLow<Register>();
319 Register output_hi = output.AsRegisterPairHigh<Register>();
320
321 X86Assembler* assembler = GetAssembler();
322 // Assign the inputs to the outputs, mixing low/high.
323 __ movl(output_lo, input_hi);
324 __ movl(output_hi, input_lo);
325 __ bswapl(output_lo);
326 __ bswapl(output_hi);
327}
328
Mark Mendell09ed1a32015-03-25 08:30:06 -0400329void IntrinsicLocationsBuilderX86::VisitShortReverseBytes(HInvoke* invoke) {
330 CreateIntToIntLocations(arena_, invoke);
331}
332
333void IntrinsicCodeGeneratorX86::VisitShortReverseBytes(HInvoke* invoke) {
334 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
335}
336
337
338// TODO: Consider Quick's way of doing Double abs through integer operations, as the immediate we
339// need is 64b.
340
341static void CreateFloatToFloat(ArenaAllocator* arena, HInvoke* invoke) {
342 // TODO: Enable memory operations when the assembler supports them.
343 LocationSummary* locations = new (arena) LocationSummary(invoke,
344 LocationSummary::kNoCall,
345 kIntrinsified);
346 locations->SetInAt(0, Location::RequiresFpuRegister());
347 // TODO: Allow x86 to work with memory. This requires assembler support, see below.
348 // locations->SetInAt(0, Location::Any()); // X86 can work on memory directly.
349 locations->SetOut(Location::SameAsFirstInput());
350}
351
352static void MathAbsFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
353 Location output = locations->Out();
354
355 if (output.IsFpuRegister()) {
356 // Create the right constant on an aligned stack.
357 if (is64bit) {
358 __ subl(ESP, Immediate(8));
359 __ pushl(Immediate(0x7FFFFFFF));
360 __ pushl(Immediate(0xFFFFFFFF));
361 __ andpd(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
362 } else {
363 __ subl(ESP, Immediate(12));
364 __ pushl(Immediate(0x7FFFFFFF));
365 __ andps(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
366 }
367 __ addl(ESP, Immediate(16));
368 } else {
369 // TODO: update when assember support is available.
370 UNIMPLEMENTED(FATAL) << "Needs assembler support.";
371// Once assembler support is available, in-memory operations look like this:
372// if (is64bit) {
373// DCHECK(output.IsDoubleStackSlot());
374// __ andl(Address(Register(RSP), output.GetHighStackIndex(kX86WordSize)),
375// Immediate(0x7FFFFFFF));
376// } else {
377// DCHECK(output.IsStackSlot());
378// // Can use and with a literal directly.
379// __ andl(Address(Register(RSP), output.GetStackIndex()), Immediate(0x7FFFFFFF));
380// }
381 }
382}
383
384void IntrinsicLocationsBuilderX86::VisitMathAbsDouble(HInvoke* invoke) {
385 CreateFloatToFloat(arena_, invoke);
386}
387
388void IntrinsicCodeGeneratorX86::VisitMathAbsDouble(HInvoke* invoke) {
389 MathAbsFP(invoke->GetLocations(), true, GetAssembler());
390}
391
392void IntrinsicLocationsBuilderX86::VisitMathAbsFloat(HInvoke* invoke) {
393 CreateFloatToFloat(arena_, invoke);
394}
395
396void IntrinsicCodeGeneratorX86::VisitMathAbsFloat(HInvoke* invoke) {
397 MathAbsFP(invoke->GetLocations(), false, GetAssembler());
398}
399
400static void CreateAbsIntLocation(ArenaAllocator* arena, HInvoke* invoke) {
401 LocationSummary* locations = new (arena) LocationSummary(invoke,
402 LocationSummary::kNoCall,
403 kIntrinsified);
404 locations->SetInAt(0, Location::RegisterLocation(EAX));
405 locations->SetOut(Location::SameAsFirstInput());
406 locations->AddTemp(Location::RegisterLocation(EDX));
407}
408
409static void GenAbsInteger(LocationSummary* locations, X86Assembler* assembler) {
410 Location output = locations->Out();
411 Register out = output.AsRegister<Register>();
412 DCHECK_EQ(out, EAX);
413 Register temp = locations->GetTemp(0).AsRegister<Register>();
414 DCHECK_EQ(temp, EDX);
415
416 // Sign extend EAX into EDX.
417 __ cdq();
418
419 // XOR EAX with sign.
420 __ xorl(EAX, EDX);
421
422 // Subtract out sign to correct.
423 __ subl(EAX, EDX);
424
425 // The result is in EAX.
426}
427
428static void CreateAbsLongLocation(ArenaAllocator* arena, HInvoke* invoke) {
429 LocationSummary* locations = new (arena) LocationSummary(invoke,
430 LocationSummary::kNoCall,
431 kIntrinsified);
432 locations->SetInAt(0, Location::RequiresRegister());
433 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
434 locations->AddTemp(Location::RequiresRegister());
435}
436
437static void GenAbsLong(LocationSummary* locations, X86Assembler* assembler) {
438 Location input = locations->InAt(0);
439 Register input_lo = input.AsRegisterPairLow<Register>();
440 Register input_hi = input.AsRegisterPairHigh<Register>();
441 Location output = locations->Out();
442 Register output_lo = output.AsRegisterPairLow<Register>();
443 Register output_hi = output.AsRegisterPairHigh<Register>();
444 Register temp = locations->GetTemp(0).AsRegister<Register>();
445
446 // Compute the sign into the temporary.
447 __ movl(temp, input_hi);
448 __ sarl(temp, Immediate(31));
449
450 // Store the sign into the output.
451 __ movl(output_lo, temp);
452 __ movl(output_hi, temp);
453
454 // XOR the input to the output.
455 __ xorl(output_lo, input_lo);
456 __ xorl(output_hi, input_hi);
457
458 // Subtract the sign.
459 __ subl(output_lo, temp);
460 __ sbbl(output_hi, temp);
461}
462
463void IntrinsicLocationsBuilderX86::VisitMathAbsInt(HInvoke* invoke) {
464 CreateAbsIntLocation(arena_, invoke);
465}
466
467void IntrinsicCodeGeneratorX86::VisitMathAbsInt(HInvoke* invoke) {
468 GenAbsInteger(invoke->GetLocations(), GetAssembler());
469}
470
471void IntrinsicLocationsBuilderX86::VisitMathAbsLong(HInvoke* invoke) {
472 CreateAbsLongLocation(arena_, invoke);
473}
474
475void IntrinsicCodeGeneratorX86::VisitMathAbsLong(HInvoke* invoke) {
476 GenAbsLong(invoke->GetLocations(), GetAssembler());
477}
478
479static void GenMinMaxFP(LocationSummary* locations, bool is_min, bool is_double,
480 X86Assembler* assembler) {
481 Location op1_loc = locations->InAt(0);
482 Location op2_loc = locations->InAt(1);
483 Location out_loc = locations->Out();
484 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
485
486 // Shortcut for same input locations.
487 if (op1_loc.Equals(op2_loc)) {
488 DCHECK(out_loc.Equals(op1_loc));
489 return;
490 }
491
492 // (out := op1)
493 // out <=? op2
494 // if Nan jmp Nan_label
495 // if out is min jmp done
496 // if op2 is min jmp op2_label
497 // handle -0/+0
498 // jmp done
499 // Nan_label:
500 // out := NaN
501 // op2_label:
502 // out := op2
503 // done:
504 //
505 // This removes one jmp, but needs to copy one input (op1) to out.
506 //
507 // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath?
508
509 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
510
Mark Mendell0c9497d2015-08-21 09:30:05 -0400511 NearLabel nan, done, op2_label;
Mark Mendell09ed1a32015-03-25 08:30:06 -0400512 if (is_double) {
513 __ ucomisd(out, op2);
514 } else {
515 __ ucomiss(out, op2);
516 }
517
518 __ j(Condition::kParityEven, &nan);
519
520 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
521 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
522
523 // Handle 0.0/-0.0.
524 if (is_min) {
525 if (is_double) {
526 __ orpd(out, op2);
527 } else {
528 __ orps(out, op2);
529 }
530 } else {
531 if (is_double) {
532 __ andpd(out, op2);
533 } else {
534 __ andps(out, op2);
535 }
536 }
537 __ jmp(&done);
538
539 // NaN handling.
540 __ Bind(&nan);
541 if (is_double) {
542 __ pushl(Immediate(kDoubleNaNHigh));
543 __ pushl(Immediate(kDoubleNaNLow));
544 __ movsd(out, Address(ESP, 0));
545 __ addl(ESP, Immediate(8));
546 } else {
547 __ pushl(Immediate(kFloatNaN));
548 __ movss(out, Address(ESP, 0));
549 __ addl(ESP, Immediate(4));
550 }
551 __ jmp(&done);
552
553 // out := op2;
554 __ Bind(&op2_label);
555 if (is_double) {
556 __ movsd(out, op2);
557 } else {
558 __ movss(out, op2);
559 }
560
561 // Done.
562 __ Bind(&done);
563}
564
565static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
566 LocationSummary* locations = new (arena) LocationSummary(invoke,
567 LocationSummary::kNoCall,
568 kIntrinsified);
569 locations->SetInAt(0, Location::RequiresFpuRegister());
570 locations->SetInAt(1, Location::RequiresFpuRegister());
571 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
572 // the second input to be the output (we can simply swap inputs).
573 locations->SetOut(Location::SameAsFirstInput());
574}
575
576void IntrinsicLocationsBuilderX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
577 CreateFPFPToFPLocations(arena_, invoke);
578}
579
580void IntrinsicCodeGeneratorX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
581 GenMinMaxFP(invoke->GetLocations(), true, true, GetAssembler());
582}
583
584void IntrinsicLocationsBuilderX86::VisitMathMinFloatFloat(HInvoke* invoke) {
585 CreateFPFPToFPLocations(arena_, invoke);
586}
587
588void IntrinsicCodeGeneratorX86::VisitMathMinFloatFloat(HInvoke* invoke) {
589 GenMinMaxFP(invoke->GetLocations(), true, false, GetAssembler());
590}
591
592void IntrinsicLocationsBuilderX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
593 CreateFPFPToFPLocations(arena_, invoke);
594}
595
596void IntrinsicCodeGeneratorX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
597 GenMinMaxFP(invoke->GetLocations(), false, true, GetAssembler());
598}
599
600void IntrinsicLocationsBuilderX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
601 CreateFPFPToFPLocations(arena_, invoke);
602}
603
604void IntrinsicCodeGeneratorX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
605 GenMinMaxFP(invoke->GetLocations(), false, false, GetAssembler());
606}
607
608static void GenMinMax(LocationSummary* locations, bool is_min, bool is_long,
609 X86Assembler* assembler) {
610 Location op1_loc = locations->InAt(0);
611 Location op2_loc = locations->InAt(1);
612
613 // Shortcut for same input locations.
614 if (op1_loc.Equals(op2_loc)) {
615 // Can return immediately, as op1_loc == out_loc.
616 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
617 // a copy here.
618 DCHECK(locations->Out().Equals(op1_loc));
619 return;
620 }
621
622 if (is_long) {
623 // Need to perform a subtract to get the sign right.
624 // op1 is already in the same location as the output.
625 Location output = locations->Out();
626 Register output_lo = output.AsRegisterPairLow<Register>();
627 Register output_hi = output.AsRegisterPairHigh<Register>();
628
629 Register op2_lo = op2_loc.AsRegisterPairLow<Register>();
630 Register op2_hi = op2_loc.AsRegisterPairHigh<Register>();
631
632 // Spare register to compute the subtraction to set condition code.
633 Register temp = locations->GetTemp(0).AsRegister<Register>();
634
635 // Subtract off op2_low.
636 __ movl(temp, output_lo);
637 __ subl(temp, op2_lo);
638
639 // Now use the same tempo and the borrow to finish the subtraction of op2_hi.
640 __ movl(temp, output_hi);
641 __ sbbl(temp, op2_hi);
642
643 // Now the condition code is correct.
644 Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess;
645 __ cmovl(cond, output_lo, op2_lo);
646 __ cmovl(cond, output_hi, op2_hi);
647 } else {
648 Register out = locations->Out().AsRegister<Register>();
649 Register op2 = op2_loc.AsRegister<Register>();
650
651 // (out := op1)
652 // out <=? op2
653 // if out is min jmp done
654 // out := op2
655 // done:
656
657 __ cmpl(out, op2);
658 Condition cond = is_min ? Condition::kGreater : Condition::kLess;
659 __ cmovl(cond, out, op2);
660 }
661}
662
663static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
664 LocationSummary* locations = new (arena) LocationSummary(invoke,
665 LocationSummary::kNoCall,
666 kIntrinsified);
667 locations->SetInAt(0, Location::RequiresRegister());
668 locations->SetInAt(1, Location::RequiresRegister());
669 locations->SetOut(Location::SameAsFirstInput());
670}
671
672static void CreateLongLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
673 LocationSummary* locations = new (arena) LocationSummary(invoke,
674 LocationSummary::kNoCall,
675 kIntrinsified);
676 locations->SetInAt(0, Location::RequiresRegister());
677 locations->SetInAt(1, Location::RequiresRegister());
678 locations->SetOut(Location::SameAsFirstInput());
679 // Register to use to perform a long subtract to set cc.
680 locations->AddTemp(Location::RequiresRegister());
681}
682
683void IntrinsicLocationsBuilderX86::VisitMathMinIntInt(HInvoke* invoke) {
684 CreateIntIntToIntLocations(arena_, invoke);
685}
686
687void IntrinsicCodeGeneratorX86::VisitMathMinIntInt(HInvoke* invoke) {
688 GenMinMax(invoke->GetLocations(), true, false, GetAssembler());
689}
690
691void IntrinsicLocationsBuilderX86::VisitMathMinLongLong(HInvoke* invoke) {
692 CreateLongLongToLongLocations(arena_, invoke);
693}
694
695void IntrinsicCodeGeneratorX86::VisitMathMinLongLong(HInvoke* invoke) {
696 GenMinMax(invoke->GetLocations(), true, true, GetAssembler());
697}
698
699void IntrinsicLocationsBuilderX86::VisitMathMaxIntInt(HInvoke* invoke) {
700 CreateIntIntToIntLocations(arena_, invoke);
701}
702
703void IntrinsicCodeGeneratorX86::VisitMathMaxIntInt(HInvoke* invoke) {
704 GenMinMax(invoke->GetLocations(), false, false, GetAssembler());
705}
706
707void IntrinsicLocationsBuilderX86::VisitMathMaxLongLong(HInvoke* invoke) {
708 CreateLongLongToLongLocations(arena_, invoke);
709}
710
711void IntrinsicCodeGeneratorX86::VisitMathMaxLongLong(HInvoke* invoke) {
712 GenMinMax(invoke->GetLocations(), false, true, GetAssembler());
713}
714
715static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
716 LocationSummary* locations = new (arena) LocationSummary(invoke,
717 LocationSummary::kNoCall,
718 kIntrinsified);
719 locations->SetInAt(0, Location::RequiresFpuRegister());
720 locations->SetOut(Location::RequiresFpuRegister());
721}
722
723void IntrinsicLocationsBuilderX86::VisitMathSqrt(HInvoke* invoke) {
724 CreateFPToFPLocations(arena_, invoke);
725}
726
727void IntrinsicCodeGeneratorX86::VisitMathSqrt(HInvoke* invoke) {
728 LocationSummary* locations = invoke->GetLocations();
729 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
730 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
731
732 GetAssembler()->sqrtsd(out, in);
733}
734
Mark Mendellfb8d2792015-03-31 22:16:59 -0400735static void InvokeOutOfLineIntrinsic(CodeGeneratorX86* codegen, HInvoke* invoke) {
Roland Levillainec525fc2015-04-28 15:50:20 +0100736 MoveArguments(invoke, codegen);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400737
738 DCHECK(invoke->IsInvokeStaticOrDirect());
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100739 codegen->GenerateStaticOrDirectCall(invoke->AsInvokeStaticOrDirect(),
740 Location::RegisterLocation(EAX));
Mingyao Yange90db122015-04-03 17:56:54 -0700741 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400742
743 // Copy the result back to the expected output.
744 Location out = invoke->GetLocations()->Out();
745 if (out.IsValid()) {
746 DCHECK(out.IsRegister());
747 MoveFromReturnRegister(out, invoke->GetType(), codegen);
748 }
749}
750
751static void CreateSSE41FPToFPLocations(ArenaAllocator* arena,
752 HInvoke* invoke,
753 CodeGeneratorX86* codegen) {
754 // Do we have instruction support?
755 if (codegen->GetInstructionSetFeatures().HasSSE4_1()) {
756 CreateFPToFPLocations(arena, invoke);
757 return;
758 }
759
760 // We have to fall back to a call to the intrinsic.
761 LocationSummary* locations = new (arena) LocationSummary(invoke,
762 LocationSummary::kCall);
763 InvokeRuntimeCallingConvention calling_convention;
764 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
765 locations->SetOut(Location::FpuRegisterLocation(XMM0));
766 // Needs to be EAX for the invoke.
767 locations->AddTemp(Location::RegisterLocation(EAX));
768}
769
770static void GenSSE41FPToFPIntrinsic(CodeGeneratorX86* codegen,
771 HInvoke* invoke,
772 X86Assembler* assembler,
773 int round_mode) {
774 LocationSummary* locations = invoke->GetLocations();
775 if (locations->WillCall()) {
776 InvokeOutOfLineIntrinsic(codegen, invoke);
777 } else {
778 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
779 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
780 __ roundsd(out, in, Immediate(round_mode));
781 }
782}
783
784void IntrinsicLocationsBuilderX86::VisitMathCeil(HInvoke* invoke) {
785 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
786}
787
788void IntrinsicCodeGeneratorX86::VisitMathCeil(HInvoke* invoke) {
789 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 2);
790}
791
792void IntrinsicLocationsBuilderX86::VisitMathFloor(HInvoke* invoke) {
793 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
794}
795
796void IntrinsicCodeGeneratorX86::VisitMathFloor(HInvoke* invoke) {
797 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 1);
798}
799
800void IntrinsicLocationsBuilderX86::VisitMathRint(HInvoke* invoke) {
801 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
802}
803
804void IntrinsicCodeGeneratorX86::VisitMathRint(HInvoke* invoke) {
805 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 0);
806}
807
808// Note that 32 bit x86 doesn't have the capability to inline MathRoundDouble,
809// as it needs 64 bit instructions.
810void IntrinsicLocationsBuilderX86::VisitMathRoundFloat(HInvoke* invoke) {
811 // Do we have instruction support?
812 if (codegen_->GetInstructionSetFeatures().HasSSE4_1()) {
813 LocationSummary* locations = new (arena_) LocationSummary(invoke,
814 LocationSummary::kNoCall,
815 kIntrinsified);
816 locations->SetInAt(0, Location::RequiresFpuRegister());
Nicolas Geoffrayd9b92402015-04-21 10:02:22 +0100817 locations->SetOut(Location::RequiresRegister());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400818 locations->AddTemp(Location::RequiresFpuRegister());
819 locations->AddTemp(Location::RequiresFpuRegister());
820 return;
821 }
822
823 // We have to fall back to a call to the intrinsic.
824 LocationSummary* locations = new (arena_) LocationSummary(invoke,
825 LocationSummary::kCall);
826 InvokeRuntimeCallingConvention calling_convention;
827 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
828 locations->SetOut(Location::RegisterLocation(EAX));
829 // Needs to be EAX for the invoke.
830 locations->AddTemp(Location::RegisterLocation(EAX));
831}
832
833void IntrinsicCodeGeneratorX86::VisitMathRoundFloat(HInvoke* invoke) {
834 LocationSummary* locations = invoke->GetLocations();
835 if (locations->WillCall()) {
836 InvokeOutOfLineIntrinsic(codegen_, invoke);
837 return;
838 }
839
840 // Implement RoundFloat as t1 = floor(input + 0.5f); convert to int.
841 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
842 Register out = locations->Out().AsRegister<Register>();
843 XmmRegister maxInt = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
844 XmmRegister inPlusPointFive = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -0400845 NearLabel done, nan;
Mark Mendellfb8d2792015-03-31 22:16:59 -0400846 X86Assembler* assembler = GetAssembler();
847
848 // Generate 0.5 into inPlusPointFive.
849 __ movl(out, Immediate(bit_cast<int32_t, float>(0.5f)));
850 __ movd(inPlusPointFive, out);
851
852 // Add in the input.
853 __ addss(inPlusPointFive, in);
854
855 // And truncate to an integer.
856 __ roundss(inPlusPointFive, inPlusPointFive, Immediate(1));
857
858 __ movl(out, Immediate(kPrimIntMax));
859 // maxInt = int-to-float(out)
860 __ cvtsi2ss(maxInt, out);
861
862 // if inPlusPointFive >= maxInt goto done
863 __ comiss(inPlusPointFive, maxInt);
864 __ j(kAboveEqual, &done);
865
866 // if input == NaN goto nan
867 __ j(kUnordered, &nan);
868
869 // output = float-to-int-truncate(input)
870 __ cvttss2si(out, inPlusPointFive);
871 __ jmp(&done);
872 __ Bind(&nan);
873
874 // output = 0
875 __ xorl(out, out);
876 __ Bind(&done);
877}
878
Mark Mendell09ed1a32015-03-25 08:30:06 -0400879void IntrinsicLocationsBuilderX86::VisitStringCharAt(HInvoke* invoke) {
880 // The inputs plus one temp.
881 LocationSummary* locations = new (arena_) LocationSummary(invoke,
882 LocationSummary::kCallOnSlowPath,
883 kIntrinsified);
884 locations->SetInAt(0, Location::RequiresRegister());
885 locations->SetInAt(1, Location::RequiresRegister());
886 locations->SetOut(Location::SameAsFirstInput());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400887}
888
889void IntrinsicCodeGeneratorX86::VisitStringCharAt(HInvoke* invoke) {
890 LocationSummary* locations = invoke->GetLocations();
891
892 // Location of reference to data array
893 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
894 // Location of count
895 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
Mark Mendell09ed1a32015-03-25 08:30:06 -0400896
897 Register obj = locations->InAt(0).AsRegister<Register>();
898 Register idx = locations->InAt(1).AsRegister<Register>();
899 Register out = locations->Out().AsRegister<Register>();
Mark Mendell09ed1a32015-03-25 08:30:06 -0400900
901 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
902 // the cost.
903 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
904 // we will not optimize the code for constants (which would save a register).
905
Andreas Gampe21030dd2015-05-07 14:46:15 -0700906 SlowPathCodeX86* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400907 codegen_->AddSlowPath(slow_path);
908
909 X86Assembler* assembler = GetAssembler();
910
911 __ cmpl(idx, Address(obj, count_offset));
912 codegen_->MaybeRecordImplicitNullCheck(invoke);
913 __ j(kAboveEqual, slow_path->GetEntryLabel());
914
Jeff Hao848f70a2014-01-15 13:49:50 -0800915 // out = out[2*idx].
916 __ movzxw(out, Address(out, idx, ScaleFactor::TIMES_2, value_offset));
Mark Mendell09ed1a32015-03-25 08:30:06 -0400917
918 __ Bind(slow_path->GetExitLabel());
919}
920
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000921void IntrinsicLocationsBuilderX86::VisitStringCompareTo(HInvoke* invoke) {
922 // The inputs plus one temp.
923 LocationSummary* locations = new (arena_) LocationSummary(invoke,
924 LocationSummary::kCall,
925 kIntrinsified);
926 InvokeRuntimeCallingConvention calling_convention;
927 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
928 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
929 locations->SetOut(Location::RegisterLocation(EAX));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000930}
931
932void IntrinsicCodeGeneratorX86::VisitStringCompareTo(HInvoke* invoke) {
933 X86Assembler* assembler = GetAssembler();
934 LocationSummary* locations = invoke->GetLocations();
935
Nicolas Geoffray512e04d2015-03-27 17:21:24 +0000936 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +0100937 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000938
939 Register argument = locations->InAt(1).AsRegister<Register>();
940 __ testl(argument, argument);
Andreas Gampe21030dd2015-05-07 14:46:15 -0700941 SlowPathCodeX86* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +0000942 codegen_->AddSlowPath(slow_path);
943 __ j(kEqual, slow_path->GetEntryLabel());
944
945 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pStringCompareTo)));
946 __ Bind(slow_path->GetExitLabel());
947}
948
Agi Csakid7138c82015-08-13 17:46:44 -0700949void IntrinsicLocationsBuilderX86::VisitStringEquals(HInvoke* invoke) {
950 LocationSummary* locations = new (arena_) LocationSummary(invoke,
951 LocationSummary::kNoCall,
952 kIntrinsified);
953 locations->SetInAt(0, Location::RequiresRegister());
954 locations->SetInAt(1, Location::RequiresRegister());
955
956 // Request temporary registers, ECX and EDI needed for repe_cmpsl instruction.
957 locations->AddTemp(Location::RegisterLocation(ECX));
958 locations->AddTemp(Location::RegisterLocation(EDI));
959
960 // Set output, ESI needed for repe_cmpsl instruction anyways.
961 locations->SetOut(Location::RegisterLocation(ESI), Location::kOutputOverlap);
962}
963
964void IntrinsicCodeGeneratorX86::VisitStringEquals(HInvoke* invoke) {
965 X86Assembler* assembler = GetAssembler();
966 LocationSummary* locations = invoke->GetLocations();
967
968 Register str = locations->InAt(0).AsRegister<Register>();
969 Register arg = locations->InAt(1).AsRegister<Register>();
970 Register ecx = locations->GetTemp(0).AsRegister<Register>();
971 Register edi = locations->GetTemp(1).AsRegister<Register>();
972 Register esi = locations->Out().AsRegister<Register>();
973
Mark Mendell0c9497d2015-08-21 09:30:05 -0400974 NearLabel end, return_true, return_false;
Agi Csakid7138c82015-08-13 17:46:44 -0700975
976 // Get offsets of count, value, and class fields within a string object.
977 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
978 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
979 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
980
981 // Note that the null check must have been done earlier.
982 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
983
984 // Check if input is null, return false if it is.
985 __ testl(arg, arg);
986 __ j(kEqual, &return_false);
987
988 // Instanceof check for the argument by comparing class fields.
989 // All string objects must have the same type since String cannot be subclassed.
990 // Receiver must be a string object, so its class field is equal to all strings' class fields.
991 // If the argument is a string object, its class field must be equal to receiver's class field.
992 __ movl(ecx, Address(str, class_offset));
993 __ cmpl(ecx, Address(arg, class_offset));
994 __ j(kNotEqual, &return_false);
995
996 // Reference equality check, return true if same reference.
997 __ cmpl(str, arg);
998 __ j(kEqual, &return_true);
999
1000 // Load length of receiver string.
1001 __ movl(ecx, Address(str, count_offset));
1002 // Check if lengths are equal, return false if they're not.
1003 __ cmpl(ecx, Address(arg, count_offset));
1004 __ j(kNotEqual, &return_false);
1005 // Return true if both strings are empty.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001006 __ jecxz(&return_true);
Agi Csakid7138c82015-08-13 17:46:44 -07001007
1008 // Load starting addresses of string values into ESI/EDI as required for repe_cmpsl instruction.
1009 __ leal(esi, Address(str, value_offset));
1010 __ leal(edi, Address(arg, value_offset));
1011
1012 // Divide string length by 2 to compare characters 2 at a time and adjust for odd lengths.
1013 __ addl(ecx, Immediate(1));
1014 __ shrl(ecx, Immediate(1));
1015
1016 // Assertions that must hold in order to compare strings 2 characters at a time.
1017 DCHECK_ALIGNED(value_offset, 4);
1018 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1019
1020 // Loop to compare strings two characters at a time starting at the beginning of the string.
1021 __ repe_cmpsl();
1022 // If strings are not equal, zero flag will be cleared.
1023 __ j(kNotEqual, &return_false);
1024
1025 // Return true and exit the function.
1026 // If loop does not result in returning false, we return true.
1027 __ Bind(&return_true);
1028 __ movl(esi, Immediate(1));
1029 __ jmp(&end);
1030
1031 // Return false and exit the function.
1032 __ Bind(&return_false);
1033 __ xorl(esi, esi);
1034 __ Bind(&end);
1035}
1036
Andreas Gampe21030dd2015-05-07 14:46:15 -07001037static void CreateStringIndexOfLocations(HInvoke* invoke,
1038 ArenaAllocator* allocator,
1039 bool start_at_zero) {
1040 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1041 LocationSummary::kCallOnSlowPath,
1042 kIntrinsified);
1043 // The data needs to be in EDI for scasw. So request that the string is there, anyways.
1044 locations->SetInAt(0, Location::RegisterLocation(EDI));
1045 // If we look for a constant char, we'll still have to copy it into EAX. So just request the
1046 // allocator to do that, anyways. We can still do the constant check by checking the parameter
1047 // of the instruction explicitly.
1048 // Note: This works as we don't clobber EAX anywhere.
1049 locations->SetInAt(1, Location::RegisterLocation(EAX));
1050 if (!start_at_zero) {
1051 locations->SetInAt(2, Location::RequiresRegister()); // The starting index.
1052 }
1053 // As we clobber EDI during execution anyways, also use it as the output.
1054 locations->SetOut(Location::SameAsFirstInput());
1055
1056 // repne scasw uses ECX as the counter.
1057 locations->AddTemp(Location::RegisterLocation(ECX));
1058 // Need another temporary to be able to compute the result.
1059 locations->AddTemp(Location::RequiresRegister());
1060}
1061
1062static void GenerateStringIndexOf(HInvoke* invoke,
1063 X86Assembler* assembler,
1064 CodeGeneratorX86* codegen,
1065 ArenaAllocator* allocator,
1066 bool start_at_zero) {
1067 LocationSummary* locations = invoke->GetLocations();
1068
1069 // Note that the null check must have been done earlier.
1070 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1071
1072 Register string_obj = locations->InAt(0).AsRegister<Register>();
1073 Register search_value = locations->InAt(1).AsRegister<Register>();
1074 Register counter = locations->GetTemp(0).AsRegister<Register>();
1075 Register string_length = locations->GetTemp(1).AsRegister<Register>();
1076 Register out = locations->Out().AsRegister<Register>();
1077
1078 // Check our assumptions for registers.
1079 DCHECK_EQ(string_obj, EDI);
1080 DCHECK_EQ(search_value, EAX);
1081 DCHECK_EQ(counter, ECX);
1082 DCHECK_EQ(out, EDI);
1083
1084 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1085 // or directly dispatch if we have a constant.
1086 SlowPathCodeX86* slow_path = nullptr;
1087 if (invoke->InputAt(1)->IsIntConstant()) {
1088 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
1089 std::numeric_limits<uint16_t>::max()) {
1090 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1091 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1092 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1093 codegen->AddSlowPath(slow_path);
1094 __ jmp(slow_path->GetEntryLabel());
1095 __ Bind(slow_path->GetExitLabel());
1096 return;
1097 }
1098 } else {
1099 __ cmpl(search_value, Immediate(std::numeric_limits<uint16_t>::max()));
1100 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1101 codegen->AddSlowPath(slow_path);
1102 __ j(kAbove, slow_path->GetEntryLabel());
1103 }
1104
1105 // From here down, we know that we are looking for a char that fits in 16 bits.
1106 // Location of reference to data array within the String object.
1107 int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1108 // Location of count within the String object.
1109 int32_t count_offset = mirror::String::CountOffset().Int32Value();
1110
1111 // Load string length, i.e., the count field of the string.
1112 __ movl(string_length, Address(string_obj, count_offset));
1113
1114 // Do a zero-length check.
1115 // TODO: Support jecxz.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001116 NearLabel not_found_label;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001117 __ testl(string_length, string_length);
1118 __ j(kEqual, &not_found_label);
1119
1120 if (start_at_zero) {
1121 // Number of chars to scan is the same as the string length.
1122 __ movl(counter, string_length);
1123
1124 // Move to the start of the string.
1125 __ addl(string_obj, Immediate(value_offset));
1126 } else {
1127 Register start_index = locations->InAt(2).AsRegister<Register>();
1128
1129 // Do a start_index check.
1130 __ cmpl(start_index, string_length);
1131 __ j(kGreaterEqual, &not_found_label);
1132
1133 // Ensure we have a start index >= 0;
1134 __ xorl(counter, counter);
1135 __ cmpl(start_index, Immediate(0));
1136 __ cmovl(kGreater, counter, start_index);
1137
1138 // Move to the start of the string: string_obj + value_offset + 2 * start_index.
1139 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset));
1140
1141 // Now update ecx (the repne scasw work counter). We have string.length - start_index left to
1142 // compare.
1143 __ negl(counter);
1144 __ leal(counter, Address(string_length, counter, ScaleFactor::TIMES_1, 0));
1145 }
1146
1147 // Everything is set up for repne scasw:
1148 // * Comparison address in EDI.
1149 // * Counter in ECX.
1150 __ repne_scasw();
1151
1152 // Did we find a match?
1153 __ j(kNotEqual, &not_found_label);
1154
1155 // Yes, we matched. Compute the index of the result.
1156 __ subl(string_length, counter);
1157 __ leal(out, Address(string_length, -1));
1158
Mark Mendell0c9497d2015-08-21 09:30:05 -04001159 NearLabel done;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001160 __ jmp(&done);
1161
1162 // Failed to match; return -1.
1163 __ Bind(&not_found_label);
1164 __ movl(out, Immediate(-1));
1165
1166 // And join up at the end.
1167 __ Bind(&done);
1168 if (slow_path != nullptr) {
1169 __ Bind(slow_path->GetExitLabel());
1170 }
1171}
1172
1173void IntrinsicLocationsBuilderX86::VisitStringIndexOf(HInvoke* invoke) {
1174 CreateStringIndexOfLocations(invoke, arena_, true);
1175}
1176
1177void IntrinsicCodeGeneratorX86::VisitStringIndexOf(HInvoke* invoke) {
1178 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
1179}
1180
1181void IntrinsicLocationsBuilderX86::VisitStringIndexOfAfter(HInvoke* invoke) {
1182 CreateStringIndexOfLocations(invoke, arena_, false);
1183}
1184
1185void IntrinsicCodeGeneratorX86::VisitStringIndexOfAfter(HInvoke* invoke) {
1186 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
1187}
1188
Jeff Hao848f70a2014-01-15 13:49:50 -08001189void IntrinsicLocationsBuilderX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1190 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1191 LocationSummary::kCall,
1192 kIntrinsified);
1193 InvokeRuntimeCallingConvention calling_convention;
1194 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1195 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1196 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1197 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1198 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001199}
1200
1201void IntrinsicCodeGeneratorX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1202 X86Assembler* assembler = GetAssembler();
1203 LocationSummary* locations = invoke->GetLocations();
1204
1205 Register byte_array = locations->InAt(0).AsRegister<Register>();
1206 __ testl(byte_array, byte_array);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001207 SlowPathCodeX86* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001208 codegen_->AddSlowPath(slow_path);
1209 __ j(kEqual, slow_path->GetEntryLabel());
1210
1211 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromBytes)));
1212 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1213 __ Bind(slow_path->GetExitLabel());
1214}
1215
1216void IntrinsicLocationsBuilderX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1217 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1218 LocationSummary::kCall,
1219 kIntrinsified);
1220 InvokeRuntimeCallingConvention calling_convention;
1221 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1222 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1223 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1224 locations->SetOut(Location::RegisterLocation(EAX));
1225}
1226
1227void IntrinsicCodeGeneratorX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1228 X86Assembler* assembler = GetAssembler();
1229
1230 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromChars)));
1231 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1232}
1233
1234void IntrinsicLocationsBuilderX86::VisitStringNewStringFromString(HInvoke* invoke) {
1235 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1236 LocationSummary::kCall,
1237 kIntrinsified);
1238 InvokeRuntimeCallingConvention calling_convention;
1239 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1240 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001241}
1242
1243void IntrinsicCodeGeneratorX86::VisitStringNewStringFromString(HInvoke* invoke) {
1244 X86Assembler* assembler = GetAssembler();
1245 LocationSummary* locations = invoke->GetLocations();
1246
1247 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1248 __ testl(string_to_copy, string_to_copy);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001249 SlowPathCodeX86* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001250 codegen_->AddSlowPath(slow_path);
1251 __ j(kEqual, slow_path->GetEntryLabel());
1252
1253 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromString)));
1254 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1255 __ Bind(slow_path->GetExitLabel());
1256}
1257
Mark Mendell09ed1a32015-03-25 08:30:06 -04001258static void GenPeek(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1259 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1260 Location out_loc = locations->Out();
1261 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1262 // to avoid a SIGBUS.
1263 switch (size) {
1264 case Primitive::kPrimByte:
1265 __ movsxb(out_loc.AsRegister<Register>(), Address(address, 0));
1266 break;
1267 case Primitive::kPrimShort:
1268 __ movsxw(out_loc.AsRegister<Register>(), Address(address, 0));
1269 break;
1270 case Primitive::kPrimInt:
1271 __ movl(out_loc.AsRegister<Register>(), Address(address, 0));
1272 break;
1273 case Primitive::kPrimLong:
1274 __ movl(out_loc.AsRegisterPairLow<Register>(), Address(address, 0));
1275 __ movl(out_loc.AsRegisterPairHigh<Register>(), Address(address, 4));
1276 break;
1277 default:
1278 LOG(FATAL) << "Type not recognized for peek: " << size;
1279 UNREACHABLE();
1280 }
1281}
1282
1283void IntrinsicLocationsBuilderX86::VisitMemoryPeekByte(HInvoke* invoke) {
1284 CreateLongToIntLocations(arena_, invoke);
1285}
1286
1287void IntrinsicCodeGeneratorX86::VisitMemoryPeekByte(HInvoke* invoke) {
1288 GenPeek(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1289}
1290
1291void IntrinsicLocationsBuilderX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1292 CreateLongToIntLocations(arena_, invoke);
1293}
1294
1295void IntrinsicCodeGeneratorX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1296 GenPeek(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1297}
1298
1299void IntrinsicLocationsBuilderX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1300 CreateLongToLongLocations(arena_, invoke);
1301}
1302
1303void IntrinsicCodeGeneratorX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1304 GenPeek(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1305}
1306
1307void IntrinsicLocationsBuilderX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1308 CreateLongToIntLocations(arena_, invoke);
1309}
1310
1311void IntrinsicCodeGeneratorX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1312 GenPeek(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1313}
1314
1315static void CreateLongIntToVoidLocations(ArenaAllocator* arena, Primitive::Type size,
1316 HInvoke* invoke) {
1317 LocationSummary* locations = new (arena) LocationSummary(invoke,
1318 LocationSummary::kNoCall,
1319 kIntrinsified);
1320 locations->SetInAt(0, Location::RequiresRegister());
Roland Levillain4c0eb422015-04-24 16:43:49 +01001321 HInstruction* value = invoke->InputAt(1);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001322 if (size == Primitive::kPrimByte) {
1323 locations->SetInAt(1, Location::ByteRegisterOrConstant(EDX, value));
1324 } else {
1325 locations->SetInAt(1, Location::RegisterOrConstant(value));
1326 }
1327}
1328
1329static void GenPoke(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1330 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1331 Location value_loc = locations->InAt(1);
1332 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1333 // to avoid a SIGBUS.
1334 switch (size) {
1335 case Primitive::kPrimByte:
1336 if (value_loc.IsConstant()) {
1337 __ movb(Address(address, 0),
1338 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1339 } else {
1340 __ movb(Address(address, 0), value_loc.AsRegister<ByteRegister>());
1341 }
1342 break;
1343 case Primitive::kPrimShort:
1344 if (value_loc.IsConstant()) {
1345 __ movw(Address(address, 0),
1346 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1347 } else {
1348 __ movw(Address(address, 0), value_loc.AsRegister<Register>());
1349 }
1350 break;
1351 case Primitive::kPrimInt:
1352 if (value_loc.IsConstant()) {
1353 __ movl(Address(address, 0),
1354 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1355 } else {
1356 __ movl(Address(address, 0), value_loc.AsRegister<Register>());
1357 }
1358 break;
1359 case Primitive::kPrimLong:
1360 if (value_loc.IsConstant()) {
1361 int64_t value = value_loc.GetConstant()->AsLongConstant()->GetValue();
1362 __ movl(Address(address, 0), Immediate(Low32Bits(value)));
1363 __ movl(Address(address, 4), Immediate(High32Bits(value)));
1364 } else {
1365 __ movl(Address(address, 0), value_loc.AsRegisterPairLow<Register>());
1366 __ movl(Address(address, 4), value_loc.AsRegisterPairHigh<Register>());
1367 }
1368 break;
1369 default:
1370 LOG(FATAL) << "Type not recognized for poke: " << size;
1371 UNREACHABLE();
1372 }
1373}
1374
1375void IntrinsicLocationsBuilderX86::VisitMemoryPokeByte(HInvoke* invoke) {
1376 CreateLongIntToVoidLocations(arena_, Primitive::kPrimByte, invoke);
1377}
1378
1379void IntrinsicCodeGeneratorX86::VisitMemoryPokeByte(HInvoke* invoke) {
1380 GenPoke(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1381}
1382
1383void IntrinsicLocationsBuilderX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1384 CreateLongIntToVoidLocations(arena_, Primitive::kPrimInt, invoke);
1385}
1386
1387void IntrinsicCodeGeneratorX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1388 GenPoke(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1389}
1390
1391void IntrinsicLocationsBuilderX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1392 CreateLongIntToVoidLocations(arena_, Primitive::kPrimLong, invoke);
1393}
1394
1395void IntrinsicCodeGeneratorX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1396 GenPoke(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1397}
1398
1399void IntrinsicLocationsBuilderX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1400 CreateLongIntToVoidLocations(arena_, Primitive::kPrimShort, invoke);
1401}
1402
1403void IntrinsicCodeGeneratorX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1404 GenPoke(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1405}
1406
1407void IntrinsicLocationsBuilderX86::VisitThreadCurrentThread(HInvoke* invoke) {
1408 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1409 LocationSummary::kNoCall,
1410 kIntrinsified);
1411 locations->SetOut(Location::RequiresRegister());
1412}
1413
1414void IntrinsicCodeGeneratorX86::VisitThreadCurrentThread(HInvoke* invoke) {
1415 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
1416 GetAssembler()->fs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86WordSize>()));
1417}
1418
1419static void GenUnsafeGet(LocationSummary* locations, Primitive::Type type,
1420 bool is_volatile, X86Assembler* assembler) {
1421 Register base = locations->InAt(1).AsRegister<Register>();
1422 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1423 Location output = locations->Out();
1424
1425 switch (type) {
1426 case Primitive::kPrimInt:
Roland Levillain4d027112015-07-01 15:41:14 +01001427 case Primitive::kPrimNot: {
1428 Register output_reg = output.AsRegister<Register>();
1429 __ movl(output_reg, Address(base, offset, ScaleFactor::TIMES_1, 0));
1430 if (type == Primitive::kPrimNot) {
1431 __ MaybeUnpoisonHeapReference(output_reg);
1432 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001433 break;
Roland Levillain4d027112015-07-01 15:41:14 +01001434 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001435
1436 case Primitive::kPrimLong: {
1437 Register output_lo = output.AsRegisterPairLow<Register>();
1438 Register output_hi = output.AsRegisterPairHigh<Register>();
1439 if (is_volatile) {
1440 // Need to use a XMM to read atomically.
1441 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1442 __ movsd(temp, Address(base, offset, ScaleFactor::TIMES_1, 0));
1443 __ movd(output_lo, temp);
1444 __ psrlq(temp, Immediate(32));
1445 __ movd(output_hi, temp);
1446 } else {
1447 __ movl(output_lo, Address(base, offset, ScaleFactor::TIMES_1, 0));
1448 __ movl(output_hi, Address(base, offset, ScaleFactor::TIMES_1, 4));
1449 }
1450 }
1451 break;
1452
1453 default:
1454 LOG(FATAL) << "Unsupported op size " << type;
1455 UNREACHABLE();
1456 }
1457}
1458
1459static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke,
1460 bool is_long, bool is_volatile) {
1461 LocationSummary* locations = new (arena) LocationSummary(invoke,
1462 LocationSummary::kNoCall,
1463 kIntrinsified);
1464 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1465 locations->SetInAt(1, Location::RequiresRegister());
1466 locations->SetInAt(2, Location::RequiresRegister());
1467 if (is_long) {
1468 if (is_volatile) {
1469 // Need to use XMM to read volatile.
1470 locations->AddTemp(Location::RequiresFpuRegister());
1471 locations->SetOut(Location::RequiresRegister());
1472 } else {
1473 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1474 }
1475 } else {
1476 locations->SetOut(Location::RequiresRegister());
1477 }
1478}
1479
1480void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) {
1481 CreateIntIntIntToIntLocations(arena_, invoke, false, false);
1482}
1483void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
1484 CreateIntIntIntToIntLocations(arena_, invoke, false, true);
1485}
1486void IntrinsicLocationsBuilderX86::VisitUnsafeGetLong(HInvoke* invoke) {
1487 CreateIntIntIntToIntLocations(arena_, invoke, false, false);
1488}
1489void IntrinsicLocationsBuilderX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1490 CreateIntIntIntToIntLocations(arena_, invoke, true, true);
1491}
1492void IntrinsicLocationsBuilderX86::VisitUnsafeGetObject(HInvoke* invoke) {
1493 CreateIntIntIntToIntLocations(arena_, invoke, false, false);
1494}
1495void IntrinsicLocationsBuilderX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1496 CreateIntIntIntToIntLocations(arena_, invoke, false, true);
1497}
1498
1499
1500void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) {
1501 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimInt, false, GetAssembler());
1502}
1503void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
1504 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimInt, true, GetAssembler());
1505}
1506void IntrinsicCodeGeneratorX86::VisitUnsafeGetLong(HInvoke* invoke) {
1507 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimLong, false, GetAssembler());
1508}
1509void IntrinsicCodeGeneratorX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1510 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimLong, true, GetAssembler());
1511}
1512void IntrinsicCodeGeneratorX86::VisitUnsafeGetObject(HInvoke* invoke) {
1513 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimNot, false, GetAssembler());
1514}
1515void IntrinsicCodeGeneratorX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1516 GenUnsafeGet(invoke->GetLocations(), Primitive::kPrimNot, true, GetAssembler());
1517}
1518
1519
1520static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* arena,
1521 Primitive::Type type,
1522 HInvoke* invoke,
1523 bool is_volatile) {
1524 LocationSummary* locations = new (arena) LocationSummary(invoke,
1525 LocationSummary::kNoCall,
1526 kIntrinsified);
1527 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1528 locations->SetInAt(1, Location::RequiresRegister());
1529 locations->SetInAt(2, Location::RequiresRegister());
1530 locations->SetInAt(3, Location::RequiresRegister());
1531 if (type == Primitive::kPrimNot) {
1532 // Need temp registers for card-marking.
Roland Levillain4d027112015-07-01 15:41:14 +01001533 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell09ed1a32015-03-25 08:30:06 -04001534 // Ensure the value is in a byte register.
1535 locations->AddTemp(Location::RegisterLocation(ECX));
1536 } else if (type == Primitive::kPrimLong && is_volatile) {
1537 locations->AddTemp(Location::RequiresFpuRegister());
1538 locations->AddTemp(Location::RequiresFpuRegister());
1539 }
1540}
1541
1542void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) {
1543 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, false);
1544}
1545void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) {
1546 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, false);
1547}
1548void IntrinsicLocationsBuilderX86::VisitUnsafePutVolatile(HInvoke* invoke) {
1549 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, true);
1550}
1551void IntrinsicLocationsBuilderX86::VisitUnsafePutObject(HInvoke* invoke) {
1552 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, false);
1553}
1554void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1555 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, false);
1556}
1557void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1558 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, true);
1559}
1560void IntrinsicLocationsBuilderX86::VisitUnsafePutLong(HInvoke* invoke) {
1561 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, false);
1562}
1563void IntrinsicLocationsBuilderX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1564 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, false);
1565}
1566void IntrinsicLocationsBuilderX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1567 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, true);
1568}
1569
1570// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86
1571// memory model.
1572static void GenUnsafePut(LocationSummary* locations,
1573 Primitive::Type type,
1574 bool is_volatile,
1575 CodeGeneratorX86* codegen) {
1576 X86Assembler* assembler = reinterpret_cast<X86Assembler*>(codegen->GetAssembler());
1577 Register base = locations->InAt(1).AsRegister<Register>();
1578 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1579 Location value_loc = locations->InAt(3);
1580
1581 if (type == Primitive::kPrimLong) {
1582 Register value_lo = value_loc.AsRegisterPairLow<Register>();
1583 Register value_hi = value_loc.AsRegisterPairHigh<Register>();
1584 if (is_volatile) {
1585 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1586 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
1587 __ movd(temp1, value_lo);
1588 __ movd(temp2, value_hi);
1589 __ punpckldq(temp1, temp2);
1590 __ movsd(Address(base, offset, ScaleFactor::TIMES_1, 0), temp1);
1591 } else {
1592 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_lo);
1593 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 4), value_hi);
1594 }
Roland Levillain4d027112015-07-01 15:41:14 +01001595 } else if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1596 Register temp = locations->GetTemp(0).AsRegister<Register>();
1597 __ movl(temp, value_loc.AsRegister<Register>());
1598 __ PoisonHeapReference(temp);
1599 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001600 } else {
1601 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_loc.AsRegister<Register>());
1602 }
1603
1604 if (is_volatile) {
1605 __ mfence();
1606 }
1607
1608 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001609 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell09ed1a32015-03-25 08:30:06 -04001610 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
1611 locations->GetTemp(1).AsRegister<Register>(),
1612 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001613 value_loc.AsRegister<Register>(),
1614 value_can_be_null);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001615 }
1616}
1617
1618void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) {
1619 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_);
1620}
1621void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) {
1622 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_);
1623}
1624void IntrinsicCodeGeneratorX86::VisitUnsafePutVolatile(HInvoke* invoke) {
1625 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, codegen_);
1626}
1627void IntrinsicCodeGeneratorX86::VisitUnsafePutObject(HInvoke* invoke) {
1628 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_);
1629}
1630void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1631 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_);
1632}
1633void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1634 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, codegen_);
1635}
1636void IntrinsicCodeGeneratorX86::VisitUnsafePutLong(HInvoke* invoke) {
1637 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_);
1638}
1639void IntrinsicCodeGeneratorX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1640 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_);
1641}
1642void IntrinsicCodeGeneratorX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1643 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, codegen_);
1644}
1645
Mark Mendell58d25fd2015-04-03 14:52:31 -04001646static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, Primitive::Type type,
1647 HInvoke* invoke) {
1648 LocationSummary* locations = new (arena) LocationSummary(invoke,
1649 LocationSummary::kNoCall,
1650 kIntrinsified);
1651 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1652 locations->SetInAt(1, Location::RequiresRegister());
1653 // Offset is a long, but in 32 bit mode, we only need the low word.
1654 // Can we update the invoke here to remove a TypeConvert to Long?
1655 locations->SetInAt(2, Location::RequiresRegister());
1656 // Expected value must be in EAX or EDX:EAX.
1657 // For long, new value must be in ECX:EBX.
1658 if (type == Primitive::kPrimLong) {
1659 locations->SetInAt(3, Location::RegisterPairLocation(EAX, EDX));
1660 locations->SetInAt(4, Location::RegisterPairLocation(EBX, ECX));
1661 } else {
1662 locations->SetInAt(3, Location::RegisterLocation(EAX));
1663 locations->SetInAt(4, Location::RequiresRegister());
1664 }
1665
1666 // Force a byte register for the output.
1667 locations->SetOut(Location::RegisterLocation(EAX));
1668 if (type == Primitive::kPrimNot) {
1669 // Need temp registers for card-marking.
1670 locations->AddTemp(Location::RequiresRegister());
1671 // Need a byte register for marking.
1672 locations->AddTemp(Location::RegisterLocation(ECX));
1673 }
1674}
1675
1676void IntrinsicLocationsBuilderX86::VisitUnsafeCASInt(HInvoke* invoke) {
1677 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimInt, invoke);
1678}
1679
1680void IntrinsicLocationsBuilderX86::VisitUnsafeCASLong(HInvoke* invoke) {
1681 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimLong, invoke);
1682}
1683
1684void IntrinsicLocationsBuilderX86::VisitUnsafeCASObject(HInvoke* invoke) {
1685 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimNot, invoke);
1686}
1687
1688static void GenCAS(Primitive::Type type, HInvoke* invoke, CodeGeneratorX86* codegen) {
1689 X86Assembler* assembler =
1690 reinterpret_cast<X86Assembler*>(codegen->GetAssembler());
1691 LocationSummary* locations = invoke->GetLocations();
1692
1693 Register base = locations->InAt(1).AsRegister<Register>();
1694 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1695 Location out = locations->Out();
1696 DCHECK_EQ(out.AsRegister<Register>(), EAX);
1697
1698 if (type == Primitive::kPrimLong) {
1699 DCHECK_EQ(locations->InAt(3).AsRegisterPairLow<Register>(), EAX);
1700 DCHECK_EQ(locations->InAt(3).AsRegisterPairHigh<Register>(), EDX);
1701 DCHECK_EQ(locations->InAt(4).AsRegisterPairLow<Register>(), EBX);
1702 DCHECK_EQ(locations->InAt(4).AsRegisterPairHigh<Register>(), ECX);
1703 __ LockCmpxchg8b(Address(base, offset, TIMES_1, 0));
1704 } else {
1705 // Integer or object.
Roland Levillain4d027112015-07-01 15:41:14 +01001706 Register expected = locations->InAt(3).AsRegister<Register>();
1707 DCHECK_EQ(expected, EAX);
Mark Mendell58d25fd2015-04-03 14:52:31 -04001708 Register value = locations->InAt(4).AsRegister<Register>();
1709 if (type == Primitive::kPrimNot) {
1710 // Mark card for object assuming new value is stored.
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001711 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell58d25fd2015-04-03 14:52:31 -04001712 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
1713 locations->GetTemp(1).AsRegister<Register>(),
1714 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001715 value,
1716 value_can_be_null);
Roland Levillain4d027112015-07-01 15:41:14 +01001717
1718 if (kPoisonHeapReferences) {
1719 __ PoisonHeapReference(expected);
1720 __ PoisonHeapReference(value);
1721 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04001722 }
1723
1724 __ LockCmpxchgl(Address(base, offset, TIMES_1, 0), value);
1725 }
1726
1727 // locked cmpxchg has full barrier semantics, and we don't need scheduling
1728 // barriers at this time.
1729
1730 // Convert ZF into the boolean result.
1731 __ setb(kZero, out.AsRegister<Register>());
1732 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01001733
1734 if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1735 Register value = locations->InAt(4).AsRegister<Register>();
1736 __ UnpoisonHeapReference(value);
1737 // Do not unpoison the reference contained in register `expected`,
1738 // as it is the same as register `out`.
1739 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04001740}
1741
1742void IntrinsicCodeGeneratorX86::VisitUnsafeCASInt(HInvoke* invoke) {
1743 GenCAS(Primitive::kPrimInt, invoke, codegen_);
1744}
1745
1746void IntrinsicCodeGeneratorX86::VisitUnsafeCASLong(HInvoke* invoke) {
1747 GenCAS(Primitive::kPrimLong, invoke, codegen_);
1748}
1749
1750void IntrinsicCodeGeneratorX86::VisitUnsafeCASObject(HInvoke* invoke) {
1751 GenCAS(Primitive::kPrimNot, invoke, codegen_);
1752}
1753
1754void IntrinsicLocationsBuilderX86::VisitIntegerReverse(HInvoke* invoke) {
1755 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1756 LocationSummary::kNoCall,
1757 kIntrinsified);
1758 locations->SetInAt(0, Location::RequiresRegister());
1759 locations->SetOut(Location::SameAsFirstInput());
1760 locations->AddTemp(Location::RequiresRegister());
1761}
1762
1763static void SwapBits(Register reg, Register temp, int32_t shift, int32_t mask,
1764 X86Assembler* assembler) {
1765 Immediate imm_shift(shift);
1766 Immediate imm_mask(mask);
1767 __ movl(temp, reg);
1768 __ shrl(reg, imm_shift);
1769 __ andl(temp, imm_mask);
1770 __ andl(reg, imm_mask);
1771 __ shll(temp, imm_shift);
1772 __ orl(reg, temp);
1773}
1774
1775void IntrinsicCodeGeneratorX86::VisitIntegerReverse(HInvoke* invoke) {
1776 X86Assembler* assembler =
1777 reinterpret_cast<X86Assembler*>(codegen_->GetAssembler());
1778 LocationSummary* locations = invoke->GetLocations();
1779
1780 Register reg = locations->InAt(0).AsRegister<Register>();
1781 Register temp = locations->GetTemp(0).AsRegister<Register>();
1782
1783 /*
1784 * Use one bswap instruction to reverse byte order first and then use 3 rounds of
1785 * swapping bits to reverse bits in a number x. Using bswap to save instructions
1786 * compared to generic luni implementation which has 5 rounds of swapping bits.
1787 * x = bswap x
1788 * x = (x & 0x55555555) << 1 | (x >> 1) & 0x55555555;
1789 * x = (x & 0x33333333) << 2 | (x >> 2) & 0x33333333;
1790 * x = (x & 0x0F0F0F0F) << 4 | (x >> 4) & 0x0F0F0F0F;
1791 */
1792 __ bswapl(reg);
1793 SwapBits(reg, temp, 1, 0x55555555, assembler);
1794 SwapBits(reg, temp, 2, 0x33333333, assembler);
1795 SwapBits(reg, temp, 4, 0x0f0f0f0f, assembler);
1796}
1797
1798void IntrinsicLocationsBuilderX86::VisitLongReverse(HInvoke* invoke) {
1799 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1800 LocationSummary::kNoCall,
1801 kIntrinsified);
1802 locations->SetInAt(0, Location::RequiresRegister());
1803 locations->SetOut(Location::SameAsFirstInput());
1804 locations->AddTemp(Location::RequiresRegister());
1805}
1806
1807void IntrinsicCodeGeneratorX86::VisitLongReverse(HInvoke* invoke) {
1808 X86Assembler* assembler =
1809 reinterpret_cast<X86Assembler*>(codegen_->GetAssembler());
1810 LocationSummary* locations = invoke->GetLocations();
1811
1812 Register reg_low = locations->InAt(0).AsRegisterPairLow<Register>();
1813 Register reg_high = locations->InAt(0).AsRegisterPairHigh<Register>();
1814 Register temp = locations->GetTemp(0).AsRegister<Register>();
1815
1816 // We want to swap high/low, then bswap each one, and then do the same
1817 // as a 32 bit reverse.
1818 // Exchange high and low.
1819 __ movl(temp, reg_low);
1820 __ movl(reg_low, reg_high);
1821 __ movl(reg_high, temp);
1822
1823 // bit-reverse low
1824 __ bswapl(reg_low);
1825 SwapBits(reg_low, temp, 1, 0x55555555, assembler);
1826 SwapBits(reg_low, temp, 2, 0x33333333, assembler);
1827 SwapBits(reg_low, temp, 4, 0x0f0f0f0f, assembler);
1828
1829 // bit-reverse high
1830 __ bswapl(reg_high);
1831 SwapBits(reg_high, temp, 1, 0x55555555, assembler);
1832 SwapBits(reg_high, temp, 2, 0x33333333, assembler);
1833 SwapBits(reg_high, temp, 4, 0x0f0f0f0f, assembler);
1834}
1835
Mark Mendelld5897672015-08-12 21:16:41 -04001836static void CreateLeadingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
1837 LocationSummary* locations = new (arena) LocationSummary(invoke,
1838 LocationSummary::kNoCall,
1839 kIntrinsified);
1840 if (is_long) {
1841 locations->SetInAt(0, Location::RequiresRegister());
1842 } else {
1843 locations->SetInAt(0, Location::Any());
1844 }
1845 locations->SetOut(Location::RequiresRegister());
1846}
1847
1848static void GenLeadingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_long) {
1849 LocationSummary* locations = invoke->GetLocations();
1850 Location src = locations->InAt(0);
1851 Register out = locations->Out().AsRegister<Register>();
1852
1853 if (invoke->InputAt(0)->IsConstant()) {
1854 // Evaluate this at compile time.
1855 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
1856 if (value == 0) {
1857 value = is_long ? 64 : 32;
1858 } else {
1859 value = is_long ? CLZ(static_cast<uint64_t>(value)) : CLZ(static_cast<uint32_t>(value));
1860 }
1861 if (value == 0) {
1862 __ xorl(out, out);
1863 } else {
1864 __ movl(out, Immediate(value));
1865 }
1866 return;
1867 }
1868
1869 // Handle the non-constant cases.
1870 if (!is_long) {
1871 if (src.IsRegister()) {
1872 __ bsrl(out, src.AsRegister<Register>());
1873 } else {
1874 DCHECK(src.IsStackSlot());
1875 __ bsrl(out, Address(ESP, src.GetStackIndex()));
1876 }
1877
1878 // BSR sets ZF if the input was zero, and the output is undefined.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001879 NearLabel all_zeroes, done;
Mark Mendelld5897672015-08-12 21:16:41 -04001880 __ j(kEqual, &all_zeroes);
1881
1882 // Correct the result from BSR to get the final CLZ result.
1883 __ xorl(out, Immediate(31));
1884 __ jmp(&done);
1885
1886 // Fix the zero case with the expected result.
1887 __ Bind(&all_zeroes);
1888 __ movl(out, Immediate(32));
1889
1890 __ Bind(&done);
1891 return;
1892 }
1893
1894 // 64 bit case needs to worry about both parts of the register.
1895 DCHECK(src.IsRegisterPair());
1896 Register src_lo = src.AsRegisterPairLow<Register>();
1897 Register src_hi = src.AsRegisterPairHigh<Register>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04001898 NearLabel handle_low, done, all_zeroes;
Mark Mendelld5897672015-08-12 21:16:41 -04001899
1900 // Is the high word zero?
1901 __ testl(src_hi, src_hi);
1902 __ j(kEqual, &handle_low);
1903
1904 // High word is not zero. We know that the BSR result is defined in this case.
1905 __ bsrl(out, src_hi);
1906
1907 // Correct the result from BSR to get the final CLZ result.
1908 __ xorl(out, Immediate(31));
1909 __ jmp(&done);
1910
1911 // High word was zero. We have to compute the low word count and add 32.
1912 __ Bind(&handle_low);
1913 __ bsrl(out, src_lo);
1914 __ j(kEqual, &all_zeroes);
1915
1916 // We had a valid result. Use an XOR to both correct the result and add 32.
1917 __ xorl(out, Immediate(63));
1918 __ jmp(&done);
1919
1920 // All zero case.
1921 __ Bind(&all_zeroes);
1922 __ movl(out, Immediate(64));
1923
1924 __ Bind(&done);
1925}
1926
1927void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
1928 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ false);
1929}
1930
1931void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
1932 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
1933 GenLeadingZeros(assembler, invoke, /* is_long */ false);
1934}
1935
1936void IntrinsicLocationsBuilderX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
1937 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ true);
1938}
1939
1940void IntrinsicCodeGeneratorX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
1941 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
1942 GenLeadingZeros(assembler, invoke, /* is_long */ true);
1943}
1944
Mark Mendell09ed1a32015-03-25 08:30:06 -04001945// Unimplemented intrinsics.
1946
1947#define UNIMPLEMENTED_INTRINSIC(Name) \
1948void IntrinsicLocationsBuilderX86::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1949} \
1950void IntrinsicCodeGeneratorX86::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
1951}
1952
Mark Mendell09ed1a32015-03-25 08:30:06 -04001953UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
Jeff Hao848f70a2014-01-15 13:49:50 -08001954UNIMPLEMENTED_INTRINSIC(StringGetCharsNoCheck)
Mark Mendell09ed1a32015-03-25 08:30:06 -04001955UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
Mark Mendell09ed1a32015-03-25 08:30:06 -04001956UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
1957
Roland Levillain4d027112015-07-01 15:41:14 +01001958#undef UNIMPLEMENTED_INTRINSIC
1959
1960#undef __
1961
Mark Mendell09ed1a32015-03-25 08:30:06 -04001962} // namespace x86
1963} // namespace art