blob: 98a4cb9006acc13eab3b9d68e53341ea3d68582a [file] [log] [blame]
Mark Mendell09ed1a32015-03-25 08:30:06 -04001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_x86.h"
18
Andreas Gampe21030dd2015-05-07 14:46:15 -070019#include <limits>
20
Mark Mendellfb8d2792015-03-31 22:16:59 -040021#include "arch/x86/instruction_set_features_x86.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include "art_method.h"
Mark Mendelld5897672015-08-12 21:16:41 -040023#include "base/bit_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040024#include "code_generator_x86.h"
25#include "entrypoints/quick/quick_entrypoints.h"
26#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070027#include "intrinsics_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040028#include "mirror/array-inl.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040029#include "mirror/string.h"
30#include "thread.h"
31#include "utils/x86/assembler_x86.h"
32#include "utils/x86/constants_x86.h"
33
34namespace art {
35
36namespace x86 {
37
38static constexpr int kDoubleNaNHigh = 0x7FF80000;
39static constexpr int kDoubleNaNLow = 0x00000000;
40static constexpr int kFloatNaN = 0x7FC00000;
41
Mark Mendellfb8d2792015-03-31 22:16:59 -040042IntrinsicLocationsBuilderX86::IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen)
43 : arena_(codegen->GetGraph()->GetArena()), codegen_(codegen) {
44}
45
46
Mark Mendell09ed1a32015-03-25 08:30:06 -040047X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() {
Roland Levillainb488b782015-10-22 11:38:49 +010048 return down_cast<X86Assembler*>(codegen_->GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -040049}
50
51ArenaAllocator* IntrinsicCodeGeneratorX86::GetAllocator() {
52 return codegen_->GetGraph()->GetArena();
53}
54
55bool IntrinsicLocationsBuilderX86::TryDispatch(HInvoke* invoke) {
56 Dispatch(invoke);
57 LocationSummary* res = invoke->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +000058 if (res == nullptr) {
59 return false;
60 }
61 if (kEmitCompilerReadBarrier && res->CanCall()) {
62 // Generating an intrinsic for this HInvoke may produce an
63 // IntrinsicSlowPathX86 slow path. Currently this approach
64 // does not work when using read barriers, as the emitted
65 // calling sequence will make use of another slow path
66 // (ReadBarrierForRootSlowPathX86 for HInvokeStaticOrDirect,
67 // ReadBarrierSlowPathX86 for HInvokeVirtual). So we bail
68 // out in this case.
69 //
70 // TODO: Find a way to have intrinsics work with read barriers.
71 invoke->SetLocations(nullptr);
72 return false;
73 }
74 return res->Intrinsified();
Mark Mendell09ed1a32015-03-25 08:30:06 -040075}
76
Roland Levillainec525fc2015-04-28 15:50:20 +010077static void MoveArguments(HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010078 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010079 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Mark Mendell09ed1a32015-03-25 08:30:06 -040080}
81
Andreas Gampe85b62f22015-09-09 13:15:38 -070082using IntrinsicSlowPathX86 = IntrinsicSlowPath<InvokeDexCallingConventionVisitorX86>;
Mark Mendell09ed1a32015-03-25 08:30:06 -040083
Mark Mendell09ed1a32015-03-25 08:30:06 -040084#define __ assembler->
85
86static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
87 LocationSummary* locations = new (arena) LocationSummary(invoke,
88 LocationSummary::kNoCall,
89 kIntrinsified);
90 locations->SetInAt(0, Location::RequiresFpuRegister());
91 locations->SetOut(Location::RequiresRegister());
92 if (is64bit) {
93 locations->AddTemp(Location::RequiresFpuRegister());
94 }
95}
96
97static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
98 LocationSummary* locations = new (arena) LocationSummary(invoke,
99 LocationSummary::kNoCall,
100 kIntrinsified);
101 locations->SetInAt(0, Location::RequiresRegister());
102 locations->SetOut(Location::RequiresFpuRegister());
103 if (is64bit) {
104 locations->AddTemp(Location::RequiresFpuRegister());
105 locations->AddTemp(Location::RequiresFpuRegister());
106 }
107}
108
109static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
110 Location input = locations->InAt(0);
111 Location output = locations->Out();
112 if (is64bit) {
113 // Need to use the temporary.
114 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
115 __ movsd(temp, input.AsFpuRegister<XmmRegister>());
116 __ movd(output.AsRegisterPairLow<Register>(), temp);
117 __ psrlq(temp, Immediate(32));
118 __ movd(output.AsRegisterPairHigh<Register>(), temp);
119 } else {
120 __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>());
121 }
122}
123
124static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
125 Location input = locations->InAt(0);
126 Location output = locations->Out();
127 if (is64bit) {
128 // Need to use the temporary.
129 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
130 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
131 __ movd(temp1, input.AsRegisterPairLow<Register>());
132 __ movd(temp2, input.AsRegisterPairHigh<Register>());
133 __ punpckldq(temp1, temp2);
134 __ movsd(output.AsFpuRegister<XmmRegister>(), temp1);
135 } else {
136 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<Register>());
137 }
138}
139
140void IntrinsicLocationsBuilderX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
141 CreateFPToIntLocations(arena_, invoke, true);
142}
143void IntrinsicLocationsBuilderX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
144 CreateIntToFPLocations(arena_, invoke, true);
145}
146
147void IntrinsicCodeGeneratorX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
148 MoveFPToInt(invoke->GetLocations(), true, GetAssembler());
149}
150void IntrinsicCodeGeneratorX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
151 MoveIntToFP(invoke->GetLocations(), true, GetAssembler());
152}
153
154void IntrinsicLocationsBuilderX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
155 CreateFPToIntLocations(arena_, invoke, false);
156}
157void IntrinsicLocationsBuilderX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
158 CreateIntToFPLocations(arena_, invoke, false);
159}
160
161void IntrinsicCodeGeneratorX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
162 MoveFPToInt(invoke->GetLocations(), false, GetAssembler());
163}
164void IntrinsicCodeGeneratorX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
165 MoveIntToFP(invoke->GetLocations(), false, GetAssembler());
166}
167
168static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
169 LocationSummary* locations = new (arena) LocationSummary(invoke,
170 LocationSummary::kNoCall,
171 kIntrinsified);
172 locations->SetInAt(0, Location::RequiresRegister());
173 locations->SetOut(Location::SameAsFirstInput());
174}
175
176static void CreateLongToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
177 LocationSummary* locations = new (arena) LocationSummary(invoke,
178 LocationSummary::kNoCall,
179 kIntrinsified);
180 locations->SetInAt(0, Location::RequiresRegister());
181 locations->SetOut(Location::RequiresRegister());
182}
183
184static void CreateLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
185 LocationSummary* locations = new (arena) LocationSummary(invoke,
186 LocationSummary::kNoCall,
187 kIntrinsified);
188 locations->SetInAt(0, Location::RequiresRegister());
189 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
190}
191
192static void GenReverseBytes(LocationSummary* locations,
193 Primitive::Type size,
194 X86Assembler* assembler) {
195 Register out = locations->Out().AsRegister<Register>();
196
197 switch (size) {
198 case Primitive::kPrimShort:
199 // TODO: Can be done with an xchg of 8b registers. This is straight from Quick.
200 __ bswapl(out);
201 __ sarl(out, Immediate(16));
202 break;
203 case Primitive::kPrimInt:
204 __ bswapl(out);
205 break;
206 default:
207 LOG(FATAL) << "Unexpected size for reverse-bytes: " << size;
208 UNREACHABLE();
209 }
210}
211
212void IntrinsicLocationsBuilderX86::VisitIntegerReverseBytes(HInvoke* invoke) {
213 CreateIntToIntLocations(arena_, invoke);
214}
215
216void IntrinsicCodeGeneratorX86::VisitIntegerReverseBytes(HInvoke* invoke) {
217 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
218}
219
Mark Mendell58d25fd2015-04-03 14:52:31 -0400220void IntrinsicLocationsBuilderX86::VisitLongReverseBytes(HInvoke* invoke) {
221 CreateLongToLongLocations(arena_, invoke);
222}
223
224void IntrinsicCodeGeneratorX86::VisitLongReverseBytes(HInvoke* invoke) {
225 LocationSummary* locations = invoke->GetLocations();
226 Location input = locations->InAt(0);
227 Register input_lo = input.AsRegisterPairLow<Register>();
228 Register input_hi = input.AsRegisterPairHigh<Register>();
229 Location output = locations->Out();
230 Register output_lo = output.AsRegisterPairLow<Register>();
231 Register output_hi = output.AsRegisterPairHigh<Register>();
232
233 X86Assembler* assembler = GetAssembler();
234 // Assign the inputs to the outputs, mixing low/high.
235 __ movl(output_lo, input_hi);
236 __ movl(output_hi, input_lo);
237 __ bswapl(output_lo);
238 __ bswapl(output_hi);
239}
240
Mark Mendell09ed1a32015-03-25 08:30:06 -0400241void IntrinsicLocationsBuilderX86::VisitShortReverseBytes(HInvoke* invoke) {
242 CreateIntToIntLocations(arena_, invoke);
243}
244
245void IntrinsicCodeGeneratorX86::VisitShortReverseBytes(HInvoke* invoke) {
246 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
247}
248
249
250// TODO: Consider Quick's way of doing Double abs through integer operations, as the immediate we
251// need is 64b.
252
253static void CreateFloatToFloat(ArenaAllocator* arena, HInvoke* invoke) {
254 // TODO: Enable memory operations when the assembler supports them.
255 LocationSummary* locations = new (arena) LocationSummary(invoke,
256 LocationSummary::kNoCall,
257 kIntrinsified);
258 locations->SetInAt(0, Location::RequiresFpuRegister());
259 // TODO: Allow x86 to work with memory. This requires assembler support, see below.
260 // locations->SetInAt(0, Location::Any()); // X86 can work on memory directly.
261 locations->SetOut(Location::SameAsFirstInput());
262}
263
264static void MathAbsFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
265 Location output = locations->Out();
266
267 if (output.IsFpuRegister()) {
268 // Create the right constant on an aligned stack.
269 if (is64bit) {
270 __ subl(ESP, Immediate(8));
271 __ pushl(Immediate(0x7FFFFFFF));
272 __ pushl(Immediate(0xFFFFFFFF));
273 __ andpd(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
274 } else {
275 __ subl(ESP, Immediate(12));
276 __ pushl(Immediate(0x7FFFFFFF));
277 __ andps(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
278 }
279 __ addl(ESP, Immediate(16));
280 } else {
281 // TODO: update when assember support is available.
282 UNIMPLEMENTED(FATAL) << "Needs assembler support.";
283// Once assembler support is available, in-memory operations look like this:
284// if (is64bit) {
285// DCHECK(output.IsDoubleStackSlot());
286// __ andl(Address(Register(RSP), output.GetHighStackIndex(kX86WordSize)),
287// Immediate(0x7FFFFFFF));
288// } else {
289// DCHECK(output.IsStackSlot());
290// // Can use and with a literal directly.
291// __ andl(Address(Register(RSP), output.GetStackIndex()), Immediate(0x7FFFFFFF));
292// }
293 }
294}
295
296void IntrinsicLocationsBuilderX86::VisitMathAbsDouble(HInvoke* invoke) {
297 CreateFloatToFloat(arena_, invoke);
298}
299
300void IntrinsicCodeGeneratorX86::VisitMathAbsDouble(HInvoke* invoke) {
301 MathAbsFP(invoke->GetLocations(), true, GetAssembler());
302}
303
304void IntrinsicLocationsBuilderX86::VisitMathAbsFloat(HInvoke* invoke) {
305 CreateFloatToFloat(arena_, invoke);
306}
307
308void IntrinsicCodeGeneratorX86::VisitMathAbsFloat(HInvoke* invoke) {
309 MathAbsFP(invoke->GetLocations(), false, GetAssembler());
310}
311
312static void CreateAbsIntLocation(ArenaAllocator* arena, HInvoke* invoke) {
313 LocationSummary* locations = new (arena) LocationSummary(invoke,
314 LocationSummary::kNoCall,
315 kIntrinsified);
316 locations->SetInAt(0, Location::RegisterLocation(EAX));
317 locations->SetOut(Location::SameAsFirstInput());
318 locations->AddTemp(Location::RegisterLocation(EDX));
319}
320
321static void GenAbsInteger(LocationSummary* locations, X86Assembler* assembler) {
322 Location output = locations->Out();
323 Register out = output.AsRegister<Register>();
324 DCHECK_EQ(out, EAX);
325 Register temp = locations->GetTemp(0).AsRegister<Register>();
326 DCHECK_EQ(temp, EDX);
327
328 // Sign extend EAX into EDX.
329 __ cdq();
330
331 // XOR EAX with sign.
332 __ xorl(EAX, EDX);
333
334 // Subtract out sign to correct.
335 __ subl(EAX, EDX);
336
337 // The result is in EAX.
338}
339
340static void CreateAbsLongLocation(ArenaAllocator* arena, HInvoke* invoke) {
341 LocationSummary* locations = new (arena) LocationSummary(invoke,
342 LocationSummary::kNoCall,
343 kIntrinsified);
344 locations->SetInAt(0, Location::RequiresRegister());
345 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
346 locations->AddTemp(Location::RequiresRegister());
347}
348
349static void GenAbsLong(LocationSummary* locations, X86Assembler* assembler) {
350 Location input = locations->InAt(0);
351 Register input_lo = input.AsRegisterPairLow<Register>();
352 Register input_hi = input.AsRegisterPairHigh<Register>();
353 Location output = locations->Out();
354 Register output_lo = output.AsRegisterPairLow<Register>();
355 Register output_hi = output.AsRegisterPairHigh<Register>();
356 Register temp = locations->GetTemp(0).AsRegister<Register>();
357
358 // Compute the sign into the temporary.
359 __ movl(temp, input_hi);
360 __ sarl(temp, Immediate(31));
361
362 // Store the sign into the output.
363 __ movl(output_lo, temp);
364 __ movl(output_hi, temp);
365
366 // XOR the input to the output.
367 __ xorl(output_lo, input_lo);
368 __ xorl(output_hi, input_hi);
369
370 // Subtract the sign.
371 __ subl(output_lo, temp);
372 __ sbbl(output_hi, temp);
373}
374
375void IntrinsicLocationsBuilderX86::VisitMathAbsInt(HInvoke* invoke) {
376 CreateAbsIntLocation(arena_, invoke);
377}
378
379void IntrinsicCodeGeneratorX86::VisitMathAbsInt(HInvoke* invoke) {
380 GenAbsInteger(invoke->GetLocations(), GetAssembler());
381}
382
383void IntrinsicLocationsBuilderX86::VisitMathAbsLong(HInvoke* invoke) {
384 CreateAbsLongLocation(arena_, invoke);
385}
386
387void IntrinsicCodeGeneratorX86::VisitMathAbsLong(HInvoke* invoke) {
388 GenAbsLong(invoke->GetLocations(), GetAssembler());
389}
390
391static void GenMinMaxFP(LocationSummary* locations, bool is_min, bool is_double,
392 X86Assembler* assembler) {
393 Location op1_loc = locations->InAt(0);
394 Location op2_loc = locations->InAt(1);
395 Location out_loc = locations->Out();
396 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
397
398 // Shortcut for same input locations.
399 if (op1_loc.Equals(op2_loc)) {
400 DCHECK(out_loc.Equals(op1_loc));
401 return;
402 }
403
404 // (out := op1)
405 // out <=? op2
406 // if Nan jmp Nan_label
407 // if out is min jmp done
408 // if op2 is min jmp op2_label
409 // handle -0/+0
410 // jmp done
411 // Nan_label:
412 // out := NaN
413 // op2_label:
414 // out := op2
415 // done:
416 //
417 // This removes one jmp, but needs to copy one input (op1) to out.
418 //
419 // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath?
420
421 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
422
Mark Mendell0c9497d2015-08-21 09:30:05 -0400423 NearLabel nan, done, op2_label;
Mark Mendell09ed1a32015-03-25 08:30:06 -0400424 if (is_double) {
425 __ ucomisd(out, op2);
426 } else {
427 __ ucomiss(out, op2);
428 }
429
430 __ j(Condition::kParityEven, &nan);
431
432 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
433 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
434
435 // Handle 0.0/-0.0.
436 if (is_min) {
437 if (is_double) {
438 __ orpd(out, op2);
439 } else {
440 __ orps(out, op2);
441 }
442 } else {
443 if (is_double) {
444 __ andpd(out, op2);
445 } else {
446 __ andps(out, op2);
447 }
448 }
449 __ jmp(&done);
450
451 // NaN handling.
452 __ Bind(&nan);
453 if (is_double) {
454 __ pushl(Immediate(kDoubleNaNHigh));
455 __ pushl(Immediate(kDoubleNaNLow));
456 __ movsd(out, Address(ESP, 0));
457 __ addl(ESP, Immediate(8));
458 } else {
459 __ pushl(Immediate(kFloatNaN));
460 __ movss(out, Address(ESP, 0));
461 __ addl(ESP, Immediate(4));
462 }
463 __ jmp(&done);
464
465 // out := op2;
466 __ Bind(&op2_label);
467 if (is_double) {
468 __ movsd(out, op2);
469 } else {
470 __ movss(out, op2);
471 }
472
473 // Done.
474 __ Bind(&done);
475}
476
477static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
478 LocationSummary* locations = new (arena) LocationSummary(invoke,
479 LocationSummary::kNoCall,
480 kIntrinsified);
481 locations->SetInAt(0, Location::RequiresFpuRegister());
482 locations->SetInAt(1, Location::RequiresFpuRegister());
483 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
484 // the second input to be the output (we can simply swap inputs).
485 locations->SetOut(Location::SameAsFirstInput());
486}
487
488void IntrinsicLocationsBuilderX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
489 CreateFPFPToFPLocations(arena_, invoke);
490}
491
492void IntrinsicCodeGeneratorX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
493 GenMinMaxFP(invoke->GetLocations(), true, true, GetAssembler());
494}
495
496void IntrinsicLocationsBuilderX86::VisitMathMinFloatFloat(HInvoke* invoke) {
497 CreateFPFPToFPLocations(arena_, invoke);
498}
499
500void IntrinsicCodeGeneratorX86::VisitMathMinFloatFloat(HInvoke* invoke) {
501 GenMinMaxFP(invoke->GetLocations(), true, false, GetAssembler());
502}
503
504void IntrinsicLocationsBuilderX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
505 CreateFPFPToFPLocations(arena_, invoke);
506}
507
508void IntrinsicCodeGeneratorX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
509 GenMinMaxFP(invoke->GetLocations(), false, true, GetAssembler());
510}
511
512void IntrinsicLocationsBuilderX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
513 CreateFPFPToFPLocations(arena_, invoke);
514}
515
516void IntrinsicCodeGeneratorX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
517 GenMinMaxFP(invoke->GetLocations(), false, false, GetAssembler());
518}
519
520static void GenMinMax(LocationSummary* locations, bool is_min, bool is_long,
521 X86Assembler* assembler) {
522 Location op1_loc = locations->InAt(0);
523 Location op2_loc = locations->InAt(1);
524
525 // Shortcut for same input locations.
526 if (op1_loc.Equals(op2_loc)) {
527 // Can return immediately, as op1_loc == out_loc.
528 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
529 // a copy here.
530 DCHECK(locations->Out().Equals(op1_loc));
531 return;
532 }
533
534 if (is_long) {
535 // Need to perform a subtract to get the sign right.
536 // op1 is already in the same location as the output.
537 Location output = locations->Out();
538 Register output_lo = output.AsRegisterPairLow<Register>();
539 Register output_hi = output.AsRegisterPairHigh<Register>();
540
541 Register op2_lo = op2_loc.AsRegisterPairLow<Register>();
542 Register op2_hi = op2_loc.AsRegisterPairHigh<Register>();
543
544 // Spare register to compute the subtraction to set condition code.
545 Register temp = locations->GetTemp(0).AsRegister<Register>();
546
547 // Subtract off op2_low.
548 __ movl(temp, output_lo);
549 __ subl(temp, op2_lo);
550
551 // Now use the same tempo and the borrow to finish the subtraction of op2_hi.
552 __ movl(temp, output_hi);
553 __ sbbl(temp, op2_hi);
554
555 // Now the condition code is correct.
556 Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess;
557 __ cmovl(cond, output_lo, op2_lo);
558 __ cmovl(cond, output_hi, op2_hi);
559 } else {
560 Register out = locations->Out().AsRegister<Register>();
561 Register op2 = op2_loc.AsRegister<Register>();
562
563 // (out := op1)
564 // out <=? op2
565 // if out is min jmp done
566 // out := op2
567 // done:
568
569 __ cmpl(out, op2);
570 Condition cond = is_min ? Condition::kGreater : Condition::kLess;
571 __ cmovl(cond, out, op2);
572 }
573}
574
575static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
576 LocationSummary* locations = new (arena) LocationSummary(invoke,
577 LocationSummary::kNoCall,
578 kIntrinsified);
579 locations->SetInAt(0, Location::RequiresRegister());
580 locations->SetInAt(1, Location::RequiresRegister());
581 locations->SetOut(Location::SameAsFirstInput());
582}
583
584static void CreateLongLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
585 LocationSummary* locations = new (arena) LocationSummary(invoke,
586 LocationSummary::kNoCall,
587 kIntrinsified);
588 locations->SetInAt(0, Location::RequiresRegister());
589 locations->SetInAt(1, Location::RequiresRegister());
590 locations->SetOut(Location::SameAsFirstInput());
591 // Register to use to perform a long subtract to set cc.
592 locations->AddTemp(Location::RequiresRegister());
593}
594
595void IntrinsicLocationsBuilderX86::VisitMathMinIntInt(HInvoke* invoke) {
596 CreateIntIntToIntLocations(arena_, invoke);
597}
598
599void IntrinsicCodeGeneratorX86::VisitMathMinIntInt(HInvoke* invoke) {
600 GenMinMax(invoke->GetLocations(), true, false, GetAssembler());
601}
602
603void IntrinsicLocationsBuilderX86::VisitMathMinLongLong(HInvoke* invoke) {
604 CreateLongLongToLongLocations(arena_, invoke);
605}
606
607void IntrinsicCodeGeneratorX86::VisitMathMinLongLong(HInvoke* invoke) {
608 GenMinMax(invoke->GetLocations(), true, true, GetAssembler());
609}
610
611void IntrinsicLocationsBuilderX86::VisitMathMaxIntInt(HInvoke* invoke) {
612 CreateIntIntToIntLocations(arena_, invoke);
613}
614
615void IntrinsicCodeGeneratorX86::VisitMathMaxIntInt(HInvoke* invoke) {
616 GenMinMax(invoke->GetLocations(), false, false, GetAssembler());
617}
618
619void IntrinsicLocationsBuilderX86::VisitMathMaxLongLong(HInvoke* invoke) {
620 CreateLongLongToLongLocations(arena_, invoke);
621}
622
623void IntrinsicCodeGeneratorX86::VisitMathMaxLongLong(HInvoke* invoke) {
624 GenMinMax(invoke->GetLocations(), false, true, GetAssembler());
625}
626
627static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
628 LocationSummary* locations = new (arena) LocationSummary(invoke,
629 LocationSummary::kNoCall,
630 kIntrinsified);
631 locations->SetInAt(0, Location::RequiresFpuRegister());
632 locations->SetOut(Location::RequiresFpuRegister());
633}
634
635void IntrinsicLocationsBuilderX86::VisitMathSqrt(HInvoke* invoke) {
636 CreateFPToFPLocations(arena_, invoke);
637}
638
639void IntrinsicCodeGeneratorX86::VisitMathSqrt(HInvoke* invoke) {
640 LocationSummary* locations = invoke->GetLocations();
641 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
642 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
643
644 GetAssembler()->sqrtsd(out, in);
645}
646
Mark Mendellfb8d2792015-03-31 22:16:59 -0400647static void InvokeOutOfLineIntrinsic(CodeGeneratorX86* codegen, HInvoke* invoke) {
Roland Levillainec525fc2015-04-28 15:50:20 +0100648 MoveArguments(invoke, codegen);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400649
650 DCHECK(invoke->IsInvokeStaticOrDirect());
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100651 codegen->GenerateStaticOrDirectCall(invoke->AsInvokeStaticOrDirect(),
652 Location::RegisterLocation(EAX));
Mingyao Yange90db122015-04-03 17:56:54 -0700653 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400654
655 // Copy the result back to the expected output.
656 Location out = invoke->GetLocations()->Out();
657 if (out.IsValid()) {
658 DCHECK(out.IsRegister());
Andreas Gampe85b62f22015-09-09 13:15:38 -0700659 codegen->MoveFromReturnRegister(out, invoke->GetType());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400660 }
661}
662
663static void CreateSSE41FPToFPLocations(ArenaAllocator* arena,
664 HInvoke* invoke,
665 CodeGeneratorX86* codegen) {
666 // Do we have instruction support?
667 if (codegen->GetInstructionSetFeatures().HasSSE4_1()) {
668 CreateFPToFPLocations(arena, invoke);
669 return;
670 }
671
672 // We have to fall back to a call to the intrinsic.
673 LocationSummary* locations = new (arena) LocationSummary(invoke,
674 LocationSummary::kCall);
675 InvokeRuntimeCallingConvention calling_convention;
676 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
677 locations->SetOut(Location::FpuRegisterLocation(XMM0));
678 // Needs to be EAX for the invoke.
679 locations->AddTemp(Location::RegisterLocation(EAX));
680}
681
682static void GenSSE41FPToFPIntrinsic(CodeGeneratorX86* codegen,
683 HInvoke* invoke,
684 X86Assembler* assembler,
685 int round_mode) {
686 LocationSummary* locations = invoke->GetLocations();
687 if (locations->WillCall()) {
688 InvokeOutOfLineIntrinsic(codegen, invoke);
689 } else {
690 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
691 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
692 __ roundsd(out, in, Immediate(round_mode));
693 }
694}
695
696void IntrinsicLocationsBuilderX86::VisitMathCeil(HInvoke* invoke) {
697 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
698}
699
700void IntrinsicCodeGeneratorX86::VisitMathCeil(HInvoke* invoke) {
701 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 2);
702}
703
704void IntrinsicLocationsBuilderX86::VisitMathFloor(HInvoke* invoke) {
705 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
706}
707
708void IntrinsicCodeGeneratorX86::VisitMathFloor(HInvoke* invoke) {
709 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 1);
710}
711
712void IntrinsicLocationsBuilderX86::VisitMathRint(HInvoke* invoke) {
713 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
714}
715
716void IntrinsicCodeGeneratorX86::VisitMathRint(HInvoke* invoke) {
717 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 0);
718}
719
720// Note that 32 bit x86 doesn't have the capability to inline MathRoundDouble,
721// as it needs 64 bit instructions.
722void IntrinsicLocationsBuilderX86::VisitMathRoundFloat(HInvoke* invoke) {
723 // Do we have instruction support?
724 if (codegen_->GetInstructionSetFeatures().HasSSE4_1()) {
725 LocationSummary* locations = new (arena_) LocationSummary(invoke,
726 LocationSummary::kNoCall,
727 kIntrinsified);
728 locations->SetInAt(0, Location::RequiresFpuRegister());
Nicolas Geoffrayd9b92402015-04-21 10:02:22 +0100729 locations->SetOut(Location::RequiresRegister());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400730 locations->AddTemp(Location::RequiresFpuRegister());
731 locations->AddTemp(Location::RequiresFpuRegister());
732 return;
733 }
734
735 // We have to fall back to a call to the intrinsic.
736 LocationSummary* locations = new (arena_) LocationSummary(invoke,
737 LocationSummary::kCall);
738 InvokeRuntimeCallingConvention calling_convention;
739 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
740 locations->SetOut(Location::RegisterLocation(EAX));
741 // Needs to be EAX for the invoke.
742 locations->AddTemp(Location::RegisterLocation(EAX));
743}
744
745void IntrinsicCodeGeneratorX86::VisitMathRoundFloat(HInvoke* invoke) {
746 LocationSummary* locations = invoke->GetLocations();
747 if (locations->WillCall()) {
748 InvokeOutOfLineIntrinsic(codegen_, invoke);
749 return;
750 }
751
752 // Implement RoundFloat as t1 = floor(input + 0.5f); convert to int.
753 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
754 Register out = locations->Out().AsRegister<Register>();
755 XmmRegister maxInt = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
756 XmmRegister inPlusPointFive = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Mark Mendell0c9497d2015-08-21 09:30:05 -0400757 NearLabel done, nan;
Mark Mendellfb8d2792015-03-31 22:16:59 -0400758 X86Assembler* assembler = GetAssembler();
759
760 // Generate 0.5 into inPlusPointFive.
761 __ movl(out, Immediate(bit_cast<int32_t, float>(0.5f)));
762 __ movd(inPlusPointFive, out);
763
764 // Add in the input.
765 __ addss(inPlusPointFive, in);
766
767 // And truncate to an integer.
768 __ roundss(inPlusPointFive, inPlusPointFive, Immediate(1));
769
770 __ movl(out, Immediate(kPrimIntMax));
771 // maxInt = int-to-float(out)
772 __ cvtsi2ss(maxInt, out);
773
774 // if inPlusPointFive >= maxInt goto done
775 __ comiss(inPlusPointFive, maxInt);
776 __ j(kAboveEqual, &done);
777
778 // if input == NaN goto nan
779 __ j(kUnordered, &nan);
780
781 // output = float-to-int-truncate(input)
782 __ cvttss2si(out, inPlusPointFive);
783 __ jmp(&done);
784 __ Bind(&nan);
785
786 // output = 0
787 __ xorl(out, out);
788 __ Bind(&done);
789}
790
Mark Mendell09ed1a32015-03-25 08:30:06 -0400791void IntrinsicLocationsBuilderX86::VisitStringCharAt(HInvoke* invoke) {
792 // The inputs plus one temp.
793 LocationSummary* locations = new (arena_) LocationSummary(invoke,
794 LocationSummary::kCallOnSlowPath,
795 kIntrinsified);
796 locations->SetInAt(0, Location::RequiresRegister());
797 locations->SetInAt(1, Location::RequiresRegister());
798 locations->SetOut(Location::SameAsFirstInput());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400799}
800
801void IntrinsicCodeGeneratorX86::VisitStringCharAt(HInvoke* invoke) {
802 LocationSummary* locations = invoke->GetLocations();
803
Mark Mendell6bc53a92015-07-01 14:26:52 -0400804 // Location of reference to data array.
Mark Mendell09ed1a32015-03-25 08:30:06 -0400805 const int32_t value_offset = mirror::String::ValueOffset().Int32Value();
Mark Mendell6bc53a92015-07-01 14:26:52 -0400806 // Location of count.
Mark Mendell09ed1a32015-03-25 08:30:06 -0400807 const int32_t count_offset = mirror::String::CountOffset().Int32Value();
Mark Mendell09ed1a32015-03-25 08:30:06 -0400808
809 Register obj = locations->InAt(0).AsRegister<Register>();
810 Register idx = locations->InAt(1).AsRegister<Register>();
811 Register out = locations->Out().AsRegister<Register>();
Mark Mendell09ed1a32015-03-25 08:30:06 -0400812
813 // TODO: Maybe we can support range check elimination. Overall, though, I think it's not worth
814 // the cost.
815 // TODO: For simplicity, the index parameter is requested in a register, so different from Quick
816 // we will not optimize the code for constants (which would save a register).
817
Andreas Gampe85b62f22015-09-09 13:15:38 -0700818 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400819 codegen_->AddSlowPath(slow_path);
820
821 X86Assembler* assembler = GetAssembler();
822
823 __ cmpl(idx, Address(obj, count_offset));
824 codegen_->MaybeRecordImplicitNullCheck(invoke);
825 __ j(kAboveEqual, slow_path->GetEntryLabel());
826
Jeff Hao848f70a2014-01-15 13:49:50 -0800827 // out = out[2*idx].
828 __ movzxw(out, Address(out, idx, ScaleFactor::TIMES_2, value_offset));
Mark Mendell09ed1a32015-03-25 08:30:06 -0400829
830 __ Bind(slow_path->GetExitLabel());
831}
832
Mark Mendell6bc53a92015-07-01 14:26:52 -0400833void IntrinsicLocationsBuilderX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
834 // We need at least two of the positions or length to be an integer constant,
835 // or else we won't have enough free registers.
836 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
837 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
838 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
839
840 int num_constants =
841 ((src_pos != nullptr) ? 1 : 0)
842 + ((dest_pos != nullptr) ? 1 : 0)
843 + ((length != nullptr) ? 1 : 0);
844
845 if (num_constants < 2) {
846 // Not enough free registers.
847 return;
848 }
849
850 // As long as we are checking, we might as well check to see if the src and dest
851 // positions are >= 0.
852 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
853 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
854 // We will have to fail anyways.
855 return;
856 }
857
858 // And since we are already checking, check the length too.
859 if (length != nullptr) {
860 int32_t len = length->GetValue();
861 if (len < 0) {
862 // Just call as normal.
863 return;
864 }
865 }
866
867 // Okay, it is safe to generate inline code.
868 LocationSummary* locations =
869 new (arena_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
870 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
871 locations->SetInAt(0, Location::RequiresRegister());
872 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
873 locations->SetInAt(2, Location::RequiresRegister());
874 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
875 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
876
877 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
878 locations->AddTemp(Location::RegisterLocation(ESI));
879 locations->AddTemp(Location::RegisterLocation(EDI));
880 locations->AddTemp(Location::RegisterLocation(ECX));
881}
882
883static void CheckPosition(X86Assembler* assembler,
884 Location pos,
885 Register input,
886 Register length,
Andreas Gampe85b62f22015-09-09 13:15:38 -0700887 SlowPathCode* slow_path,
Mark Mendell6bc53a92015-07-01 14:26:52 -0400888 Register input_len,
889 Register temp) {
890 // Where is the length in the String?
891 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
892
893 if (pos.IsConstant()) {
894 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
895 if (pos_const == 0) {
896 // Check that length(input) >= length.
897 __ cmpl(Address(input, length_offset), length);
898 __ j(kLess, slow_path->GetEntryLabel());
899 } else {
900 // Check that length(input) >= pos.
901 __ movl(input_len, Address(input, length_offset));
902 __ cmpl(input_len, Immediate(pos_const));
903 __ j(kLess, slow_path->GetEntryLabel());
904
905 // Check that (length(input) - pos) >= length.
906 __ leal(temp, Address(input_len, -pos_const));
907 __ cmpl(temp, length);
908 __ j(kLess, slow_path->GetEntryLabel());
909 }
910 } else {
911 // Check that pos >= 0.
912 Register pos_reg = pos.AsRegister<Register>();
913 __ testl(pos_reg, pos_reg);
914 __ j(kLess, slow_path->GetEntryLabel());
915
916 // Check that pos <= length(input).
917 __ cmpl(Address(input, length_offset), pos_reg);
918 __ j(kLess, slow_path->GetEntryLabel());
919
920 // Check that (length(input) - pos) >= length.
921 __ movl(temp, Address(input, length_offset));
922 __ subl(temp, pos_reg);
923 __ cmpl(temp, length);
924 __ j(kLess, slow_path->GetEntryLabel());
925 }
926}
927
928void IntrinsicCodeGeneratorX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
929 X86Assembler* assembler = GetAssembler();
930 LocationSummary* locations = invoke->GetLocations();
931
932 Register src = locations->InAt(0).AsRegister<Register>();
933 Location srcPos = locations->InAt(1);
934 Register dest = locations->InAt(2).AsRegister<Register>();
935 Location destPos = locations->InAt(3);
936 Location length = locations->InAt(4);
937
938 // Temporaries that we need for MOVSW.
939 Register src_base = locations->GetTemp(0).AsRegister<Register>();
940 DCHECK_EQ(src_base, ESI);
941 Register dest_base = locations->GetTemp(1).AsRegister<Register>();
942 DCHECK_EQ(dest_base, EDI);
943 Register count = locations->GetTemp(2).AsRegister<Register>();
944 DCHECK_EQ(count, ECX);
945
Andreas Gampe85b62f22015-09-09 13:15:38 -0700946 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell6bc53a92015-07-01 14:26:52 -0400947 codegen_->AddSlowPath(slow_path);
948
949 // Bail out if the source and destination are the same (to handle overlap).
950 __ cmpl(src, dest);
951 __ j(kEqual, slow_path->GetEntryLabel());
952
953 // Bail out if the source is null.
954 __ testl(src, src);
955 __ j(kEqual, slow_path->GetEntryLabel());
956
957 // Bail out if the destination is null.
958 __ testl(dest, dest);
959 __ j(kEqual, slow_path->GetEntryLabel());
960
961 // If the length is negative, bail out.
962 // We have already checked in the LocationsBuilder for the constant case.
963 if (!length.IsConstant()) {
964 __ cmpl(length.AsRegister<Register>(), length.AsRegister<Register>());
965 __ j(kLess, slow_path->GetEntryLabel());
966 }
967
968 // We need the count in ECX.
969 if (length.IsConstant()) {
970 __ movl(count, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
971 } else {
972 __ movl(count, length.AsRegister<Register>());
973 }
974
975 // Validity checks: source.
976 CheckPosition(assembler, srcPos, src, count, slow_path, src_base, dest_base);
977
978 // Validity checks: dest.
979 CheckPosition(assembler, destPos, dest, count, slow_path, src_base, dest_base);
980
981 // Okay, everything checks out. Finally time to do the copy.
982 // Check assumption that sizeof(Char) is 2 (used in scaling below).
983 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
984 DCHECK_EQ(char_size, 2u);
985
986 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
987
988 if (srcPos.IsConstant()) {
989 int32_t srcPos_const = srcPos.GetConstant()->AsIntConstant()->GetValue();
990 __ leal(src_base, Address(src, char_size * srcPos_const + data_offset));
991 } else {
992 __ leal(src_base, Address(src, srcPos.AsRegister<Register>(),
993 ScaleFactor::TIMES_2, data_offset));
994 }
995 if (destPos.IsConstant()) {
996 int32_t destPos_const = destPos.GetConstant()->AsIntConstant()->GetValue();
997
998 __ leal(dest_base, Address(dest, char_size * destPos_const + data_offset));
999 } else {
1000 __ leal(dest_base, Address(dest, destPos.AsRegister<Register>(),
1001 ScaleFactor::TIMES_2, data_offset));
1002 }
1003
1004 // Do the move.
1005 __ rep_movsw();
1006
1007 __ Bind(slow_path->GetExitLabel());
1008}
1009
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001010void IntrinsicLocationsBuilderX86::VisitStringCompareTo(HInvoke* invoke) {
1011 // The inputs plus one temp.
1012 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1013 LocationSummary::kCall,
1014 kIntrinsified);
1015 InvokeRuntimeCallingConvention calling_convention;
1016 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1017 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1018 locations->SetOut(Location::RegisterLocation(EAX));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001019}
1020
1021void IntrinsicCodeGeneratorX86::VisitStringCompareTo(HInvoke* invoke) {
1022 X86Assembler* assembler = GetAssembler();
1023 LocationSummary* locations = invoke->GetLocations();
1024
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001025 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001026 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001027
1028 Register argument = locations->InAt(1).AsRegister<Register>();
1029 __ testl(argument, argument);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001030 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001031 codegen_->AddSlowPath(slow_path);
1032 __ j(kEqual, slow_path->GetEntryLabel());
1033
1034 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pStringCompareTo)));
1035 __ Bind(slow_path->GetExitLabel());
1036}
1037
Agi Csakid7138c82015-08-13 17:46:44 -07001038void IntrinsicLocationsBuilderX86::VisitStringEquals(HInvoke* invoke) {
1039 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1040 LocationSummary::kNoCall,
1041 kIntrinsified);
1042 locations->SetInAt(0, Location::RequiresRegister());
1043 locations->SetInAt(1, Location::RequiresRegister());
1044
1045 // Request temporary registers, ECX and EDI needed for repe_cmpsl instruction.
1046 locations->AddTemp(Location::RegisterLocation(ECX));
1047 locations->AddTemp(Location::RegisterLocation(EDI));
1048
1049 // Set output, ESI needed for repe_cmpsl instruction anyways.
1050 locations->SetOut(Location::RegisterLocation(ESI), Location::kOutputOverlap);
1051}
1052
1053void IntrinsicCodeGeneratorX86::VisitStringEquals(HInvoke* invoke) {
1054 X86Assembler* assembler = GetAssembler();
1055 LocationSummary* locations = invoke->GetLocations();
1056
1057 Register str = locations->InAt(0).AsRegister<Register>();
1058 Register arg = locations->InAt(1).AsRegister<Register>();
1059 Register ecx = locations->GetTemp(0).AsRegister<Register>();
1060 Register edi = locations->GetTemp(1).AsRegister<Register>();
1061 Register esi = locations->Out().AsRegister<Register>();
1062
Mark Mendell0c9497d2015-08-21 09:30:05 -04001063 NearLabel end, return_true, return_false;
Agi Csakid7138c82015-08-13 17:46:44 -07001064
1065 // Get offsets of count, value, and class fields within a string object.
1066 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1067 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1068 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1069
1070 // Note that the null check must have been done earlier.
1071 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1072
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001073 StringEqualsOptimizations optimizations(invoke);
1074 if (!optimizations.GetArgumentNotNull()) {
1075 // Check if input is null, return false if it is.
1076 __ testl(arg, arg);
1077 __ j(kEqual, &return_false);
1078 }
Agi Csakid7138c82015-08-13 17:46:44 -07001079
1080 // Instanceof check for the argument by comparing class fields.
1081 // All string objects must have the same type since String cannot be subclassed.
1082 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1083 // If the argument is a string object, its class field must be equal to receiver's class field.
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001084 if (!optimizations.GetArgumentIsString()) {
1085 __ movl(ecx, Address(str, class_offset));
1086 __ cmpl(ecx, Address(arg, class_offset));
1087 __ j(kNotEqual, &return_false);
1088 }
Agi Csakid7138c82015-08-13 17:46:44 -07001089
1090 // Reference equality check, return true if same reference.
1091 __ cmpl(str, arg);
1092 __ j(kEqual, &return_true);
1093
1094 // Load length of receiver string.
1095 __ movl(ecx, Address(str, count_offset));
1096 // Check if lengths are equal, return false if they're not.
1097 __ cmpl(ecx, Address(arg, count_offset));
1098 __ j(kNotEqual, &return_false);
1099 // Return true if both strings are empty.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001100 __ jecxz(&return_true);
Agi Csakid7138c82015-08-13 17:46:44 -07001101
1102 // Load starting addresses of string values into ESI/EDI as required for repe_cmpsl instruction.
1103 __ leal(esi, Address(str, value_offset));
1104 __ leal(edi, Address(arg, value_offset));
1105
1106 // Divide string length by 2 to compare characters 2 at a time and adjust for odd lengths.
1107 __ addl(ecx, Immediate(1));
1108 __ shrl(ecx, Immediate(1));
1109
1110 // Assertions that must hold in order to compare strings 2 characters at a time.
1111 DCHECK_ALIGNED(value_offset, 4);
1112 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1113
1114 // Loop to compare strings two characters at a time starting at the beginning of the string.
1115 __ repe_cmpsl();
1116 // If strings are not equal, zero flag will be cleared.
1117 __ j(kNotEqual, &return_false);
1118
1119 // Return true and exit the function.
1120 // If loop does not result in returning false, we return true.
1121 __ Bind(&return_true);
1122 __ movl(esi, Immediate(1));
1123 __ jmp(&end);
1124
1125 // Return false and exit the function.
1126 __ Bind(&return_false);
1127 __ xorl(esi, esi);
1128 __ Bind(&end);
1129}
1130
Andreas Gampe21030dd2015-05-07 14:46:15 -07001131static void CreateStringIndexOfLocations(HInvoke* invoke,
1132 ArenaAllocator* allocator,
1133 bool start_at_zero) {
1134 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1135 LocationSummary::kCallOnSlowPath,
1136 kIntrinsified);
1137 // The data needs to be in EDI for scasw. So request that the string is there, anyways.
1138 locations->SetInAt(0, Location::RegisterLocation(EDI));
1139 // If we look for a constant char, we'll still have to copy it into EAX. So just request the
1140 // allocator to do that, anyways. We can still do the constant check by checking the parameter
1141 // of the instruction explicitly.
1142 // Note: This works as we don't clobber EAX anywhere.
1143 locations->SetInAt(1, Location::RegisterLocation(EAX));
1144 if (!start_at_zero) {
1145 locations->SetInAt(2, Location::RequiresRegister()); // The starting index.
1146 }
1147 // As we clobber EDI during execution anyways, also use it as the output.
1148 locations->SetOut(Location::SameAsFirstInput());
1149
1150 // repne scasw uses ECX as the counter.
1151 locations->AddTemp(Location::RegisterLocation(ECX));
1152 // Need another temporary to be able to compute the result.
1153 locations->AddTemp(Location::RequiresRegister());
1154}
1155
1156static void GenerateStringIndexOf(HInvoke* invoke,
1157 X86Assembler* assembler,
1158 CodeGeneratorX86* codegen,
1159 ArenaAllocator* allocator,
1160 bool start_at_zero) {
1161 LocationSummary* locations = invoke->GetLocations();
1162
1163 // Note that the null check must have been done earlier.
1164 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1165
1166 Register string_obj = locations->InAt(0).AsRegister<Register>();
1167 Register search_value = locations->InAt(1).AsRegister<Register>();
1168 Register counter = locations->GetTemp(0).AsRegister<Register>();
1169 Register string_length = locations->GetTemp(1).AsRegister<Register>();
1170 Register out = locations->Out().AsRegister<Register>();
1171
1172 // Check our assumptions for registers.
1173 DCHECK_EQ(string_obj, EDI);
1174 DCHECK_EQ(search_value, EAX);
1175 DCHECK_EQ(counter, ECX);
1176 DCHECK_EQ(out, EDI);
1177
1178 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
1179 // or directly dispatch if we have a constant.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001180 SlowPathCode* slow_path = nullptr;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001181 if (invoke->InputAt(1)->IsIntConstant()) {
1182 if (static_cast<uint32_t>(invoke->InputAt(1)->AsIntConstant()->GetValue()) >
1183 std::numeric_limits<uint16_t>::max()) {
1184 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1185 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1186 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1187 codegen->AddSlowPath(slow_path);
1188 __ jmp(slow_path->GetEntryLabel());
1189 __ Bind(slow_path->GetExitLabel());
1190 return;
1191 }
1192 } else {
1193 __ cmpl(search_value, Immediate(std::numeric_limits<uint16_t>::max()));
1194 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1195 codegen->AddSlowPath(slow_path);
1196 __ j(kAbove, slow_path->GetEntryLabel());
1197 }
1198
1199 // From here down, we know that we are looking for a char that fits in 16 bits.
1200 // Location of reference to data array within the String object.
1201 int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1202 // Location of count within the String object.
1203 int32_t count_offset = mirror::String::CountOffset().Int32Value();
1204
1205 // Load string length, i.e., the count field of the string.
1206 __ movl(string_length, Address(string_obj, count_offset));
1207
1208 // Do a zero-length check.
1209 // TODO: Support jecxz.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001210 NearLabel not_found_label;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001211 __ testl(string_length, string_length);
1212 __ j(kEqual, &not_found_label);
1213
1214 if (start_at_zero) {
1215 // Number of chars to scan is the same as the string length.
1216 __ movl(counter, string_length);
1217
1218 // Move to the start of the string.
1219 __ addl(string_obj, Immediate(value_offset));
1220 } else {
1221 Register start_index = locations->InAt(2).AsRegister<Register>();
1222
1223 // Do a start_index check.
1224 __ cmpl(start_index, string_length);
1225 __ j(kGreaterEqual, &not_found_label);
1226
1227 // Ensure we have a start index >= 0;
1228 __ xorl(counter, counter);
1229 __ cmpl(start_index, Immediate(0));
1230 __ cmovl(kGreater, counter, start_index);
1231
1232 // Move to the start of the string: string_obj + value_offset + 2 * start_index.
1233 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset));
1234
1235 // Now update ecx (the repne scasw work counter). We have string.length - start_index left to
1236 // compare.
1237 __ negl(counter);
1238 __ leal(counter, Address(string_length, counter, ScaleFactor::TIMES_1, 0));
1239 }
1240
1241 // Everything is set up for repne scasw:
1242 // * Comparison address in EDI.
1243 // * Counter in ECX.
1244 __ repne_scasw();
1245
1246 // Did we find a match?
1247 __ j(kNotEqual, &not_found_label);
1248
1249 // Yes, we matched. Compute the index of the result.
1250 __ subl(string_length, counter);
1251 __ leal(out, Address(string_length, -1));
1252
Mark Mendell0c9497d2015-08-21 09:30:05 -04001253 NearLabel done;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001254 __ jmp(&done);
1255
1256 // Failed to match; return -1.
1257 __ Bind(&not_found_label);
1258 __ movl(out, Immediate(-1));
1259
1260 // And join up at the end.
1261 __ Bind(&done);
1262 if (slow_path != nullptr) {
1263 __ Bind(slow_path->GetExitLabel());
1264 }
1265}
1266
1267void IntrinsicLocationsBuilderX86::VisitStringIndexOf(HInvoke* invoke) {
1268 CreateStringIndexOfLocations(invoke, arena_, true);
1269}
1270
1271void IntrinsicCodeGeneratorX86::VisitStringIndexOf(HInvoke* invoke) {
1272 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), true);
1273}
1274
1275void IntrinsicLocationsBuilderX86::VisitStringIndexOfAfter(HInvoke* invoke) {
1276 CreateStringIndexOfLocations(invoke, arena_, false);
1277}
1278
1279void IntrinsicCodeGeneratorX86::VisitStringIndexOfAfter(HInvoke* invoke) {
1280 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), false);
1281}
1282
Jeff Hao848f70a2014-01-15 13:49:50 -08001283void IntrinsicLocationsBuilderX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1284 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1285 LocationSummary::kCall,
1286 kIntrinsified);
1287 InvokeRuntimeCallingConvention calling_convention;
1288 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1289 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1290 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1291 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1292 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001293}
1294
1295void IntrinsicCodeGeneratorX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1296 X86Assembler* assembler = GetAssembler();
1297 LocationSummary* locations = invoke->GetLocations();
1298
1299 Register byte_array = locations->InAt(0).AsRegister<Register>();
1300 __ testl(byte_array, byte_array);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001301 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001302 codegen_->AddSlowPath(slow_path);
1303 __ j(kEqual, slow_path->GetEntryLabel());
1304
1305 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromBytes)));
1306 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1307 __ Bind(slow_path->GetExitLabel());
1308}
1309
1310void IntrinsicLocationsBuilderX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1311 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1312 LocationSummary::kCall,
1313 kIntrinsified);
1314 InvokeRuntimeCallingConvention calling_convention;
1315 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1316 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1317 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1318 locations->SetOut(Location::RegisterLocation(EAX));
1319}
1320
1321void IntrinsicCodeGeneratorX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1322 X86Assembler* assembler = GetAssembler();
1323
1324 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromChars)));
1325 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1326}
1327
1328void IntrinsicLocationsBuilderX86::VisitStringNewStringFromString(HInvoke* invoke) {
1329 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1330 LocationSummary::kCall,
1331 kIntrinsified);
1332 InvokeRuntimeCallingConvention calling_convention;
1333 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1334 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001335}
1336
1337void IntrinsicCodeGeneratorX86::VisitStringNewStringFromString(HInvoke* invoke) {
1338 X86Assembler* assembler = GetAssembler();
1339 LocationSummary* locations = invoke->GetLocations();
1340
1341 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1342 __ testl(string_to_copy, string_to_copy);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001343 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001344 codegen_->AddSlowPath(slow_path);
1345 __ j(kEqual, slow_path->GetEntryLabel());
1346
1347 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocStringFromString)));
1348 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1349 __ Bind(slow_path->GetExitLabel());
1350}
1351
Mark Mendell8f8926a2015-08-17 11:39:06 -04001352void IntrinsicLocationsBuilderX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1353 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1354 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1355 LocationSummary::kNoCall,
1356 kIntrinsified);
1357 locations->SetInAt(0, Location::RequiresRegister());
1358 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1359 // Place srcEnd in ECX to save a move below.
1360 locations->SetInAt(2, Location::RegisterLocation(ECX));
1361 locations->SetInAt(3, Location::RequiresRegister());
1362 locations->SetInAt(4, Location::RequiresRegister());
1363
1364 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
1365 // We don't have enough registers to also grab ECX, so handle below.
1366 locations->AddTemp(Location::RegisterLocation(ESI));
1367 locations->AddTemp(Location::RegisterLocation(EDI));
1368}
1369
1370void IntrinsicCodeGeneratorX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1371 X86Assembler* assembler = GetAssembler();
1372 LocationSummary* locations = invoke->GetLocations();
1373
1374 size_t char_component_size = Primitive::ComponentSize(Primitive::kPrimChar);
1375 // Location of data in char array buffer.
1376 const uint32_t data_offset = mirror::Array::DataOffset(char_component_size).Uint32Value();
1377 // Location of char array data in string.
1378 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1379
1380 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1381 Register obj = locations->InAt(0).AsRegister<Register>();
1382 Location srcBegin = locations->InAt(1);
1383 int srcBegin_value =
1384 srcBegin.IsConstant() ? srcBegin.GetConstant()->AsIntConstant()->GetValue() : 0;
1385 Register srcEnd = locations->InAt(2).AsRegister<Register>();
1386 Register dst = locations->InAt(3).AsRegister<Register>();
1387 Register dstBegin = locations->InAt(4).AsRegister<Register>();
1388
1389 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1390 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1391 DCHECK_EQ(char_size, 2u);
1392
1393 // Compute the address of the destination buffer.
1394 __ leal(EDI, Address(dst, dstBegin, ScaleFactor::TIMES_2, data_offset));
1395
1396 // Compute the address of the source string.
1397 if (srcBegin.IsConstant()) {
1398 // Compute the address of the source string by adding the number of chars from
1399 // the source beginning to the value offset of a string.
1400 __ leal(ESI, Address(obj, srcBegin_value * char_size + value_offset));
1401 } else {
1402 __ leal(ESI, Address(obj, srcBegin.AsRegister<Register>(),
1403 ScaleFactor::TIMES_2, value_offset));
1404 }
1405
1406 // Compute the number of chars (words) to move.
1407 // Now is the time to save ECX, since we don't know if it will be used later.
1408 __ pushl(ECX);
1409 int stack_adjust = kX86WordSize;
1410 __ cfi().AdjustCFAOffset(stack_adjust);
1411 DCHECK_EQ(srcEnd, ECX);
1412 if (srcBegin.IsConstant()) {
1413 if (srcBegin_value != 0) {
1414 __ subl(ECX, Immediate(srcBegin_value));
1415 }
1416 } else {
1417 DCHECK(srcBegin.IsRegister());
1418 __ subl(ECX, srcBegin.AsRegister<Register>());
1419 }
1420
1421 // Do the move.
1422 __ rep_movsw();
1423
1424 // And restore ECX.
1425 __ popl(ECX);
1426 __ cfi().AdjustCFAOffset(-stack_adjust);
1427}
1428
Mark Mendell09ed1a32015-03-25 08:30:06 -04001429static void GenPeek(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1430 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1431 Location out_loc = locations->Out();
1432 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1433 // to avoid a SIGBUS.
1434 switch (size) {
1435 case Primitive::kPrimByte:
1436 __ movsxb(out_loc.AsRegister<Register>(), Address(address, 0));
1437 break;
1438 case Primitive::kPrimShort:
1439 __ movsxw(out_loc.AsRegister<Register>(), Address(address, 0));
1440 break;
1441 case Primitive::kPrimInt:
1442 __ movl(out_loc.AsRegister<Register>(), Address(address, 0));
1443 break;
1444 case Primitive::kPrimLong:
1445 __ movl(out_loc.AsRegisterPairLow<Register>(), Address(address, 0));
1446 __ movl(out_loc.AsRegisterPairHigh<Register>(), Address(address, 4));
1447 break;
1448 default:
1449 LOG(FATAL) << "Type not recognized for peek: " << size;
1450 UNREACHABLE();
1451 }
1452}
1453
1454void IntrinsicLocationsBuilderX86::VisitMemoryPeekByte(HInvoke* invoke) {
1455 CreateLongToIntLocations(arena_, invoke);
1456}
1457
1458void IntrinsicCodeGeneratorX86::VisitMemoryPeekByte(HInvoke* invoke) {
1459 GenPeek(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1460}
1461
1462void IntrinsicLocationsBuilderX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1463 CreateLongToIntLocations(arena_, invoke);
1464}
1465
1466void IntrinsicCodeGeneratorX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1467 GenPeek(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1468}
1469
1470void IntrinsicLocationsBuilderX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1471 CreateLongToLongLocations(arena_, invoke);
1472}
1473
1474void IntrinsicCodeGeneratorX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1475 GenPeek(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1476}
1477
1478void IntrinsicLocationsBuilderX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1479 CreateLongToIntLocations(arena_, invoke);
1480}
1481
1482void IntrinsicCodeGeneratorX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1483 GenPeek(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1484}
1485
1486static void CreateLongIntToVoidLocations(ArenaAllocator* arena, Primitive::Type size,
1487 HInvoke* invoke) {
1488 LocationSummary* locations = new (arena) LocationSummary(invoke,
1489 LocationSummary::kNoCall,
1490 kIntrinsified);
1491 locations->SetInAt(0, Location::RequiresRegister());
Roland Levillain4c0eb422015-04-24 16:43:49 +01001492 HInstruction* value = invoke->InputAt(1);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001493 if (size == Primitive::kPrimByte) {
1494 locations->SetInAt(1, Location::ByteRegisterOrConstant(EDX, value));
1495 } else {
1496 locations->SetInAt(1, Location::RegisterOrConstant(value));
1497 }
1498}
1499
1500static void GenPoke(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1501 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1502 Location value_loc = locations->InAt(1);
1503 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1504 // to avoid a SIGBUS.
1505 switch (size) {
1506 case Primitive::kPrimByte:
1507 if (value_loc.IsConstant()) {
1508 __ movb(Address(address, 0),
1509 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1510 } else {
1511 __ movb(Address(address, 0), value_loc.AsRegister<ByteRegister>());
1512 }
1513 break;
1514 case Primitive::kPrimShort:
1515 if (value_loc.IsConstant()) {
1516 __ movw(Address(address, 0),
1517 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1518 } else {
1519 __ movw(Address(address, 0), value_loc.AsRegister<Register>());
1520 }
1521 break;
1522 case Primitive::kPrimInt:
1523 if (value_loc.IsConstant()) {
1524 __ movl(Address(address, 0),
1525 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1526 } else {
1527 __ movl(Address(address, 0), value_loc.AsRegister<Register>());
1528 }
1529 break;
1530 case Primitive::kPrimLong:
1531 if (value_loc.IsConstant()) {
1532 int64_t value = value_loc.GetConstant()->AsLongConstant()->GetValue();
1533 __ movl(Address(address, 0), Immediate(Low32Bits(value)));
1534 __ movl(Address(address, 4), Immediate(High32Bits(value)));
1535 } else {
1536 __ movl(Address(address, 0), value_loc.AsRegisterPairLow<Register>());
1537 __ movl(Address(address, 4), value_loc.AsRegisterPairHigh<Register>());
1538 }
1539 break;
1540 default:
1541 LOG(FATAL) << "Type not recognized for poke: " << size;
1542 UNREACHABLE();
1543 }
1544}
1545
1546void IntrinsicLocationsBuilderX86::VisitMemoryPokeByte(HInvoke* invoke) {
1547 CreateLongIntToVoidLocations(arena_, Primitive::kPrimByte, invoke);
1548}
1549
1550void IntrinsicCodeGeneratorX86::VisitMemoryPokeByte(HInvoke* invoke) {
1551 GenPoke(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1552}
1553
1554void IntrinsicLocationsBuilderX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1555 CreateLongIntToVoidLocations(arena_, Primitive::kPrimInt, invoke);
1556}
1557
1558void IntrinsicCodeGeneratorX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1559 GenPoke(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1560}
1561
1562void IntrinsicLocationsBuilderX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1563 CreateLongIntToVoidLocations(arena_, Primitive::kPrimLong, invoke);
1564}
1565
1566void IntrinsicCodeGeneratorX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1567 GenPoke(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1568}
1569
1570void IntrinsicLocationsBuilderX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1571 CreateLongIntToVoidLocations(arena_, Primitive::kPrimShort, invoke);
1572}
1573
1574void IntrinsicCodeGeneratorX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1575 GenPoke(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1576}
1577
1578void IntrinsicLocationsBuilderX86::VisitThreadCurrentThread(HInvoke* invoke) {
1579 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1580 LocationSummary::kNoCall,
1581 kIntrinsified);
1582 locations->SetOut(Location::RequiresRegister());
1583}
1584
1585void IntrinsicCodeGeneratorX86::VisitThreadCurrentThread(HInvoke* invoke) {
1586 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
1587 GetAssembler()->fs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86WordSize>()));
1588}
1589
Roland Levillain0d5a2812015-11-13 10:07:31 +00001590static void GenUnsafeGet(HInvoke* invoke,
1591 Primitive::Type type,
1592 bool is_volatile,
1593 CodeGeneratorX86* codegen) {
1594 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
1595 LocationSummary* locations = invoke->GetLocations();
1596 Location base_loc = locations->InAt(1);
1597 Register base = base_loc.AsRegister<Register>();
1598 Location offset_loc = locations->InAt(2);
1599 Register offset = offset_loc.AsRegisterPairLow<Register>();
1600 Location output_loc = locations->Out();
Mark Mendell09ed1a32015-03-25 08:30:06 -04001601
1602 switch (type) {
1603 case Primitive::kPrimInt:
Roland Levillain4d027112015-07-01 15:41:14 +01001604 case Primitive::kPrimNot: {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001605 Register output = output_loc.AsRegister<Register>();
1606 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
Roland Levillain4d027112015-07-01 15:41:14 +01001607 if (type == Primitive::kPrimNot) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001608 codegen->MaybeGenerateReadBarrier(invoke, output_loc, output_loc, base_loc, 0U, offset_loc);
Roland Levillain4d027112015-07-01 15:41:14 +01001609 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001610 break;
Roland Levillain4d027112015-07-01 15:41:14 +01001611 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001612
1613 case Primitive::kPrimLong: {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001614 Register output_lo = output_loc.AsRegisterPairLow<Register>();
1615 Register output_hi = output_loc.AsRegisterPairHigh<Register>();
Mark Mendell09ed1a32015-03-25 08:30:06 -04001616 if (is_volatile) {
1617 // Need to use a XMM to read atomically.
1618 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1619 __ movsd(temp, Address(base, offset, ScaleFactor::TIMES_1, 0));
1620 __ movd(output_lo, temp);
1621 __ psrlq(temp, Immediate(32));
1622 __ movd(output_hi, temp);
1623 } else {
1624 __ movl(output_lo, Address(base, offset, ScaleFactor::TIMES_1, 0));
1625 __ movl(output_hi, Address(base, offset, ScaleFactor::TIMES_1, 4));
1626 }
1627 }
1628 break;
1629
1630 default:
1631 LOG(FATAL) << "Unsupported op size " << type;
1632 UNREACHABLE();
1633 }
1634}
1635
1636static void CreateIntIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke,
1637 bool is_long, bool is_volatile) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001638 bool can_call = kEmitCompilerReadBarrier &&
1639 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
1640 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001641 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001642 can_call ?
1643 LocationSummary::kCallOnSlowPath :
1644 LocationSummary::kNoCall,
Mark Mendell09ed1a32015-03-25 08:30:06 -04001645 kIntrinsified);
1646 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1647 locations->SetInAt(1, Location::RequiresRegister());
1648 locations->SetInAt(2, Location::RequiresRegister());
1649 if (is_long) {
1650 if (is_volatile) {
1651 // Need to use XMM to read volatile.
1652 locations->AddTemp(Location::RequiresFpuRegister());
1653 locations->SetOut(Location::RequiresRegister());
1654 } else {
1655 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1656 }
1657 } else {
1658 locations->SetOut(Location::RequiresRegister());
1659 }
1660}
1661
1662void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) {
1663 CreateIntIntIntToIntLocations(arena_, invoke, false, false);
1664}
1665void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
1666 CreateIntIntIntToIntLocations(arena_, invoke, false, true);
1667}
1668void IntrinsicLocationsBuilderX86::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainb9f81192015-12-03 19:26:40 +00001669 CreateIntIntIntToIntLocations(arena_, invoke, true, false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001670}
1671void IntrinsicLocationsBuilderX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
1672 CreateIntIntIntToIntLocations(arena_, invoke, true, true);
1673}
1674void IntrinsicLocationsBuilderX86::VisitUnsafeGetObject(HInvoke* invoke) {
1675 CreateIntIntIntToIntLocations(arena_, invoke, false, false);
1676}
1677void IntrinsicLocationsBuilderX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
1678 CreateIntIntIntToIntLocations(arena_, invoke, false, true);
1679}
1680
1681
1682void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001683 GenUnsafeGet(invoke, Primitive::kPrimInt, false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001684}
1685void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001686 GenUnsafeGet(invoke, Primitive::kPrimInt, true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001687}
1688void IntrinsicCodeGeneratorX86::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001689 GenUnsafeGet(invoke, Primitive::kPrimLong, false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001690}
1691void IntrinsicCodeGeneratorX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001692 GenUnsafeGet(invoke, Primitive::kPrimLong, true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001693}
1694void IntrinsicCodeGeneratorX86::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001695 GenUnsafeGet(invoke, Primitive::kPrimNot, false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001696}
1697void IntrinsicCodeGeneratorX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001698 GenUnsafeGet(invoke, Primitive::kPrimNot, true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001699}
1700
1701
1702static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* arena,
1703 Primitive::Type type,
1704 HInvoke* invoke,
1705 bool is_volatile) {
1706 LocationSummary* locations = new (arena) LocationSummary(invoke,
1707 LocationSummary::kNoCall,
1708 kIntrinsified);
1709 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1710 locations->SetInAt(1, Location::RequiresRegister());
1711 locations->SetInAt(2, Location::RequiresRegister());
1712 locations->SetInAt(3, Location::RequiresRegister());
1713 if (type == Primitive::kPrimNot) {
1714 // Need temp registers for card-marking.
Roland Levillain4d027112015-07-01 15:41:14 +01001715 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell09ed1a32015-03-25 08:30:06 -04001716 // Ensure the value is in a byte register.
1717 locations->AddTemp(Location::RegisterLocation(ECX));
1718 } else if (type == Primitive::kPrimLong && is_volatile) {
1719 locations->AddTemp(Location::RequiresFpuRegister());
1720 locations->AddTemp(Location::RequiresFpuRegister());
1721 }
1722}
1723
1724void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) {
1725 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, false);
1726}
1727void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) {
1728 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, false);
1729}
1730void IntrinsicLocationsBuilderX86::VisitUnsafePutVolatile(HInvoke* invoke) {
1731 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimInt, invoke, true);
1732}
1733void IntrinsicLocationsBuilderX86::VisitUnsafePutObject(HInvoke* invoke) {
1734 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, false);
1735}
1736void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1737 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, false);
1738}
1739void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1740 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimNot, invoke, true);
1741}
1742void IntrinsicLocationsBuilderX86::VisitUnsafePutLong(HInvoke* invoke) {
1743 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, false);
1744}
1745void IntrinsicLocationsBuilderX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1746 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, false);
1747}
1748void IntrinsicLocationsBuilderX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1749 CreateIntIntIntIntToVoidPlusTempsLocations(arena_, Primitive::kPrimLong, invoke, true);
1750}
1751
1752// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86
1753// memory model.
1754static void GenUnsafePut(LocationSummary* locations,
1755 Primitive::Type type,
1756 bool is_volatile,
1757 CodeGeneratorX86* codegen) {
Roland Levillainb488b782015-10-22 11:38:49 +01001758 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04001759 Register base = locations->InAt(1).AsRegister<Register>();
1760 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1761 Location value_loc = locations->InAt(3);
1762
1763 if (type == Primitive::kPrimLong) {
1764 Register value_lo = value_loc.AsRegisterPairLow<Register>();
1765 Register value_hi = value_loc.AsRegisterPairHigh<Register>();
1766 if (is_volatile) {
1767 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1768 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
1769 __ movd(temp1, value_lo);
1770 __ movd(temp2, value_hi);
1771 __ punpckldq(temp1, temp2);
1772 __ movsd(Address(base, offset, ScaleFactor::TIMES_1, 0), temp1);
1773 } else {
1774 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_lo);
1775 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 4), value_hi);
1776 }
Roland Levillain4d027112015-07-01 15:41:14 +01001777 } else if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
1778 Register temp = locations->GetTemp(0).AsRegister<Register>();
1779 __ movl(temp, value_loc.AsRegister<Register>());
1780 __ PoisonHeapReference(temp);
1781 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001782 } else {
1783 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_loc.AsRegister<Register>());
1784 }
1785
1786 if (is_volatile) {
1787 __ mfence();
1788 }
1789
1790 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001791 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell09ed1a32015-03-25 08:30:06 -04001792 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
1793 locations->GetTemp(1).AsRegister<Register>(),
1794 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01001795 value_loc.AsRegister<Register>(),
1796 value_can_be_null);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001797 }
1798}
1799
1800void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) {
1801 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_);
1802}
1803void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) {
1804 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, false, codegen_);
1805}
1806void IntrinsicCodeGeneratorX86::VisitUnsafePutVolatile(HInvoke* invoke) {
1807 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, true, codegen_);
1808}
1809void IntrinsicCodeGeneratorX86::VisitUnsafePutObject(HInvoke* invoke) {
1810 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_);
1811}
1812void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
1813 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, false, codegen_);
1814}
1815void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
1816 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, true, codegen_);
1817}
1818void IntrinsicCodeGeneratorX86::VisitUnsafePutLong(HInvoke* invoke) {
1819 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_);
1820}
1821void IntrinsicCodeGeneratorX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
1822 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, false, codegen_);
1823}
1824void IntrinsicCodeGeneratorX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
1825 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, true, codegen_);
1826}
1827
Mark Mendell58d25fd2015-04-03 14:52:31 -04001828static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, Primitive::Type type,
1829 HInvoke* invoke) {
1830 LocationSummary* locations = new (arena) LocationSummary(invoke,
1831 LocationSummary::kNoCall,
1832 kIntrinsified);
1833 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1834 locations->SetInAt(1, Location::RequiresRegister());
1835 // Offset is a long, but in 32 bit mode, we only need the low word.
1836 // Can we update the invoke here to remove a TypeConvert to Long?
1837 locations->SetInAt(2, Location::RequiresRegister());
1838 // Expected value must be in EAX or EDX:EAX.
1839 // For long, new value must be in ECX:EBX.
1840 if (type == Primitive::kPrimLong) {
1841 locations->SetInAt(3, Location::RegisterPairLocation(EAX, EDX));
1842 locations->SetInAt(4, Location::RegisterPairLocation(EBX, ECX));
1843 } else {
1844 locations->SetInAt(3, Location::RegisterLocation(EAX));
1845 locations->SetInAt(4, Location::RequiresRegister());
1846 }
1847
1848 // Force a byte register for the output.
1849 locations->SetOut(Location::RegisterLocation(EAX));
1850 if (type == Primitive::kPrimNot) {
1851 // Need temp registers for card-marking.
Roland Levillainb488b782015-10-22 11:38:49 +01001852 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell58d25fd2015-04-03 14:52:31 -04001853 // Need a byte register for marking.
1854 locations->AddTemp(Location::RegisterLocation(ECX));
1855 }
1856}
1857
1858void IntrinsicLocationsBuilderX86::VisitUnsafeCASInt(HInvoke* invoke) {
1859 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimInt, invoke);
1860}
1861
1862void IntrinsicLocationsBuilderX86::VisitUnsafeCASLong(HInvoke* invoke) {
1863 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimLong, invoke);
1864}
1865
1866void IntrinsicLocationsBuilderX86::VisitUnsafeCASObject(HInvoke* invoke) {
1867 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimNot, invoke);
1868}
1869
1870static void GenCAS(Primitive::Type type, HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillainb488b782015-10-22 11:38:49 +01001871 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
Mark Mendell58d25fd2015-04-03 14:52:31 -04001872 LocationSummary* locations = invoke->GetLocations();
1873
1874 Register base = locations->InAt(1).AsRegister<Register>();
1875 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
1876 Location out = locations->Out();
1877 DCHECK_EQ(out.AsRegister<Register>(), EAX);
1878
Roland Levillainb488b782015-10-22 11:38:49 +01001879 if (type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +01001880 Register expected = locations->InAt(3).AsRegister<Register>();
Roland Levillainb488b782015-10-22 11:38:49 +01001881 // Ensure `expected` is in EAX (required by the CMPXCHG instruction).
Roland Levillain4d027112015-07-01 15:41:14 +01001882 DCHECK_EQ(expected, EAX);
Mark Mendell58d25fd2015-04-03 14:52:31 -04001883 Register value = locations->InAt(4).AsRegister<Register>();
Roland Levillain4d027112015-07-01 15:41:14 +01001884
Roland Levillainb488b782015-10-22 11:38:49 +01001885 // Mark card for object assuming new value is stored.
1886 bool value_can_be_null = true; // TODO: Worth finding out this information?
1887 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
1888 locations->GetTemp(1).AsRegister<Register>(),
1889 base,
1890 value,
1891 value_can_be_null);
1892
1893 bool base_equals_value = (base == value);
1894 if (kPoisonHeapReferences) {
1895 if (base_equals_value) {
1896 // If `base` and `value` are the same register location, move
1897 // `value` to a temporary register. This way, poisoning
1898 // `value` won't invalidate `base`.
1899 value = locations->GetTemp(0).AsRegister<Register>();
1900 __ movl(value, base);
Roland Levillain4d027112015-07-01 15:41:14 +01001901 }
Roland Levillainb488b782015-10-22 11:38:49 +01001902
1903 // Check that the register allocator did not assign the location
1904 // of `expected` (EAX) to `value` nor to `base`, so that heap
1905 // poisoning (when enabled) works as intended below.
1906 // - If `value` were equal to `expected`, both references would
1907 // be poisoned twice, meaning they would not be poisoned at
1908 // all, as heap poisoning uses address negation.
1909 // - If `base` were equal to `expected`, poisoning `expected`
1910 // would invalidate `base`.
1911 DCHECK_NE(value, expected);
1912 DCHECK_NE(base, expected);
1913
1914 __ PoisonHeapReference(expected);
1915 __ PoisonHeapReference(value);
Mark Mendell58d25fd2015-04-03 14:52:31 -04001916 }
1917
1918 __ LockCmpxchgl(Address(base, offset, TIMES_1, 0), value);
Mark Mendell58d25fd2015-04-03 14:52:31 -04001919
Roland Levillain0d5a2812015-11-13 10:07:31 +00001920 // LOCK CMPXCHG has full barrier semantics, and we don't need
Roland Levillainb488b782015-10-22 11:38:49 +01001921 // scheduling barriers at this time.
Mark Mendell58d25fd2015-04-03 14:52:31 -04001922
Roland Levillainb488b782015-10-22 11:38:49 +01001923 // Convert ZF into the boolean result.
1924 __ setb(kZero, out.AsRegister<Register>());
1925 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01001926
Roland Levillain0d5a2812015-11-13 10:07:31 +00001927 // In the case of the `UnsafeCASObject` intrinsic, accessing an
1928 // object in the heap with LOCK CMPXCHG does not require a read
1929 // barrier, as we do not keep a reference to this heap location.
1930 // However, if heap poisoning is enabled, we need to unpoison the
1931 // values that were poisoned earlier.
Roland Levillainb488b782015-10-22 11:38:49 +01001932 if (kPoisonHeapReferences) {
1933 if (base_equals_value) {
1934 // `value` has been moved to a temporary register, no need to
1935 // unpoison it.
1936 } else {
1937 // Ensure `value` is different from `out`, so that unpoisoning
1938 // the former does not invalidate the latter.
1939 DCHECK_NE(value, out.AsRegister<Register>());
1940 __ UnpoisonHeapReference(value);
1941 }
1942 // Do not unpoison the reference contained in register
1943 // `expected`, as it is the same as register `out` (EAX).
1944 }
1945 } else {
1946 if (type == Primitive::kPrimInt) {
1947 // Ensure the expected value is in EAX (required by the CMPXCHG
1948 // instruction).
1949 DCHECK_EQ(locations->InAt(3).AsRegister<Register>(), EAX);
1950 __ LockCmpxchgl(Address(base, offset, TIMES_1, 0),
1951 locations->InAt(4).AsRegister<Register>());
1952 } else if (type == Primitive::kPrimLong) {
1953 // Ensure the expected value is in EAX:EDX and that the new
1954 // value is in EBX:ECX (required by the CMPXCHG8B instruction).
1955 DCHECK_EQ(locations->InAt(3).AsRegisterPairLow<Register>(), EAX);
1956 DCHECK_EQ(locations->InAt(3).AsRegisterPairHigh<Register>(), EDX);
1957 DCHECK_EQ(locations->InAt(4).AsRegisterPairLow<Register>(), EBX);
1958 DCHECK_EQ(locations->InAt(4).AsRegisterPairHigh<Register>(), ECX);
1959 __ LockCmpxchg8b(Address(base, offset, TIMES_1, 0));
1960 } else {
1961 LOG(FATAL) << "Unexpected CAS type " << type;
1962 }
1963
Roland Levillain0d5a2812015-11-13 10:07:31 +00001964 // LOCK CMPXCHG/LOCK CMPXCHG8B have full barrier semantics, and we
1965 // don't need scheduling barriers at this time.
Roland Levillainb488b782015-10-22 11:38:49 +01001966
1967 // Convert ZF into the boolean result.
1968 __ setb(kZero, out.AsRegister<Register>());
1969 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01001970 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04001971}
1972
1973void IntrinsicCodeGeneratorX86::VisitUnsafeCASInt(HInvoke* invoke) {
1974 GenCAS(Primitive::kPrimInt, invoke, codegen_);
1975}
1976
1977void IntrinsicCodeGeneratorX86::VisitUnsafeCASLong(HInvoke* invoke) {
1978 GenCAS(Primitive::kPrimLong, invoke, codegen_);
1979}
1980
1981void IntrinsicCodeGeneratorX86::VisitUnsafeCASObject(HInvoke* invoke) {
1982 GenCAS(Primitive::kPrimNot, invoke, codegen_);
1983}
1984
1985void IntrinsicLocationsBuilderX86::VisitIntegerReverse(HInvoke* invoke) {
1986 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1987 LocationSummary::kNoCall,
1988 kIntrinsified);
1989 locations->SetInAt(0, Location::RequiresRegister());
1990 locations->SetOut(Location::SameAsFirstInput());
1991 locations->AddTemp(Location::RequiresRegister());
1992}
1993
1994static void SwapBits(Register reg, Register temp, int32_t shift, int32_t mask,
1995 X86Assembler* assembler) {
1996 Immediate imm_shift(shift);
1997 Immediate imm_mask(mask);
1998 __ movl(temp, reg);
1999 __ shrl(reg, imm_shift);
2000 __ andl(temp, imm_mask);
2001 __ andl(reg, imm_mask);
2002 __ shll(temp, imm_shift);
2003 __ orl(reg, temp);
2004}
2005
2006void IntrinsicCodeGeneratorX86::VisitIntegerReverse(HInvoke* invoke) {
Roland Levillainb488b782015-10-22 11:38:49 +01002007 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
Mark Mendell58d25fd2015-04-03 14:52:31 -04002008 LocationSummary* locations = invoke->GetLocations();
2009
2010 Register reg = locations->InAt(0).AsRegister<Register>();
2011 Register temp = locations->GetTemp(0).AsRegister<Register>();
2012
2013 /*
2014 * Use one bswap instruction to reverse byte order first and then use 3 rounds of
2015 * swapping bits to reverse bits in a number x. Using bswap to save instructions
2016 * compared to generic luni implementation which has 5 rounds of swapping bits.
2017 * x = bswap x
2018 * x = (x & 0x55555555) << 1 | (x >> 1) & 0x55555555;
2019 * x = (x & 0x33333333) << 2 | (x >> 2) & 0x33333333;
2020 * x = (x & 0x0F0F0F0F) << 4 | (x >> 4) & 0x0F0F0F0F;
2021 */
2022 __ bswapl(reg);
2023 SwapBits(reg, temp, 1, 0x55555555, assembler);
2024 SwapBits(reg, temp, 2, 0x33333333, assembler);
2025 SwapBits(reg, temp, 4, 0x0f0f0f0f, assembler);
2026}
2027
2028void IntrinsicLocationsBuilderX86::VisitLongReverse(HInvoke* invoke) {
2029 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2030 LocationSummary::kNoCall,
2031 kIntrinsified);
2032 locations->SetInAt(0, Location::RequiresRegister());
2033 locations->SetOut(Location::SameAsFirstInput());
2034 locations->AddTemp(Location::RequiresRegister());
2035}
2036
2037void IntrinsicCodeGeneratorX86::VisitLongReverse(HInvoke* invoke) {
Roland Levillainb488b782015-10-22 11:38:49 +01002038 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
Mark Mendell58d25fd2015-04-03 14:52:31 -04002039 LocationSummary* locations = invoke->GetLocations();
2040
2041 Register reg_low = locations->InAt(0).AsRegisterPairLow<Register>();
2042 Register reg_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2043 Register temp = locations->GetTemp(0).AsRegister<Register>();
2044
2045 // We want to swap high/low, then bswap each one, and then do the same
2046 // as a 32 bit reverse.
2047 // Exchange high and low.
2048 __ movl(temp, reg_low);
2049 __ movl(reg_low, reg_high);
2050 __ movl(reg_high, temp);
2051
2052 // bit-reverse low
2053 __ bswapl(reg_low);
2054 SwapBits(reg_low, temp, 1, 0x55555555, assembler);
2055 SwapBits(reg_low, temp, 2, 0x33333333, assembler);
2056 SwapBits(reg_low, temp, 4, 0x0f0f0f0f, assembler);
2057
2058 // bit-reverse high
2059 __ bswapl(reg_high);
2060 SwapBits(reg_high, temp, 1, 0x55555555, assembler);
2061 SwapBits(reg_high, temp, 2, 0x33333333, assembler);
2062 SwapBits(reg_high, temp, 4, 0x0f0f0f0f, assembler);
2063}
2064
Mark Mendelld5897672015-08-12 21:16:41 -04002065static void CreateLeadingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2066 LocationSummary* locations = new (arena) LocationSummary(invoke,
2067 LocationSummary::kNoCall,
2068 kIntrinsified);
2069 if (is_long) {
2070 locations->SetInAt(0, Location::RequiresRegister());
2071 } else {
2072 locations->SetInAt(0, Location::Any());
2073 }
2074 locations->SetOut(Location::RequiresRegister());
2075}
2076
2077static void GenLeadingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_long) {
2078 LocationSummary* locations = invoke->GetLocations();
2079 Location src = locations->InAt(0);
2080 Register out = locations->Out().AsRegister<Register>();
2081
2082 if (invoke->InputAt(0)->IsConstant()) {
2083 // Evaluate this at compile time.
2084 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2085 if (value == 0) {
2086 value = is_long ? 64 : 32;
2087 } else {
2088 value = is_long ? CLZ(static_cast<uint64_t>(value)) : CLZ(static_cast<uint32_t>(value));
2089 }
2090 if (value == 0) {
2091 __ xorl(out, out);
2092 } else {
2093 __ movl(out, Immediate(value));
2094 }
2095 return;
2096 }
2097
2098 // Handle the non-constant cases.
2099 if (!is_long) {
2100 if (src.IsRegister()) {
2101 __ bsrl(out, src.AsRegister<Register>());
2102 } else {
2103 DCHECK(src.IsStackSlot());
2104 __ bsrl(out, Address(ESP, src.GetStackIndex()));
2105 }
2106
2107 // BSR sets ZF if the input was zero, and the output is undefined.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002108 NearLabel all_zeroes, done;
Mark Mendelld5897672015-08-12 21:16:41 -04002109 __ j(kEqual, &all_zeroes);
2110
2111 // Correct the result from BSR to get the final CLZ result.
2112 __ xorl(out, Immediate(31));
2113 __ jmp(&done);
2114
2115 // Fix the zero case with the expected result.
2116 __ Bind(&all_zeroes);
2117 __ movl(out, Immediate(32));
2118
2119 __ Bind(&done);
2120 return;
2121 }
2122
2123 // 64 bit case needs to worry about both parts of the register.
2124 DCHECK(src.IsRegisterPair());
2125 Register src_lo = src.AsRegisterPairLow<Register>();
2126 Register src_hi = src.AsRegisterPairHigh<Register>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002127 NearLabel handle_low, done, all_zeroes;
Mark Mendelld5897672015-08-12 21:16:41 -04002128
2129 // Is the high word zero?
2130 __ testl(src_hi, src_hi);
2131 __ j(kEqual, &handle_low);
2132
2133 // High word is not zero. We know that the BSR result is defined in this case.
2134 __ bsrl(out, src_hi);
2135
2136 // Correct the result from BSR to get the final CLZ result.
2137 __ xorl(out, Immediate(31));
2138 __ jmp(&done);
2139
2140 // High word was zero. We have to compute the low word count and add 32.
2141 __ Bind(&handle_low);
2142 __ bsrl(out, src_lo);
2143 __ j(kEqual, &all_zeroes);
2144
2145 // We had a valid result. Use an XOR to both correct the result and add 32.
2146 __ xorl(out, Immediate(63));
2147 __ jmp(&done);
2148
2149 // All zero case.
2150 __ Bind(&all_zeroes);
2151 __ movl(out, Immediate(64));
2152
2153 __ Bind(&done);
2154}
2155
2156void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
2157 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ false);
2158}
2159
2160void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
2161 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2162 GenLeadingZeros(assembler, invoke, /* is_long */ false);
2163}
2164
2165void IntrinsicLocationsBuilderX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
2166 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ true);
2167}
2168
2169void IntrinsicCodeGeneratorX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
2170 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2171 GenLeadingZeros(assembler, invoke, /* is_long */ true);
2172}
2173
Mark Mendell2d554792015-09-15 21:45:18 -04002174static void CreateTrailingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2175 LocationSummary* locations = new (arena) LocationSummary(invoke,
2176 LocationSummary::kNoCall,
2177 kIntrinsified);
2178 if (is_long) {
2179 locations->SetInAt(0, Location::RequiresRegister());
2180 } else {
2181 locations->SetInAt(0, Location::Any());
2182 }
2183 locations->SetOut(Location::RequiresRegister());
2184}
2185
2186static void GenTrailingZeros(X86Assembler* assembler, HInvoke* invoke, bool is_long) {
2187 LocationSummary* locations = invoke->GetLocations();
2188 Location src = locations->InAt(0);
2189 Register out = locations->Out().AsRegister<Register>();
2190
2191 if (invoke->InputAt(0)->IsConstant()) {
2192 // Evaluate this at compile time.
2193 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2194 if (value == 0) {
2195 value = is_long ? 64 : 32;
2196 } else {
2197 value = is_long ? CTZ(static_cast<uint64_t>(value)) : CTZ(static_cast<uint32_t>(value));
2198 }
2199 if (value == 0) {
2200 __ xorl(out, out);
2201 } else {
2202 __ movl(out, Immediate(value));
2203 }
2204 return;
2205 }
2206
2207 // Handle the non-constant cases.
2208 if (!is_long) {
2209 if (src.IsRegister()) {
2210 __ bsfl(out, src.AsRegister<Register>());
2211 } else {
2212 DCHECK(src.IsStackSlot());
2213 __ bsfl(out, Address(ESP, src.GetStackIndex()));
2214 }
2215
2216 // BSF sets ZF if the input was zero, and the output is undefined.
2217 NearLabel done;
2218 __ j(kNotEqual, &done);
2219
2220 // Fix the zero case with the expected result.
2221 __ movl(out, Immediate(32));
2222
2223 __ Bind(&done);
2224 return;
2225 }
2226
2227 // 64 bit case needs to worry about both parts of the register.
2228 DCHECK(src.IsRegisterPair());
2229 Register src_lo = src.AsRegisterPairLow<Register>();
2230 Register src_hi = src.AsRegisterPairHigh<Register>();
2231 NearLabel done, all_zeroes;
2232
2233 // If the low word is zero, then ZF will be set. If not, we have the answer.
2234 __ bsfl(out, src_lo);
2235 __ j(kNotEqual, &done);
2236
2237 // Low word was zero. We have to compute the high word count and add 32.
2238 __ bsfl(out, src_hi);
2239 __ j(kEqual, &all_zeroes);
2240
2241 // We had a valid result. Add 32 to account for the low word being zero.
2242 __ addl(out, Immediate(32));
2243 __ jmp(&done);
2244
2245 // All zero case.
2246 __ Bind(&all_zeroes);
2247 __ movl(out, Immediate(64));
2248
2249 __ Bind(&done);
2250}
2251
2252void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
2253 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ false);
2254}
2255
2256void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
2257 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2258 GenTrailingZeros(assembler, invoke, /* is_long */ false);
2259}
2260
2261void IntrinsicLocationsBuilderX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
2262 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ true);
2263}
2264
2265void IntrinsicCodeGeneratorX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
2266 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2267 GenTrailingZeros(assembler, invoke, /* is_long */ true);
2268}
2269
2270static void CreateRotateLocations(ArenaAllocator* arena, HInvoke* invoke) {
2271 LocationSummary* locations = new (arena) LocationSummary(invoke,
2272 LocationSummary::kNoCall,
2273 kIntrinsified);
2274 locations->SetInAt(0, Location::RequiresRegister());
2275 // The shift count needs to be in CL or a constant.
2276 locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, invoke->InputAt(1)));
2277 locations->SetOut(Location::SameAsFirstInput());
2278}
2279
2280static void GenRotate(X86Assembler* assembler, HInvoke* invoke, bool is_left) {
2281 LocationSummary* locations = invoke->GetLocations();
2282 Register first_reg = locations->InAt(0).AsRegister<Register>();
2283 Location second = locations->InAt(1);
2284
2285 if (second.IsRegister()) {
2286 Register second_reg = second.AsRegister<Register>();
2287 if (is_left) {
2288 __ roll(first_reg, second_reg);
2289 } else {
2290 __ rorl(first_reg, second_reg);
2291 }
2292 } else {
2293 Immediate imm(second.GetConstant()->AsIntConstant()->GetValue() & kMaxIntShiftValue);
2294 if (is_left) {
2295 __ roll(first_reg, imm);
2296 } else {
2297 __ rorl(first_reg, imm);
2298 }
2299 }
2300}
2301
2302void IntrinsicLocationsBuilderX86::VisitIntegerRotateLeft(HInvoke* invoke) {
2303 CreateRotateLocations(arena_, invoke);
2304}
2305
2306void IntrinsicCodeGeneratorX86::VisitIntegerRotateLeft(HInvoke* invoke) {
2307 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2308 GenRotate(assembler, invoke, /* is_left */ true);
2309}
2310
2311void IntrinsicLocationsBuilderX86::VisitIntegerRotateRight(HInvoke* invoke) {
2312 CreateRotateLocations(arena_, invoke);
2313}
2314
2315void IntrinsicCodeGeneratorX86::VisitIntegerRotateRight(HInvoke* invoke) {
2316 X86Assembler* assembler = down_cast<X86Assembler*>(codegen_->GetAssembler());
2317 GenRotate(assembler, invoke, /* is_left */ false);
2318}
2319
Mark Mendell09ed1a32015-03-25 08:30:06 -04002320// Unimplemented intrinsics.
2321
2322#define UNIMPLEMENTED_INTRINSIC(Name) \
2323void IntrinsicLocationsBuilderX86::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
2324} \
2325void IntrinsicCodeGeneratorX86::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED) { \
2326}
2327
Mark Mendell09ed1a32015-03-25 08:30:06 -04002328UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
Mark Mendell09ed1a32015-03-25 08:30:06 -04002329UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002330UNIMPLEMENTED_INTRINSIC(LongRotateRight)
Scott Wakeling9ee23f42015-07-23 10:44:35 +01002331UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
Nicolas Geoffrayee3cf072015-10-06 11:45:02 +01002332UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
Mark Mendell09ed1a32015-03-25 08:30:06 -04002333
Roland Levillain4d027112015-07-01 15:41:14 +01002334#undef UNIMPLEMENTED_INTRINSIC
2335
2336#undef __
2337
Mark Mendell09ed1a32015-03-25 08:30:06 -04002338} // namespace x86
2339} // namespace art