blob: 9c24db9f766cb723941784ed9ffc972c0a5a336b [file] [log] [blame]
Mark Mendell09ed1a32015-03-25 08:30:06 -04001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_x86.h"
18
Andreas Gampe21030dd2015-05-07 14:46:15 -070019#include <limits>
20
Mark Mendellfb8d2792015-03-31 22:16:59 -040021#include "arch/x86/instruction_set_features_x86.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include "art_method.h"
Mark Mendelld5897672015-08-12 21:16:41 -040023#include "base/bit_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040024#include "code_generator_x86.h"
25#include "entrypoints/quick/quick_entrypoints.h"
26#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070027#include "intrinsics_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040028#include "mirror/array-inl.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040029#include "mirror/string.h"
30#include "thread.h"
31#include "utils/x86/assembler_x86.h"
32#include "utils/x86/constants_x86.h"
33
34namespace art {
35
36namespace x86 {
37
38static constexpr int kDoubleNaNHigh = 0x7FF80000;
39static constexpr int kDoubleNaNLow = 0x00000000;
Mark P Mendell2f10a5f2016-01-25 14:47:50 +000040static constexpr int64_t kDoubleNaN = INT64_C(0x7FF8000000000000);
41static constexpr int32_t kFloatNaN = INT32_C(0x7FC00000);
Mark Mendell09ed1a32015-03-25 08:30:06 -040042
Mark Mendellfb8d2792015-03-31 22:16:59 -040043IntrinsicLocationsBuilderX86::IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen)
Mark P Mendell2f10a5f2016-01-25 14:47:50 +000044 : arena_(codegen->GetGraph()->GetArena()),
45 codegen_(codegen) {
Mark Mendellfb8d2792015-03-31 22:16:59 -040046}
47
48
Mark Mendell09ed1a32015-03-25 08:30:06 -040049X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() {
Roland Levillainb488b782015-10-22 11:38:49 +010050 return down_cast<X86Assembler*>(codegen_->GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -040051}
52
53ArenaAllocator* IntrinsicCodeGeneratorX86::GetAllocator() {
54 return codegen_->GetGraph()->GetArena();
55}
56
57bool IntrinsicLocationsBuilderX86::TryDispatch(HInvoke* invoke) {
58 Dispatch(invoke);
59 LocationSummary* res = invoke->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +000060 if (res == nullptr) {
61 return false;
62 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 return res->Intrinsified();
Mark Mendell09ed1a32015-03-25 08:30:06 -040064}
65
Roland Levillainec525fc2015-04-28 15:50:20 +010066static void MoveArguments(HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010067 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010068 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Mark Mendell09ed1a32015-03-25 08:30:06 -040069}
70
Andreas Gampe85b62f22015-09-09 13:15:38 -070071using IntrinsicSlowPathX86 = IntrinsicSlowPath<InvokeDexCallingConventionVisitorX86>;
Mark Mendell09ed1a32015-03-25 08:30:06 -040072
Roland Levillain0b671c02016-08-19 12:02:34 +010073// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
74#define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT
75
76// Slow path implementing the SystemArrayCopy intrinsic copy loop with read barriers.
77class ReadBarrierSystemArrayCopySlowPathX86 : public SlowPathCode {
78 public:
79 explicit ReadBarrierSystemArrayCopySlowPathX86(HInstruction* instruction)
80 : SlowPathCode(instruction) {
81 DCHECK(kEmitCompilerReadBarrier);
82 DCHECK(kUseBakerReadBarrier);
83 }
84
85 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
86 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
87 LocationSummary* locations = instruction_->GetLocations();
88 DCHECK(locations->CanCall());
89 DCHECK(instruction_->IsInvokeStaticOrDirect())
90 << "Unexpected instruction in read barrier arraycopy slow path: "
91 << instruction_->DebugName();
92 DCHECK(instruction_->GetLocations()->Intrinsified());
93 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kSystemArrayCopy);
94
95 int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
96 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
97
98 Register src = locations->InAt(0).AsRegister<Register>();
99 Location src_pos = locations->InAt(1);
100 Register dest = locations->InAt(2).AsRegister<Register>();
101 Location dest_pos = locations->InAt(3);
102 Location length = locations->InAt(4);
103 Location temp1_loc = locations->GetTemp(0);
104 Register temp1 = temp1_loc.AsRegister<Register>();
105 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
106 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
107
108 __ Bind(GetEntryLabel());
109 // In this code path, registers `temp1`, `temp2`, and `temp3`
110 // (resp.) are not used for the base source address, the base
111 // destination address, and the end source address (resp.), as in
112 // other SystemArrayCopy intrinsic code paths. Instead they are
113 // (resp.) used for:
114 // - the loop index (`i`);
115 // - the source index (`src_index`) and the loaded (source)
116 // reference (`value`); and
117 // - the destination index (`dest_index`).
118
119 // i = 0
120 __ xorl(temp1, temp1);
121 NearLabel loop;
122 __ Bind(&loop);
123 // value = src_array[i + src_pos]
124 if (src_pos.IsConstant()) {
125 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
126 int32_t adjusted_offset = offset + constant * element_size;
127 __ movl(temp2, Address(src, temp1, ScaleFactor::TIMES_4, adjusted_offset));
128 } else {
129 __ leal(temp2, Address(src_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0));
130 __ movl(temp2, Address(src, temp2, ScaleFactor::TIMES_4, offset));
131 }
132 __ MaybeUnpoisonHeapReference(temp2);
133 // TODO: Inline the mark bit check before calling the runtime?
134 // value = ReadBarrier::Mark(value)
135 // No need to save live registers; it's taken care of by the
136 // entrypoint. Also, there is no need to update the stack mask,
137 // as this runtime call will not trigger a garbage collection.
138 // (See ReadBarrierMarkSlowPathX86::EmitNativeCode for more
139 // explanations.)
140 DCHECK_NE(temp2, ESP);
141 DCHECK(0 <= temp2 && temp2 < kNumberOfCpuRegisters) << temp2;
142 int32_t entry_point_offset =
143 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86PointerSize>(temp2);
144 // This runtime call does not require a stack map.
145 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
146 __ MaybePoisonHeapReference(temp2);
147 // dest_array[i + dest_pos] = value
148 if (dest_pos.IsConstant()) {
149 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
150 int32_t adjusted_offset = offset + constant * element_size;
151 __ movl(Address(dest, temp1, ScaleFactor::TIMES_4, adjusted_offset), temp2);
152 } else {
153 __ leal(temp3, Address(dest_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0));
154 __ movl(Address(dest, temp3, ScaleFactor::TIMES_4, offset), temp2);
155 }
156 // ++i
157 __ addl(temp1, Immediate(1));
158 // if (i != length) goto loop
159 x86_codegen->GenerateIntCompare(temp1_loc, length);
160 __ j(kNotEqual, &loop);
161 __ jmp(GetExitLabel());
162 }
163
164 const char* GetDescription() const OVERRIDE { return "ReadBarrierSystemArrayCopySlowPathX86"; }
165
166 private:
167 DISALLOW_COPY_AND_ASSIGN(ReadBarrierSystemArrayCopySlowPathX86);
168};
169
170#undef __
171
Mark Mendell09ed1a32015-03-25 08:30:06 -0400172#define __ assembler->
173
174static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
175 LocationSummary* locations = new (arena) LocationSummary(invoke,
176 LocationSummary::kNoCall,
177 kIntrinsified);
178 locations->SetInAt(0, Location::RequiresFpuRegister());
179 locations->SetOut(Location::RequiresRegister());
180 if (is64bit) {
181 locations->AddTemp(Location::RequiresFpuRegister());
182 }
183}
184
185static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
186 LocationSummary* locations = new (arena) LocationSummary(invoke,
187 LocationSummary::kNoCall,
188 kIntrinsified);
189 locations->SetInAt(0, Location::RequiresRegister());
190 locations->SetOut(Location::RequiresFpuRegister());
191 if (is64bit) {
192 locations->AddTemp(Location::RequiresFpuRegister());
193 locations->AddTemp(Location::RequiresFpuRegister());
194 }
195}
196
197static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
198 Location input = locations->InAt(0);
199 Location output = locations->Out();
200 if (is64bit) {
201 // Need to use the temporary.
202 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
203 __ movsd(temp, input.AsFpuRegister<XmmRegister>());
204 __ movd(output.AsRegisterPairLow<Register>(), temp);
205 __ psrlq(temp, Immediate(32));
206 __ movd(output.AsRegisterPairHigh<Register>(), temp);
207 } else {
208 __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>());
209 }
210}
211
212static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
213 Location input = locations->InAt(0);
214 Location output = locations->Out();
215 if (is64bit) {
216 // Need to use the temporary.
217 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
218 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
219 __ movd(temp1, input.AsRegisterPairLow<Register>());
220 __ movd(temp2, input.AsRegisterPairHigh<Register>());
221 __ punpckldq(temp1, temp2);
222 __ movsd(output.AsFpuRegister<XmmRegister>(), temp1);
223 } else {
224 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<Register>());
225 }
226}
227
228void IntrinsicLocationsBuilderX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000229 CreateFPToIntLocations(arena_, invoke, /* is64bit */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400230}
231void IntrinsicLocationsBuilderX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000232 CreateIntToFPLocations(arena_, invoke, /* is64bit */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400233}
234
235void IntrinsicCodeGeneratorX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000236 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400237}
238void IntrinsicCodeGeneratorX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000239 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400240}
241
242void IntrinsicLocationsBuilderX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000243 CreateFPToIntLocations(arena_, invoke, /* is64bit */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400244}
245void IntrinsicLocationsBuilderX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000246 CreateIntToFPLocations(arena_, invoke, /* is64bit */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400247}
248
249void IntrinsicCodeGeneratorX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000250 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400251}
252void IntrinsicCodeGeneratorX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000253 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400254}
255
256static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
257 LocationSummary* locations = new (arena) LocationSummary(invoke,
258 LocationSummary::kNoCall,
259 kIntrinsified);
260 locations->SetInAt(0, Location::RequiresRegister());
261 locations->SetOut(Location::SameAsFirstInput());
262}
263
264static void CreateLongToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
265 LocationSummary* locations = new (arena) LocationSummary(invoke,
266 LocationSummary::kNoCall,
267 kIntrinsified);
268 locations->SetInAt(0, Location::RequiresRegister());
269 locations->SetOut(Location::RequiresRegister());
270}
271
272static void CreateLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
273 LocationSummary* locations = new (arena) LocationSummary(invoke,
274 LocationSummary::kNoCall,
275 kIntrinsified);
276 locations->SetInAt(0, Location::RequiresRegister());
277 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
278}
279
280static void GenReverseBytes(LocationSummary* locations,
281 Primitive::Type size,
282 X86Assembler* assembler) {
283 Register out = locations->Out().AsRegister<Register>();
284
285 switch (size) {
286 case Primitive::kPrimShort:
287 // TODO: Can be done with an xchg of 8b registers. This is straight from Quick.
288 __ bswapl(out);
289 __ sarl(out, Immediate(16));
290 break;
291 case Primitive::kPrimInt:
292 __ bswapl(out);
293 break;
294 default:
295 LOG(FATAL) << "Unexpected size for reverse-bytes: " << size;
296 UNREACHABLE();
297 }
298}
299
300void IntrinsicLocationsBuilderX86::VisitIntegerReverseBytes(HInvoke* invoke) {
301 CreateIntToIntLocations(arena_, invoke);
302}
303
304void IntrinsicCodeGeneratorX86::VisitIntegerReverseBytes(HInvoke* invoke) {
305 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
306}
307
Mark Mendell58d25fd2015-04-03 14:52:31 -0400308void IntrinsicLocationsBuilderX86::VisitLongReverseBytes(HInvoke* invoke) {
309 CreateLongToLongLocations(arena_, invoke);
310}
311
312void IntrinsicCodeGeneratorX86::VisitLongReverseBytes(HInvoke* invoke) {
313 LocationSummary* locations = invoke->GetLocations();
314 Location input = locations->InAt(0);
315 Register input_lo = input.AsRegisterPairLow<Register>();
316 Register input_hi = input.AsRegisterPairHigh<Register>();
317 Location output = locations->Out();
318 Register output_lo = output.AsRegisterPairLow<Register>();
319 Register output_hi = output.AsRegisterPairHigh<Register>();
320
321 X86Assembler* assembler = GetAssembler();
322 // Assign the inputs to the outputs, mixing low/high.
323 __ movl(output_lo, input_hi);
324 __ movl(output_hi, input_lo);
325 __ bswapl(output_lo);
326 __ bswapl(output_hi);
327}
328
Mark Mendell09ed1a32015-03-25 08:30:06 -0400329void IntrinsicLocationsBuilderX86::VisitShortReverseBytes(HInvoke* invoke) {
330 CreateIntToIntLocations(arena_, invoke);
331}
332
333void IntrinsicCodeGeneratorX86::VisitShortReverseBytes(HInvoke* invoke) {
334 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
335}
336
337
338// TODO: Consider Quick's way of doing Double abs through integer operations, as the immediate we
339// need is 64b.
340
341static void CreateFloatToFloat(ArenaAllocator* arena, HInvoke* invoke) {
342 // TODO: Enable memory operations when the assembler supports them.
343 LocationSummary* locations = new (arena) LocationSummary(invoke,
344 LocationSummary::kNoCall,
345 kIntrinsified);
346 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400347 locations->SetOut(Location::SameAsFirstInput());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000348 HInvokeStaticOrDirect* static_or_direct = invoke->AsInvokeStaticOrDirect();
349 DCHECK(static_or_direct != nullptr);
Nicolas Geoffray97793072016-02-16 15:33:54 +0000350 if (static_or_direct->HasSpecialInput() &&
351 invoke->InputAt(static_or_direct->GetSpecialInputIndex())->IsX86ComputeBaseMethodAddress()) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000352 // We need addressibility for the constant area.
353 locations->SetInAt(1, Location::RequiresRegister());
354 // We need a temporary to hold the constant.
355 locations->AddTemp(Location::RequiresFpuRegister());
356 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400357}
358
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000359static void MathAbsFP(LocationSummary* locations,
360 bool is64bit,
361 X86Assembler* assembler,
362 CodeGeneratorX86* codegen) {
Mark Mendell09ed1a32015-03-25 08:30:06 -0400363 Location output = locations->Out();
364
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000365 DCHECK(output.IsFpuRegister());
Nicolas Geoffray97793072016-02-16 15:33:54 +0000366 if (locations->GetInputCount() == 2 && locations->InAt(1).IsValid()) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000367 DCHECK(locations->InAt(1).IsRegister());
368 // We also have a constant area pointer.
369 Register constant_area = locations->InAt(1).AsRegister<Register>();
370 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
371 if (is64bit) {
372 __ movsd(temp, codegen->LiteralInt64Address(INT64_C(0x7FFFFFFFFFFFFFFF), constant_area));
373 __ andpd(output.AsFpuRegister<XmmRegister>(), temp);
374 } else {
375 __ movss(temp, codegen->LiteralInt32Address(INT32_C(0x7FFFFFFF), constant_area));
376 __ andps(output.AsFpuRegister<XmmRegister>(), temp);
377 }
378 } else {
Mark Mendell09ed1a32015-03-25 08:30:06 -0400379 // Create the right constant on an aligned stack.
380 if (is64bit) {
381 __ subl(ESP, Immediate(8));
382 __ pushl(Immediate(0x7FFFFFFF));
383 __ pushl(Immediate(0xFFFFFFFF));
384 __ andpd(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
385 } else {
386 __ subl(ESP, Immediate(12));
387 __ pushl(Immediate(0x7FFFFFFF));
388 __ andps(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
389 }
390 __ addl(ESP, Immediate(16));
Mark Mendell09ed1a32015-03-25 08:30:06 -0400391 }
392}
393
394void IntrinsicLocationsBuilderX86::VisitMathAbsDouble(HInvoke* invoke) {
395 CreateFloatToFloat(arena_, invoke);
396}
397
398void IntrinsicCodeGeneratorX86::VisitMathAbsDouble(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000399 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler(), codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400400}
401
402void IntrinsicLocationsBuilderX86::VisitMathAbsFloat(HInvoke* invoke) {
403 CreateFloatToFloat(arena_, invoke);
404}
405
406void IntrinsicCodeGeneratorX86::VisitMathAbsFloat(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000407 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler(), codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400408}
409
410static void CreateAbsIntLocation(ArenaAllocator* arena, HInvoke* invoke) {
411 LocationSummary* locations = new (arena) LocationSummary(invoke,
412 LocationSummary::kNoCall,
413 kIntrinsified);
414 locations->SetInAt(0, Location::RegisterLocation(EAX));
415 locations->SetOut(Location::SameAsFirstInput());
416 locations->AddTemp(Location::RegisterLocation(EDX));
417}
418
419static void GenAbsInteger(LocationSummary* locations, X86Assembler* assembler) {
420 Location output = locations->Out();
421 Register out = output.AsRegister<Register>();
422 DCHECK_EQ(out, EAX);
423 Register temp = locations->GetTemp(0).AsRegister<Register>();
424 DCHECK_EQ(temp, EDX);
425
426 // Sign extend EAX into EDX.
427 __ cdq();
428
429 // XOR EAX with sign.
430 __ xorl(EAX, EDX);
431
432 // Subtract out sign to correct.
433 __ subl(EAX, EDX);
434
435 // The result is in EAX.
436}
437
438static void CreateAbsLongLocation(ArenaAllocator* arena, HInvoke* invoke) {
439 LocationSummary* locations = new (arena) LocationSummary(invoke,
440 LocationSummary::kNoCall,
441 kIntrinsified);
442 locations->SetInAt(0, Location::RequiresRegister());
443 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
444 locations->AddTemp(Location::RequiresRegister());
445}
446
447static void GenAbsLong(LocationSummary* locations, X86Assembler* assembler) {
448 Location input = locations->InAt(0);
449 Register input_lo = input.AsRegisterPairLow<Register>();
450 Register input_hi = input.AsRegisterPairHigh<Register>();
451 Location output = locations->Out();
452 Register output_lo = output.AsRegisterPairLow<Register>();
453 Register output_hi = output.AsRegisterPairHigh<Register>();
454 Register temp = locations->GetTemp(0).AsRegister<Register>();
455
456 // Compute the sign into the temporary.
457 __ movl(temp, input_hi);
458 __ sarl(temp, Immediate(31));
459
460 // Store the sign into the output.
461 __ movl(output_lo, temp);
462 __ movl(output_hi, temp);
463
464 // XOR the input to the output.
465 __ xorl(output_lo, input_lo);
466 __ xorl(output_hi, input_hi);
467
468 // Subtract the sign.
469 __ subl(output_lo, temp);
470 __ sbbl(output_hi, temp);
471}
472
473void IntrinsicLocationsBuilderX86::VisitMathAbsInt(HInvoke* invoke) {
474 CreateAbsIntLocation(arena_, invoke);
475}
476
477void IntrinsicCodeGeneratorX86::VisitMathAbsInt(HInvoke* invoke) {
478 GenAbsInteger(invoke->GetLocations(), GetAssembler());
479}
480
481void IntrinsicLocationsBuilderX86::VisitMathAbsLong(HInvoke* invoke) {
482 CreateAbsLongLocation(arena_, invoke);
483}
484
485void IntrinsicCodeGeneratorX86::VisitMathAbsLong(HInvoke* invoke) {
486 GenAbsLong(invoke->GetLocations(), GetAssembler());
487}
488
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000489static void GenMinMaxFP(LocationSummary* locations,
490 bool is_min,
491 bool is_double,
492 X86Assembler* assembler,
493 CodeGeneratorX86* codegen) {
Mark Mendell09ed1a32015-03-25 08:30:06 -0400494 Location op1_loc = locations->InAt(0);
495 Location op2_loc = locations->InAt(1);
496 Location out_loc = locations->Out();
497 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
498
499 // Shortcut for same input locations.
500 if (op1_loc.Equals(op2_loc)) {
501 DCHECK(out_loc.Equals(op1_loc));
502 return;
503 }
504
505 // (out := op1)
506 // out <=? op2
507 // if Nan jmp Nan_label
508 // if out is min jmp done
509 // if op2 is min jmp op2_label
510 // handle -0/+0
511 // jmp done
512 // Nan_label:
513 // out := NaN
514 // op2_label:
515 // out := op2
516 // done:
517 //
518 // This removes one jmp, but needs to copy one input (op1) to out.
519 //
520 // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath?
521
522 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
523
Mark Mendell0c9497d2015-08-21 09:30:05 -0400524 NearLabel nan, done, op2_label;
Mark Mendell09ed1a32015-03-25 08:30:06 -0400525 if (is_double) {
526 __ ucomisd(out, op2);
527 } else {
528 __ ucomiss(out, op2);
529 }
530
531 __ j(Condition::kParityEven, &nan);
532
533 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
534 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
535
536 // Handle 0.0/-0.0.
537 if (is_min) {
538 if (is_double) {
539 __ orpd(out, op2);
540 } else {
541 __ orps(out, op2);
542 }
543 } else {
544 if (is_double) {
545 __ andpd(out, op2);
546 } else {
547 __ andps(out, op2);
548 }
549 }
550 __ jmp(&done);
551
552 // NaN handling.
553 __ Bind(&nan);
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000554 // Do we have a constant area pointer?
Nicolas Geoffray97793072016-02-16 15:33:54 +0000555 if (locations->GetInputCount() == 3 && locations->InAt(2).IsValid()) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000556 DCHECK(locations->InAt(2).IsRegister());
557 Register constant_area = locations->InAt(2).AsRegister<Register>();
558 if (is_double) {
559 __ movsd(out, codegen->LiteralInt64Address(kDoubleNaN, constant_area));
560 } else {
561 __ movss(out, codegen->LiteralInt32Address(kFloatNaN, constant_area));
562 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400563 } else {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000564 if (is_double) {
565 __ pushl(Immediate(kDoubleNaNHigh));
566 __ pushl(Immediate(kDoubleNaNLow));
567 __ movsd(out, Address(ESP, 0));
568 __ addl(ESP, Immediate(8));
569 } else {
570 __ pushl(Immediate(kFloatNaN));
571 __ movss(out, Address(ESP, 0));
572 __ addl(ESP, Immediate(4));
573 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400574 }
575 __ jmp(&done);
576
577 // out := op2;
578 __ Bind(&op2_label);
579 if (is_double) {
580 __ movsd(out, op2);
581 } else {
582 __ movss(out, op2);
583 }
584
585 // Done.
586 __ Bind(&done);
587}
588
589static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
590 LocationSummary* locations = new (arena) LocationSummary(invoke,
591 LocationSummary::kNoCall,
592 kIntrinsified);
593 locations->SetInAt(0, Location::RequiresFpuRegister());
594 locations->SetInAt(1, Location::RequiresFpuRegister());
595 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
596 // the second input to be the output (we can simply swap inputs).
597 locations->SetOut(Location::SameAsFirstInput());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000598 HInvokeStaticOrDirect* static_or_direct = invoke->AsInvokeStaticOrDirect();
599 DCHECK(static_or_direct != nullptr);
Nicolas Geoffray97793072016-02-16 15:33:54 +0000600 if (static_or_direct->HasSpecialInput() &&
601 invoke->InputAt(static_or_direct->GetSpecialInputIndex())->IsX86ComputeBaseMethodAddress()) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000602 locations->SetInAt(2, Location::RequiresRegister());
603 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400604}
605
606void IntrinsicLocationsBuilderX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
607 CreateFPFPToFPLocations(arena_, invoke);
608}
609
610void IntrinsicCodeGeneratorX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000611 GenMinMaxFP(invoke->GetLocations(),
612 /* is_min */ true,
613 /* is_double */ true,
614 GetAssembler(),
615 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400616}
617
618void IntrinsicLocationsBuilderX86::VisitMathMinFloatFloat(HInvoke* invoke) {
619 CreateFPFPToFPLocations(arena_, invoke);
620}
621
622void IntrinsicCodeGeneratorX86::VisitMathMinFloatFloat(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000623 GenMinMaxFP(invoke->GetLocations(),
624 /* is_min */ true,
625 /* is_double */ false,
626 GetAssembler(),
627 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400628}
629
630void IntrinsicLocationsBuilderX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
631 CreateFPFPToFPLocations(arena_, invoke);
632}
633
634void IntrinsicCodeGeneratorX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000635 GenMinMaxFP(invoke->GetLocations(),
636 /* is_min */ false,
637 /* is_double */ true,
638 GetAssembler(),
639 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400640}
641
642void IntrinsicLocationsBuilderX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
643 CreateFPFPToFPLocations(arena_, invoke);
644}
645
646void IntrinsicCodeGeneratorX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000647 GenMinMaxFP(invoke->GetLocations(),
648 /* is_min */ false,
649 /* is_double */ false,
650 GetAssembler(),
651 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400652}
653
654static void GenMinMax(LocationSummary* locations, bool is_min, bool is_long,
655 X86Assembler* assembler) {
656 Location op1_loc = locations->InAt(0);
657 Location op2_loc = locations->InAt(1);
658
659 // Shortcut for same input locations.
660 if (op1_loc.Equals(op2_loc)) {
661 // Can return immediately, as op1_loc == out_loc.
662 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
663 // a copy here.
664 DCHECK(locations->Out().Equals(op1_loc));
665 return;
666 }
667
668 if (is_long) {
669 // Need to perform a subtract to get the sign right.
670 // op1 is already in the same location as the output.
671 Location output = locations->Out();
672 Register output_lo = output.AsRegisterPairLow<Register>();
673 Register output_hi = output.AsRegisterPairHigh<Register>();
674
675 Register op2_lo = op2_loc.AsRegisterPairLow<Register>();
676 Register op2_hi = op2_loc.AsRegisterPairHigh<Register>();
677
678 // Spare register to compute the subtraction to set condition code.
679 Register temp = locations->GetTemp(0).AsRegister<Register>();
680
681 // Subtract off op2_low.
682 __ movl(temp, output_lo);
683 __ subl(temp, op2_lo);
684
685 // Now use the same tempo and the borrow to finish the subtraction of op2_hi.
686 __ movl(temp, output_hi);
687 __ sbbl(temp, op2_hi);
688
689 // Now the condition code is correct.
690 Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess;
691 __ cmovl(cond, output_lo, op2_lo);
692 __ cmovl(cond, output_hi, op2_hi);
693 } else {
694 Register out = locations->Out().AsRegister<Register>();
695 Register op2 = op2_loc.AsRegister<Register>();
696
697 // (out := op1)
698 // out <=? op2
699 // if out is min jmp done
700 // out := op2
701 // done:
702
703 __ cmpl(out, op2);
704 Condition cond = is_min ? Condition::kGreater : Condition::kLess;
705 __ cmovl(cond, out, op2);
706 }
707}
708
709static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
710 LocationSummary* locations = new (arena) LocationSummary(invoke,
711 LocationSummary::kNoCall,
712 kIntrinsified);
713 locations->SetInAt(0, Location::RequiresRegister());
714 locations->SetInAt(1, Location::RequiresRegister());
715 locations->SetOut(Location::SameAsFirstInput());
716}
717
718static void CreateLongLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
719 LocationSummary* locations = new (arena) LocationSummary(invoke,
720 LocationSummary::kNoCall,
721 kIntrinsified);
722 locations->SetInAt(0, Location::RequiresRegister());
723 locations->SetInAt(1, Location::RequiresRegister());
724 locations->SetOut(Location::SameAsFirstInput());
725 // Register to use to perform a long subtract to set cc.
726 locations->AddTemp(Location::RequiresRegister());
727}
728
729void IntrinsicLocationsBuilderX86::VisitMathMinIntInt(HInvoke* invoke) {
730 CreateIntIntToIntLocations(arena_, invoke);
731}
732
733void IntrinsicCodeGeneratorX86::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000734 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400735}
736
737void IntrinsicLocationsBuilderX86::VisitMathMinLongLong(HInvoke* invoke) {
738 CreateLongLongToLongLocations(arena_, invoke);
739}
740
741void IntrinsicCodeGeneratorX86::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000742 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400743}
744
745void IntrinsicLocationsBuilderX86::VisitMathMaxIntInt(HInvoke* invoke) {
746 CreateIntIntToIntLocations(arena_, invoke);
747}
748
749void IntrinsicCodeGeneratorX86::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000750 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400751}
752
753void IntrinsicLocationsBuilderX86::VisitMathMaxLongLong(HInvoke* invoke) {
754 CreateLongLongToLongLocations(arena_, invoke);
755}
756
757void IntrinsicCodeGeneratorX86::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000758 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400759}
760
761static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
762 LocationSummary* locations = new (arena) LocationSummary(invoke,
763 LocationSummary::kNoCall,
764 kIntrinsified);
765 locations->SetInAt(0, Location::RequiresFpuRegister());
766 locations->SetOut(Location::RequiresFpuRegister());
767}
768
769void IntrinsicLocationsBuilderX86::VisitMathSqrt(HInvoke* invoke) {
770 CreateFPToFPLocations(arena_, invoke);
771}
772
773void IntrinsicCodeGeneratorX86::VisitMathSqrt(HInvoke* invoke) {
774 LocationSummary* locations = invoke->GetLocations();
775 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
776 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
777
778 GetAssembler()->sqrtsd(out, in);
779}
780
Mark Mendellfb8d2792015-03-31 22:16:59 -0400781static void InvokeOutOfLineIntrinsic(CodeGeneratorX86* codegen, HInvoke* invoke) {
Roland Levillainec525fc2015-04-28 15:50:20 +0100782 MoveArguments(invoke, codegen);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400783
784 DCHECK(invoke->IsInvokeStaticOrDirect());
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100785 codegen->GenerateStaticOrDirectCall(invoke->AsInvokeStaticOrDirect(),
786 Location::RegisterLocation(EAX));
Mingyao Yange90db122015-04-03 17:56:54 -0700787 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400788
789 // Copy the result back to the expected output.
790 Location out = invoke->GetLocations()->Out();
791 if (out.IsValid()) {
792 DCHECK(out.IsRegister());
Andreas Gampe85b62f22015-09-09 13:15:38 -0700793 codegen->MoveFromReturnRegister(out, invoke->GetType());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400794 }
795}
796
797static void CreateSSE41FPToFPLocations(ArenaAllocator* arena,
798 HInvoke* invoke,
799 CodeGeneratorX86* codegen) {
800 // Do we have instruction support?
801 if (codegen->GetInstructionSetFeatures().HasSSE4_1()) {
802 CreateFPToFPLocations(arena, invoke);
803 return;
804 }
805
806 // We have to fall back to a call to the intrinsic.
807 LocationSummary* locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +0100808 LocationSummary::kCallOnMainOnly);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400809 InvokeRuntimeCallingConvention calling_convention;
810 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
811 locations->SetOut(Location::FpuRegisterLocation(XMM0));
812 // Needs to be EAX for the invoke.
813 locations->AddTemp(Location::RegisterLocation(EAX));
814}
815
816static void GenSSE41FPToFPIntrinsic(CodeGeneratorX86* codegen,
817 HInvoke* invoke,
818 X86Assembler* assembler,
819 int round_mode) {
820 LocationSummary* locations = invoke->GetLocations();
821 if (locations->WillCall()) {
822 InvokeOutOfLineIntrinsic(codegen, invoke);
823 } else {
824 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
825 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
826 __ roundsd(out, in, Immediate(round_mode));
827 }
828}
829
830void IntrinsicLocationsBuilderX86::VisitMathCeil(HInvoke* invoke) {
831 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
832}
833
834void IntrinsicCodeGeneratorX86::VisitMathCeil(HInvoke* invoke) {
835 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 2);
836}
837
838void IntrinsicLocationsBuilderX86::VisitMathFloor(HInvoke* invoke) {
839 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
840}
841
842void IntrinsicCodeGeneratorX86::VisitMathFloor(HInvoke* invoke) {
843 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 1);
844}
845
846void IntrinsicLocationsBuilderX86::VisitMathRint(HInvoke* invoke) {
847 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
848}
849
850void IntrinsicCodeGeneratorX86::VisitMathRint(HInvoke* invoke) {
851 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 0);
852}
853
Mark Mendellfb8d2792015-03-31 22:16:59 -0400854void IntrinsicLocationsBuilderX86::VisitMathRoundFloat(HInvoke* invoke) {
855 // Do we have instruction support?
856 if (codegen_->GetInstructionSetFeatures().HasSSE4_1()) {
Aart Bik2c9f4952016-08-01 16:52:27 -0700857 HInvokeStaticOrDirect* static_or_direct = invoke->AsInvokeStaticOrDirect();
858 DCHECK(static_or_direct != nullptr);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400859 LocationSummary* locations = new (arena_) LocationSummary(invoke,
860 LocationSummary::kNoCall,
861 kIntrinsified);
862 locations->SetInAt(0, Location::RequiresFpuRegister());
Aart Bik2c9f4952016-08-01 16:52:27 -0700863 if (static_or_direct->HasSpecialInput() &&
864 invoke->InputAt(
865 static_or_direct->GetSpecialInputIndex())->IsX86ComputeBaseMethodAddress()) {
866 locations->SetInAt(1, Location::RequiresRegister());
867 }
Nicolas Geoffrayd9b92402015-04-21 10:02:22 +0100868 locations->SetOut(Location::RequiresRegister());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400869 locations->AddTemp(Location::RequiresFpuRegister());
870 locations->AddTemp(Location::RequiresFpuRegister());
871 return;
872 }
873
874 // We have to fall back to a call to the intrinsic.
875 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Aart Bik2c9f4952016-08-01 16:52:27 -0700876 LocationSummary::kCallOnMainOnly);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400877 InvokeRuntimeCallingConvention calling_convention;
878 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
879 locations->SetOut(Location::RegisterLocation(EAX));
880 // Needs to be EAX for the invoke.
881 locations->AddTemp(Location::RegisterLocation(EAX));
882}
883
884void IntrinsicCodeGeneratorX86::VisitMathRoundFloat(HInvoke* invoke) {
885 LocationSummary* locations = invoke->GetLocations();
Aart Bik2c9f4952016-08-01 16:52:27 -0700886 if (locations->WillCall()) { // TODO: can we reach this?
Mark Mendellfb8d2792015-03-31 22:16:59 -0400887 InvokeOutOfLineIntrinsic(codegen_, invoke);
888 return;
889 }
890
Mark Mendellfb8d2792015-03-31 22:16:59 -0400891 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
Aart Bik2c9f4952016-08-01 16:52:27 -0700892 XmmRegister t1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
893 XmmRegister t2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Mark Mendellfb8d2792015-03-31 22:16:59 -0400894 Register out = locations->Out().AsRegister<Register>();
Aart Bik2c9f4952016-08-01 16:52:27 -0700895 NearLabel skip_incr, done;
Mark Mendellfb8d2792015-03-31 22:16:59 -0400896 X86Assembler* assembler = GetAssembler();
897
Aart Bik2c9f4952016-08-01 16:52:27 -0700898 // Since no direct x86 rounding instruction matches the required semantics,
899 // this intrinsic is implemented as follows:
900 // result = floor(in);
901 // if (in - result >= 0.5f)
902 // result = result + 1.0f;
903 __ movss(t2, in);
904 __ roundss(t1, in, Immediate(1));
905 __ subss(t2, t1);
Aart Bik0cf8d9c2016-08-10 14:05:54 -0700906 if (locations->GetInputCount() == 2 && locations->InAt(1).IsValid()) {
907 // Direct constant area available.
908 Register constant_area = locations->InAt(1).AsRegister<Register>();
909 __ comiss(t2, codegen_->LiteralInt32Address(bit_cast<int32_t, float>(0.5f), constant_area));
910 __ j(kBelow, &skip_incr);
911 __ addss(t1, codegen_->LiteralInt32Address(bit_cast<int32_t, float>(1.0f), constant_area));
912 __ Bind(&skip_incr);
913 } else {
914 // No constant area: go through stack.
915 __ pushl(Immediate(bit_cast<int32_t, float>(0.5f)));
916 __ pushl(Immediate(bit_cast<int32_t, float>(1.0f)));
917 __ comiss(t2, Address(ESP, 4));
918 __ j(kBelow, &skip_incr);
919 __ addss(t1, Address(ESP, 0));
920 __ Bind(&skip_incr);
921 __ addl(ESP, Immediate(8));
922 }
Mark Mendellfb8d2792015-03-31 22:16:59 -0400923
Aart Bik2c9f4952016-08-01 16:52:27 -0700924 // Final conversion to an integer. Unfortunately this also does not have a
925 // direct x86 instruction, since NaN should map to 0 and large positive
926 // values need to be clipped to the extreme value.
Mark Mendellfb8d2792015-03-31 22:16:59 -0400927 __ movl(out, Immediate(kPrimIntMax));
Aart Bik2c9f4952016-08-01 16:52:27 -0700928 __ cvtsi2ss(t2, out);
929 __ comiss(t1, t2);
930 __ j(kAboveEqual, &done); // clipped to max (already in out), does not jump on unordered
931 __ movl(out, Immediate(0)); // does not change flags
932 __ j(kUnordered, &done); // NaN mapped to 0 (just moved in out)
933 __ cvttss2si(out, t1);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400934 __ Bind(&done);
935}
936
Mark Mendella4f12202015-08-06 15:23:34 -0400937static void CreateFPToFPCallLocations(ArenaAllocator* arena,
938 HInvoke* invoke) {
939 LocationSummary* locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +0100940 LocationSummary::kCallOnMainOnly,
Mark Mendella4f12202015-08-06 15:23:34 -0400941 kIntrinsified);
942 InvokeRuntimeCallingConvention calling_convention;
943 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
944 locations->SetOut(Location::FpuRegisterLocation(XMM0));
945}
946
947static void GenFPToFPCall(HInvoke* invoke, CodeGeneratorX86* codegen, QuickEntrypointEnum entry) {
948 LocationSummary* locations = invoke->GetLocations();
949 DCHECK(locations->WillCall());
950 DCHECK(invoke->IsInvokeStaticOrDirect());
951 X86Assembler* assembler = codegen->GetAssembler();
952
953 // We need some place to pass the parameters.
954 __ subl(ESP, Immediate(16));
955 __ cfi().AdjustCFAOffset(16);
956
957 // Pass the parameters at the bottom of the stack.
958 __ movsd(Address(ESP, 0), XMM0);
959
960 // If we have a second parameter, pass it next.
961 if (invoke->GetNumberOfArguments() == 2) {
962 __ movsd(Address(ESP, 8), XMM1);
963 }
964
965 // Now do the actual call.
Andreas Gampe542451c2016-07-26 09:02:02 -0700966 __ fs()->call(Address::Absolute(GetThreadOffset<kX86PointerSize>(entry)));
Mark Mendella4f12202015-08-06 15:23:34 -0400967
968 // Extract the return value from the FP stack.
969 __ fstpl(Address(ESP, 0));
970 __ movsd(XMM0, Address(ESP, 0));
971
972 // And clean up the stack.
973 __ addl(ESP, Immediate(16));
974 __ cfi().AdjustCFAOffset(-16);
975
976 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
977}
978
979void IntrinsicLocationsBuilderX86::VisitMathCos(HInvoke* invoke) {
980 CreateFPToFPCallLocations(arena_, invoke);
981}
982
983void IntrinsicCodeGeneratorX86::VisitMathCos(HInvoke* invoke) {
984 GenFPToFPCall(invoke, codegen_, kQuickCos);
985}
986
987void IntrinsicLocationsBuilderX86::VisitMathSin(HInvoke* invoke) {
988 CreateFPToFPCallLocations(arena_, invoke);
989}
990
991void IntrinsicCodeGeneratorX86::VisitMathSin(HInvoke* invoke) {
992 GenFPToFPCall(invoke, codegen_, kQuickSin);
993}
994
995void IntrinsicLocationsBuilderX86::VisitMathAcos(HInvoke* invoke) {
996 CreateFPToFPCallLocations(arena_, invoke);
997}
998
999void IntrinsicCodeGeneratorX86::VisitMathAcos(HInvoke* invoke) {
1000 GenFPToFPCall(invoke, codegen_, kQuickAcos);
1001}
1002
1003void IntrinsicLocationsBuilderX86::VisitMathAsin(HInvoke* invoke) {
1004 CreateFPToFPCallLocations(arena_, invoke);
1005}
1006
1007void IntrinsicCodeGeneratorX86::VisitMathAsin(HInvoke* invoke) {
1008 GenFPToFPCall(invoke, codegen_, kQuickAsin);
1009}
1010
1011void IntrinsicLocationsBuilderX86::VisitMathAtan(HInvoke* invoke) {
1012 CreateFPToFPCallLocations(arena_, invoke);
1013}
1014
1015void IntrinsicCodeGeneratorX86::VisitMathAtan(HInvoke* invoke) {
1016 GenFPToFPCall(invoke, codegen_, kQuickAtan);
1017}
1018
1019void IntrinsicLocationsBuilderX86::VisitMathCbrt(HInvoke* invoke) {
1020 CreateFPToFPCallLocations(arena_, invoke);
1021}
1022
1023void IntrinsicCodeGeneratorX86::VisitMathCbrt(HInvoke* invoke) {
1024 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
1025}
1026
1027void IntrinsicLocationsBuilderX86::VisitMathCosh(HInvoke* invoke) {
1028 CreateFPToFPCallLocations(arena_, invoke);
1029}
1030
1031void IntrinsicCodeGeneratorX86::VisitMathCosh(HInvoke* invoke) {
1032 GenFPToFPCall(invoke, codegen_, kQuickCosh);
1033}
1034
1035void IntrinsicLocationsBuilderX86::VisitMathExp(HInvoke* invoke) {
1036 CreateFPToFPCallLocations(arena_, invoke);
1037}
1038
1039void IntrinsicCodeGeneratorX86::VisitMathExp(HInvoke* invoke) {
1040 GenFPToFPCall(invoke, codegen_, kQuickExp);
1041}
1042
1043void IntrinsicLocationsBuilderX86::VisitMathExpm1(HInvoke* invoke) {
1044 CreateFPToFPCallLocations(arena_, invoke);
1045}
1046
1047void IntrinsicCodeGeneratorX86::VisitMathExpm1(HInvoke* invoke) {
1048 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
1049}
1050
1051void IntrinsicLocationsBuilderX86::VisitMathLog(HInvoke* invoke) {
1052 CreateFPToFPCallLocations(arena_, invoke);
1053}
1054
1055void IntrinsicCodeGeneratorX86::VisitMathLog(HInvoke* invoke) {
1056 GenFPToFPCall(invoke, codegen_, kQuickLog);
1057}
1058
1059void IntrinsicLocationsBuilderX86::VisitMathLog10(HInvoke* invoke) {
1060 CreateFPToFPCallLocations(arena_, invoke);
1061}
1062
1063void IntrinsicCodeGeneratorX86::VisitMathLog10(HInvoke* invoke) {
1064 GenFPToFPCall(invoke, codegen_, kQuickLog10);
1065}
1066
1067void IntrinsicLocationsBuilderX86::VisitMathSinh(HInvoke* invoke) {
1068 CreateFPToFPCallLocations(arena_, invoke);
1069}
1070
1071void IntrinsicCodeGeneratorX86::VisitMathSinh(HInvoke* invoke) {
1072 GenFPToFPCall(invoke, codegen_, kQuickSinh);
1073}
1074
1075void IntrinsicLocationsBuilderX86::VisitMathTan(HInvoke* invoke) {
1076 CreateFPToFPCallLocations(arena_, invoke);
1077}
1078
1079void IntrinsicCodeGeneratorX86::VisitMathTan(HInvoke* invoke) {
1080 GenFPToFPCall(invoke, codegen_, kQuickTan);
1081}
1082
1083void IntrinsicLocationsBuilderX86::VisitMathTanh(HInvoke* invoke) {
1084 CreateFPToFPCallLocations(arena_, invoke);
1085}
1086
1087void IntrinsicCodeGeneratorX86::VisitMathTanh(HInvoke* invoke) {
1088 GenFPToFPCall(invoke, codegen_, kQuickTanh);
1089}
1090
1091static void CreateFPFPToFPCallLocations(ArenaAllocator* arena,
1092 HInvoke* invoke) {
1093 LocationSummary* locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001094 LocationSummary::kCallOnMainOnly,
Mark Mendella4f12202015-08-06 15:23:34 -04001095 kIntrinsified);
1096 InvokeRuntimeCallingConvention calling_convention;
1097 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
1098 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
1099 locations->SetOut(Location::FpuRegisterLocation(XMM0));
1100}
1101
1102void IntrinsicLocationsBuilderX86::VisitMathAtan2(HInvoke* invoke) {
1103 CreateFPFPToFPCallLocations(arena_, invoke);
1104}
1105
1106void IntrinsicCodeGeneratorX86::VisitMathAtan2(HInvoke* invoke) {
1107 GenFPToFPCall(invoke, codegen_, kQuickAtan2);
1108}
1109
1110void IntrinsicLocationsBuilderX86::VisitMathHypot(HInvoke* invoke) {
1111 CreateFPFPToFPCallLocations(arena_, invoke);
1112}
1113
1114void IntrinsicCodeGeneratorX86::VisitMathHypot(HInvoke* invoke) {
1115 GenFPToFPCall(invoke, codegen_, kQuickHypot);
1116}
1117
1118void IntrinsicLocationsBuilderX86::VisitMathNextAfter(HInvoke* invoke) {
1119 CreateFPFPToFPCallLocations(arena_, invoke);
1120}
1121
1122void IntrinsicCodeGeneratorX86::VisitMathNextAfter(HInvoke* invoke) {
1123 GenFPToFPCall(invoke, codegen_, kQuickNextAfter);
1124}
1125
Mark Mendell6bc53a92015-07-01 14:26:52 -04001126void IntrinsicLocationsBuilderX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
1127 // We need at least two of the positions or length to be an integer constant,
1128 // or else we won't have enough free registers.
1129 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1130 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1131 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1132
1133 int num_constants =
1134 ((src_pos != nullptr) ? 1 : 0)
1135 + ((dest_pos != nullptr) ? 1 : 0)
1136 + ((length != nullptr) ? 1 : 0);
1137
1138 if (num_constants < 2) {
1139 // Not enough free registers.
1140 return;
1141 }
1142
1143 // As long as we are checking, we might as well check to see if the src and dest
1144 // positions are >= 0.
1145 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
1146 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
1147 // We will have to fail anyways.
1148 return;
1149 }
1150
1151 // And since we are already checking, check the length too.
1152 if (length != nullptr) {
1153 int32_t len = length->GetValue();
1154 if (len < 0) {
1155 // Just call as normal.
1156 return;
1157 }
1158 }
1159
1160 // Okay, it is safe to generate inline code.
1161 LocationSummary* locations =
1162 new (arena_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
1163 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
1164 locations->SetInAt(0, Location::RequiresRegister());
1165 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1166 locations->SetInAt(2, Location::RequiresRegister());
1167 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
1168 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
1169
1170 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
1171 locations->AddTemp(Location::RegisterLocation(ESI));
1172 locations->AddTemp(Location::RegisterLocation(EDI));
1173 locations->AddTemp(Location::RegisterLocation(ECX));
1174}
1175
1176static void CheckPosition(X86Assembler* assembler,
1177 Location pos,
1178 Register input,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001179 Location length,
Andreas Gampe85b62f22015-09-09 13:15:38 -07001180 SlowPathCode* slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001181 Register temp,
1182 bool length_is_input_length = false) {
1183 // Where is the length in the Array?
Mark Mendell6bc53a92015-07-01 14:26:52 -04001184 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1185
1186 if (pos.IsConstant()) {
1187 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1188 if (pos_const == 0) {
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001189 if (!length_is_input_length) {
1190 // Check that length(input) >= length.
1191 if (length.IsConstant()) {
1192 __ cmpl(Address(input, length_offset),
1193 Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1194 } else {
1195 __ cmpl(Address(input, length_offset), length.AsRegister<Register>());
1196 }
1197 __ j(kLess, slow_path->GetEntryLabel());
1198 }
Mark Mendell6bc53a92015-07-01 14:26:52 -04001199 } else {
1200 // Check that length(input) >= pos.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001201 __ movl(temp, Address(input, length_offset));
1202 __ subl(temp, Immediate(pos_const));
Mark Mendell6bc53a92015-07-01 14:26:52 -04001203 __ j(kLess, slow_path->GetEntryLabel());
1204
1205 // Check that (length(input) - pos) >= length.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001206 if (length.IsConstant()) {
1207 __ cmpl(temp, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1208 } else {
1209 __ cmpl(temp, length.AsRegister<Register>());
1210 }
Mark Mendell6bc53a92015-07-01 14:26:52 -04001211 __ j(kLess, slow_path->GetEntryLabel());
1212 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001213 } else if (length_is_input_length) {
1214 // The only way the copy can succeed is if pos is zero.
1215 Register pos_reg = pos.AsRegister<Register>();
1216 __ testl(pos_reg, pos_reg);
1217 __ j(kNotEqual, slow_path->GetEntryLabel());
Mark Mendell6bc53a92015-07-01 14:26:52 -04001218 } else {
1219 // Check that pos >= 0.
1220 Register pos_reg = pos.AsRegister<Register>();
1221 __ testl(pos_reg, pos_reg);
1222 __ j(kLess, slow_path->GetEntryLabel());
1223
1224 // Check that pos <= length(input).
1225 __ cmpl(Address(input, length_offset), pos_reg);
1226 __ j(kLess, slow_path->GetEntryLabel());
1227
1228 // Check that (length(input) - pos) >= length.
1229 __ movl(temp, Address(input, length_offset));
1230 __ subl(temp, pos_reg);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001231 if (length.IsConstant()) {
1232 __ cmpl(temp, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1233 } else {
1234 __ cmpl(temp, length.AsRegister<Register>());
1235 }
Mark Mendell6bc53a92015-07-01 14:26:52 -04001236 __ j(kLess, slow_path->GetEntryLabel());
1237 }
1238}
1239
1240void IntrinsicCodeGeneratorX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
1241 X86Assembler* assembler = GetAssembler();
1242 LocationSummary* locations = invoke->GetLocations();
1243
1244 Register src = locations->InAt(0).AsRegister<Register>();
1245 Location srcPos = locations->InAt(1);
1246 Register dest = locations->InAt(2).AsRegister<Register>();
1247 Location destPos = locations->InAt(3);
1248 Location length = locations->InAt(4);
1249
1250 // Temporaries that we need for MOVSW.
1251 Register src_base = locations->GetTemp(0).AsRegister<Register>();
1252 DCHECK_EQ(src_base, ESI);
1253 Register dest_base = locations->GetTemp(1).AsRegister<Register>();
1254 DCHECK_EQ(dest_base, EDI);
1255 Register count = locations->GetTemp(2).AsRegister<Register>();
1256 DCHECK_EQ(count, ECX);
1257
Andreas Gampe85b62f22015-09-09 13:15:38 -07001258 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell6bc53a92015-07-01 14:26:52 -04001259 codegen_->AddSlowPath(slow_path);
1260
1261 // Bail out if the source and destination are the same (to handle overlap).
1262 __ cmpl(src, dest);
1263 __ j(kEqual, slow_path->GetEntryLabel());
1264
1265 // Bail out if the source is null.
1266 __ testl(src, src);
1267 __ j(kEqual, slow_path->GetEntryLabel());
1268
1269 // Bail out if the destination is null.
1270 __ testl(dest, dest);
1271 __ j(kEqual, slow_path->GetEntryLabel());
1272
1273 // If the length is negative, bail out.
1274 // We have already checked in the LocationsBuilder for the constant case.
1275 if (!length.IsConstant()) {
1276 __ cmpl(length.AsRegister<Register>(), length.AsRegister<Register>());
1277 __ j(kLess, slow_path->GetEntryLabel());
1278 }
1279
1280 // We need the count in ECX.
1281 if (length.IsConstant()) {
1282 __ movl(count, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1283 } else {
1284 __ movl(count, length.AsRegister<Register>());
1285 }
1286
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001287 // Validity checks: source. Use src_base as a temporary register.
1288 CheckPosition(assembler, srcPos, src, Location::RegisterLocation(count), slow_path, src_base);
Mark Mendell6bc53a92015-07-01 14:26:52 -04001289
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001290 // Validity checks: dest. Use src_base as a temporary register.
1291 CheckPosition(assembler, destPos, dest, Location::RegisterLocation(count), slow_path, src_base);
Mark Mendell6bc53a92015-07-01 14:26:52 -04001292
1293 // Okay, everything checks out. Finally time to do the copy.
1294 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1295 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1296 DCHECK_EQ(char_size, 2u);
1297
1298 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1299
1300 if (srcPos.IsConstant()) {
1301 int32_t srcPos_const = srcPos.GetConstant()->AsIntConstant()->GetValue();
1302 __ leal(src_base, Address(src, char_size * srcPos_const + data_offset));
1303 } else {
1304 __ leal(src_base, Address(src, srcPos.AsRegister<Register>(),
1305 ScaleFactor::TIMES_2, data_offset));
1306 }
1307 if (destPos.IsConstant()) {
1308 int32_t destPos_const = destPos.GetConstant()->AsIntConstant()->GetValue();
1309
1310 __ leal(dest_base, Address(dest, char_size * destPos_const + data_offset));
1311 } else {
1312 __ leal(dest_base, Address(dest, destPos.AsRegister<Register>(),
1313 ScaleFactor::TIMES_2, data_offset));
1314 }
1315
1316 // Do the move.
1317 __ rep_movsw();
1318
1319 __ Bind(slow_path->GetExitLabel());
1320}
1321
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001322void IntrinsicLocationsBuilderX86::VisitStringCompareTo(HInvoke* invoke) {
1323 // The inputs plus one temp.
1324 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001325 LocationSummary::kCallOnMainAndSlowPath,
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001326 kIntrinsified);
1327 InvokeRuntimeCallingConvention calling_convention;
1328 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1329 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1330 locations->SetOut(Location::RegisterLocation(EAX));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001331}
1332
1333void IntrinsicCodeGeneratorX86::VisitStringCompareTo(HInvoke* invoke) {
1334 X86Assembler* assembler = GetAssembler();
1335 LocationSummary* locations = invoke->GetLocations();
1336
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001337 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001338 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001339
1340 Register argument = locations->InAt(1).AsRegister<Register>();
1341 __ testl(argument, argument);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001342 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001343 codegen_->AddSlowPath(slow_path);
1344 __ j(kEqual, slow_path->GetEntryLabel());
1345
Andreas Gampe542451c2016-07-26 09:02:02 -07001346 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, pStringCompareTo)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001347 __ Bind(slow_path->GetExitLabel());
1348}
1349
Agi Csakid7138c82015-08-13 17:46:44 -07001350void IntrinsicLocationsBuilderX86::VisitStringEquals(HInvoke* invoke) {
1351 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1352 LocationSummary::kNoCall,
1353 kIntrinsified);
1354 locations->SetInAt(0, Location::RequiresRegister());
1355 locations->SetInAt(1, Location::RequiresRegister());
1356
1357 // Request temporary registers, ECX and EDI needed for repe_cmpsl instruction.
1358 locations->AddTemp(Location::RegisterLocation(ECX));
1359 locations->AddTemp(Location::RegisterLocation(EDI));
1360
1361 // Set output, ESI needed for repe_cmpsl instruction anyways.
1362 locations->SetOut(Location::RegisterLocation(ESI), Location::kOutputOverlap);
1363}
1364
1365void IntrinsicCodeGeneratorX86::VisitStringEquals(HInvoke* invoke) {
1366 X86Assembler* assembler = GetAssembler();
1367 LocationSummary* locations = invoke->GetLocations();
1368
1369 Register str = locations->InAt(0).AsRegister<Register>();
1370 Register arg = locations->InAt(1).AsRegister<Register>();
1371 Register ecx = locations->GetTemp(0).AsRegister<Register>();
1372 Register edi = locations->GetTemp(1).AsRegister<Register>();
1373 Register esi = locations->Out().AsRegister<Register>();
1374
Mark Mendell0c9497d2015-08-21 09:30:05 -04001375 NearLabel end, return_true, return_false;
Agi Csakid7138c82015-08-13 17:46:44 -07001376
1377 // Get offsets of count, value, and class fields within a string object.
1378 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1379 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1380 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1381
1382 // Note that the null check must have been done earlier.
1383 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1384
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001385 StringEqualsOptimizations optimizations(invoke);
1386 if (!optimizations.GetArgumentNotNull()) {
1387 // Check if input is null, return false if it is.
1388 __ testl(arg, arg);
1389 __ j(kEqual, &return_false);
1390 }
Agi Csakid7138c82015-08-13 17:46:44 -07001391
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001392 if (!optimizations.GetArgumentIsString()) {
Vladimir Marko53b52002016-05-24 19:30:45 +01001393 // Instanceof check for the argument by comparing class fields.
1394 // All string objects must have the same type since String cannot be subclassed.
1395 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1396 // If the argument is a string object, its class field must be equal to receiver's class field.
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001397 __ movl(ecx, Address(str, class_offset));
1398 __ cmpl(ecx, Address(arg, class_offset));
1399 __ j(kNotEqual, &return_false);
1400 }
Agi Csakid7138c82015-08-13 17:46:44 -07001401
1402 // Reference equality check, return true if same reference.
1403 __ cmpl(str, arg);
1404 __ j(kEqual, &return_true);
1405
1406 // Load length of receiver string.
1407 __ movl(ecx, Address(str, count_offset));
1408 // Check if lengths are equal, return false if they're not.
1409 __ cmpl(ecx, Address(arg, count_offset));
1410 __ j(kNotEqual, &return_false);
1411 // Return true if both strings are empty.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001412 __ jecxz(&return_true);
Agi Csakid7138c82015-08-13 17:46:44 -07001413
1414 // Load starting addresses of string values into ESI/EDI as required for repe_cmpsl instruction.
1415 __ leal(esi, Address(str, value_offset));
1416 __ leal(edi, Address(arg, value_offset));
1417
1418 // Divide string length by 2 to compare characters 2 at a time and adjust for odd lengths.
1419 __ addl(ecx, Immediate(1));
1420 __ shrl(ecx, Immediate(1));
1421
1422 // Assertions that must hold in order to compare strings 2 characters at a time.
1423 DCHECK_ALIGNED(value_offset, 4);
1424 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1425
1426 // Loop to compare strings two characters at a time starting at the beginning of the string.
1427 __ repe_cmpsl();
1428 // If strings are not equal, zero flag will be cleared.
1429 __ j(kNotEqual, &return_false);
1430
1431 // Return true and exit the function.
1432 // If loop does not result in returning false, we return true.
1433 __ Bind(&return_true);
1434 __ movl(esi, Immediate(1));
1435 __ jmp(&end);
1436
1437 // Return false and exit the function.
1438 __ Bind(&return_false);
1439 __ xorl(esi, esi);
1440 __ Bind(&end);
1441}
1442
Andreas Gampe21030dd2015-05-07 14:46:15 -07001443static void CreateStringIndexOfLocations(HInvoke* invoke,
1444 ArenaAllocator* allocator,
1445 bool start_at_zero) {
1446 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1447 LocationSummary::kCallOnSlowPath,
1448 kIntrinsified);
1449 // The data needs to be in EDI for scasw. So request that the string is there, anyways.
1450 locations->SetInAt(0, Location::RegisterLocation(EDI));
1451 // If we look for a constant char, we'll still have to copy it into EAX. So just request the
1452 // allocator to do that, anyways. We can still do the constant check by checking the parameter
1453 // of the instruction explicitly.
1454 // Note: This works as we don't clobber EAX anywhere.
1455 locations->SetInAt(1, Location::RegisterLocation(EAX));
1456 if (!start_at_zero) {
1457 locations->SetInAt(2, Location::RequiresRegister()); // The starting index.
1458 }
1459 // As we clobber EDI during execution anyways, also use it as the output.
1460 locations->SetOut(Location::SameAsFirstInput());
1461
1462 // repne scasw uses ECX as the counter.
1463 locations->AddTemp(Location::RegisterLocation(ECX));
1464 // Need another temporary to be able to compute the result.
1465 locations->AddTemp(Location::RequiresRegister());
1466}
1467
1468static void GenerateStringIndexOf(HInvoke* invoke,
1469 X86Assembler* assembler,
1470 CodeGeneratorX86* codegen,
1471 ArenaAllocator* allocator,
1472 bool start_at_zero) {
1473 LocationSummary* locations = invoke->GetLocations();
1474
1475 // Note that the null check must have been done earlier.
1476 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1477
1478 Register string_obj = locations->InAt(0).AsRegister<Register>();
1479 Register search_value = locations->InAt(1).AsRegister<Register>();
1480 Register counter = locations->GetTemp(0).AsRegister<Register>();
1481 Register string_length = locations->GetTemp(1).AsRegister<Register>();
1482 Register out = locations->Out().AsRegister<Register>();
1483
1484 // Check our assumptions for registers.
1485 DCHECK_EQ(string_obj, EDI);
1486 DCHECK_EQ(search_value, EAX);
1487 DCHECK_EQ(counter, ECX);
1488 DCHECK_EQ(out, EDI);
1489
1490 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001491 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001492 SlowPathCode* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001493 HInstruction* code_point = invoke->InputAt(1);
1494 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001495 if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) >
Andreas Gampe21030dd2015-05-07 14:46:15 -07001496 std::numeric_limits<uint16_t>::max()) {
1497 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1498 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1499 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1500 codegen->AddSlowPath(slow_path);
1501 __ jmp(slow_path->GetEntryLabel());
1502 __ Bind(slow_path->GetExitLabel());
1503 return;
1504 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001505 } else if (code_point->GetType() != Primitive::kPrimChar) {
Andreas Gampe21030dd2015-05-07 14:46:15 -07001506 __ cmpl(search_value, Immediate(std::numeric_limits<uint16_t>::max()));
1507 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1508 codegen->AddSlowPath(slow_path);
1509 __ j(kAbove, slow_path->GetEntryLabel());
1510 }
1511
1512 // From here down, we know that we are looking for a char that fits in 16 bits.
1513 // Location of reference to data array within the String object.
1514 int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1515 // Location of count within the String object.
1516 int32_t count_offset = mirror::String::CountOffset().Int32Value();
1517
1518 // Load string length, i.e., the count field of the string.
1519 __ movl(string_length, Address(string_obj, count_offset));
1520
1521 // Do a zero-length check.
1522 // TODO: Support jecxz.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001523 NearLabel not_found_label;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001524 __ testl(string_length, string_length);
1525 __ j(kEqual, &not_found_label);
1526
1527 if (start_at_zero) {
1528 // Number of chars to scan is the same as the string length.
1529 __ movl(counter, string_length);
1530
1531 // Move to the start of the string.
1532 __ addl(string_obj, Immediate(value_offset));
1533 } else {
1534 Register start_index = locations->InAt(2).AsRegister<Register>();
1535
1536 // Do a start_index check.
1537 __ cmpl(start_index, string_length);
1538 __ j(kGreaterEqual, &not_found_label);
1539
1540 // Ensure we have a start index >= 0;
1541 __ xorl(counter, counter);
1542 __ cmpl(start_index, Immediate(0));
1543 __ cmovl(kGreater, counter, start_index);
1544
1545 // Move to the start of the string: string_obj + value_offset + 2 * start_index.
1546 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset));
1547
1548 // Now update ecx (the repne scasw work counter). We have string.length - start_index left to
1549 // compare.
1550 __ negl(counter);
1551 __ leal(counter, Address(string_length, counter, ScaleFactor::TIMES_1, 0));
1552 }
1553
1554 // Everything is set up for repne scasw:
1555 // * Comparison address in EDI.
1556 // * Counter in ECX.
1557 __ repne_scasw();
1558
1559 // Did we find a match?
1560 __ j(kNotEqual, &not_found_label);
1561
1562 // Yes, we matched. Compute the index of the result.
1563 __ subl(string_length, counter);
1564 __ leal(out, Address(string_length, -1));
1565
Mark Mendell0c9497d2015-08-21 09:30:05 -04001566 NearLabel done;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001567 __ jmp(&done);
1568
1569 // Failed to match; return -1.
1570 __ Bind(&not_found_label);
1571 __ movl(out, Immediate(-1));
1572
1573 // And join up at the end.
1574 __ Bind(&done);
1575 if (slow_path != nullptr) {
1576 __ Bind(slow_path->GetExitLabel());
1577 }
1578}
1579
1580void IntrinsicLocationsBuilderX86::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001581 CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ true);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001582}
1583
1584void IntrinsicCodeGeneratorX86::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001585 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001586}
1587
1588void IntrinsicLocationsBuilderX86::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001589 CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ false);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001590}
1591
1592void IntrinsicCodeGeneratorX86::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001593 GenerateStringIndexOf(
1594 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001595}
1596
Jeff Hao848f70a2014-01-15 13:49:50 -08001597void IntrinsicLocationsBuilderX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1598 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001599 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001600 kIntrinsified);
1601 InvokeRuntimeCallingConvention calling_convention;
1602 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1603 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1604 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1605 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1606 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001607}
1608
1609void IntrinsicCodeGeneratorX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1610 X86Assembler* assembler = GetAssembler();
1611 LocationSummary* locations = invoke->GetLocations();
1612
1613 Register byte_array = locations->InAt(0).AsRegister<Register>();
1614 __ testl(byte_array, byte_array);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001615 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001616 codegen_->AddSlowPath(slow_path);
1617 __ j(kEqual, slow_path->GetEntryLabel());
1618
Andreas Gampe542451c2016-07-26 09:02:02 -07001619 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, pAllocStringFromBytes)));
Roland Levillainf969a202016-03-09 16:14:00 +00001620 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001621 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1622 __ Bind(slow_path->GetExitLabel());
1623}
1624
1625void IntrinsicLocationsBuilderX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1626 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001627 LocationSummary::kCallOnMainOnly,
Jeff Hao848f70a2014-01-15 13:49:50 -08001628 kIntrinsified);
1629 InvokeRuntimeCallingConvention calling_convention;
1630 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1631 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1632 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1633 locations->SetOut(Location::RegisterLocation(EAX));
1634}
1635
1636void IntrinsicCodeGeneratorX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1637 X86Assembler* assembler = GetAssembler();
1638
Roland Levillaincc3839c2016-02-29 16:23:48 +00001639 // No need to emit code checking whether `locations->InAt(2)` is a null
1640 // pointer, as callers of the native method
1641 //
1642 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1643 //
1644 // all include a null check on `data` before calling that method.
Andreas Gampe542451c2016-07-26 09:02:02 -07001645 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, pAllocStringFromChars)));
Roland Levillainf969a202016-03-09 16:14:00 +00001646 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001647 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1648}
1649
1650void IntrinsicLocationsBuilderX86::VisitStringNewStringFromString(HInvoke* invoke) {
1651 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001652 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001653 kIntrinsified);
1654 InvokeRuntimeCallingConvention calling_convention;
1655 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1656 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001657}
1658
1659void IntrinsicCodeGeneratorX86::VisitStringNewStringFromString(HInvoke* invoke) {
1660 X86Assembler* assembler = GetAssembler();
1661 LocationSummary* locations = invoke->GetLocations();
1662
1663 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1664 __ testl(string_to_copy, string_to_copy);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001665 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001666 codegen_->AddSlowPath(slow_path);
1667 __ j(kEqual, slow_path->GetEntryLabel());
1668
Andreas Gampe542451c2016-07-26 09:02:02 -07001669 __ fs()->call(
1670 Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, pAllocStringFromString)));
Roland Levillainf969a202016-03-09 16:14:00 +00001671 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001672 codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1673 __ Bind(slow_path->GetExitLabel());
1674}
1675
Mark Mendell8f8926a2015-08-17 11:39:06 -04001676void IntrinsicLocationsBuilderX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1677 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1678 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1679 LocationSummary::kNoCall,
1680 kIntrinsified);
1681 locations->SetInAt(0, Location::RequiresRegister());
1682 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1683 // Place srcEnd in ECX to save a move below.
1684 locations->SetInAt(2, Location::RegisterLocation(ECX));
1685 locations->SetInAt(3, Location::RequiresRegister());
1686 locations->SetInAt(4, Location::RequiresRegister());
1687
1688 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
1689 // We don't have enough registers to also grab ECX, so handle below.
1690 locations->AddTemp(Location::RegisterLocation(ESI));
1691 locations->AddTemp(Location::RegisterLocation(EDI));
1692}
1693
1694void IntrinsicCodeGeneratorX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1695 X86Assembler* assembler = GetAssembler();
1696 LocationSummary* locations = invoke->GetLocations();
1697
1698 size_t char_component_size = Primitive::ComponentSize(Primitive::kPrimChar);
1699 // Location of data in char array buffer.
1700 const uint32_t data_offset = mirror::Array::DataOffset(char_component_size).Uint32Value();
1701 // Location of char array data in string.
1702 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1703
1704 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1705 Register obj = locations->InAt(0).AsRegister<Register>();
1706 Location srcBegin = locations->InAt(1);
1707 int srcBegin_value =
1708 srcBegin.IsConstant() ? srcBegin.GetConstant()->AsIntConstant()->GetValue() : 0;
1709 Register srcEnd = locations->InAt(2).AsRegister<Register>();
1710 Register dst = locations->InAt(3).AsRegister<Register>();
1711 Register dstBegin = locations->InAt(4).AsRegister<Register>();
1712
1713 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1714 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1715 DCHECK_EQ(char_size, 2u);
1716
1717 // Compute the address of the destination buffer.
1718 __ leal(EDI, Address(dst, dstBegin, ScaleFactor::TIMES_2, data_offset));
1719
1720 // Compute the address of the source string.
1721 if (srcBegin.IsConstant()) {
1722 // Compute the address of the source string by adding the number of chars from
1723 // the source beginning to the value offset of a string.
1724 __ leal(ESI, Address(obj, srcBegin_value * char_size + value_offset));
1725 } else {
1726 __ leal(ESI, Address(obj, srcBegin.AsRegister<Register>(),
1727 ScaleFactor::TIMES_2, value_offset));
1728 }
1729
1730 // Compute the number of chars (words) to move.
1731 // Now is the time to save ECX, since we don't know if it will be used later.
1732 __ pushl(ECX);
1733 int stack_adjust = kX86WordSize;
1734 __ cfi().AdjustCFAOffset(stack_adjust);
1735 DCHECK_EQ(srcEnd, ECX);
1736 if (srcBegin.IsConstant()) {
1737 if (srcBegin_value != 0) {
1738 __ subl(ECX, Immediate(srcBegin_value));
1739 }
1740 } else {
1741 DCHECK(srcBegin.IsRegister());
1742 __ subl(ECX, srcBegin.AsRegister<Register>());
1743 }
1744
1745 // Do the move.
1746 __ rep_movsw();
1747
1748 // And restore ECX.
1749 __ popl(ECX);
1750 __ cfi().AdjustCFAOffset(-stack_adjust);
1751}
1752
Mark Mendell09ed1a32015-03-25 08:30:06 -04001753static void GenPeek(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1754 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1755 Location out_loc = locations->Out();
1756 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1757 // to avoid a SIGBUS.
1758 switch (size) {
1759 case Primitive::kPrimByte:
1760 __ movsxb(out_loc.AsRegister<Register>(), Address(address, 0));
1761 break;
1762 case Primitive::kPrimShort:
1763 __ movsxw(out_loc.AsRegister<Register>(), Address(address, 0));
1764 break;
1765 case Primitive::kPrimInt:
1766 __ movl(out_loc.AsRegister<Register>(), Address(address, 0));
1767 break;
1768 case Primitive::kPrimLong:
1769 __ movl(out_loc.AsRegisterPairLow<Register>(), Address(address, 0));
1770 __ movl(out_loc.AsRegisterPairHigh<Register>(), Address(address, 4));
1771 break;
1772 default:
1773 LOG(FATAL) << "Type not recognized for peek: " << size;
1774 UNREACHABLE();
1775 }
1776}
1777
1778void IntrinsicLocationsBuilderX86::VisitMemoryPeekByte(HInvoke* invoke) {
1779 CreateLongToIntLocations(arena_, invoke);
1780}
1781
1782void IntrinsicCodeGeneratorX86::VisitMemoryPeekByte(HInvoke* invoke) {
1783 GenPeek(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1784}
1785
1786void IntrinsicLocationsBuilderX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1787 CreateLongToIntLocations(arena_, invoke);
1788}
1789
1790void IntrinsicCodeGeneratorX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1791 GenPeek(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1792}
1793
1794void IntrinsicLocationsBuilderX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1795 CreateLongToLongLocations(arena_, invoke);
1796}
1797
1798void IntrinsicCodeGeneratorX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1799 GenPeek(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1800}
1801
1802void IntrinsicLocationsBuilderX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1803 CreateLongToIntLocations(arena_, invoke);
1804}
1805
1806void IntrinsicCodeGeneratorX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1807 GenPeek(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1808}
1809
1810static void CreateLongIntToVoidLocations(ArenaAllocator* arena, Primitive::Type size,
1811 HInvoke* invoke) {
1812 LocationSummary* locations = new (arena) LocationSummary(invoke,
1813 LocationSummary::kNoCall,
1814 kIntrinsified);
1815 locations->SetInAt(0, Location::RequiresRegister());
Roland Levillain4c0eb422015-04-24 16:43:49 +01001816 HInstruction* value = invoke->InputAt(1);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001817 if (size == Primitive::kPrimByte) {
1818 locations->SetInAt(1, Location::ByteRegisterOrConstant(EDX, value));
1819 } else {
1820 locations->SetInAt(1, Location::RegisterOrConstant(value));
1821 }
1822}
1823
1824static void GenPoke(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1825 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1826 Location value_loc = locations->InAt(1);
1827 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1828 // to avoid a SIGBUS.
1829 switch (size) {
1830 case Primitive::kPrimByte:
1831 if (value_loc.IsConstant()) {
1832 __ movb(Address(address, 0),
1833 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1834 } else {
1835 __ movb(Address(address, 0), value_loc.AsRegister<ByteRegister>());
1836 }
1837 break;
1838 case Primitive::kPrimShort:
1839 if (value_loc.IsConstant()) {
1840 __ movw(Address(address, 0),
1841 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1842 } else {
1843 __ movw(Address(address, 0), value_loc.AsRegister<Register>());
1844 }
1845 break;
1846 case Primitive::kPrimInt:
1847 if (value_loc.IsConstant()) {
1848 __ movl(Address(address, 0),
1849 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1850 } else {
1851 __ movl(Address(address, 0), value_loc.AsRegister<Register>());
1852 }
1853 break;
1854 case Primitive::kPrimLong:
1855 if (value_loc.IsConstant()) {
1856 int64_t value = value_loc.GetConstant()->AsLongConstant()->GetValue();
1857 __ movl(Address(address, 0), Immediate(Low32Bits(value)));
1858 __ movl(Address(address, 4), Immediate(High32Bits(value)));
1859 } else {
1860 __ movl(Address(address, 0), value_loc.AsRegisterPairLow<Register>());
1861 __ movl(Address(address, 4), value_loc.AsRegisterPairHigh<Register>());
1862 }
1863 break;
1864 default:
1865 LOG(FATAL) << "Type not recognized for poke: " << size;
1866 UNREACHABLE();
1867 }
1868}
1869
1870void IntrinsicLocationsBuilderX86::VisitMemoryPokeByte(HInvoke* invoke) {
1871 CreateLongIntToVoidLocations(arena_, Primitive::kPrimByte, invoke);
1872}
1873
1874void IntrinsicCodeGeneratorX86::VisitMemoryPokeByte(HInvoke* invoke) {
1875 GenPoke(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1876}
1877
1878void IntrinsicLocationsBuilderX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1879 CreateLongIntToVoidLocations(arena_, Primitive::kPrimInt, invoke);
1880}
1881
1882void IntrinsicCodeGeneratorX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1883 GenPoke(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1884}
1885
1886void IntrinsicLocationsBuilderX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1887 CreateLongIntToVoidLocations(arena_, Primitive::kPrimLong, invoke);
1888}
1889
1890void IntrinsicCodeGeneratorX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1891 GenPoke(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1892}
1893
1894void IntrinsicLocationsBuilderX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1895 CreateLongIntToVoidLocations(arena_, Primitive::kPrimShort, invoke);
1896}
1897
1898void IntrinsicCodeGeneratorX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1899 GenPoke(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1900}
1901
1902void IntrinsicLocationsBuilderX86::VisitThreadCurrentThread(HInvoke* invoke) {
1903 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1904 LocationSummary::kNoCall,
1905 kIntrinsified);
1906 locations->SetOut(Location::RequiresRegister());
1907}
1908
1909void IntrinsicCodeGeneratorX86::VisitThreadCurrentThread(HInvoke* invoke) {
1910 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07001911 GetAssembler()->fs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86PointerSize>()));
Mark Mendell09ed1a32015-03-25 08:30:06 -04001912}
1913
Roland Levillain0d5a2812015-11-13 10:07:31 +00001914static void GenUnsafeGet(HInvoke* invoke,
1915 Primitive::Type type,
1916 bool is_volatile,
1917 CodeGeneratorX86* codegen) {
1918 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
1919 LocationSummary* locations = invoke->GetLocations();
1920 Location base_loc = locations->InAt(1);
1921 Register base = base_loc.AsRegister<Register>();
1922 Location offset_loc = locations->InAt(2);
1923 Register offset = offset_loc.AsRegisterPairLow<Register>();
1924 Location output_loc = locations->Out();
Mark Mendell09ed1a32015-03-25 08:30:06 -04001925
1926 switch (type) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00001927 case Primitive::kPrimInt: {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001928 Register output = output_loc.AsRegister<Register>();
1929 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
Roland Levillain7c1559a2015-12-15 10:55:36 +00001930 break;
1931 }
1932
1933 case Primitive::kPrimNot: {
1934 Register output = output_loc.AsRegister<Register>();
1935 if (kEmitCompilerReadBarrier) {
1936 if (kUseBakerReadBarrier) {
Sang, Chunlei0fcd2b82016-04-05 17:12:59 +08001937 Address src(base, offset, ScaleFactor::TIMES_1, 0);
1938 codegen->GenerateReferenceLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00001939 invoke, output_loc, base, src, /* needs_null_check */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00001940 } else {
1941 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
1942 codegen->GenerateReadBarrierSlow(
1943 invoke, output_loc, output_loc, base_loc, 0U, offset_loc);
1944 }
1945 } else {
1946 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
1947 __ MaybeUnpoisonHeapReference(output);
Roland Levillain4d027112015-07-01 15:41:14 +01001948 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001949 break;
Roland Levillain4d027112015-07-01 15:41:14 +01001950 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001951
1952 case Primitive::kPrimLong: {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001953 Register output_lo = output_loc.AsRegisterPairLow<Register>();
1954 Register output_hi = output_loc.AsRegisterPairHigh<Register>();
Mark Mendell09ed1a32015-03-25 08:30:06 -04001955 if (is_volatile) {
1956 // Need to use a XMM to read atomically.
1957 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1958 __ movsd(temp, Address(base, offset, ScaleFactor::TIMES_1, 0));
1959 __ movd(output_lo, temp);
1960 __ psrlq(temp, Immediate(32));
1961 __ movd(output_hi, temp);
1962 } else {
1963 __ movl(output_lo, Address(base, offset, ScaleFactor::TIMES_1, 0));
1964 __ movl(output_hi, Address(base, offset, ScaleFactor::TIMES_1, 4));
1965 }
1966 }
1967 break;
1968
1969 default:
1970 LOG(FATAL) << "Unsupported op size " << type;
1971 UNREACHABLE();
1972 }
1973}
1974
Roland Levillain7c1559a2015-12-15 10:55:36 +00001975static void CreateIntIntIntToIntLocations(ArenaAllocator* arena,
1976 HInvoke* invoke,
1977 Primitive::Type type,
1978 bool is_volatile) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001979 bool can_call = kEmitCompilerReadBarrier &&
1980 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
1981 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001982 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001983 can_call ?
1984 LocationSummary::kCallOnSlowPath :
1985 LocationSummary::kNoCall,
Mark Mendell09ed1a32015-03-25 08:30:06 -04001986 kIntrinsified);
Vladimir Marko70e97462016-08-09 11:04:26 +01001987 if (can_call && kUseBakerReadBarrier) {
1988 locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers.
1989 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001990 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1991 locations->SetInAt(1, Location::RequiresRegister());
1992 locations->SetInAt(2, Location::RequiresRegister());
Roland Levillain7c1559a2015-12-15 10:55:36 +00001993 if (type == Primitive::kPrimLong) {
Mark Mendell09ed1a32015-03-25 08:30:06 -04001994 if (is_volatile) {
1995 // Need to use XMM to read volatile.
1996 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain3d312422016-06-23 13:53:42 +01001997 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001998 } else {
1999 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
2000 }
2001 } else {
Roland Levillain3d312422016-06-23 13:53:42 +01002002 locations->SetOut(Location::RequiresRegister(),
2003 can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002004 }
2005}
2006
2007void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002008 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002009}
2010void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002011 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002012}
2013void IntrinsicLocationsBuilderX86::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002014 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002015}
2016void IntrinsicLocationsBuilderX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002017 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002018}
2019void IntrinsicLocationsBuilderX86::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002020 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002021}
2022void IntrinsicLocationsBuilderX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002023 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002024}
2025
2026
2027void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002028 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002029}
2030void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002031 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002032}
2033void IntrinsicCodeGeneratorX86::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002034 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002035}
2036void IntrinsicCodeGeneratorX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002037 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002038}
2039void IntrinsicCodeGeneratorX86::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002040 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002041}
2042void IntrinsicCodeGeneratorX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002043 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002044}
2045
2046
2047static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* arena,
2048 Primitive::Type type,
2049 HInvoke* invoke,
2050 bool is_volatile) {
2051 LocationSummary* locations = new (arena) LocationSummary(invoke,
2052 LocationSummary::kNoCall,
2053 kIntrinsified);
2054 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
2055 locations->SetInAt(1, Location::RequiresRegister());
2056 locations->SetInAt(2, Location::RequiresRegister());
2057 locations->SetInAt(3, Location::RequiresRegister());
2058 if (type == Primitive::kPrimNot) {
2059 // Need temp registers for card-marking.
Roland Levillain4d027112015-07-01 15:41:14 +01002060 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell09ed1a32015-03-25 08:30:06 -04002061 // Ensure the value is in a byte register.
2062 locations->AddTemp(Location::RegisterLocation(ECX));
2063 } else if (type == Primitive::kPrimLong && is_volatile) {
2064 locations->AddTemp(Location::RequiresFpuRegister());
2065 locations->AddTemp(Location::RequiresFpuRegister());
2066 }
2067}
2068
2069void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002070 CreateIntIntIntIntToVoidPlusTempsLocations(
2071 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002072}
2073void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002074 CreateIntIntIntIntToVoidPlusTempsLocations(
2075 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002076}
2077void IntrinsicLocationsBuilderX86::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002078 CreateIntIntIntIntToVoidPlusTempsLocations(
2079 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002080}
2081void IntrinsicLocationsBuilderX86::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002082 CreateIntIntIntIntToVoidPlusTempsLocations(
2083 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002084}
2085void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002086 CreateIntIntIntIntToVoidPlusTempsLocations(
2087 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002088}
2089void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002090 CreateIntIntIntIntToVoidPlusTempsLocations(
2091 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002092}
2093void IntrinsicLocationsBuilderX86::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002094 CreateIntIntIntIntToVoidPlusTempsLocations(
2095 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002096}
2097void IntrinsicLocationsBuilderX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002098 CreateIntIntIntIntToVoidPlusTempsLocations(
2099 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002100}
2101void IntrinsicLocationsBuilderX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002102 CreateIntIntIntIntToVoidPlusTempsLocations(
2103 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002104}
2105
2106// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86
2107// memory model.
2108static void GenUnsafePut(LocationSummary* locations,
2109 Primitive::Type type,
2110 bool is_volatile,
2111 CodeGeneratorX86* codegen) {
Roland Levillainb488b782015-10-22 11:38:49 +01002112 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04002113 Register base = locations->InAt(1).AsRegister<Register>();
2114 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
2115 Location value_loc = locations->InAt(3);
2116
2117 if (type == Primitive::kPrimLong) {
2118 Register value_lo = value_loc.AsRegisterPairLow<Register>();
2119 Register value_hi = value_loc.AsRegisterPairHigh<Register>();
2120 if (is_volatile) {
2121 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
2122 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
2123 __ movd(temp1, value_lo);
2124 __ movd(temp2, value_hi);
2125 __ punpckldq(temp1, temp2);
2126 __ movsd(Address(base, offset, ScaleFactor::TIMES_1, 0), temp1);
2127 } else {
2128 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_lo);
2129 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 4), value_hi);
2130 }
Roland Levillain4d027112015-07-01 15:41:14 +01002131 } else if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
2132 Register temp = locations->GetTemp(0).AsRegister<Register>();
2133 __ movl(temp, value_loc.AsRegister<Register>());
2134 __ PoisonHeapReference(temp);
2135 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002136 } else {
2137 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_loc.AsRegister<Register>());
2138 }
2139
2140 if (is_volatile) {
Mark P Mendell17077d82015-12-16 19:15:59 +00002141 codegen->MemoryFence();
Mark Mendell09ed1a32015-03-25 08:30:06 -04002142 }
2143
2144 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002145 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell09ed1a32015-03-25 08:30:06 -04002146 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
2147 locations->GetTemp(1).AsRegister<Register>(),
2148 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002149 value_loc.AsRegister<Register>(),
2150 value_can_be_null);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002151 }
2152}
2153
2154void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002155 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002156}
2157void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002158 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002159}
2160void IntrinsicCodeGeneratorX86::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002161 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002162}
2163void IntrinsicCodeGeneratorX86::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002164 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002165}
2166void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002167 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002168}
2169void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002170 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002171}
2172void IntrinsicCodeGeneratorX86::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002173 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002174}
2175void IntrinsicCodeGeneratorX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002176 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002177}
2178void IntrinsicCodeGeneratorX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002179 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002180}
2181
Mark Mendell58d25fd2015-04-03 14:52:31 -04002182static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, Primitive::Type type,
2183 HInvoke* invoke) {
2184 LocationSummary* locations = new (arena) LocationSummary(invoke,
2185 LocationSummary::kNoCall,
2186 kIntrinsified);
2187 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
2188 locations->SetInAt(1, Location::RequiresRegister());
2189 // Offset is a long, but in 32 bit mode, we only need the low word.
2190 // Can we update the invoke here to remove a TypeConvert to Long?
2191 locations->SetInAt(2, Location::RequiresRegister());
2192 // Expected value must be in EAX or EDX:EAX.
2193 // For long, new value must be in ECX:EBX.
2194 if (type == Primitive::kPrimLong) {
2195 locations->SetInAt(3, Location::RegisterPairLocation(EAX, EDX));
2196 locations->SetInAt(4, Location::RegisterPairLocation(EBX, ECX));
2197 } else {
2198 locations->SetInAt(3, Location::RegisterLocation(EAX));
2199 locations->SetInAt(4, Location::RequiresRegister());
2200 }
2201
2202 // Force a byte register for the output.
2203 locations->SetOut(Location::RegisterLocation(EAX));
2204 if (type == Primitive::kPrimNot) {
2205 // Need temp registers for card-marking.
Roland Levillainb488b782015-10-22 11:38:49 +01002206 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell58d25fd2015-04-03 14:52:31 -04002207 // Need a byte register for marking.
2208 locations->AddTemp(Location::RegisterLocation(ECX));
2209 }
2210}
2211
2212void IntrinsicLocationsBuilderX86::VisitUnsafeCASInt(HInvoke* invoke) {
2213 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimInt, invoke);
2214}
2215
2216void IntrinsicLocationsBuilderX86::VisitUnsafeCASLong(HInvoke* invoke) {
2217 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimLong, invoke);
2218}
2219
2220void IntrinsicLocationsBuilderX86::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain391b8662015-12-18 11:43:38 +00002221 // The UnsafeCASObject intrinsic is missing a read barrier, and
2222 // therefore sometimes does not work as expected (b/25883050).
2223 // Turn it off temporarily as a quick fix, until the read barrier is
Roland Levillain3d312422016-06-23 13:53:42 +01002224 // implemented (see TODO in GenCAS).
Roland Levillain391b8662015-12-18 11:43:38 +00002225 //
Roland Levillain3d312422016-06-23 13:53:42 +01002226 // TODO(rpl): Implement read barrier support in GenCAS and re-enable
Roland Levillain391b8662015-12-18 11:43:38 +00002227 // this intrinsic.
2228 if (kEmitCompilerReadBarrier) {
2229 return;
2230 }
2231
Mark Mendell58d25fd2015-04-03 14:52:31 -04002232 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimNot, invoke);
2233}
2234
2235static void GenCAS(Primitive::Type type, HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillainb488b782015-10-22 11:38:49 +01002236 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
Mark Mendell58d25fd2015-04-03 14:52:31 -04002237 LocationSummary* locations = invoke->GetLocations();
2238
2239 Register base = locations->InAt(1).AsRegister<Register>();
2240 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
2241 Location out = locations->Out();
2242 DCHECK_EQ(out.AsRegister<Register>(), EAX);
2243
Roland Levillainb488b782015-10-22 11:38:49 +01002244 if (type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +01002245 Register expected = locations->InAt(3).AsRegister<Register>();
Roland Levillainb488b782015-10-22 11:38:49 +01002246 // Ensure `expected` is in EAX (required by the CMPXCHG instruction).
Roland Levillain4d027112015-07-01 15:41:14 +01002247 DCHECK_EQ(expected, EAX);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002248 Register value = locations->InAt(4).AsRegister<Register>();
Roland Levillain4d027112015-07-01 15:41:14 +01002249
Roland Levillainb488b782015-10-22 11:38:49 +01002250 // Mark card for object assuming new value is stored.
2251 bool value_can_be_null = true; // TODO: Worth finding out this information?
2252 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
2253 locations->GetTemp(1).AsRegister<Register>(),
2254 base,
2255 value,
2256 value_can_be_null);
2257
2258 bool base_equals_value = (base == value);
2259 if (kPoisonHeapReferences) {
2260 if (base_equals_value) {
2261 // If `base` and `value` are the same register location, move
2262 // `value` to a temporary register. This way, poisoning
2263 // `value` won't invalidate `base`.
2264 value = locations->GetTemp(0).AsRegister<Register>();
2265 __ movl(value, base);
Roland Levillain4d027112015-07-01 15:41:14 +01002266 }
Roland Levillainb488b782015-10-22 11:38:49 +01002267
2268 // Check that the register allocator did not assign the location
2269 // of `expected` (EAX) to `value` nor to `base`, so that heap
2270 // poisoning (when enabled) works as intended below.
2271 // - If `value` were equal to `expected`, both references would
2272 // be poisoned twice, meaning they would not be poisoned at
2273 // all, as heap poisoning uses address negation.
2274 // - If `base` were equal to `expected`, poisoning `expected`
2275 // would invalidate `base`.
2276 DCHECK_NE(value, expected);
2277 DCHECK_NE(base, expected);
2278
2279 __ PoisonHeapReference(expected);
2280 __ PoisonHeapReference(value);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002281 }
2282
Roland Levillain391b8662015-12-18 11:43:38 +00002283 // TODO: Add a read barrier for the reference stored in the object
2284 // before attempting the CAS, similar to the one in the
2285 // art::Unsafe_compareAndSwapObject JNI implementation.
2286 //
2287 // Note that this code is not (yet) used when read barriers are
2288 // enabled (see IntrinsicLocationsBuilderX86::VisitUnsafeCASObject).
2289 DCHECK(!kEmitCompilerReadBarrier);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002290 __ LockCmpxchgl(Address(base, offset, TIMES_1, 0), value);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002291
Roland Levillain0d5a2812015-11-13 10:07:31 +00002292 // LOCK CMPXCHG has full barrier semantics, and we don't need
Roland Levillainb488b782015-10-22 11:38:49 +01002293 // scheduling barriers at this time.
Mark Mendell58d25fd2015-04-03 14:52:31 -04002294
Roland Levillainb488b782015-10-22 11:38:49 +01002295 // Convert ZF into the boolean result.
2296 __ setb(kZero, out.AsRegister<Register>());
2297 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01002298
Roland Levillain391b8662015-12-18 11:43:38 +00002299 // If heap poisoning is enabled, we need to unpoison the values
2300 // that were poisoned earlier.
Roland Levillainb488b782015-10-22 11:38:49 +01002301 if (kPoisonHeapReferences) {
2302 if (base_equals_value) {
2303 // `value` has been moved to a temporary register, no need to
2304 // unpoison it.
2305 } else {
2306 // Ensure `value` is different from `out`, so that unpoisoning
2307 // the former does not invalidate the latter.
2308 DCHECK_NE(value, out.AsRegister<Register>());
2309 __ UnpoisonHeapReference(value);
2310 }
2311 // Do not unpoison the reference contained in register
2312 // `expected`, as it is the same as register `out` (EAX).
2313 }
2314 } else {
2315 if (type == Primitive::kPrimInt) {
2316 // Ensure the expected value is in EAX (required by the CMPXCHG
2317 // instruction).
2318 DCHECK_EQ(locations->InAt(3).AsRegister<Register>(), EAX);
2319 __ LockCmpxchgl(Address(base, offset, TIMES_1, 0),
2320 locations->InAt(4).AsRegister<Register>());
2321 } else if (type == Primitive::kPrimLong) {
2322 // Ensure the expected value is in EAX:EDX and that the new
2323 // value is in EBX:ECX (required by the CMPXCHG8B instruction).
2324 DCHECK_EQ(locations->InAt(3).AsRegisterPairLow<Register>(), EAX);
2325 DCHECK_EQ(locations->InAt(3).AsRegisterPairHigh<Register>(), EDX);
2326 DCHECK_EQ(locations->InAt(4).AsRegisterPairLow<Register>(), EBX);
2327 DCHECK_EQ(locations->InAt(4).AsRegisterPairHigh<Register>(), ECX);
2328 __ LockCmpxchg8b(Address(base, offset, TIMES_1, 0));
2329 } else {
2330 LOG(FATAL) << "Unexpected CAS type " << type;
2331 }
2332
Roland Levillain0d5a2812015-11-13 10:07:31 +00002333 // LOCK CMPXCHG/LOCK CMPXCHG8B have full barrier semantics, and we
2334 // don't need scheduling barriers at this time.
Roland Levillainb488b782015-10-22 11:38:49 +01002335
2336 // Convert ZF into the boolean result.
2337 __ setb(kZero, out.AsRegister<Register>());
2338 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01002339 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04002340}
2341
2342void IntrinsicCodeGeneratorX86::VisitUnsafeCASInt(HInvoke* invoke) {
2343 GenCAS(Primitive::kPrimInt, invoke, codegen_);
2344}
2345
2346void IntrinsicCodeGeneratorX86::VisitUnsafeCASLong(HInvoke* invoke) {
2347 GenCAS(Primitive::kPrimLong, invoke, codegen_);
2348}
2349
2350void IntrinsicCodeGeneratorX86::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain3d312422016-06-23 13:53:42 +01002351 // The UnsafeCASObject intrinsic is missing a read barrier, and
2352 // therefore sometimes does not work as expected (b/25883050).
2353 // Turn it off temporarily as a quick fix, until the read barrier is
2354 // implemented (see TODO in GenCAS).
2355 //
2356 // TODO(rpl): Implement read barrier support in GenCAS and re-enable
2357 // this intrinsic.
2358 DCHECK(!kEmitCompilerReadBarrier);
2359
Mark Mendell58d25fd2015-04-03 14:52:31 -04002360 GenCAS(Primitive::kPrimNot, invoke, codegen_);
2361}
2362
2363void IntrinsicLocationsBuilderX86::VisitIntegerReverse(HInvoke* invoke) {
2364 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2365 LocationSummary::kNoCall,
2366 kIntrinsified);
2367 locations->SetInAt(0, Location::RequiresRegister());
2368 locations->SetOut(Location::SameAsFirstInput());
2369 locations->AddTemp(Location::RequiresRegister());
2370}
2371
2372static void SwapBits(Register reg, Register temp, int32_t shift, int32_t mask,
2373 X86Assembler* assembler) {
2374 Immediate imm_shift(shift);
2375 Immediate imm_mask(mask);
2376 __ movl(temp, reg);
2377 __ shrl(reg, imm_shift);
2378 __ andl(temp, imm_mask);
2379 __ andl(reg, imm_mask);
2380 __ shll(temp, imm_shift);
2381 __ orl(reg, temp);
2382}
2383
2384void IntrinsicCodeGeneratorX86::VisitIntegerReverse(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002385 X86Assembler* assembler = GetAssembler();
Mark Mendell58d25fd2015-04-03 14:52:31 -04002386 LocationSummary* locations = invoke->GetLocations();
2387
2388 Register reg = locations->InAt(0).AsRegister<Register>();
2389 Register temp = locations->GetTemp(0).AsRegister<Register>();
2390
2391 /*
2392 * Use one bswap instruction to reverse byte order first and then use 3 rounds of
2393 * swapping bits to reverse bits in a number x. Using bswap to save instructions
2394 * compared to generic luni implementation which has 5 rounds of swapping bits.
2395 * x = bswap x
2396 * x = (x & 0x55555555) << 1 | (x >> 1) & 0x55555555;
2397 * x = (x & 0x33333333) << 2 | (x >> 2) & 0x33333333;
2398 * x = (x & 0x0F0F0F0F) << 4 | (x >> 4) & 0x0F0F0F0F;
2399 */
2400 __ bswapl(reg);
2401 SwapBits(reg, temp, 1, 0x55555555, assembler);
2402 SwapBits(reg, temp, 2, 0x33333333, assembler);
2403 SwapBits(reg, temp, 4, 0x0f0f0f0f, assembler);
2404}
2405
2406void IntrinsicLocationsBuilderX86::VisitLongReverse(HInvoke* invoke) {
2407 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2408 LocationSummary::kNoCall,
2409 kIntrinsified);
2410 locations->SetInAt(0, Location::RequiresRegister());
2411 locations->SetOut(Location::SameAsFirstInput());
2412 locations->AddTemp(Location::RequiresRegister());
2413}
2414
2415void IntrinsicCodeGeneratorX86::VisitLongReverse(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002416 X86Assembler* assembler = GetAssembler();
Mark Mendell58d25fd2015-04-03 14:52:31 -04002417 LocationSummary* locations = invoke->GetLocations();
2418
2419 Register reg_low = locations->InAt(0).AsRegisterPairLow<Register>();
2420 Register reg_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2421 Register temp = locations->GetTemp(0).AsRegister<Register>();
2422
2423 // We want to swap high/low, then bswap each one, and then do the same
2424 // as a 32 bit reverse.
2425 // Exchange high and low.
2426 __ movl(temp, reg_low);
2427 __ movl(reg_low, reg_high);
2428 __ movl(reg_high, temp);
2429
2430 // bit-reverse low
2431 __ bswapl(reg_low);
2432 SwapBits(reg_low, temp, 1, 0x55555555, assembler);
2433 SwapBits(reg_low, temp, 2, 0x33333333, assembler);
2434 SwapBits(reg_low, temp, 4, 0x0f0f0f0f, assembler);
2435
2436 // bit-reverse high
2437 __ bswapl(reg_high);
2438 SwapBits(reg_high, temp, 1, 0x55555555, assembler);
2439 SwapBits(reg_high, temp, 2, 0x33333333, assembler);
2440 SwapBits(reg_high, temp, 4, 0x0f0f0f0f, assembler);
2441}
2442
Aart Bikc39dac12016-01-21 08:59:48 -08002443static void CreateBitCountLocations(
2444 ArenaAllocator* arena, CodeGeneratorX86* codegen, HInvoke* invoke, bool is_long) {
2445 if (!codegen->GetInstructionSetFeatures().HasPopCnt()) {
2446 // Do nothing if there is no popcnt support. This results in generating
2447 // a call for the intrinsic rather than direct code.
2448 return;
2449 }
2450 LocationSummary* locations = new (arena) LocationSummary(invoke,
2451 LocationSummary::kNoCall,
2452 kIntrinsified);
2453 if (is_long) {
Aart Bikc39dac12016-01-21 08:59:48 -08002454 locations->AddTemp(Location::RequiresRegister());
Aart Bikc39dac12016-01-21 08:59:48 -08002455 }
Aart Bik2a946072016-01-21 12:49:00 -08002456 locations->SetInAt(0, Location::Any());
Aart Bikc39dac12016-01-21 08:59:48 -08002457 locations->SetOut(Location::RequiresRegister());
2458}
2459
Aart Bika19616e2016-02-01 18:57:58 -08002460static void GenBitCount(X86Assembler* assembler,
2461 CodeGeneratorX86* codegen,
2462 HInvoke* invoke, bool is_long) {
Aart Bikc39dac12016-01-21 08:59:48 -08002463 LocationSummary* locations = invoke->GetLocations();
2464 Location src = locations->InAt(0);
2465 Register out = locations->Out().AsRegister<Register>();
2466
2467 if (invoke->InputAt(0)->IsConstant()) {
2468 // Evaluate this at compile time.
2469 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
Roland Levillainfa3912e2016-04-01 18:21:55 +01002470 int32_t result = is_long
Aart Bikc39dac12016-01-21 08:59:48 -08002471 ? POPCOUNT(static_cast<uint64_t>(value))
2472 : POPCOUNT(static_cast<uint32_t>(value));
Roland Levillainfa3912e2016-04-01 18:21:55 +01002473 codegen->Load32BitValue(out, result);
Aart Bikc39dac12016-01-21 08:59:48 -08002474 return;
2475 }
2476
2477 // Handle the non-constant cases.
2478 if (!is_long) {
2479 if (src.IsRegister()) {
2480 __ popcntl(out, src.AsRegister<Register>());
2481 } else {
2482 DCHECK(src.IsStackSlot());
2483 __ popcntl(out, Address(ESP, src.GetStackIndex()));
2484 }
Aart Bik2a946072016-01-21 12:49:00 -08002485 } else {
2486 // The 64-bit case needs to worry about two parts.
2487 Register temp = locations->GetTemp(0).AsRegister<Register>();
2488 if (src.IsRegisterPair()) {
2489 __ popcntl(temp, src.AsRegisterPairLow<Register>());
2490 __ popcntl(out, src.AsRegisterPairHigh<Register>());
2491 } else {
2492 DCHECK(src.IsDoubleStackSlot());
2493 __ popcntl(temp, Address(ESP, src.GetStackIndex()));
2494 __ popcntl(out, Address(ESP, src.GetHighStackIndex(kX86WordSize)));
2495 }
2496 __ addl(out, temp);
Aart Bikc39dac12016-01-21 08:59:48 -08002497 }
Aart Bikc39dac12016-01-21 08:59:48 -08002498}
2499
2500void IntrinsicLocationsBuilderX86::VisitIntegerBitCount(HInvoke* invoke) {
2501 CreateBitCountLocations(arena_, codegen_, invoke, /* is_long */ false);
2502}
2503
2504void IntrinsicCodeGeneratorX86::VisitIntegerBitCount(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002505 GenBitCount(GetAssembler(), codegen_, invoke, /* is_long */ false);
Aart Bikc39dac12016-01-21 08:59:48 -08002506}
2507
2508void IntrinsicLocationsBuilderX86::VisitLongBitCount(HInvoke* invoke) {
2509 CreateBitCountLocations(arena_, codegen_, invoke, /* is_long */ true);
2510}
2511
2512void IntrinsicCodeGeneratorX86::VisitLongBitCount(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002513 GenBitCount(GetAssembler(), codegen_, invoke, /* is_long */ true);
Aart Bikc39dac12016-01-21 08:59:48 -08002514}
2515
Mark Mendelld5897672015-08-12 21:16:41 -04002516static void CreateLeadingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2517 LocationSummary* locations = new (arena) LocationSummary(invoke,
2518 LocationSummary::kNoCall,
2519 kIntrinsified);
2520 if (is_long) {
2521 locations->SetInAt(0, Location::RequiresRegister());
2522 } else {
2523 locations->SetInAt(0, Location::Any());
2524 }
2525 locations->SetOut(Location::RequiresRegister());
2526}
2527
Aart Bika19616e2016-02-01 18:57:58 -08002528static void GenLeadingZeros(X86Assembler* assembler,
2529 CodeGeneratorX86* codegen,
2530 HInvoke* invoke, bool is_long) {
Mark Mendelld5897672015-08-12 21:16:41 -04002531 LocationSummary* locations = invoke->GetLocations();
2532 Location src = locations->InAt(0);
2533 Register out = locations->Out().AsRegister<Register>();
2534
2535 if (invoke->InputAt(0)->IsConstant()) {
2536 // Evaluate this at compile time.
2537 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2538 if (value == 0) {
2539 value = is_long ? 64 : 32;
2540 } else {
2541 value = is_long ? CLZ(static_cast<uint64_t>(value)) : CLZ(static_cast<uint32_t>(value));
2542 }
Aart Bika19616e2016-02-01 18:57:58 -08002543 codegen->Load32BitValue(out, value);
Mark Mendelld5897672015-08-12 21:16:41 -04002544 return;
2545 }
2546
2547 // Handle the non-constant cases.
2548 if (!is_long) {
2549 if (src.IsRegister()) {
2550 __ bsrl(out, src.AsRegister<Register>());
2551 } else {
2552 DCHECK(src.IsStackSlot());
2553 __ bsrl(out, Address(ESP, src.GetStackIndex()));
2554 }
2555
2556 // BSR sets ZF if the input was zero, and the output is undefined.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002557 NearLabel all_zeroes, done;
Mark Mendelld5897672015-08-12 21:16:41 -04002558 __ j(kEqual, &all_zeroes);
2559
2560 // Correct the result from BSR to get the final CLZ result.
2561 __ xorl(out, Immediate(31));
2562 __ jmp(&done);
2563
2564 // Fix the zero case with the expected result.
2565 __ Bind(&all_zeroes);
2566 __ movl(out, Immediate(32));
2567
2568 __ Bind(&done);
2569 return;
2570 }
2571
2572 // 64 bit case needs to worry about both parts of the register.
2573 DCHECK(src.IsRegisterPair());
2574 Register src_lo = src.AsRegisterPairLow<Register>();
2575 Register src_hi = src.AsRegisterPairHigh<Register>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002576 NearLabel handle_low, done, all_zeroes;
Mark Mendelld5897672015-08-12 21:16:41 -04002577
2578 // Is the high word zero?
2579 __ testl(src_hi, src_hi);
2580 __ j(kEqual, &handle_low);
2581
2582 // High word is not zero. We know that the BSR result is defined in this case.
2583 __ bsrl(out, src_hi);
2584
2585 // Correct the result from BSR to get the final CLZ result.
2586 __ xorl(out, Immediate(31));
2587 __ jmp(&done);
2588
2589 // High word was zero. We have to compute the low word count and add 32.
2590 __ Bind(&handle_low);
2591 __ bsrl(out, src_lo);
2592 __ j(kEqual, &all_zeroes);
2593
2594 // We had a valid result. Use an XOR to both correct the result and add 32.
2595 __ xorl(out, Immediate(63));
2596 __ jmp(&done);
2597
2598 // All zero case.
2599 __ Bind(&all_zeroes);
2600 __ movl(out, Immediate(64));
2601
2602 __ Bind(&done);
2603}
2604
2605void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
2606 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ false);
2607}
2608
2609void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002610 GenLeadingZeros(GetAssembler(), codegen_, invoke, /* is_long */ false);
Mark Mendelld5897672015-08-12 21:16:41 -04002611}
2612
2613void IntrinsicLocationsBuilderX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
2614 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ true);
2615}
2616
2617void IntrinsicCodeGeneratorX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002618 GenLeadingZeros(GetAssembler(), codegen_, invoke, /* is_long */ true);
Mark Mendelld5897672015-08-12 21:16:41 -04002619}
2620
Mark Mendell2d554792015-09-15 21:45:18 -04002621static void CreateTrailingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2622 LocationSummary* locations = new (arena) LocationSummary(invoke,
2623 LocationSummary::kNoCall,
2624 kIntrinsified);
2625 if (is_long) {
2626 locations->SetInAt(0, Location::RequiresRegister());
2627 } else {
2628 locations->SetInAt(0, Location::Any());
2629 }
2630 locations->SetOut(Location::RequiresRegister());
2631}
2632
Aart Bika19616e2016-02-01 18:57:58 -08002633static void GenTrailingZeros(X86Assembler* assembler,
2634 CodeGeneratorX86* codegen,
2635 HInvoke* invoke, bool is_long) {
Mark Mendell2d554792015-09-15 21:45:18 -04002636 LocationSummary* locations = invoke->GetLocations();
2637 Location src = locations->InAt(0);
2638 Register out = locations->Out().AsRegister<Register>();
2639
2640 if (invoke->InputAt(0)->IsConstant()) {
2641 // Evaluate this at compile time.
2642 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2643 if (value == 0) {
2644 value = is_long ? 64 : 32;
2645 } else {
2646 value = is_long ? CTZ(static_cast<uint64_t>(value)) : CTZ(static_cast<uint32_t>(value));
2647 }
Aart Bika19616e2016-02-01 18:57:58 -08002648 codegen->Load32BitValue(out, value);
Mark Mendell2d554792015-09-15 21:45:18 -04002649 return;
2650 }
2651
2652 // Handle the non-constant cases.
2653 if (!is_long) {
2654 if (src.IsRegister()) {
2655 __ bsfl(out, src.AsRegister<Register>());
2656 } else {
2657 DCHECK(src.IsStackSlot());
2658 __ bsfl(out, Address(ESP, src.GetStackIndex()));
2659 }
2660
2661 // BSF sets ZF if the input was zero, and the output is undefined.
2662 NearLabel done;
2663 __ j(kNotEqual, &done);
2664
2665 // Fix the zero case with the expected result.
2666 __ movl(out, Immediate(32));
2667
2668 __ Bind(&done);
2669 return;
2670 }
2671
2672 // 64 bit case needs to worry about both parts of the register.
2673 DCHECK(src.IsRegisterPair());
2674 Register src_lo = src.AsRegisterPairLow<Register>();
2675 Register src_hi = src.AsRegisterPairHigh<Register>();
2676 NearLabel done, all_zeroes;
2677
2678 // If the low word is zero, then ZF will be set. If not, we have the answer.
2679 __ bsfl(out, src_lo);
2680 __ j(kNotEqual, &done);
2681
2682 // Low word was zero. We have to compute the high word count and add 32.
2683 __ bsfl(out, src_hi);
2684 __ j(kEqual, &all_zeroes);
2685
2686 // We had a valid result. Add 32 to account for the low word being zero.
2687 __ addl(out, Immediate(32));
2688 __ jmp(&done);
2689
2690 // All zero case.
2691 __ Bind(&all_zeroes);
2692 __ movl(out, Immediate(64));
2693
2694 __ Bind(&done);
2695}
2696
2697void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
2698 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ false);
2699}
2700
2701void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002702 GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long */ false);
Mark Mendell2d554792015-09-15 21:45:18 -04002703}
2704
2705void IntrinsicLocationsBuilderX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
2706 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ true);
2707}
2708
2709void IntrinsicCodeGeneratorX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002710 GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long */ true);
Mark Mendell2d554792015-09-15 21:45:18 -04002711}
2712
Serguei Katkov288c7a82016-05-16 11:53:15 +06002713void IntrinsicLocationsBuilderX86::VisitReferenceGetReferent(HInvoke* invoke) {
2714 if (kEmitCompilerReadBarrier) {
2715 // Do not intrinsify this call with the read barrier configuration.
2716 return;
2717 }
2718 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2719 LocationSummary::kCallOnSlowPath,
2720 kIntrinsified);
2721 locations->SetInAt(0, Location::RequiresRegister());
2722 locations->SetOut(Location::SameAsFirstInput());
2723 locations->AddTemp(Location::RequiresRegister());
2724}
2725
2726void IntrinsicCodeGeneratorX86::VisitReferenceGetReferent(HInvoke* invoke) {
2727 DCHECK(!kEmitCompilerReadBarrier);
2728 LocationSummary* locations = invoke->GetLocations();
2729 X86Assembler* assembler = GetAssembler();
2730
2731 Register obj = locations->InAt(0).AsRegister<Register>();
2732 Register out = locations->Out().AsRegister<Register>();
2733
2734 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
2735 codegen_->AddSlowPath(slow_path);
2736
2737 // Load ArtMethod first.
2738 HInvokeStaticOrDirect* invoke_direct = invoke->AsInvokeStaticOrDirect();
2739 DCHECK(invoke_direct != nullptr);
2740 Location temp_loc = codegen_->GenerateCalleeMethodStaticOrDirectCall(
2741 invoke_direct, locations->GetTemp(0));
2742 DCHECK(temp_loc.Equals(locations->GetTemp(0)));
2743 Register temp = temp_loc.AsRegister<Register>();
2744
2745 // Now get declaring class.
2746 __ movl(temp, Address(temp, ArtMethod::DeclaringClassOffset().Int32Value()));
2747
2748 uint32_t slow_path_flag_offset = codegen_->GetReferenceSlowFlagOffset();
2749 uint32_t disable_flag_offset = codegen_->GetReferenceDisableFlagOffset();
2750 DCHECK_NE(slow_path_flag_offset, 0u);
2751 DCHECK_NE(disable_flag_offset, 0u);
2752 DCHECK_NE(slow_path_flag_offset, disable_flag_offset);
2753
2754 // Check static flags preventing us for using intrinsic.
2755 if (slow_path_flag_offset == disable_flag_offset + 1) {
2756 __ cmpw(Address(temp, disable_flag_offset), Immediate(0));
2757 __ j(kNotEqual, slow_path->GetEntryLabel());
2758 } else {
2759 __ cmpb(Address(temp, disable_flag_offset), Immediate(0));
2760 __ j(kNotEqual, slow_path->GetEntryLabel());
2761 __ cmpb(Address(temp, slow_path_flag_offset), Immediate(0));
2762 __ j(kNotEqual, slow_path->GetEntryLabel());
2763 }
2764
2765 // Fast path.
2766 __ movl(out, Address(obj, mirror::Reference::ReferentOffset().Int32Value()));
2767 codegen_->MaybeRecordImplicitNullCheck(invoke);
2768 __ MaybeUnpoisonHeapReference(out);
2769 __ Bind(slow_path->GetExitLabel());
2770}
2771
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002772static bool IsSameInput(HInstruction* instruction, size_t input0, size_t input1) {
2773 return instruction->InputAt(input0) == instruction->InputAt(input1);
2774}
2775
2776void IntrinsicLocationsBuilderX86::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002777 // The only read barrier implementation supporting the
2778 // SystemArrayCopy intrinsic is the Baker-style read barriers.
2779 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002780 return;
2781 }
2782
2783 CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke);
2784 if (invoke->GetLocations() != nullptr) {
2785 // Need a byte register for marking.
2786 invoke->GetLocations()->SetTempAt(1, Location::RegisterLocation(ECX));
2787
2788 static constexpr size_t kSrc = 0;
2789 static constexpr size_t kSrcPos = 1;
2790 static constexpr size_t kDest = 2;
2791 static constexpr size_t kDestPos = 3;
2792 static constexpr size_t kLength = 4;
2793
2794 if (!invoke->InputAt(kSrcPos)->IsIntConstant() &&
2795 !invoke->InputAt(kDestPos)->IsIntConstant() &&
2796 !invoke->InputAt(kLength)->IsIntConstant()) {
2797 if (!IsSameInput(invoke, kSrcPos, kDestPos) &&
2798 !IsSameInput(invoke, kSrcPos, kLength) &&
2799 !IsSameInput(invoke, kDestPos, kLength) &&
2800 !IsSameInput(invoke, kSrc, kDest)) {
2801 // Not enough registers, make the length also take a stack slot.
2802 invoke->GetLocations()->SetInAt(kLength, Location::Any());
2803 }
2804 }
2805 }
2806}
2807
2808void IntrinsicCodeGeneratorX86::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002809 // The only read barrier implementation supporting the
2810 // SystemArrayCopy intrinsic is the Baker-style read barriers.
2811 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002812
2813 X86Assembler* assembler = GetAssembler();
2814 LocationSummary* locations = invoke->GetLocations();
2815
2816 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2817 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2818 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2819 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Roland Levillain0b671c02016-08-19 12:02:34 +01002820 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002821
2822 Register src = locations->InAt(0).AsRegister<Register>();
2823 Location src_pos = locations->InAt(1);
2824 Register dest = locations->InAt(2).AsRegister<Register>();
2825 Location dest_pos = locations->InAt(3);
Roland Levillain0b671c02016-08-19 12:02:34 +01002826 Location length_arg = locations->InAt(4);
2827 Location length = length_arg;
2828 Location temp1_loc = locations->GetTemp(0);
2829 Register temp1 = temp1_loc.AsRegister<Register>();
2830 Location temp2_loc = locations->GetTemp(1);
2831 Register temp2 = temp2_loc.AsRegister<Register>();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002832
Roland Levillain0b671c02016-08-19 12:02:34 +01002833 SlowPathCode* intrinsic_slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
2834 codegen_->AddSlowPath(intrinsic_slow_path);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002835
2836 NearLabel conditions_on_positions_validated;
2837 SystemArrayCopyOptimizations optimizations(invoke);
2838
2839 // If source and destination are the same, we go to slow path if we need to do
2840 // forward copying.
2841 if (src_pos.IsConstant()) {
2842 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
2843 if (dest_pos.IsConstant()) {
2844 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2845 if (optimizations.GetDestinationIsSource()) {
2846 // Checked when building locations.
2847 DCHECK_GE(src_pos_constant, dest_pos_constant);
2848 } else if (src_pos_constant < dest_pos_constant) {
2849 __ cmpl(src, dest);
Roland Levillain0b671c02016-08-19 12:02:34 +01002850 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002851 }
2852 } else {
2853 if (!optimizations.GetDestinationIsSource()) {
2854 __ cmpl(src, dest);
2855 __ j(kNotEqual, &conditions_on_positions_validated);
2856 }
2857 __ cmpl(dest_pos.AsRegister<Register>(), Immediate(src_pos_constant));
Roland Levillain0b671c02016-08-19 12:02:34 +01002858 __ j(kGreater, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002859 }
2860 } else {
2861 if (!optimizations.GetDestinationIsSource()) {
2862 __ cmpl(src, dest);
2863 __ j(kNotEqual, &conditions_on_positions_validated);
2864 }
2865 if (dest_pos.IsConstant()) {
2866 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2867 __ cmpl(src_pos.AsRegister<Register>(), Immediate(dest_pos_constant));
Roland Levillain0b671c02016-08-19 12:02:34 +01002868 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002869 } else {
2870 __ cmpl(src_pos.AsRegister<Register>(), dest_pos.AsRegister<Register>());
Roland Levillain0b671c02016-08-19 12:02:34 +01002871 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002872 }
2873 }
2874
2875 __ Bind(&conditions_on_positions_validated);
2876
2877 if (!optimizations.GetSourceIsNotNull()) {
2878 // Bail out if the source is null.
2879 __ testl(src, src);
Roland Levillain0b671c02016-08-19 12:02:34 +01002880 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002881 }
2882
2883 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
2884 // Bail out if the destination is null.
2885 __ testl(dest, dest);
Roland Levillain0b671c02016-08-19 12:02:34 +01002886 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002887 }
2888
Roland Levillain0b671c02016-08-19 12:02:34 +01002889 Location temp3_loc = locations->GetTemp(2);
2890 Register temp3 = temp3_loc.AsRegister<Register>();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002891 if (length.IsStackSlot()) {
2892 __ movl(temp3, Address(ESP, length.GetStackIndex()));
2893 length = Location::RegisterLocation(temp3);
2894 }
2895
2896 // If the length is negative, bail out.
2897 // We have already checked in the LocationsBuilder for the constant case.
2898 if (!length.IsConstant() &&
2899 !optimizations.GetCountIsSourceLength() &&
2900 !optimizations.GetCountIsDestinationLength()) {
2901 __ testl(length.AsRegister<Register>(), length.AsRegister<Register>());
Roland Levillain0b671c02016-08-19 12:02:34 +01002902 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002903 }
2904
2905 // Validity checks: source.
2906 CheckPosition(assembler,
2907 src_pos,
2908 src,
2909 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01002910 intrinsic_slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002911 temp1,
2912 optimizations.GetCountIsSourceLength());
2913
2914 // Validity checks: dest.
2915 CheckPosition(assembler,
2916 dest_pos,
2917 dest,
2918 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01002919 intrinsic_slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002920 temp1,
2921 optimizations.GetCountIsDestinationLength());
2922
2923 if (!optimizations.GetDoesNotNeedTypeCheck()) {
2924 // Check whether all elements of the source array are assignable to the component
2925 // type of the destination array. We do two checks: the classes are the same,
2926 // or the destination is Object[]. If none of these checks succeed, we go to the
2927 // slow path.
Roland Levillain0b671c02016-08-19 12:02:34 +01002928
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002929 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002930 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2931 // /* HeapReference<Class> */ temp1 = src->klass_
2932 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002933 invoke, temp1_loc, src, class_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002934 // Bail out if the source is not a non primitive array.
2935 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2936 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002937 invoke, temp1_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002938 __ testl(temp1, temp1);
2939 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2940 // If heap poisoning is enabled, `temp1` has been unpoisoned
2941 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
2942 } else {
2943 // /* HeapReference<Class> */ temp1 = src->klass_
2944 __ movl(temp1, Address(src, class_offset));
2945 __ MaybeUnpoisonHeapReference(temp1);
2946 // Bail out if the source is not a non primitive array.
2947 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2948 __ movl(temp1, Address(temp1, component_offset));
2949 __ testl(temp1, temp1);
2950 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2951 __ MaybeUnpoisonHeapReference(temp1);
2952 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002953 __ cmpw(Address(temp1, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0b671c02016-08-19 12:02:34 +01002954 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002955 }
2956
Roland Levillain0b671c02016-08-19 12:02:34 +01002957 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2958 if (length.Equals(Location::RegisterLocation(temp3))) {
2959 // When Baker read barriers are enabled, register `temp3`,
2960 // which in the present case contains the `length` parameter,
2961 // will be overwritten below. Make the `length` location
2962 // reference the original stack location; it will be moved
2963 // back to `temp3` later if necessary.
2964 DCHECK(length_arg.IsStackSlot());
2965 length = length_arg;
2966 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002967
Roland Levillain0b671c02016-08-19 12:02:34 +01002968 // /* HeapReference<Class> */ temp1 = dest->klass_
2969 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002970 invoke, temp1_loc, dest, class_offset, /* needs_null_check */ false);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002971
Roland Levillain0b671c02016-08-19 12:02:34 +01002972 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
2973 // Bail out if the destination is not a non primitive array.
2974 //
2975 // Register `temp1` is not trashed by the read barrier emitted
2976 // by GenerateFieldLoadWithBakerReadBarrier below, as that
2977 // method produces a call to a ReadBarrierMarkRegX entry point,
2978 // which saves all potentially live registers, including
2979 // temporaries such a `temp1`.
2980 // /* HeapReference<Class> */ temp2 = temp1->component_type_
2981 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002982 invoke, temp2_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002983 __ testl(temp2, temp2);
2984 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2985 // If heap poisoning is enabled, `temp2` has been unpoisoned
2986 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
2987 __ cmpw(Address(temp2, primitive_offset), Immediate(Primitive::kPrimNot));
2988 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
2989 }
2990
2991 // For the same reason given earlier, `temp1` is not trashed by the
2992 // read barrier emitted by GenerateFieldLoadWithBakerReadBarrier below.
2993 // /* HeapReference<Class> */ temp2 = src->klass_
2994 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002995 invoke, temp2_loc, src, class_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002996 // Note: if heap poisoning is on, we are comparing two unpoisoned references here.
2997 __ cmpl(temp1, temp2);
2998
2999 if (optimizations.GetDestinationIsTypedObjectArray()) {
3000 NearLabel do_copy;
3001 __ j(kEqual, &do_copy);
3002 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3003 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003004 invoke, temp1_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01003005 // We do not need to emit a read barrier for the following
3006 // heap reference load, as `temp1` is only used in a
3007 // comparison with null below, and this reference is not
3008 // kept afterwards.
3009 __ cmpl(Address(temp1, super_offset), Immediate(0));
3010 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3011 __ Bind(&do_copy);
3012 } else {
3013 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3014 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003015 } else {
Roland Levillain0b671c02016-08-19 12:02:34 +01003016 // Non read barrier code.
3017
3018 // /* HeapReference<Class> */ temp1 = dest->klass_
3019 __ movl(temp1, Address(dest, class_offset));
3020 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
3021 __ MaybeUnpoisonHeapReference(temp1);
3022 // Bail out if the destination is not a non primitive array.
3023 // /* HeapReference<Class> */ temp2 = temp1->component_type_
3024 __ movl(temp2, Address(temp1, component_offset));
3025 __ testl(temp2, temp2);
3026 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3027 __ MaybeUnpoisonHeapReference(temp2);
3028 __ cmpw(Address(temp2, primitive_offset), Immediate(Primitive::kPrimNot));
3029 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3030 // Re-poison the heap reference to make the compare instruction below
3031 // compare two poisoned references.
3032 __ PoisonHeapReference(temp1);
3033 }
3034
3035 // Note: if heap poisoning is on, we are comparing two poisoned references here.
3036 __ cmpl(temp1, Address(src, class_offset));
3037
3038 if (optimizations.GetDestinationIsTypedObjectArray()) {
3039 NearLabel do_copy;
3040 __ j(kEqual, &do_copy);
3041 __ MaybeUnpoisonHeapReference(temp1);
3042 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3043 __ movl(temp1, Address(temp1, component_offset));
3044 __ MaybeUnpoisonHeapReference(temp1);
3045 __ cmpl(Address(temp1, super_offset), Immediate(0));
3046 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3047 __ Bind(&do_copy);
3048 } else {
3049 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3050 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003051 }
3052 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
3053 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
3054 // Bail out if the source is not a non primitive array.
Roland Levillain0b671c02016-08-19 12:02:34 +01003055 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
3056 // /* HeapReference<Class> */ temp1 = src->klass_
3057 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003058 invoke, temp1_loc, src, class_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01003059 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3060 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003061 invoke, temp1_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01003062 __ testl(temp1, temp1);
3063 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3064 // If heap poisoning is enabled, `temp1` has been unpoisoned
3065 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
3066 } else {
3067 // /* HeapReference<Class> */ temp1 = src->klass_
3068 __ movl(temp1, Address(src, class_offset));
3069 __ MaybeUnpoisonHeapReference(temp1);
3070 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3071 __ movl(temp1, Address(temp1, component_offset));
3072 __ testl(temp1, temp1);
3073 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3074 __ MaybeUnpoisonHeapReference(temp1);
3075 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003076 __ cmpw(Address(temp1, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0b671c02016-08-19 12:02:34 +01003077 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003078 }
3079
Roland Levillain0b671c02016-08-19 12:02:34 +01003080 // Compute the base source address in `temp1`.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003081 int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
3082 DCHECK_EQ(element_size, 4);
3083 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
3084 if (src_pos.IsConstant()) {
3085 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
3086 __ leal(temp1, Address(src, element_size * constant + offset));
3087 } else {
3088 __ leal(temp1, Address(src, src_pos.AsRegister<Register>(), ScaleFactor::TIMES_4, offset));
3089 }
3090
Roland Levillain0b671c02016-08-19 12:02:34 +01003091 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
3092 // If it is needed (in the case of the fast-path loop), the base
3093 // destination address is computed later, as `temp2` is used for
3094 // intermediate computations.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003095
Roland Levillain0b671c02016-08-19 12:02:34 +01003096 // Compute the end source address in `temp3`.
3097 if (length.IsConstant()) {
3098 int32_t constant = length.GetConstant()->AsIntConstant()->GetValue();
3099 __ leal(temp3, Address(temp1, element_size * constant));
3100 } else {
3101 if (length.IsStackSlot()) {
3102 // Location `length` is again pointing at a stack slot, as
3103 // register `temp3` (which was containing the length parameter
3104 // earlier) has been overwritten; restore it now
3105 DCHECK(length.Equals(length_arg));
3106 __ movl(temp3, Address(ESP, length.GetStackIndex()));
3107 length = Location::RegisterLocation(temp3);
3108 }
3109 __ leal(temp3, Address(temp1, length.AsRegister<Register>(), ScaleFactor::TIMES_4, 0));
3110 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003111
Roland Levillain0b671c02016-08-19 12:02:34 +01003112 // SystemArrayCopy implementation for Baker read barriers (see
3113 // also CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier):
3114 //
3115 // if (src_ptr != end_ptr) {
3116 // uint32_t rb_state = Lockword(src->monitor_).ReadBarrierState();
3117 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
3118 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
3119 // if (is_gray) {
3120 // // Slow-path copy.
3121 // for (size_t i = 0; i != length; ++i) {
3122 // dest_array[dest_pos + i] =
3123 // MaybePoison(ReadBarrier::Mark(MaybeUnpoison(src_array[src_pos + i])));
3124 // }
3125 // } else {
3126 // // Fast-path copy.
3127 // do {
3128 // *dest_ptr++ = *src_ptr++;
3129 // } while (src_ptr != end_ptr)
3130 // }
3131 // }
3132
3133 NearLabel loop, done;
3134
3135 // Don't enter copy loop if `length == 0`.
3136 __ cmpl(temp1, temp3);
3137 __ j(kEqual, &done);
3138
Vladimir Marko953437b2016-08-24 08:30:46 +00003139 // Given the numeric representation, it's enough to check the low bit of the rb_state.
3140 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
3141 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
3142 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
3143 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
3144 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
3145 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
3146
3147 // if (rb_state == ReadBarrier::gray_ptr_)
3148 // goto slow_path;
3149 // At this point, just do the "if" and make sure that flags are preserved until the branch.
3150 __ testb(Address(src, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain0b671c02016-08-19 12:02:34 +01003151
3152 // Load fence to prevent load-load reordering.
3153 // Note that this is a no-op, thanks to the x86 memory model.
3154 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
3155
3156 // Slow path used to copy array when `src` is gray.
3157 SlowPathCode* read_barrier_slow_path =
3158 new (GetAllocator()) ReadBarrierSystemArrayCopySlowPathX86(invoke);
3159 codegen_->AddSlowPath(read_barrier_slow_path);
3160
Vladimir Marko953437b2016-08-24 08:30:46 +00003161 // We have done the "if" of the gray bit check above, now branch based on the flags.
3162 __ j(kNotZero, read_barrier_slow_path->GetEntryLabel());
Roland Levillain0b671c02016-08-19 12:02:34 +01003163
3164 // Fast-path copy.
3165
3166 // Set the base destination address in `temp2`.
3167 if (dest_pos.IsConstant()) {
3168 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
3169 __ leal(temp2, Address(dest, element_size * constant + offset));
3170 } else {
3171 __ leal(temp2, Address(dest, dest_pos.AsRegister<Register>(), ScaleFactor::TIMES_4, offset));
3172 }
3173
3174 // Iterate over the arrays and do a raw copy of the objects. We don't need to
3175 // poison/unpoison.
3176 __ Bind(&loop);
3177 __ pushl(Address(temp1, 0));
3178 __ cfi().AdjustCFAOffset(4);
3179 __ popl(Address(temp2, 0));
3180 __ cfi().AdjustCFAOffset(-4);
3181 __ addl(temp1, Immediate(element_size));
3182 __ addl(temp2, Immediate(element_size));
3183 __ cmpl(temp1, temp3);
3184 __ j(kNotEqual, &loop);
3185
3186 __ Bind(read_barrier_slow_path->GetExitLabel());
3187 __ Bind(&done);
3188 } else {
3189 // Non read barrier code.
3190
3191 // Compute the base destination address in `temp2`.
3192 if (dest_pos.IsConstant()) {
3193 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
3194 __ leal(temp2, Address(dest, element_size * constant + offset));
3195 } else {
3196 __ leal(temp2, Address(dest, dest_pos.AsRegister<Register>(), ScaleFactor::TIMES_4, offset));
3197 }
3198
3199 // Compute the end source address in `temp3`.
3200 if (length.IsConstant()) {
3201 int32_t constant = length.GetConstant()->AsIntConstant()->GetValue();
3202 __ leal(temp3, Address(temp1, element_size * constant));
3203 } else {
3204 __ leal(temp3, Address(temp1, length.AsRegister<Register>(), ScaleFactor::TIMES_4, 0));
3205 }
3206
3207 // Iterate over the arrays and do a raw copy of the objects. We don't need to
3208 // poison/unpoison.
3209 NearLabel loop, done;
3210 __ cmpl(temp1, temp3);
3211 __ j(kEqual, &done);
3212 __ Bind(&loop);
3213 __ pushl(Address(temp1, 0));
3214 __ cfi().AdjustCFAOffset(4);
3215 __ popl(Address(temp2, 0));
3216 __ cfi().AdjustCFAOffset(-4);
3217 __ addl(temp1, Immediate(element_size));
3218 __ addl(temp2, Immediate(element_size));
3219 __ cmpl(temp1, temp3);
3220 __ j(kNotEqual, &loop);
3221 __ Bind(&done);
3222 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003223
3224 // We only need one card marking on the destination array.
3225 codegen_->MarkGCCard(temp1,
3226 temp2,
3227 dest,
3228 Register(kNoRegister),
3229 /* value_can_be_null */ false);
3230
Roland Levillain0b671c02016-08-19 12:02:34 +01003231 __ Bind(intrinsic_slow_path->GetExitLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003232}
3233
Aart Bik2f9fcc92016-03-01 15:16:54 -08003234UNIMPLEMENTED_INTRINSIC(X86, MathRoundDouble)
Aart Bik2f9fcc92016-03-01 15:16:54 -08003235UNIMPLEMENTED_INTRINSIC(X86, FloatIsInfinite)
3236UNIMPLEMENTED_INTRINSIC(X86, DoubleIsInfinite)
3237UNIMPLEMENTED_INTRINSIC(X86, IntegerHighestOneBit)
3238UNIMPLEMENTED_INTRINSIC(X86, LongHighestOneBit)
3239UNIMPLEMENTED_INTRINSIC(X86, IntegerLowestOneBit)
3240UNIMPLEMENTED_INTRINSIC(X86, LongLowestOneBit)
Mark Mendell09ed1a32015-03-25 08:30:06 -04003241
Aart Bik0e54c012016-03-04 12:08:31 -08003242// 1.8.
3243UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndAddInt)
3244UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndAddLong)
3245UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetInt)
3246UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetLong)
3247UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08003248
Aart Bik2f9fcc92016-03-01 15:16:54 -08003249UNREACHABLE_INTRINSICS(X86)
Roland Levillain4d027112015-07-01 15:41:14 +01003250
3251#undef __
3252
Mark Mendell09ed1a32015-03-25 08:30:06 -04003253} // namespace x86
3254} // namespace art