blob: e61aba05b4f10d7faffbee31318085ad024800c9 [file] [log] [blame]
Mark Mendell09ed1a32015-03-25 08:30:06 -04001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "intrinsics_x86.h"
18
Andreas Gampe21030dd2015-05-07 14:46:15 -070019#include <limits>
20
Mark Mendellfb8d2792015-03-31 22:16:59 -040021#include "arch/x86/instruction_set_features_x86.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include "art_method.h"
Mark Mendelld5897672015-08-12 21:16:41 -040023#include "base/bit_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040024#include "code_generator_x86.h"
25#include "entrypoints/quick/quick_entrypoints.h"
26#include "intrinsics.h"
Andreas Gampe85b62f22015-09-09 13:15:38 -070027#include "intrinsics_utils.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040028#include "mirror/array-inl.h"
Mark Mendell09ed1a32015-03-25 08:30:06 -040029#include "mirror/string.h"
30#include "thread.h"
31#include "utils/x86/assembler_x86.h"
32#include "utils/x86/constants_x86.h"
33
34namespace art {
35
36namespace x86 {
37
38static constexpr int kDoubleNaNHigh = 0x7FF80000;
39static constexpr int kDoubleNaNLow = 0x00000000;
Mark P Mendell2f10a5f2016-01-25 14:47:50 +000040static constexpr int64_t kDoubleNaN = INT64_C(0x7FF8000000000000);
41static constexpr int32_t kFloatNaN = INT32_C(0x7FC00000);
Mark Mendell09ed1a32015-03-25 08:30:06 -040042
Mark Mendellfb8d2792015-03-31 22:16:59 -040043IntrinsicLocationsBuilderX86::IntrinsicLocationsBuilderX86(CodeGeneratorX86* codegen)
Mark P Mendell2f10a5f2016-01-25 14:47:50 +000044 : arena_(codegen->GetGraph()->GetArena()),
45 codegen_(codegen) {
Mark Mendellfb8d2792015-03-31 22:16:59 -040046}
47
48
Mark Mendell09ed1a32015-03-25 08:30:06 -040049X86Assembler* IntrinsicCodeGeneratorX86::GetAssembler() {
Roland Levillainb488b782015-10-22 11:38:49 +010050 return down_cast<X86Assembler*>(codegen_->GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -040051}
52
53ArenaAllocator* IntrinsicCodeGeneratorX86::GetAllocator() {
54 return codegen_->GetGraph()->GetArena();
55}
56
57bool IntrinsicLocationsBuilderX86::TryDispatch(HInvoke* invoke) {
58 Dispatch(invoke);
59 LocationSummary* res = invoke->GetLocations();
Roland Levillain0d5a2812015-11-13 10:07:31 +000060 if (res == nullptr) {
61 return false;
62 }
Roland Levillain0d5a2812015-11-13 10:07:31 +000063 return res->Intrinsified();
Mark Mendell09ed1a32015-03-25 08:30:06 -040064}
65
Roland Levillainec525fc2015-04-28 15:50:20 +010066static void MoveArguments(HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillain2d27c8e2015-04-28 15:48:45 +010067 InvokeDexCallingConventionVisitorX86 calling_convention_visitor;
Roland Levillainec525fc2015-04-28 15:50:20 +010068 IntrinsicVisitor::MoveArguments(invoke, codegen, &calling_convention_visitor);
Mark Mendell09ed1a32015-03-25 08:30:06 -040069}
70
Andreas Gampe85b62f22015-09-09 13:15:38 -070071using IntrinsicSlowPathX86 = IntrinsicSlowPath<InvokeDexCallingConventionVisitorX86>;
Mark Mendell09ed1a32015-03-25 08:30:06 -040072
Roland Levillain0b671c02016-08-19 12:02:34 +010073// NOLINT on __ macro to suppress wrong warning/fix (misc-macro-parentheses) from clang-tidy.
74#define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT
75
76// Slow path implementing the SystemArrayCopy intrinsic copy loop with read barriers.
77class ReadBarrierSystemArrayCopySlowPathX86 : public SlowPathCode {
78 public:
79 explicit ReadBarrierSystemArrayCopySlowPathX86(HInstruction* instruction)
80 : SlowPathCode(instruction) {
81 DCHECK(kEmitCompilerReadBarrier);
82 DCHECK(kUseBakerReadBarrier);
83 }
84
85 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
86 CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
87 LocationSummary* locations = instruction_->GetLocations();
88 DCHECK(locations->CanCall());
89 DCHECK(instruction_->IsInvokeStaticOrDirect())
90 << "Unexpected instruction in read barrier arraycopy slow path: "
91 << instruction_->DebugName();
92 DCHECK(instruction_->GetLocations()->Intrinsified());
93 DCHECK_EQ(instruction_->AsInvoke()->GetIntrinsic(), Intrinsics::kSystemArrayCopy);
94
95 int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
96 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
97
98 Register src = locations->InAt(0).AsRegister<Register>();
99 Location src_pos = locations->InAt(1);
100 Register dest = locations->InAt(2).AsRegister<Register>();
101 Location dest_pos = locations->InAt(3);
102 Location length = locations->InAt(4);
103 Location temp1_loc = locations->GetTemp(0);
104 Register temp1 = temp1_loc.AsRegister<Register>();
105 Register temp2 = locations->GetTemp(1).AsRegister<Register>();
106 Register temp3 = locations->GetTemp(2).AsRegister<Register>();
107
108 __ Bind(GetEntryLabel());
109 // In this code path, registers `temp1`, `temp2`, and `temp3`
110 // (resp.) are not used for the base source address, the base
111 // destination address, and the end source address (resp.), as in
112 // other SystemArrayCopy intrinsic code paths. Instead they are
113 // (resp.) used for:
114 // - the loop index (`i`);
115 // - the source index (`src_index`) and the loaded (source)
116 // reference (`value`); and
117 // - the destination index (`dest_index`).
118
119 // i = 0
120 __ xorl(temp1, temp1);
121 NearLabel loop;
122 __ Bind(&loop);
123 // value = src_array[i + src_pos]
124 if (src_pos.IsConstant()) {
125 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
126 int32_t adjusted_offset = offset + constant * element_size;
127 __ movl(temp2, Address(src, temp1, ScaleFactor::TIMES_4, adjusted_offset));
128 } else {
129 __ leal(temp2, Address(src_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0));
130 __ movl(temp2, Address(src, temp2, ScaleFactor::TIMES_4, offset));
131 }
132 __ MaybeUnpoisonHeapReference(temp2);
133 // TODO: Inline the mark bit check before calling the runtime?
134 // value = ReadBarrier::Mark(value)
135 // No need to save live registers; it's taken care of by the
136 // entrypoint. Also, there is no need to update the stack mask,
137 // as this runtime call will not trigger a garbage collection.
138 // (See ReadBarrierMarkSlowPathX86::EmitNativeCode for more
139 // explanations.)
140 DCHECK_NE(temp2, ESP);
141 DCHECK(0 <= temp2 && temp2 < kNumberOfCpuRegisters) << temp2;
142 int32_t entry_point_offset =
143 CodeGenerator::GetReadBarrierMarkEntryPointsOffset<kX86PointerSize>(temp2);
144 // This runtime call does not require a stack map.
145 x86_codegen->InvokeRuntimeWithoutRecordingPcInfo(entry_point_offset, instruction_, this);
146 __ MaybePoisonHeapReference(temp2);
147 // dest_array[i + dest_pos] = value
148 if (dest_pos.IsConstant()) {
149 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
150 int32_t adjusted_offset = offset + constant * element_size;
151 __ movl(Address(dest, temp1, ScaleFactor::TIMES_4, adjusted_offset), temp2);
152 } else {
153 __ leal(temp3, Address(dest_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0));
154 __ movl(Address(dest, temp3, ScaleFactor::TIMES_4, offset), temp2);
155 }
156 // ++i
157 __ addl(temp1, Immediate(1));
158 // if (i != length) goto loop
159 x86_codegen->GenerateIntCompare(temp1_loc, length);
160 __ j(kNotEqual, &loop);
161 __ jmp(GetExitLabel());
162 }
163
164 const char* GetDescription() const OVERRIDE { return "ReadBarrierSystemArrayCopySlowPathX86"; }
165
166 private:
167 DISALLOW_COPY_AND_ASSIGN(ReadBarrierSystemArrayCopySlowPathX86);
168};
169
170#undef __
171
Mark Mendell09ed1a32015-03-25 08:30:06 -0400172#define __ assembler->
173
174static void CreateFPToIntLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
175 LocationSummary* locations = new (arena) LocationSummary(invoke,
176 LocationSummary::kNoCall,
177 kIntrinsified);
178 locations->SetInAt(0, Location::RequiresFpuRegister());
179 locations->SetOut(Location::RequiresRegister());
180 if (is64bit) {
181 locations->AddTemp(Location::RequiresFpuRegister());
182 }
183}
184
185static void CreateIntToFPLocations(ArenaAllocator* arena, HInvoke* invoke, bool is64bit) {
186 LocationSummary* locations = new (arena) LocationSummary(invoke,
187 LocationSummary::kNoCall,
188 kIntrinsified);
189 locations->SetInAt(0, Location::RequiresRegister());
190 locations->SetOut(Location::RequiresFpuRegister());
191 if (is64bit) {
192 locations->AddTemp(Location::RequiresFpuRegister());
193 locations->AddTemp(Location::RequiresFpuRegister());
194 }
195}
196
197static void MoveFPToInt(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
198 Location input = locations->InAt(0);
199 Location output = locations->Out();
200 if (is64bit) {
201 // Need to use the temporary.
202 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
203 __ movsd(temp, input.AsFpuRegister<XmmRegister>());
204 __ movd(output.AsRegisterPairLow<Register>(), temp);
205 __ psrlq(temp, Immediate(32));
206 __ movd(output.AsRegisterPairHigh<Register>(), temp);
207 } else {
208 __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>());
209 }
210}
211
212static void MoveIntToFP(LocationSummary* locations, bool is64bit, X86Assembler* assembler) {
213 Location input = locations->InAt(0);
214 Location output = locations->Out();
215 if (is64bit) {
216 // Need to use the temporary.
217 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
218 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
219 __ movd(temp1, input.AsRegisterPairLow<Register>());
220 __ movd(temp2, input.AsRegisterPairHigh<Register>());
221 __ punpckldq(temp1, temp2);
222 __ movsd(output.AsFpuRegister<XmmRegister>(), temp1);
223 } else {
224 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<Register>());
225 }
226}
227
228void IntrinsicLocationsBuilderX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000229 CreateFPToIntLocations(arena_, invoke, /* is64bit */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400230}
231void IntrinsicLocationsBuilderX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000232 CreateIntToFPLocations(arena_, invoke, /* is64bit */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400233}
234
235void IntrinsicCodeGeneratorX86::VisitDoubleDoubleToRawLongBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000236 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400237}
238void IntrinsicCodeGeneratorX86::VisitDoubleLongBitsToDouble(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000239 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400240}
241
242void IntrinsicLocationsBuilderX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000243 CreateFPToIntLocations(arena_, invoke, /* is64bit */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400244}
245void IntrinsicLocationsBuilderX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000246 CreateIntToFPLocations(arena_, invoke, /* is64bit */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400247}
248
249void IntrinsicCodeGeneratorX86::VisitFloatFloatToRawIntBits(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000250 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400251}
252void IntrinsicCodeGeneratorX86::VisitFloatIntBitsToFloat(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000253 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400254}
255
256static void CreateIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
257 LocationSummary* locations = new (arena) LocationSummary(invoke,
258 LocationSummary::kNoCall,
259 kIntrinsified);
260 locations->SetInAt(0, Location::RequiresRegister());
261 locations->SetOut(Location::SameAsFirstInput());
262}
263
264static void CreateLongToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
265 LocationSummary* locations = new (arena) LocationSummary(invoke,
266 LocationSummary::kNoCall,
267 kIntrinsified);
268 locations->SetInAt(0, Location::RequiresRegister());
269 locations->SetOut(Location::RequiresRegister());
270}
271
272static void CreateLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
273 LocationSummary* locations = new (arena) LocationSummary(invoke,
274 LocationSummary::kNoCall,
275 kIntrinsified);
276 locations->SetInAt(0, Location::RequiresRegister());
277 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
278}
279
280static void GenReverseBytes(LocationSummary* locations,
281 Primitive::Type size,
282 X86Assembler* assembler) {
283 Register out = locations->Out().AsRegister<Register>();
284
285 switch (size) {
286 case Primitive::kPrimShort:
287 // TODO: Can be done with an xchg of 8b registers. This is straight from Quick.
288 __ bswapl(out);
289 __ sarl(out, Immediate(16));
290 break;
291 case Primitive::kPrimInt:
292 __ bswapl(out);
293 break;
294 default:
295 LOG(FATAL) << "Unexpected size for reverse-bytes: " << size;
296 UNREACHABLE();
297 }
298}
299
300void IntrinsicLocationsBuilderX86::VisitIntegerReverseBytes(HInvoke* invoke) {
301 CreateIntToIntLocations(arena_, invoke);
302}
303
304void IntrinsicCodeGeneratorX86::VisitIntegerReverseBytes(HInvoke* invoke) {
305 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
306}
307
Mark Mendell58d25fd2015-04-03 14:52:31 -0400308void IntrinsicLocationsBuilderX86::VisitLongReverseBytes(HInvoke* invoke) {
309 CreateLongToLongLocations(arena_, invoke);
310}
311
312void IntrinsicCodeGeneratorX86::VisitLongReverseBytes(HInvoke* invoke) {
313 LocationSummary* locations = invoke->GetLocations();
314 Location input = locations->InAt(0);
315 Register input_lo = input.AsRegisterPairLow<Register>();
316 Register input_hi = input.AsRegisterPairHigh<Register>();
317 Location output = locations->Out();
318 Register output_lo = output.AsRegisterPairLow<Register>();
319 Register output_hi = output.AsRegisterPairHigh<Register>();
320
321 X86Assembler* assembler = GetAssembler();
322 // Assign the inputs to the outputs, mixing low/high.
323 __ movl(output_lo, input_hi);
324 __ movl(output_hi, input_lo);
325 __ bswapl(output_lo);
326 __ bswapl(output_hi);
327}
328
Mark Mendell09ed1a32015-03-25 08:30:06 -0400329void IntrinsicLocationsBuilderX86::VisitShortReverseBytes(HInvoke* invoke) {
330 CreateIntToIntLocations(arena_, invoke);
331}
332
333void IntrinsicCodeGeneratorX86::VisitShortReverseBytes(HInvoke* invoke) {
334 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
335}
336
337
338// TODO: Consider Quick's way of doing Double abs through integer operations, as the immediate we
339// need is 64b.
340
341static void CreateFloatToFloat(ArenaAllocator* arena, HInvoke* invoke) {
342 // TODO: Enable memory operations when the assembler supports them.
343 LocationSummary* locations = new (arena) LocationSummary(invoke,
344 LocationSummary::kNoCall,
345 kIntrinsified);
346 locations->SetInAt(0, Location::RequiresFpuRegister());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400347 locations->SetOut(Location::SameAsFirstInput());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000348 HInvokeStaticOrDirect* static_or_direct = invoke->AsInvokeStaticOrDirect();
349 DCHECK(static_or_direct != nullptr);
Nicolas Geoffray97793072016-02-16 15:33:54 +0000350 if (static_or_direct->HasSpecialInput() &&
351 invoke->InputAt(static_or_direct->GetSpecialInputIndex())->IsX86ComputeBaseMethodAddress()) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000352 // We need addressibility for the constant area.
353 locations->SetInAt(1, Location::RequiresRegister());
354 // We need a temporary to hold the constant.
355 locations->AddTemp(Location::RequiresFpuRegister());
356 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400357}
358
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000359static void MathAbsFP(LocationSummary* locations,
360 bool is64bit,
361 X86Assembler* assembler,
362 CodeGeneratorX86* codegen) {
Mark Mendell09ed1a32015-03-25 08:30:06 -0400363 Location output = locations->Out();
364
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000365 DCHECK(output.IsFpuRegister());
Nicolas Geoffray97793072016-02-16 15:33:54 +0000366 if (locations->GetInputCount() == 2 && locations->InAt(1).IsValid()) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000367 DCHECK(locations->InAt(1).IsRegister());
368 // We also have a constant area pointer.
369 Register constant_area = locations->InAt(1).AsRegister<Register>();
370 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
371 if (is64bit) {
372 __ movsd(temp, codegen->LiteralInt64Address(INT64_C(0x7FFFFFFFFFFFFFFF), constant_area));
373 __ andpd(output.AsFpuRegister<XmmRegister>(), temp);
374 } else {
375 __ movss(temp, codegen->LiteralInt32Address(INT32_C(0x7FFFFFFF), constant_area));
376 __ andps(output.AsFpuRegister<XmmRegister>(), temp);
377 }
378 } else {
Mark Mendell09ed1a32015-03-25 08:30:06 -0400379 // Create the right constant on an aligned stack.
380 if (is64bit) {
381 __ subl(ESP, Immediate(8));
382 __ pushl(Immediate(0x7FFFFFFF));
383 __ pushl(Immediate(0xFFFFFFFF));
384 __ andpd(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
385 } else {
386 __ subl(ESP, Immediate(12));
387 __ pushl(Immediate(0x7FFFFFFF));
388 __ andps(output.AsFpuRegister<XmmRegister>(), Address(ESP, 0));
389 }
390 __ addl(ESP, Immediate(16));
Mark Mendell09ed1a32015-03-25 08:30:06 -0400391 }
392}
393
394void IntrinsicLocationsBuilderX86::VisitMathAbsDouble(HInvoke* invoke) {
395 CreateFloatToFloat(arena_, invoke);
396}
397
398void IntrinsicCodeGeneratorX86::VisitMathAbsDouble(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000399 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetAssembler(), codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400400}
401
402void IntrinsicLocationsBuilderX86::VisitMathAbsFloat(HInvoke* invoke) {
403 CreateFloatToFloat(arena_, invoke);
404}
405
406void IntrinsicCodeGeneratorX86::VisitMathAbsFloat(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000407 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetAssembler(), codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400408}
409
410static void CreateAbsIntLocation(ArenaAllocator* arena, HInvoke* invoke) {
411 LocationSummary* locations = new (arena) LocationSummary(invoke,
412 LocationSummary::kNoCall,
413 kIntrinsified);
414 locations->SetInAt(0, Location::RegisterLocation(EAX));
415 locations->SetOut(Location::SameAsFirstInput());
416 locations->AddTemp(Location::RegisterLocation(EDX));
417}
418
419static void GenAbsInteger(LocationSummary* locations, X86Assembler* assembler) {
420 Location output = locations->Out();
421 Register out = output.AsRegister<Register>();
422 DCHECK_EQ(out, EAX);
423 Register temp = locations->GetTemp(0).AsRegister<Register>();
424 DCHECK_EQ(temp, EDX);
425
426 // Sign extend EAX into EDX.
427 __ cdq();
428
429 // XOR EAX with sign.
430 __ xorl(EAX, EDX);
431
432 // Subtract out sign to correct.
433 __ subl(EAX, EDX);
434
435 // The result is in EAX.
436}
437
438static void CreateAbsLongLocation(ArenaAllocator* arena, HInvoke* invoke) {
439 LocationSummary* locations = new (arena) LocationSummary(invoke,
440 LocationSummary::kNoCall,
441 kIntrinsified);
442 locations->SetInAt(0, Location::RequiresRegister());
443 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
444 locations->AddTemp(Location::RequiresRegister());
445}
446
447static void GenAbsLong(LocationSummary* locations, X86Assembler* assembler) {
448 Location input = locations->InAt(0);
449 Register input_lo = input.AsRegisterPairLow<Register>();
450 Register input_hi = input.AsRegisterPairHigh<Register>();
451 Location output = locations->Out();
452 Register output_lo = output.AsRegisterPairLow<Register>();
453 Register output_hi = output.AsRegisterPairHigh<Register>();
454 Register temp = locations->GetTemp(0).AsRegister<Register>();
455
456 // Compute the sign into the temporary.
457 __ movl(temp, input_hi);
458 __ sarl(temp, Immediate(31));
459
460 // Store the sign into the output.
461 __ movl(output_lo, temp);
462 __ movl(output_hi, temp);
463
464 // XOR the input to the output.
465 __ xorl(output_lo, input_lo);
466 __ xorl(output_hi, input_hi);
467
468 // Subtract the sign.
469 __ subl(output_lo, temp);
470 __ sbbl(output_hi, temp);
471}
472
473void IntrinsicLocationsBuilderX86::VisitMathAbsInt(HInvoke* invoke) {
474 CreateAbsIntLocation(arena_, invoke);
475}
476
477void IntrinsicCodeGeneratorX86::VisitMathAbsInt(HInvoke* invoke) {
478 GenAbsInteger(invoke->GetLocations(), GetAssembler());
479}
480
481void IntrinsicLocationsBuilderX86::VisitMathAbsLong(HInvoke* invoke) {
482 CreateAbsLongLocation(arena_, invoke);
483}
484
485void IntrinsicCodeGeneratorX86::VisitMathAbsLong(HInvoke* invoke) {
486 GenAbsLong(invoke->GetLocations(), GetAssembler());
487}
488
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000489static void GenMinMaxFP(LocationSummary* locations,
490 bool is_min,
491 bool is_double,
492 X86Assembler* assembler,
493 CodeGeneratorX86* codegen) {
Mark Mendell09ed1a32015-03-25 08:30:06 -0400494 Location op1_loc = locations->InAt(0);
495 Location op2_loc = locations->InAt(1);
496 Location out_loc = locations->Out();
497 XmmRegister out = out_loc.AsFpuRegister<XmmRegister>();
498
499 // Shortcut for same input locations.
500 if (op1_loc.Equals(op2_loc)) {
501 DCHECK(out_loc.Equals(op1_loc));
502 return;
503 }
504
505 // (out := op1)
506 // out <=? op2
507 // if Nan jmp Nan_label
508 // if out is min jmp done
509 // if op2 is min jmp op2_label
510 // handle -0/+0
511 // jmp done
512 // Nan_label:
513 // out := NaN
514 // op2_label:
515 // out := op2
516 // done:
517 //
518 // This removes one jmp, but needs to copy one input (op1) to out.
519 //
520 // TODO: This is straight from Quick (except literal pool). Make NaN an out-of-line slowpath?
521
522 XmmRegister op2 = op2_loc.AsFpuRegister<XmmRegister>();
523
Mark Mendell0c9497d2015-08-21 09:30:05 -0400524 NearLabel nan, done, op2_label;
Mark Mendell09ed1a32015-03-25 08:30:06 -0400525 if (is_double) {
526 __ ucomisd(out, op2);
527 } else {
528 __ ucomiss(out, op2);
529 }
530
531 __ j(Condition::kParityEven, &nan);
532
533 __ j(is_min ? Condition::kAbove : Condition::kBelow, &op2_label);
534 __ j(is_min ? Condition::kBelow : Condition::kAbove, &done);
535
536 // Handle 0.0/-0.0.
537 if (is_min) {
538 if (is_double) {
539 __ orpd(out, op2);
540 } else {
541 __ orps(out, op2);
542 }
543 } else {
544 if (is_double) {
545 __ andpd(out, op2);
546 } else {
547 __ andps(out, op2);
548 }
549 }
550 __ jmp(&done);
551
552 // NaN handling.
553 __ Bind(&nan);
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000554 // Do we have a constant area pointer?
Nicolas Geoffray97793072016-02-16 15:33:54 +0000555 if (locations->GetInputCount() == 3 && locations->InAt(2).IsValid()) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000556 DCHECK(locations->InAt(2).IsRegister());
557 Register constant_area = locations->InAt(2).AsRegister<Register>();
558 if (is_double) {
559 __ movsd(out, codegen->LiteralInt64Address(kDoubleNaN, constant_area));
560 } else {
561 __ movss(out, codegen->LiteralInt32Address(kFloatNaN, constant_area));
562 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400563 } else {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000564 if (is_double) {
565 __ pushl(Immediate(kDoubleNaNHigh));
566 __ pushl(Immediate(kDoubleNaNLow));
567 __ movsd(out, Address(ESP, 0));
568 __ addl(ESP, Immediate(8));
569 } else {
570 __ pushl(Immediate(kFloatNaN));
571 __ movss(out, Address(ESP, 0));
572 __ addl(ESP, Immediate(4));
573 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400574 }
575 __ jmp(&done);
576
577 // out := op2;
578 __ Bind(&op2_label);
579 if (is_double) {
580 __ movsd(out, op2);
581 } else {
582 __ movss(out, op2);
583 }
584
585 // Done.
586 __ Bind(&done);
587}
588
589static void CreateFPFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
590 LocationSummary* locations = new (arena) LocationSummary(invoke,
591 LocationSummary::kNoCall,
592 kIntrinsified);
593 locations->SetInAt(0, Location::RequiresFpuRegister());
594 locations->SetInAt(1, Location::RequiresFpuRegister());
595 // The following is sub-optimal, but all we can do for now. It would be fine to also accept
596 // the second input to be the output (we can simply swap inputs).
597 locations->SetOut(Location::SameAsFirstInput());
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000598 HInvokeStaticOrDirect* static_or_direct = invoke->AsInvokeStaticOrDirect();
599 DCHECK(static_or_direct != nullptr);
Nicolas Geoffray97793072016-02-16 15:33:54 +0000600 if (static_or_direct->HasSpecialInput() &&
601 invoke->InputAt(static_or_direct->GetSpecialInputIndex())->IsX86ComputeBaseMethodAddress()) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000602 locations->SetInAt(2, Location::RequiresRegister());
603 }
Mark Mendell09ed1a32015-03-25 08:30:06 -0400604}
605
606void IntrinsicLocationsBuilderX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
607 CreateFPFPToFPLocations(arena_, invoke);
608}
609
610void IntrinsicCodeGeneratorX86::VisitMathMinDoubleDouble(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000611 GenMinMaxFP(invoke->GetLocations(),
612 /* is_min */ true,
613 /* is_double */ true,
614 GetAssembler(),
615 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400616}
617
618void IntrinsicLocationsBuilderX86::VisitMathMinFloatFloat(HInvoke* invoke) {
619 CreateFPFPToFPLocations(arena_, invoke);
620}
621
622void IntrinsicCodeGeneratorX86::VisitMathMinFloatFloat(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000623 GenMinMaxFP(invoke->GetLocations(),
624 /* is_min */ true,
625 /* is_double */ false,
626 GetAssembler(),
627 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400628}
629
630void IntrinsicLocationsBuilderX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
631 CreateFPFPToFPLocations(arena_, invoke);
632}
633
634void IntrinsicCodeGeneratorX86::VisitMathMaxDoubleDouble(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000635 GenMinMaxFP(invoke->GetLocations(),
636 /* is_min */ false,
637 /* is_double */ true,
638 GetAssembler(),
639 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400640}
641
642void IntrinsicLocationsBuilderX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
643 CreateFPFPToFPLocations(arena_, invoke);
644}
645
646void IntrinsicCodeGeneratorX86::VisitMathMaxFloatFloat(HInvoke* invoke) {
Mark P Mendell2f10a5f2016-01-25 14:47:50 +0000647 GenMinMaxFP(invoke->GetLocations(),
648 /* is_min */ false,
649 /* is_double */ false,
650 GetAssembler(),
651 codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -0400652}
653
654static void GenMinMax(LocationSummary* locations, bool is_min, bool is_long,
655 X86Assembler* assembler) {
656 Location op1_loc = locations->InAt(0);
657 Location op2_loc = locations->InAt(1);
658
659 // Shortcut for same input locations.
660 if (op1_loc.Equals(op2_loc)) {
661 // Can return immediately, as op1_loc == out_loc.
662 // Note: if we ever support separate registers, e.g., output into memory, we need to check for
663 // a copy here.
664 DCHECK(locations->Out().Equals(op1_loc));
665 return;
666 }
667
668 if (is_long) {
669 // Need to perform a subtract to get the sign right.
670 // op1 is already in the same location as the output.
671 Location output = locations->Out();
672 Register output_lo = output.AsRegisterPairLow<Register>();
673 Register output_hi = output.AsRegisterPairHigh<Register>();
674
675 Register op2_lo = op2_loc.AsRegisterPairLow<Register>();
676 Register op2_hi = op2_loc.AsRegisterPairHigh<Register>();
677
678 // Spare register to compute the subtraction to set condition code.
679 Register temp = locations->GetTemp(0).AsRegister<Register>();
680
681 // Subtract off op2_low.
682 __ movl(temp, output_lo);
683 __ subl(temp, op2_lo);
684
685 // Now use the same tempo and the borrow to finish the subtraction of op2_hi.
686 __ movl(temp, output_hi);
687 __ sbbl(temp, op2_hi);
688
689 // Now the condition code is correct.
690 Condition cond = is_min ? Condition::kGreaterEqual : Condition::kLess;
691 __ cmovl(cond, output_lo, op2_lo);
692 __ cmovl(cond, output_hi, op2_hi);
693 } else {
694 Register out = locations->Out().AsRegister<Register>();
695 Register op2 = op2_loc.AsRegister<Register>();
696
697 // (out := op1)
698 // out <=? op2
699 // if out is min jmp done
700 // out := op2
701 // done:
702
703 __ cmpl(out, op2);
704 Condition cond = is_min ? Condition::kGreater : Condition::kLess;
705 __ cmovl(cond, out, op2);
706 }
707}
708
709static void CreateIntIntToIntLocations(ArenaAllocator* arena, HInvoke* invoke) {
710 LocationSummary* locations = new (arena) LocationSummary(invoke,
711 LocationSummary::kNoCall,
712 kIntrinsified);
713 locations->SetInAt(0, Location::RequiresRegister());
714 locations->SetInAt(1, Location::RequiresRegister());
715 locations->SetOut(Location::SameAsFirstInput());
716}
717
718static void CreateLongLongToLongLocations(ArenaAllocator* arena, HInvoke* invoke) {
719 LocationSummary* locations = new (arena) LocationSummary(invoke,
720 LocationSummary::kNoCall,
721 kIntrinsified);
722 locations->SetInAt(0, Location::RequiresRegister());
723 locations->SetInAt(1, Location::RequiresRegister());
724 locations->SetOut(Location::SameAsFirstInput());
725 // Register to use to perform a long subtract to set cc.
726 locations->AddTemp(Location::RequiresRegister());
727}
728
729void IntrinsicLocationsBuilderX86::VisitMathMinIntInt(HInvoke* invoke) {
730 CreateIntIntToIntLocations(arena_, invoke);
731}
732
733void IntrinsicCodeGeneratorX86::VisitMathMinIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000734 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400735}
736
737void IntrinsicLocationsBuilderX86::VisitMathMinLongLong(HInvoke* invoke) {
738 CreateLongLongToLongLocations(arena_, invoke);
739}
740
741void IntrinsicCodeGeneratorX86::VisitMathMinLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000742 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400743}
744
745void IntrinsicLocationsBuilderX86::VisitMathMaxIntInt(HInvoke* invoke) {
746 CreateIntIntToIntLocations(arena_, invoke);
747}
748
749void IntrinsicCodeGeneratorX86::VisitMathMaxIntInt(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000750 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400751}
752
753void IntrinsicLocationsBuilderX86::VisitMathMaxLongLong(HInvoke* invoke) {
754 CreateLongLongToLongLocations(arena_, invoke);
755}
756
757void IntrinsicCodeGeneratorX86::VisitMathMaxLongLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +0000758 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -0400759}
760
761static void CreateFPToFPLocations(ArenaAllocator* arena, HInvoke* invoke) {
762 LocationSummary* locations = new (arena) LocationSummary(invoke,
763 LocationSummary::kNoCall,
764 kIntrinsified);
765 locations->SetInAt(0, Location::RequiresFpuRegister());
766 locations->SetOut(Location::RequiresFpuRegister());
767}
768
769void IntrinsicLocationsBuilderX86::VisitMathSqrt(HInvoke* invoke) {
770 CreateFPToFPLocations(arena_, invoke);
771}
772
773void IntrinsicCodeGeneratorX86::VisitMathSqrt(HInvoke* invoke) {
774 LocationSummary* locations = invoke->GetLocations();
775 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
776 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
777
778 GetAssembler()->sqrtsd(out, in);
779}
780
Mark Mendellfb8d2792015-03-31 22:16:59 -0400781static void InvokeOutOfLineIntrinsic(CodeGeneratorX86* codegen, HInvoke* invoke) {
Roland Levillainec525fc2015-04-28 15:50:20 +0100782 MoveArguments(invoke, codegen);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400783
784 DCHECK(invoke->IsInvokeStaticOrDirect());
Nicolas Geoffray94015b92015-06-04 18:21:04 +0100785 codegen->GenerateStaticOrDirectCall(invoke->AsInvokeStaticOrDirect(),
786 Location::RegisterLocation(EAX));
Mingyao Yange90db122015-04-03 17:56:54 -0700787 codegen->RecordPcInfo(invoke, invoke->GetDexPc());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400788
789 // Copy the result back to the expected output.
790 Location out = invoke->GetLocations()->Out();
791 if (out.IsValid()) {
792 DCHECK(out.IsRegister());
Andreas Gampe85b62f22015-09-09 13:15:38 -0700793 codegen->MoveFromReturnRegister(out, invoke->GetType());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400794 }
795}
796
797static void CreateSSE41FPToFPLocations(ArenaAllocator* arena,
798 HInvoke* invoke,
799 CodeGeneratorX86* codegen) {
800 // Do we have instruction support?
801 if (codegen->GetInstructionSetFeatures().HasSSE4_1()) {
802 CreateFPToFPLocations(arena, invoke);
803 return;
804 }
805
806 // We have to fall back to a call to the intrinsic.
807 LocationSummary* locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +0100808 LocationSummary::kCallOnMainOnly);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400809 InvokeRuntimeCallingConvention calling_convention;
810 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
811 locations->SetOut(Location::FpuRegisterLocation(XMM0));
812 // Needs to be EAX for the invoke.
813 locations->AddTemp(Location::RegisterLocation(EAX));
814}
815
816static void GenSSE41FPToFPIntrinsic(CodeGeneratorX86* codegen,
817 HInvoke* invoke,
818 X86Assembler* assembler,
819 int round_mode) {
820 LocationSummary* locations = invoke->GetLocations();
821 if (locations->WillCall()) {
822 InvokeOutOfLineIntrinsic(codegen, invoke);
823 } else {
824 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
825 XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
826 __ roundsd(out, in, Immediate(round_mode));
827 }
828}
829
830void IntrinsicLocationsBuilderX86::VisitMathCeil(HInvoke* invoke) {
831 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
832}
833
834void IntrinsicCodeGeneratorX86::VisitMathCeil(HInvoke* invoke) {
835 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 2);
836}
837
838void IntrinsicLocationsBuilderX86::VisitMathFloor(HInvoke* invoke) {
839 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
840}
841
842void IntrinsicCodeGeneratorX86::VisitMathFloor(HInvoke* invoke) {
843 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 1);
844}
845
846void IntrinsicLocationsBuilderX86::VisitMathRint(HInvoke* invoke) {
847 CreateSSE41FPToFPLocations(arena_, invoke, codegen_);
848}
849
850void IntrinsicCodeGeneratorX86::VisitMathRint(HInvoke* invoke) {
851 GenSSE41FPToFPIntrinsic(codegen_, invoke, GetAssembler(), 0);
852}
853
Mark Mendellfb8d2792015-03-31 22:16:59 -0400854void IntrinsicLocationsBuilderX86::VisitMathRoundFloat(HInvoke* invoke) {
855 // Do we have instruction support?
856 if (codegen_->GetInstructionSetFeatures().HasSSE4_1()) {
Aart Bik2c9f4952016-08-01 16:52:27 -0700857 HInvokeStaticOrDirect* static_or_direct = invoke->AsInvokeStaticOrDirect();
858 DCHECK(static_or_direct != nullptr);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400859 LocationSummary* locations = new (arena_) LocationSummary(invoke,
860 LocationSummary::kNoCall,
861 kIntrinsified);
862 locations->SetInAt(0, Location::RequiresFpuRegister());
Aart Bik2c9f4952016-08-01 16:52:27 -0700863 if (static_or_direct->HasSpecialInput() &&
864 invoke->InputAt(
865 static_or_direct->GetSpecialInputIndex())->IsX86ComputeBaseMethodAddress()) {
866 locations->SetInAt(1, Location::RequiresRegister());
867 }
Nicolas Geoffrayd9b92402015-04-21 10:02:22 +0100868 locations->SetOut(Location::RequiresRegister());
Mark Mendellfb8d2792015-03-31 22:16:59 -0400869 locations->AddTemp(Location::RequiresFpuRegister());
870 locations->AddTemp(Location::RequiresFpuRegister());
871 return;
872 }
873
874 // We have to fall back to a call to the intrinsic.
875 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Aart Bik2c9f4952016-08-01 16:52:27 -0700876 LocationSummary::kCallOnMainOnly);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400877 InvokeRuntimeCallingConvention calling_convention;
878 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetFpuRegisterAt(0)));
879 locations->SetOut(Location::RegisterLocation(EAX));
880 // Needs to be EAX for the invoke.
881 locations->AddTemp(Location::RegisterLocation(EAX));
882}
883
884void IntrinsicCodeGeneratorX86::VisitMathRoundFloat(HInvoke* invoke) {
885 LocationSummary* locations = invoke->GetLocations();
Aart Bik2c9f4952016-08-01 16:52:27 -0700886 if (locations->WillCall()) { // TODO: can we reach this?
Mark Mendellfb8d2792015-03-31 22:16:59 -0400887 InvokeOutOfLineIntrinsic(codegen_, invoke);
888 return;
889 }
890
Mark Mendellfb8d2792015-03-31 22:16:59 -0400891 XmmRegister in = locations->InAt(0).AsFpuRegister<XmmRegister>();
Aart Bik2c9f4952016-08-01 16:52:27 -0700892 XmmRegister t1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
893 XmmRegister t2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
Mark Mendellfb8d2792015-03-31 22:16:59 -0400894 Register out = locations->Out().AsRegister<Register>();
Aart Bik2c9f4952016-08-01 16:52:27 -0700895 NearLabel skip_incr, done;
Mark Mendellfb8d2792015-03-31 22:16:59 -0400896 X86Assembler* assembler = GetAssembler();
897
Aart Bik2c9f4952016-08-01 16:52:27 -0700898 // Since no direct x86 rounding instruction matches the required semantics,
899 // this intrinsic is implemented as follows:
900 // result = floor(in);
901 // if (in - result >= 0.5f)
902 // result = result + 1.0f;
903 __ movss(t2, in);
904 __ roundss(t1, in, Immediate(1));
905 __ subss(t2, t1);
Aart Bik0cf8d9c2016-08-10 14:05:54 -0700906 if (locations->GetInputCount() == 2 && locations->InAt(1).IsValid()) {
907 // Direct constant area available.
908 Register constant_area = locations->InAt(1).AsRegister<Register>();
909 __ comiss(t2, codegen_->LiteralInt32Address(bit_cast<int32_t, float>(0.5f), constant_area));
910 __ j(kBelow, &skip_incr);
911 __ addss(t1, codegen_->LiteralInt32Address(bit_cast<int32_t, float>(1.0f), constant_area));
912 __ Bind(&skip_incr);
913 } else {
914 // No constant area: go through stack.
915 __ pushl(Immediate(bit_cast<int32_t, float>(0.5f)));
916 __ pushl(Immediate(bit_cast<int32_t, float>(1.0f)));
917 __ comiss(t2, Address(ESP, 4));
918 __ j(kBelow, &skip_incr);
919 __ addss(t1, Address(ESP, 0));
920 __ Bind(&skip_incr);
921 __ addl(ESP, Immediate(8));
922 }
Mark Mendellfb8d2792015-03-31 22:16:59 -0400923
Aart Bik2c9f4952016-08-01 16:52:27 -0700924 // Final conversion to an integer. Unfortunately this also does not have a
925 // direct x86 instruction, since NaN should map to 0 and large positive
926 // values need to be clipped to the extreme value.
Mark Mendellfb8d2792015-03-31 22:16:59 -0400927 __ movl(out, Immediate(kPrimIntMax));
Aart Bik2c9f4952016-08-01 16:52:27 -0700928 __ cvtsi2ss(t2, out);
929 __ comiss(t1, t2);
930 __ j(kAboveEqual, &done); // clipped to max (already in out), does not jump on unordered
931 __ movl(out, Immediate(0)); // does not change flags
932 __ j(kUnordered, &done); // NaN mapped to 0 (just moved in out)
933 __ cvttss2si(out, t1);
Mark Mendellfb8d2792015-03-31 22:16:59 -0400934 __ Bind(&done);
935}
936
Mark Mendella4f12202015-08-06 15:23:34 -0400937static void CreateFPToFPCallLocations(ArenaAllocator* arena,
938 HInvoke* invoke) {
939 LocationSummary* locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +0100940 LocationSummary::kCallOnMainOnly,
Mark Mendella4f12202015-08-06 15:23:34 -0400941 kIntrinsified);
942 InvokeRuntimeCallingConvention calling_convention;
943 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
944 locations->SetOut(Location::FpuRegisterLocation(XMM0));
945}
946
947static void GenFPToFPCall(HInvoke* invoke, CodeGeneratorX86* codegen, QuickEntrypointEnum entry) {
948 LocationSummary* locations = invoke->GetLocations();
949 DCHECK(locations->WillCall());
950 DCHECK(invoke->IsInvokeStaticOrDirect());
951 X86Assembler* assembler = codegen->GetAssembler();
952
953 // We need some place to pass the parameters.
954 __ subl(ESP, Immediate(16));
955 __ cfi().AdjustCFAOffset(16);
956
957 // Pass the parameters at the bottom of the stack.
958 __ movsd(Address(ESP, 0), XMM0);
959
960 // If we have a second parameter, pass it next.
961 if (invoke->GetNumberOfArguments() == 2) {
962 __ movsd(Address(ESP, 8), XMM1);
963 }
964
965 // Now do the actual call.
Serban Constantinescuba45db02016-07-12 22:53:02 +0100966 codegen->InvokeRuntime(entry, invoke, invoke->GetDexPc());
Mark Mendella4f12202015-08-06 15:23:34 -0400967
968 // Extract the return value from the FP stack.
969 __ fstpl(Address(ESP, 0));
970 __ movsd(XMM0, Address(ESP, 0));
971
972 // And clean up the stack.
973 __ addl(ESP, Immediate(16));
974 __ cfi().AdjustCFAOffset(-16);
Mark Mendella4f12202015-08-06 15:23:34 -0400975}
976
977void IntrinsicLocationsBuilderX86::VisitMathCos(HInvoke* invoke) {
978 CreateFPToFPCallLocations(arena_, invoke);
979}
980
981void IntrinsicCodeGeneratorX86::VisitMathCos(HInvoke* invoke) {
982 GenFPToFPCall(invoke, codegen_, kQuickCos);
983}
984
985void IntrinsicLocationsBuilderX86::VisitMathSin(HInvoke* invoke) {
986 CreateFPToFPCallLocations(arena_, invoke);
987}
988
989void IntrinsicCodeGeneratorX86::VisitMathSin(HInvoke* invoke) {
990 GenFPToFPCall(invoke, codegen_, kQuickSin);
991}
992
993void IntrinsicLocationsBuilderX86::VisitMathAcos(HInvoke* invoke) {
994 CreateFPToFPCallLocations(arena_, invoke);
995}
996
997void IntrinsicCodeGeneratorX86::VisitMathAcos(HInvoke* invoke) {
998 GenFPToFPCall(invoke, codegen_, kQuickAcos);
999}
1000
1001void IntrinsicLocationsBuilderX86::VisitMathAsin(HInvoke* invoke) {
1002 CreateFPToFPCallLocations(arena_, invoke);
1003}
1004
1005void IntrinsicCodeGeneratorX86::VisitMathAsin(HInvoke* invoke) {
1006 GenFPToFPCall(invoke, codegen_, kQuickAsin);
1007}
1008
1009void IntrinsicLocationsBuilderX86::VisitMathAtan(HInvoke* invoke) {
1010 CreateFPToFPCallLocations(arena_, invoke);
1011}
1012
1013void IntrinsicCodeGeneratorX86::VisitMathAtan(HInvoke* invoke) {
1014 GenFPToFPCall(invoke, codegen_, kQuickAtan);
1015}
1016
1017void IntrinsicLocationsBuilderX86::VisitMathCbrt(HInvoke* invoke) {
1018 CreateFPToFPCallLocations(arena_, invoke);
1019}
1020
1021void IntrinsicCodeGeneratorX86::VisitMathCbrt(HInvoke* invoke) {
1022 GenFPToFPCall(invoke, codegen_, kQuickCbrt);
1023}
1024
1025void IntrinsicLocationsBuilderX86::VisitMathCosh(HInvoke* invoke) {
1026 CreateFPToFPCallLocations(arena_, invoke);
1027}
1028
1029void IntrinsicCodeGeneratorX86::VisitMathCosh(HInvoke* invoke) {
1030 GenFPToFPCall(invoke, codegen_, kQuickCosh);
1031}
1032
1033void IntrinsicLocationsBuilderX86::VisitMathExp(HInvoke* invoke) {
1034 CreateFPToFPCallLocations(arena_, invoke);
1035}
1036
1037void IntrinsicCodeGeneratorX86::VisitMathExp(HInvoke* invoke) {
1038 GenFPToFPCall(invoke, codegen_, kQuickExp);
1039}
1040
1041void IntrinsicLocationsBuilderX86::VisitMathExpm1(HInvoke* invoke) {
1042 CreateFPToFPCallLocations(arena_, invoke);
1043}
1044
1045void IntrinsicCodeGeneratorX86::VisitMathExpm1(HInvoke* invoke) {
1046 GenFPToFPCall(invoke, codegen_, kQuickExpm1);
1047}
1048
1049void IntrinsicLocationsBuilderX86::VisitMathLog(HInvoke* invoke) {
1050 CreateFPToFPCallLocations(arena_, invoke);
1051}
1052
1053void IntrinsicCodeGeneratorX86::VisitMathLog(HInvoke* invoke) {
1054 GenFPToFPCall(invoke, codegen_, kQuickLog);
1055}
1056
1057void IntrinsicLocationsBuilderX86::VisitMathLog10(HInvoke* invoke) {
1058 CreateFPToFPCallLocations(arena_, invoke);
1059}
1060
1061void IntrinsicCodeGeneratorX86::VisitMathLog10(HInvoke* invoke) {
1062 GenFPToFPCall(invoke, codegen_, kQuickLog10);
1063}
1064
1065void IntrinsicLocationsBuilderX86::VisitMathSinh(HInvoke* invoke) {
1066 CreateFPToFPCallLocations(arena_, invoke);
1067}
1068
1069void IntrinsicCodeGeneratorX86::VisitMathSinh(HInvoke* invoke) {
1070 GenFPToFPCall(invoke, codegen_, kQuickSinh);
1071}
1072
1073void IntrinsicLocationsBuilderX86::VisitMathTan(HInvoke* invoke) {
1074 CreateFPToFPCallLocations(arena_, invoke);
1075}
1076
1077void IntrinsicCodeGeneratorX86::VisitMathTan(HInvoke* invoke) {
1078 GenFPToFPCall(invoke, codegen_, kQuickTan);
1079}
1080
1081void IntrinsicLocationsBuilderX86::VisitMathTanh(HInvoke* invoke) {
1082 CreateFPToFPCallLocations(arena_, invoke);
1083}
1084
1085void IntrinsicCodeGeneratorX86::VisitMathTanh(HInvoke* invoke) {
1086 GenFPToFPCall(invoke, codegen_, kQuickTanh);
1087}
1088
1089static void CreateFPFPToFPCallLocations(ArenaAllocator* arena,
1090 HInvoke* invoke) {
1091 LocationSummary* locations = new (arena) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001092 LocationSummary::kCallOnMainOnly,
Mark Mendella4f12202015-08-06 15:23:34 -04001093 kIntrinsified);
1094 InvokeRuntimeCallingConvention calling_convention;
1095 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0)));
1096 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1)));
1097 locations->SetOut(Location::FpuRegisterLocation(XMM0));
1098}
1099
1100void IntrinsicLocationsBuilderX86::VisitMathAtan2(HInvoke* invoke) {
1101 CreateFPFPToFPCallLocations(arena_, invoke);
1102}
1103
1104void IntrinsicCodeGeneratorX86::VisitMathAtan2(HInvoke* invoke) {
1105 GenFPToFPCall(invoke, codegen_, kQuickAtan2);
1106}
1107
1108void IntrinsicLocationsBuilderX86::VisitMathHypot(HInvoke* invoke) {
1109 CreateFPFPToFPCallLocations(arena_, invoke);
1110}
1111
1112void IntrinsicCodeGeneratorX86::VisitMathHypot(HInvoke* invoke) {
1113 GenFPToFPCall(invoke, codegen_, kQuickHypot);
1114}
1115
1116void IntrinsicLocationsBuilderX86::VisitMathNextAfter(HInvoke* invoke) {
1117 CreateFPFPToFPCallLocations(arena_, invoke);
1118}
1119
1120void IntrinsicCodeGeneratorX86::VisitMathNextAfter(HInvoke* invoke) {
1121 GenFPToFPCall(invoke, codegen_, kQuickNextAfter);
1122}
1123
Mark Mendell6bc53a92015-07-01 14:26:52 -04001124void IntrinsicLocationsBuilderX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
1125 // We need at least two of the positions or length to be an integer constant,
1126 // or else we won't have enough free registers.
1127 HIntConstant* src_pos = invoke->InputAt(1)->AsIntConstant();
1128 HIntConstant* dest_pos = invoke->InputAt(3)->AsIntConstant();
1129 HIntConstant* length = invoke->InputAt(4)->AsIntConstant();
1130
1131 int num_constants =
1132 ((src_pos != nullptr) ? 1 : 0)
1133 + ((dest_pos != nullptr) ? 1 : 0)
1134 + ((length != nullptr) ? 1 : 0);
1135
1136 if (num_constants < 2) {
1137 // Not enough free registers.
1138 return;
1139 }
1140
1141 // As long as we are checking, we might as well check to see if the src and dest
1142 // positions are >= 0.
1143 if ((src_pos != nullptr && src_pos->GetValue() < 0) ||
1144 (dest_pos != nullptr && dest_pos->GetValue() < 0)) {
1145 // We will have to fail anyways.
1146 return;
1147 }
1148
1149 // And since we are already checking, check the length too.
1150 if (length != nullptr) {
1151 int32_t len = length->GetValue();
1152 if (len < 0) {
1153 // Just call as normal.
1154 return;
1155 }
1156 }
1157
1158 // Okay, it is safe to generate inline code.
1159 LocationSummary* locations =
1160 new (arena_) LocationSummary(invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
1161 // arraycopy(Object src, int srcPos, Object dest, int destPos, int length).
1162 locations->SetInAt(0, Location::RequiresRegister());
1163 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1164 locations->SetInAt(2, Location::RequiresRegister());
1165 locations->SetInAt(3, Location::RegisterOrConstant(invoke->InputAt(3)));
1166 locations->SetInAt(4, Location::RegisterOrConstant(invoke->InputAt(4)));
1167
1168 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
1169 locations->AddTemp(Location::RegisterLocation(ESI));
1170 locations->AddTemp(Location::RegisterLocation(EDI));
1171 locations->AddTemp(Location::RegisterLocation(ECX));
1172}
1173
1174static void CheckPosition(X86Assembler* assembler,
1175 Location pos,
1176 Register input,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001177 Location length,
Andreas Gampe85b62f22015-09-09 13:15:38 -07001178 SlowPathCode* slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001179 Register temp,
1180 bool length_is_input_length = false) {
1181 // Where is the length in the Array?
Mark Mendell6bc53a92015-07-01 14:26:52 -04001182 const uint32_t length_offset = mirror::Array::LengthOffset().Uint32Value();
1183
1184 if (pos.IsConstant()) {
1185 int32_t pos_const = pos.GetConstant()->AsIntConstant()->GetValue();
1186 if (pos_const == 0) {
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001187 if (!length_is_input_length) {
1188 // Check that length(input) >= length.
1189 if (length.IsConstant()) {
1190 __ cmpl(Address(input, length_offset),
1191 Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1192 } else {
1193 __ cmpl(Address(input, length_offset), length.AsRegister<Register>());
1194 }
1195 __ j(kLess, slow_path->GetEntryLabel());
1196 }
Mark Mendell6bc53a92015-07-01 14:26:52 -04001197 } else {
1198 // Check that length(input) >= pos.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001199 __ movl(temp, Address(input, length_offset));
1200 __ subl(temp, Immediate(pos_const));
Mark Mendell6bc53a92015-07-01 14:26:52 -04001201 __ j(kLess, slow_path->GetEntryLabel());
1202
1203 // Check that (length(input) - pos) >= length.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001204 if (length.IsConstant()) {
1205 __ cmpl(temp, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1206 } else {
1207 __ cmpl(temp, length.AsRegister<Register>());
1208 }
Mark Mendell6bc53a92015-07-01 14:26:52 -04001209 __ j(kLess, slow_path->GetEntryLabel());
1210 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001211 } else if (length_is_input_length) {
1212 // The only way the copy can succeed is if pos is zero.
1213 Register pos_reg = pos.AsRegister<Register>();
1214 __ testl(pos_reg, pos_reg);
1215 __ j(kNotEqual, slow_path->GetEntryLabel());
Mark Mendell6bc53a92015-07-01 14:26:52 -04001216 } else {
1217 // Check that pos >= 0.
1218 Register pos_reg = pos.AsRegister<Register>();
1219 __ testl(pos_reg, pos_reg);
1220 __ j(kLess, slow_path->GetEntryLabel());
1221
1222 // Check that pos <= length(input).
1223 __ cmpl(Address(input, length_offset), pos_reg);
1224 __ j(kLess, slow_path->GetEntryLabel());
1225
1226 // Check that (length(input) - pos) >= length.
1227 __ movl(temp, Address(input, length_offset));
1228 __ subl(temp, pos_reg);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001229 if (length.IsConstant()) {
1230 __ cmpl(temp, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1231 } else {
1232 __ cmpl(temp, length.AsRegister<Register>());
1233 }
Mark Mendell6bc53a92015-07-01 14:26:52 -04001234 __ j(kLess, slow_path->GetEntryLabel());
1235 }
1236}
1237
1238void IntrinsicCodeGeneratorX86::VisitSystemArrayCopyChar(HInvoke* invoke) {
1239 X86Assembler* assembler = GetAssembler();
1240 LocationSummary* locations = invoke->GetLocations();
1241
1242 Register src = locations->InAt(0).AsRegister<Register>();
1243 Location srcPos = locations->InAt(1);
1244 Register dest = locations->InAt(2).AsRegister<Register>();
1245 Location destPos = locations->InAt(3);
1246 Location length = locations->InAt(4);
1247
1248 // Temporaries that we need for MOVSW.
1249 Register src_base = locations->GetTemp(0).AsRegister<Register>();
1250 DCHECK_EQ(src_base, ESI);
1251 Register dest_base = locations->GetTemp(1).AsRegister<Register>();
1252 DCHECK_EQ(dest_base, EDI);
1253 Register count = locations->GetTemp(2).AsRegister<Register>();
1254 DCHECK_EQ(count, ECX);
1255
Andreas Gampe85b62f22015-09-09 13:15:38 -07001256 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Mark Mendell6bc53a92015-07-01 14:26:52 -04001257 codegen_->AddSlowPath(slow_path);
1258
1259 // Bail out if the source and destination are the same (to handle overlap).
1260 __ cmpl(src, dest);
1261 __ j(kEqual, slow_path->GetEntryLabel());
1262
1263 // Bail out if the source is null.
1264 __ testl(src, src);
1265 __ j(kEqual, slow_path->GetEntryLabel());
1266
1267 // Bail out if the destination is null.
1268 __ testl(dest, dest);
1269 __ j(kEqual, slow_path->GetEntryLabel());
1270
1271 // If the length is negative, bail out.
1272 // We have already checked in the LocationsBuilder for the constant case.
1273 if (!length.IsConstant()) {
1274 __ cmpl(length.AsRegister<Register>(), length.AsRegister<Register>());
1275 __ j(kLess, slow_path->GetEntryLabel());
1276 }
1277
1278 // We need the count in ECX.
1279 if (length.IsConstant()) {
1280 __ movl(count, Immediate(length.GetConstant()->AsIntConstant()->GetValue()));
1281 } else {
1282 __ movl(count, length.AsRegister<Register>());
1283 }
1284
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001285 // Validity checks: source. Use src_base as a temporary register.
1286 CheckPosition(assembler, srcPos, src, Location::RegisterLocation(count), slow_path, src_base);
Mark Mendell6bc53a92015-07-01 14:26:52 -04001287
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01001288 // Validity checks: dest. Use src_base as a temporary register.
1289 CheckPosition(assembler, destPos, dest, Location::RegisterLocation(count), slow_path, src_base);
Mark Mendell6bc53a92015-07-01 14:26:52 -04001290
1291 // Okay, everything checks out. Finally time to do the copy.
1292 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1293 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1294 DCHECK_EQ(char_size, 2u);
1295
1296 const uint32_t data_offset = mirror::Array::DataOffset(char_size).Uint32Value();
1297
1298 if (srcPos.IsConstant()) {
1299 int32_t srcPos_const = srcPos.GetConstant()->AsIntConstant()->GetValue();
1300 __ leal(src_base, Address(src, char_size * srcPos_const + data_offset));
1301 } else {
1302 __ leal(src_base, Address(src, srcPos.AsRegister<Register>(),
1303 ScaleFactor::TIMES_2, data_offset));
1304 }
1305 if (destPos.IsConstant()) {
1306 int32_t destPos_const = destPos.GetConstant()->AsIntConstant()->GetValue();
1307
1308 __ leal(dest_base, Address(dest, char_size * destPos_const + data_offset));
1309 } else {
1310 __ leal(dest_base, Address(dest, destPos.AsRegister<Register>(),
1311 ScaleFactor::TIMES_2, data_offset));
1312 }
1313
1314 // Do the move.
1315 __ rep_movsw();
1316
1317 __ Bind(slow_path->GetExitLabel());
1318}
1319
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001320void IntrinsicLocationsBuilderX86::VisitStringCompareTo(HInvoke* invoke) {
1321 // The inputs plus one temp.
1322 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001323 LocationSummary::kCallOnMainAndSlowPath,
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001324 kIntrinsified);
1325 InvokeRuntimeCallingConvention calling_convention;
1326 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1327 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1328 locations->SetOut(Location::RegisterLocation(EAX));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001329}
1330
1331void IntrinsicCodeGeneratorX86::VisitStringCompareTo(HInvoke* invoke) {
1332 X86Assembler* assembler = GetAssembler();
1333 LocationSummary* locations = invoke->GetLocations();
1334
Nicolas Geoffray512e04d2015-03-27 17:21:24 +00001335 // Note that the null check must have been done earlier.
Calin Juravle641547a2015-04-21 22:08:51 +01001336 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001337
1338 Register argument = locations->InAt(1).AsRegister<Register>();
1339 __ testl(argument, argument);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001340 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001341 codegen_->AddSlowPath(slow_path);
1342 __ j(kEqual, slow_path->GetEntryLabel());
1343
Serban Constantinescuba45db02016-07-12 22:53:02 +01001344 codegen_->InvokeRuntime(kQuickStringCompareTo, invoke, invoke->GetDexPc(), slow_path);
Nicolas Geoffrayd75948a2015-03-27 09:53:16 +00001345 __ Bind(slow_path->GetExitLabel());
1346}
1347
Agi Csakid7138c82015-08-13 17:46:44 -07001348void IntrinsicLocationsBuilderX86::VisitStringEquals(HInvoke* invoke) {
1349 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1350 LocationSummary::kNoCall,
1351 kIntrinsified);
1352 locations->SetInAt(0, Location::RequiresRegister());
1353 locations->SetInAt(1, Location::RequiresRegister());
1354
1355 // Request temporary registers, ECX and EDI needed for repe_cmpsl instruction.
1356 locations->AddTemp(Location::RegisterLocation(ECX));
1357 locations->AddTemp(Location::RegisterLocation(EDI));
1358
1359 // Set output, ESI needed for repe_cmpsl instruction anyways.
1360 locations->SetOut(Location::RegisterLocation(ESI), Location::kOutputOverlap);
1361}
1362
1363void IntrinsicCodeGeneratorX86::VisitStringEquals(HInvoke* invoke) {
1364 X86Assembler* assembler = GetAssembler();
1365 LocationSummary* locations = invoke->GetLocations();
1366
1367 Register str = locations->InAt(0).AsRegister<Register>();
1368 Register arg = locations->InAt(1).AsRegister<Register>();
1369 Register ecx = locations->GetTemp(0).AsRegister<Register>();
1370 Register edi = locations->GetTemp(1).AsRegister<Register>();
1371 Register esi = locations->Out().AsRegister<Register>();
1372
Mark Mendell0c9497d2015-08-21 09:30:05 -04001373 NearLabel end, return_true, return_false;
Agi Csakid7138c82015-08-13 17:46:44 -07001374
1375 // Get offsets of count, value, and class fields within a string object.
1376 const uint32_t count_offset = mirror::String::CountOffset().Uint32Value();
1377 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1378 const uint32_t class_offset = mirror::Object::ClassOffset().Uint32Value();
1379
1380 // Note that the null check must have been done earlier.
1381 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1382
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001383 StringEqualsOptimizations optimizations(invoke);
1384 if (!optimizations.GetArgumentNotNull()) {
1385 // Check if input is null, return false if it is.
1386 __ testl(arg, arg);
1387 __ j(kEqual, &return_false);
1388 }
Agi Csakid7138c82015-08-13 17:46:44 -07001389
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001390 if (!optimizations.GetArgumentIsString()) {
Vladimir Marko53b52002016-05-24 19:30:45 +01001391 // Instanceof check for the argument by comparing class fields.
1392 // All string objects must have the same type since String cannot be subclassed.
1393 // Receiver must be a string object, so its class field is equal to all strings' class fields.
1394 // If the argument is a string object, its class field must be equal to receiver's class field.
Nicolas Geoffraya83a54d2015-10-02 17:30:26 +01001395 __ movl(ecx, Address(str, class_offset));
1396 __ cmpl(ecx, Address(arg, class_offset));
1397 __ j(kNotEqual, &return_false);
1398 }
Agi Csakid7138c82015-08-13 17:46:44 -07001399
1400 // Reference equality check, return true if same reference.
1401 __ cmpl(str, arg);
1402 __ j(kEqual, &return_true);
1403
1404 // Load length of receiver string.
1405 __ movl(ecx, Address(str, count_offset));
1406 // Check if lengths are equal, return false if they're not.
1407 __ cmpl(ecx, Address(arg, count_offset));
1408 __ j(kNotEqual, &return_false);
1409 // Return true if both strings are empty.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001410 __ jecxz(&return_true);
Agi Csakid7138c82015-08-13 17:46:44 -07001411
1412 // Load starting addresses of string values into ESI/EDI as required for repe_cmpsl instruction.
1413 __ leal(esi, Address(str, value_offset));
1414 __ leal(edi, Address(arg, value_offset));
1415
1416 // Divide string length by 2 to compare characters 2 at a time and adjust for odd lengths.
1417 __ addl(ecx, Immediate(1));
1418 __ shrl(ecx, Immediate(1));
1419
1420 // Assertions that must hold in order to compare strings 2 characters at a time.
1421 DCHECK_ALIGNED(value_offset, 4);
1422 static_assert(IsAligned<4>(kObjectAlignment), "String of odd length is not zero padded");
1423
1424 // Loop to compare strings two characters at a time starting at the beginning of the string.
1425 __ repe_cmpsl();
1426 // If strings are not equal, zero flag will be cleared.
1427 __ j(kNotEqual, &return_false);
1428
1429 // Return true and exit the function.
1430 // If loop does not result in returning false, we return true.
1431 __ Bind(&return_true);
1432 __ movl(esi, Immediate(1));
1433 __ jmp(&end);
1434
1435 // Return false and exit the function.
1436 __ Bind(&return_false);
1437 __ xorl(esi, esi);
1438 __ Bind(&end);
1439}
1440
Andreas Gampe21030dd2015-05-07 14:46:15 -07001441static void CreateStringIndexOfLocations(HInvoke* invoke,
1442 ArenaAllocator* allocator,
1443 bool start_at_zero) {
1444 LocationSummary* locations = new (allocator) LocationSummary(invoke,
1445 LocationSummary::kCallOnSlowPath,
1446 kIntrinsified);
1447 // The data needs to be in EDI for scasw. So request that the string is there, anyways.
1448 locations->SetInAt(0, Location::RegisterLocation(EDI));
1449 // If we look for a constant char, we'll still have to copy it into EAX. So just request the
1450 // allocator to do that, anyways. We can still do the constant check by checking the parameter
1451 // of the instruction explicitly.
1452 // Note: This works as we don't clobber EAX anywhere.
1453 locations->SetInAt(1, Location::RegisterLocation(EAX));
1454 if (!start_at_zero) {
1455 locations->SetInAt(2, Location::RequiresRegister()); // The starting index.
1456 }
1457 // As we clobber EDI during execution anyways, also use it as the output.
1458 locations->SetOut(Location::SameAsFirstInput());
1459
1460 // repne scasw uses ECX as the counter.
1461 locations->AddTemp(Location::RegisterLocation(ECX));
1462 // Need another temporary to be able to compute the result.
1463 locations->AddTemp(Location::RequiresRegister());
1464}
1465
1466static void GenerateStringIndexOf(HInvoke* invoke,
1467 X86Assembler* assembler,
1468 CodeGeneratorX86* codegen,
1469 ArenaAllocator* allocator,
1470 bool start_at_zero) {
1471 LocationSummary* locations = invoke->GetLocations();
1472
1473 // Note that the null check must have been done earlier.
1474 DCHECK(!invoke->CanDoImplicitNullCheckOn(invoke->InputAt(0)));
1475
1476 Register string_obj = locations->InAt(0).AsRegister<Register>();
1477 Register search_value = locations->InAt(1).AsRegister<Register>();
1478 Register counter = locations->GetTemp(0).AsRegister<Register>();
1479 Register string_length = locations->GetTemp(1).AsRegister<Register>();
1480 Register out = locations->Out().AsRegister<Register>();
1481
1482 // Check our assumptions for registers.
1483 DCHECK_EQ(string_obj, EDI);
1484 DCHECK_EQ(search_value, EAX);
1485 DCHECK_EQ(counter, ECX);
1486 DCHECK_EQ(out, EDI);
1487
1488 // Check for code points > 0xFFFF. Either a slow-path check when we don't know statically,
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001489 // or directly dispatch for a large constant, or omit slow-path for a small constant or a char.
Andreas Gampe85b62f22015-09-09 13:15:38 -07001490 SlowPathCode* slow_path = nullptr;
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001491 HInstruction* code_point = invoke->InputAt(1);
1492 if (code_point->IsIntConstant()) {
Vladimir Markoda051082016-05-17 16:10:20 +01001493 if (static_cast<uint32_t>(code_point->AsIntConstant()->GetValue()) >
Andreas Gampe21030dd2015-05-07 14:46:15 -07001494 std::numeric_limits<uint16_t>::max()) {
1495 // Always needs the slow-path. We could directly dispatch to it, but this case should be
1496 // rare, so for simplicity just put the full slow-path down and branch unconditionally.
1497 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1498 codegen->AddSlowPath(slow_path);
1499 __ jmp(slow_path->GetEntryLabel());
1500 __ Bind(slow_path->GetExitLabel());
1501 return;
1502 }
Vladimir Markofb6c90a2016-05-06 15:52:12 +01001503 } else if (code_point->GetType() != Primitive::kPrimChar) {
Andreas Gampe21030dd2015-05-07 14:46:15 -07001504 __ cmpl(search_value, Immediate(std::numeric_limits<uint16_t>::max()));
1505 slow_path = new (allocator) IntrinsicSlowPathX86(invoke);
1506 codegen->AddSlowPath(slow_path);
1507 __ j(kAbove, slow_path->GetEntryLabel());
1508 }
1509
1510 // From here down, we know that we are looking for a char that fits in 16 bits.
1511 // Location of reference to data array within the String object.
1512 int32_t value_offset = mirror::String::ValueOffset().Int32Value();
1513 // Location of count within the String object.
1514 int32_t count_offset = mirror::String::CountOffset().Int32Value();
1515
1516 // Load string length, i.e., the count field of the string.
1517 __ movl(string_length, Address(string_obj, count_offset));
1518
1519 // Do a zero-length check.
1520 // TODO: Support jecxz.
Mark Mendell0c9497d2015-08-21 09:30:05 -04001521 NearLabel not_found_label;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001522 __ testl(string_length, string_length);
1523 __ j(kEqual, &not_found_label);
1524
1525 if (start_at_zero) {
1526 // Number of chars to scan is the same as the string length.
1527 __ movl(counter, string_length);
1528
1529 // Move to the start of the string.
1530 __ addl(string_obj, Immediate(value_offset));
1531 } else {
1532 Register start_index = locations->InAt(2).AsRegister<Register>();
1533
1534 // Do a start_index check.
1535 __ cmpl(start_index, string_length);
1536 __ j(kGreaterEqual, &not_found_label);
1537
1538 // Ensure we have a start index >= 0;
1539 __ xorl(counter, counter);
1540 __ cmpl(start_index, Immediate(0));
1541 __ cmovl(kGreater, counter, start_index);
1542
1543 // Move to the start of the string: string_obj + value_offset + 2 * start_index.
1544 __ leal(string_obj, Address(string_obj, counter, ScaleFactor::TIMES_2, value_offset));
1545
1546 // Now update ecx (the repne scasw work counter). We have string.length - start_index left to
1547 // compare.
1548 __ negl(counter);
1549 __ leal(counter, Address(string_length, counter, ScaleFactor::TIMES_1, 0));
1550 }
1551
1552 // Everything is set up for repne scasw:
1553 // * Comparison address in EDI.
1554 // * Counter in ECX.
1555 __ repne_scasw();
1556
1557 // Did we find a match?
1558 __ j(kNotEqual, &not_found_label);
1559
1560 // Yes, we matched. Compute the index of the result.
1561 __ subl(string_length, counter);
1562 __ leal(out, Address(string_length, -1));
1563
Mark Mendell0c9497d2015-08-21 09:30:05 -04001564 NearLabel done;
Andreas Gampe21030dd2015-05-07 14:46:15 -07001565 __ jmp(&done);
1566
1567 // Failed to match; return -1.
1568 __ Bind(&not_found_label);
1569 __ movl(out, Immediate(-1));
1570
1571 // And join up at the end.
1572 __ Bind(&done);
1573 if (slow_path != nullptr) {
1574 __ Bind(slow_path->GetExitLabel());
1575 }
1576}
1577
1578void IntrinsicLocationsBuilderX86::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001579 CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ true);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001580}
1581
1582void IntrinsicCodeGeneratorX86::VisitStringIndexOf(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001583 GenerateStringIndexOf(invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001584}
1585
1586void IntrinsicLocationsBuilderX86::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001587 CreateStringIndexOfLocations(invoke, arena_, /* start_at_zero */ false);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001588}
1589
1590void IntrinsicCodeGeneratorX86::VisitStringIndexOfAfter(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00001591 GenerateStringIndexOf(
1592 invoke, GetAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false);
Andreas Gampe21030dd2015-05-07 14:46:15 -07001593}
1594
Jeff Hao848f70a2014-01-15 13:49:50 -08001595void IntrinsicLocationsBuilderX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1596 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001597 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001598 kIntrinsified);
1599 InvokeRuntimeCallingConvention calling_convention;
1600 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1601 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1602 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1603 locations->SetInAt(3, Location::RegisterLocation(calling_convention.GetRegisterAt(3)));
1604 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001605}
1606
1607void IntrinsicCodeGeneratorX86::VisitStringNewStringFromBytes(HInvoke* invoke) {
1608 X86Assembler* assembler = GetAssembler();
1609 LocationSummary* locations = invoke->GetLocations();
1610
1611 Register byte_array = locations->InAt(0).AsRegister<Register>();
1612 __ testl(byte_array, byte_array);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001613 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001614 codegen_->AddSlowPath(slow_path);
1615 __ j(kEqual, slow_path->GetEntryLabel());
1616
Serban Constantinescuba45db02016-07-12 22:53:02 +01001617 codegen_->InvokeRuntime(kQuickAllocStringFromBytes, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001618 CheckEntrypointTypes<kQuickAllocStringFromBytes, void*, void*, int32_t, int32_t, int32_t>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001619 __ Bind(slow_path->GetExitLabel());
1620}
1621
1622void IntrinsicLocationsBuilderX86::VisitStringNewStringFromChars(HInvoke* invoke) {
1623 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu54ff4822016-07-07 18:03:19 +01001624 LocationSummary::kCallOnMainOnly,
Jeff Hao848f70a2014-01-15 13:49:50 -08001625 kIntrinsified);
1626 InvokeRuntimeCallingConvention calling_convention;
1627 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1628 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1629 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1630 locations->SetOut(Location::RegisterLocation(EAX));
1631}
1632
1633void IntrinsicCodeGeneratorX86::VisitStringNewStringFromChars(HInvoke* invoke) {
Roland Levillaincc3839c2016-02-29 16:23:48 +00001634 // No need to emit code checking whether `locations->InAt(2)` is a null
1635 // pointer, as callers of the native method
1636 //
1637 // java.lang.StringFactory.newStringFromChars(int offset, int charCount, char[] data)
1638 //
1639 // all include a null check on `data` before calling that method.
Serban Constantinescuba45db02016-07-12 22:53:02 +01001640 codegen_->InvokeRuntime(kQuickAllocStringFromChars, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001641 CheckEntrypointTypes<kQuickAllocStringFromChars, void*, int32_t, int32_t, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001642}
1643
1644void IntrinsicLocationsBuilderX86::VisitStringNewStringFromString(HInvoke* invoke) {
1645 LocationSummary* locations = new (arena_) LocationSummary(invoke,
Serban Constantinescu806f0122016-03-09 11:10:16 +00001646 LocationSummary::kCallOnMainAndSlowPath,
Jeff Hao848f70a2014-01-15 13:49:50 -08001647 kIntrinsified);
1648 InvokeRuntimeCallingConvention calling_convention;
1649 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1650 locations->SetOut(Location::RegisterLocation(EAX));
Jeff Hao848f70a2014-01-15 13:49:50 -08001651}
1652
1653void IntrinsicCodeGeneratorX86::VisitStringNewStringFromString(HInvoke* invoke) {
1654 X86Assembler* assembler = GetAssembler();
1655 LocationSummary* locations = invoke->GetLocations();
1656
1657 Register string_to_copy = locations->InAt(0).AsRegister<Register>();
1658 __ testl(string_to_copy, string_to_copy);
Andreas Gampe85b62f22015-09-09 13:15:38 -07001659 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
Jeff Hao848f70a2014-01-15 13:49:50 -08001660 codegen_->AddSlowPath(slow_path);
1661 __ j(kEqual, slow_path->GetEntryLabel());
1662
Serban Constantinescuba45db02016-07-12 22:53:02 +01001663 codegen_->InvokeRuntime(kQuickAllocStringFromString, invoke, invoke->GetDexPc());
Roland Levillainf969a202016-03-09 16:14:00 +00001664 CheckEntrypointTypes<kQuickAllocStringFromString, void*, void*>();
Jeff Hao848f70a2014-01-15 13:49:50 -08001665 __ Bind(slow_path->GetExitLabel());
1666}
1667
Mark Mendell8f8926a2015-08-17 11:39:06 -04001668void IntrinsicLocationsBuilderX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1669 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1670 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1671 LocationSummary::kNoCall,
1672 kIntrinsified);
1673 locations->SetInAt(0, Location::RequiresRegister());
1674 locations->SetInAt(1, Location::RegisterOrConstant(invoke->InputAt(1)));
1675 // Place srcEnd in ECX to save a move below.
1676 locations->SetInAt(2, Location::RegisterLocation(ECX));
1677 locations->SetInAt(3, Location::RequiresRegister());
1678 locations->SetInAt(4, Location::RequiresRegister());
1679
1680 // And we need some temporaries. We will use REP MOVSW, so we need fixed registers.
1681 // We don't have enough registers to also grab ECX, so handle below.
1682 locations->AddTemp(Location::RegisterLocation(ESI));
1683 locations->AddTemp(Location::RegisterLocation(EDI));
1684}
1685
1686void IntrinsicCodeGeneratorX86::VisitStringGetCharsNoCheck(HInvoke* invoke) {
1687 X86Assembler* assembler = GetAssembler();
1688 LocationSummary* locations = invoke->GetLocations();
1689
1690 size_t char_component_size = Primitive::ComponentSize(Primitive::kPrimChar);
1691 // Location of data in char array buffer.
1692 const uint32_t data_offset = mirror::Array::DataOffset(char_component_size).Uint32Value();
1693 // Location of char array data in string.
1694 const uint32_t value_offset = mirror::String::ValueOffset().Uint32Value();
1695
1696 // public void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin);
1697 Register obj = locations->InAt(0).AsRegister<Register>();
1698 Location srcBegin = locations->InAt(1);
1699 int srcBegin_value =
1700 srcBegin.IsConstant() ? srcBegin.GetConstant()->AsIntConstant()->GetValue() : 0;
1701 Register srcEnd = locations->InAt(2).AsRegister<Register>();
1702 Register dst = locations->InAt(3).AsRegister<Register>();
1703 Register dstBegin = locations->InAt(4).AsRegister<Register>();
1704
1705 // Check assumption that sizeof(Char) is 2 (used in scaling below).
1706 const size_t char_size = Primitive::ComponentSize(Primitive::kPrimChar);
1707 DCHECK_EQ(char_size, 2u);
1708
1709 // Compute the address of the destination buffer.
1710 __ leal(EDI, Address(dst, dstBegin, ScaleFactor::TIMES_2, data_offset));
1711
1712 // Compute the address of the source string.
1713 if (srcBegin.IsConstant()) {
1714 // Compute the address of the source string by adding the number of chars from
1715 // the source beginning to the value offset of a string.
1716 __ leal(ESI, Address(obj, srcBegin_value * char_size + value_offset));
1717 } else {
1718 __ leal(ESI, Address(obj, srcBegin.AsRegister<Register>(),
1719 ScaleFactor::TIMES_2, value_offset));
1720 }
1721
1722 // Compute the number of chars (words) to move.
1723 // Now is the time to save ECX, since we don't know if it will be used later.
1724 __ pushl(ECX);
1725 int stack_adjust = kX86WordSize;
1726 __ cfi().AdjustCFAOffset(stack_adjust);
1727 DCHECK_EQ(srcEnd, ECX);
1728 if (srcBegin.IsConstant()) {
1729 if (srcBegin_value != 0) {
1730 __ subl(ECX, Immediate(srcBegin_value));
1731 }
1732 } else {
1733 DCHECK(srcBegin.IsRegister());
1734 __ subl(ECX, srcBegin.AsRegister<Register>());
1735 }
1736
1737 // Do the move.
1738 __ rep_movsw();
1739
1740 // And restore ECX.
1741 __ popl(ECX);
1742 __ cfi().AdjustCFAOffset(-stack_adjust);
1743}
1744
Mark Mendell09ed1a32015-03-25 08:30:06 -04001745static void GenPeek(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1746 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1747 Location out_loc = locations->Out();
1748 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1749 // to avoid a SIGBUS.
1750 switch (size) {
1751 case Primitive::kPrimByte:
1752 __ movsxb(out_loc.AsRegister<Register>(), Address(address, 0));
1753 break;
1754 case Primitive::kPrimShort:
1755 __ movsxw(out_loc.AsRegister<Register>(), Address(address, 0));
1756 break;
1757 case Primitive::kPrimInt:
1758 __ movl(out_loc.AsRegister<Register>(), Address(address, 0));
1759 break;
1760 case Primitive::kPrimLong:
1761 __ movl(out_loc.AsRegisterPairLow<Register>(), Address(address, 0));
1762 __ movl(out_loc.AsRegisterPairHigh<Register>(), Address(address, 4));
1763 break;
1764 default:
1765 LOG(FATAL) << "Type not recognized for peek: " << size;
1766 UNREACHABLE();
1767 }
1768}
1769
1770void IntrinsicLocationsBuilderX86::VisitMemoryPeekByte(HInvoke* invoke) {
1771 CreateLongToIntLocations(arena_, invoke);
1772}
1773
1774void IntrinsicCodeGeneratorX86::VisitMemoryPeekByte(HInvoke* invoke) {
1775 GenPeek(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1776}
1777
1778void IntrinsicLocationsBuilderX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1779 CreateLongToIntLocations(arena_, invoke);
1780}
1781
1782void IntrinsicCodeGeneratorX86::VisitMemoryPeekIntNative(HInvoke* invoke) {
1783 GenPeek(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1784}
1785
1786void IntrinsicLocationsBuilderX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1787 CreateLongToLongLocations(arena_, invoke);
1788}
1789
1790void IntrinsicCodeGeneratorX86::VisitMemoryPeekLongNative(HInvoke* invoke) {
1791 GenPeek(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1792}
1793
1794void IntrinsicLocationsBuilderX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1795 CreateLongToIntLocations(arena_, invoke);
1796}
1797
1798void IntrinsicCodeGeneratorX86::VisitMemoryPeekShortNative(HInvoke* invoke) {
1799 GenPeek(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1800}
1801
1802static void CreateLongIntToVoidLocations(ArenaAllocator* arena, Primitive::Type size,
1803 HInvoke* invoke) {
1804 LocationSummary* locations = new (arena) LocationSummary(invoke,
1805 LocationSummary::kNoCall,
1806 kIntrinsified);
1807 locations->SetInAt(0, Location::RequiresRegister());
Roland Levillain4c0eb422015-04-24 16:43:49 +01001808 HInstruction* value = invoke->InputAt(1);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001809 if (size == Primitive::kPrimByte) {
1810 locations->SetInAt(1, Location::ByteRegisterOrConstant(EDX, value));
1811 } else {
1812 locations->SetInAt(1, Location::RegisterOrConstant(value));
1813 }
1814}
1815
1816static void GenPoke(LocationSummary* locations, Primitive::Type size, X86Assembler* assembler) {
1817 Register address = locations->InAt(0).AsRegisterPairLow<Register>();
1818 Location value_loc = locations->InAt(1);
1819 // x86 allows unaligned access. We do not have to check the input or use specific instructions
1820 // to avoid a SIGBUS.
1821 switch (size) {
1822 case Primitive::kPrimByte:
1823 if (value_loc.IsConstant()) {
1824 __ movb(Address(address, 0),
1825 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1826 } else {
1827 __ movb(Address(address, 0), value_loc.AsRegister<ByteRegister>());
1828 }
1829 break;
1830 case Primitive::kPrimShort:
1831 if (value_loc.IsConstant()) {
1832 __ movw(Address(address, 0),
1833 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1834 } else {
1835 __ movw(Address(address, 0), value_loc.AsRegister<Register>());
1836 }
1837 break;
1838 case Primitive::kPrimInt:
1839 if (value_loc.IsConstant()) {
1840 __ movl(Address(address, 0),
1841 Immediate(value_loc.GetConstant()->AsIntConstant()->GetValue()));
1842 } else {
1843 __ movl(Address(address, 0), value_loc.AsRegister<Register>());
1844 }
1845 break;
1846 case Primitive::kPrimLong:
1847 if (value_loc.IsConstant()) {
1848 int64_t value = value_loc.GetConstant()->AsLongConstant()->GetValue();
1849 __ movl(Address(address, 0), Immediate(Low32Bits(value)));
1850 __ movl(Address(address, 4), Immediate(High32Bits(value)));
1851 } else {
1852 __ movl(Address(address, 0), value_loc.AsRegisterPairLow<Register>());
1853 __ movl(Address(address, 4), value_loc.AsRegisterPairHigh<Register>());
1854 }
1855 break;
1856 default:
1857 LOG(FATAL) << "Type not recognized for poke: " << size;
1858 UNREACHABLE();
1859 }
1860}
1861
1862void IntrinsicLocationsBuilderX86::VisitMemoryPokeByte(HInvoke* invoke) {
1863 CreateLongIntToVoidLocations(arena_, Primitive::kPrimByte, invoke);
1864}
1865
1866void IntrinsicCodeGeneratorX86::VisitMemoryPokeByte(HInvoke* invoke) {
1867 GenPoke(invoke->GetLocations(), Primitive::kPrimByte, GetAssembler());
1868}
1869
1870void IntrinsicLocationsBuilderX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1871 CreateLongIntToVoidLocations(arena_, Primitive::kPrimInt, invoke);
1872}
1873
1874void IntrinsicCodeGeneratorX86::VisitMemoryPokeIntNative(HInvoke* invoke) {
1875 GenPoke(invoke->GetLocations(), Primitive::kPrimInt, GetAssembler());
1876}
1877
1878void IntrinsicLocationsBuilderX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1879 CreateLongIntToVoidLocations(arena_, Primitive::kPrimLong, invoke);
1880}
1881
1882void IntrinsicCodeGeneratorX86::VisitMemoryPokeLongNative(HInvoke* invoke) {
1883 GenPoke(invoke->GetLocations(), Primitive::kPrimLong, GetAssembler());
1884}
1885
1886void IntrinsicLocationsBuilderX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1887 CreateLongIntToVoidLocations(arena_, Primitive::kPrimShort, invoke);
1888}
1889
1890void IntrinsicCodeGeneratorX86::VisitMemoryPokeShortNative(HInvoke* invoke) {
1891 GenPoke(invoke->GetLocations(), Primitive::kPrimShort, GetAssembler());
1892}
1893
1894void IntrinsicLocationsBuilderX86::VisitThreadCurrentThread(HInvoke* invoke) {
1895 LocationSummary* locations = new (arena_) LocationSummary(invoke,
1896 LocationSummary::kNoCall,
1897 kIntrinsified);
1898 locations->SetOut(Location::RequiresRegister());
1899}
1900
1901void IntrinsicCodeGeneratorX86::VisitThreadCurrentThread(HInvoke* invoke) {
1902 Register out = invoke->GetLocations()->Out().AsRegister<Register>();
Andreas Gampe542451c2016-07-26 09:02:02 -07001903 GetAssembler()->fs()->movl(out, Address::Absolute(Thread::PeerOffset<kX86PointerSize>()));
Mark Mendell09ed1a32015-03-25 08:30:06 -04001904}
1905
Roland Levillain0d5a2812015-11-13 10:07:31 +00001906static void GenUnsafeGet(HInvoke* invoke,
1907 Primitive::Type type,
1908 bool is_volatile,
1909 CodeGeneratorX86* codegen) {
1910 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
1911 LocationSummary* locations = invoke->GetLocations();
1912 Location base_loc = locations->InAt(1);
1913 Register base = base_loc.AsRegister<Register>();
1914 Location offset_loc = locations->InAt(2);
1915 Register offset = offset_loc.AsRegisterPairLow<Register>();
1916 Location output_loc = locations->Out();
Mark Mendell09ed1a32015-03-25 08:30:06 -04001917
1918 switch (type) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00001919 case Primitive::kPrimInt: {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001920 Register output = output_loc.AsRegister<Register>();
1921 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
Roland Levillain7c1559a2015-12-15 10:55:36 +00001922 break;
1923 }
1924
1925 case Primitive::kPrimNot: {
1926 Register output = output_loc.AsRegister<Register>();
1927 if (kEmitCompilerReadBarrier) {
1928 if (kUseBakerReadBarrier) {
Sang, Chunlei0fcd2b82016-04-05 17:12:59 +08001929 Address src(base, offset, ScaleFactor::TIMES_1, 0);
1930 codegen->GenerateReferenceLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00001931 invoke, output_loc, base, src, /* needs_null_check */ false);
Roland Levillain7c1559a2015-12-15 10:55:36 +00001932 } else {
1933 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
1934 codegen->GenerateReadBarrierSlow(
1935 invoke, output_loc, output_loc, base_loc, 0U, offset_loc);
1936 }
1937 } else {
1938 __ movl(output, Address(base, offset, ScaleFactor::TIMES_1, 0));
1939 __ MaybeUnpoisonHeapReference(output);
Roland Levillain4d027112015-07-01 15:41:14 +01001940 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001941 break;
Roland Levillain4d027112015-07-01 15:41:14 +01001942 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001943
1944 case Primitive::kPrimLong: {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001945 Register output_lo = output_loc.AsRegisterPairLow<Register>();
1946 Register output_hi = output_loc.AsRegisterPairHigh<Register>();
Mark Mendell09ed1a32015-03-25 08:30:06 -04001947 if (is_volatile) {
1948 // Need to use a XMM to read atomically.
1949 XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1950 __ movsd(temp, Address(base, offset, ScaleFactor::TIMES_1, 0));
1951 __ movd(output_lo, temp);
1952 __ psrlq(temp, Immediate(32));
1953 __ movd(output_hi, temp);
1954 } else {
1955 __ movl(output_lo, Address(base, offset, ScaleFactor::TIMES_1, 0));
1956 __ movl(output_hi, Address(base, offset, ScaleFactor::TIMES_1, 4));
1957 }
1958 }
1959 break;
1960
1961 default:
1962 LOG(FATAL) << "Unsupported op size " << type;
1963 UNREACHABLE();
1964 }
1965}
1966
Roland Levillain7c1559a2015-12-15 10:55:36 +00001967static void CreateIntIntIntToIntLocations(ArenaAllocator* arena,
1968 HInvoke* invoke,
1969 Primitive::Type type,
1970 bool is_volatile) {
Roland Levillain0d5a2812015-11-13 10:07:31 +00001971 bool can_call = kEmitCompilerReadBarrier &&
1972 (invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObject ||
1973 invoke->GetIntrinsic() == Intrinsics::kUnsafeGetObjectVolatile);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001974 LocationSummary* locations = new (arena) LocationSummary(invoke,
Roland Levillain0d5a2812015-11-13 10:07:31 +00001975 can_call ?
1976 LocationSummary::kCallOnSlowPath :
1977 LocationSummary::kNoCall,
Mark Mendell09ed1a32015-03-25 08:30:06 -04001978 kIntrinsified);
Vladimir Marko70e97462016-08-09 11:04:26 +01001979 if (can_call && kUseBakerReadBarrier) {
Vladimir Marko804b03f2016-09-14 16:26:36 +01001980 locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
Vladimir Marko70e97462016-08-09 11:04:26 +01001981 }
Mark Mendell09ed1a32015-03-25 08:30:06 -04001982 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
1983 locations->SetInAt(1, Location::RequiresRegister());
1984 locations->SetInAt(2, Location::RequiresRegister());
Roland Levillain7c1559a2015-12-15 10:55:36 +00001985 if (type == Primitive::kPrimLong) {
Mark Mendell09ed1a32015-03-25 08:30:06 -04001986 if (is_volatile) {
1987 // Need to use XMM to read volatile.
1988 locations->AddTemp(Location::RequiresFpuRegister());
Roland Levillain3d312422016-06-23 13:53:42 +01001989 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001990 } else {
1991 locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
1992 }
1993 } else {
Roland Levillain3d312422016-06-23 13:53:42 +01001994 locations->SetOut(Location::RequiresRegister(),
1995 can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap);
Mark Mendell09ed1a32015-03-25 08:30:06 -04001996 }
1997}
1998
1999void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002000 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002001}
2002void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002003 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimInt, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002004}
2005void IntrinsicLocationsBuilderX86::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002006 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002007}
2008void IntrinsicLocationsBuilderX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002009 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimLong, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002010}
2011void IntrinsicLocationsBuilderX86::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002012 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002013}
2014void IntrinsicLocationsBuilderX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillain7c1559a2015-12-15 10:55:36 +00002015 CreateIntIntIntToIntLocations(arena_, invoke, Primitive::kPrimNot, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002016}
2017
2018
2019void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002020 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002021}
2022void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002023 GenUnsafeGet(invoke, Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002024}
2025void IntrinsicCodeGeneratorX86::VisitUnsafeGetLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002026 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002027}
2028void IntrinsicCodeGeneratorX86::VisitUnsafeGetLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002029 GenUnsafeGet(invoke, Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002030}
2031void IntrinsicCodeGeneratorX86::VisitUnsafeGetObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002032 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002033}
2034void IntrinsicCodeGeneratorX86::VisitUnsafeGetObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002035 GenUnsafeGet(invoke, Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002036}
2037
2038
2039static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* arena,
2040 Primitive::Type type,
2041 HInvoke* invoke,
2042 bool is_volatile) {
2043 LocationSummary* locations = new (arena) LocationSummary(invoke,
2044 LocationSummary::kNoCall,
2045 kIntrinsified);
2046 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
2047 locations->SetInAt(1, Location::RequiresRegister());
2048 locations->SetInAt(2, Location::RequiresRegister());
2049 locations->SetInAt(3, Location::RequiresRegister());
2050 if (type == Primitive::kPrimNot) {
2051 // Need temp registers for card-marking.
Roland Levillain4d027112015-07-01 15:41:14 +01002052 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell09ed1a32015-03-25 08:30:06 -04002053 // Ensure the value is in a byte register.
2054 locations->AddTemp(Location::RegisterLocation(ECX));
2055 } else if (type == Primitive::kPrimLong && is_volatile) {
2056 locations->AddTemp(Location::RequiresFpuRegister());
2057 locations->AddTemp(Location::RequiresFpuRegister());
2058 }
2059}
2060
2061void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002062 CreateIntIntIntIntToVoidPlusTempsLocations(
2063 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002064}
2065void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002066 CreateIntIntIntIntToVoidPlusTempsLocations(
2067 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002068}
2069void IntrinsicLocationsBuilderX86::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002070 CreateIntIntIntIntToVoidPlusTempsLocations(
2071 arena_, Primitive::kPrimInt, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002072}
2073void IntrinsicLocationsBuilderX86::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002074 CreateIntIntIntIntToVoidPlusTempsLocations(
2075 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002076}
2077void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002078 CreateIntIntIntIntToVoidPlusTempsLocations(
2079 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002080}
2081void IntrinsicLocationsBuilderX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002082 CreateIntIntIntIntToVoidPlusTempsLocations(
2083 arena_, Primitive::kPrimNot, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002084}
2085void IntrinsicLocationsBuilderX86::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002086 CreateIntIntIntIntToVoidPlusTempsLocations(
2087 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002088}
2089void IntrinsicLocationsBuilderX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002090 CreateIntIntIntIntToVoidPlusTempsLocations(
2091 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ false);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002092}
2093void IntrinsicLocationsBuilderX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002094 CreateIntIntIntIntToVoidPlusTempsLocations(
2095 arena_, Primitive::kPrimLong, invoke, /* is_volatile */ true);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002096}
2097
2098// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86
2099// memory model.
2100static void GenUnsafePut(LocationSummary* locations,
2101 Primitive::Type type,
2102 bool is_volatile,
2103 CodeGeneratorX86* codegen) {
Roland Levillainb488b782015-10-22 11:38:49 +01002104 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
Mark Mendell09ed1a32015-03-25 08:30:06 -04002105 Register base = locations->InAt(1).AsRegister<Register>();
2106 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
2107 Location value_loc = locations->InAt(3);
2108
2109 if (type == Primitive::kPrimLong) {
2110 Register value_lo = value_loc.AsRegisterPairLow<Register>();
2111 Register value_hi = value_loc.AsRegisterPairHigh<Register>();
2112 if (is_volatile) {
2113 XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
2114 XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
2115 __ movd(temp1, value_lo);
2116 __ movd(temp2, value_hi);
2117 __ punpckldq(temp1, temp2);
2118 __ movsd(Address(base, offset, ScaleFactor::TIMES_1, 0), temp1);
2119 } else {
2120 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_lo);
2121 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 4), value_hi);
2122 }
Roland Levillain4d027112015-07-01 15:41:14 +01002123 } else if (kPoisonHeapReferences && type == Primitive::kPrimNot) {
2124 Register temp = locations->GetTemp(0).AsRegister<Register>();
2125 __ movl(temp, value_loc.AsRegister<Register>());
2126 __ PoisonHeapReference(temp);
2127 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), temp);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002128 } else {
2129 __ movl(Address(base, offset, ScaleFactor::TIMES_1, 0), value_loc.AsRegister<Register>());
2130 }
2131
2132 if (is_volatile) {
Mark P Mendell17077d82015-12-16 19:15:59 +00002133 codegen->MemoryFence();
Mark Mendell09ed1a32015-03-25 08:30:06 -04002134 }
2135
2136 if (type == Primitive::kPrimNot) {
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002137 bool value_can_be_null = true; // TODO: Worth finding out this information?
Mark Mendell09ed1a32015-03-25 08:30:06 -04002138 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
2139 locations->GetTemp(1).AsRegister<Register>(),
2140 base,
Nicolas Geoffray07276db2015-05-18 14:22:09 +01002141 value_loc.AsRegister<Register>(),
2142 value_can_be_null);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002143 }
2144}
2145
2146void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002147 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002148}
2149void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002150 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002151}
2152void IntrinsicCodeGeneratorX86::VisitUnsafePutVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002153 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimInt, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002154}
2155void IntrinsicCodeGeneratorX86::VisitUnsafePutObject(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002156 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002157}
2158void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002159 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002160}
2161void IntrinsicCodeGeneratorX86::VisitUnsafePutObjectVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002162 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimNot, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002163}
2164void IntrinsicCodeGeneratorX86::VisitUnsafePutLong(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002165 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002166}
2167void IntrinsicCodeGeneratorX86::VisitUnsafePutLongOrdered(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002168 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ false, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002169}
2170void IntrinsicCodeGeneratorX86::VisitUnsafePutLongVolatile(HInvoke* invoke) {
Roland Levillainbf84a3d2015-12-04 14:33:02 +00002171 GenUnsafePut(invoke->GetLocations(), Primitive::kPrimLong, /* is_volatile */ true, codegen_);
Mark Mendell09ed1a32015-03-25 08:30:06 -04002172}
2173
Mark Mendell58d25fd2015-04-03 14:52:31 -04002174static void CreateIntIntIntIntIntToInt(ArenaAllocator* arena, Primitive::Type type,
2175 HInvoke* invoke) {
2176 LocationSummary* locations = new (arena) LocationSummary(invoke,
2177 LocationSummary::kNoCall,
2178 kIntrinsified);
2179 locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
2180 locations->SetInAt(1, Location::RequiresRegister());
2181 // Offset is a long, but in 32 bit mode, we only need the low word.
2182 // Can we update the invoke here to remove a TypeConvert to Long?
2183 locations->SetInAt(2, Location::RequiresRegister());
2184 // Expected value must be in EAX or EDX:EAX.
2185 // For long, new value must be in ECX:EBX.
2186 if (type == Primitive::kPrimLong) {
2187 locations->SetInAt(3, Location::RegisterPairLocation(EAX, EDX));
2188 locations->SetInAt(4, Location::RegisterPairLocation(EBX, ECX));
2189 } else {
2190 locations->SetInAt(3, Location::RegisterLocation(EAX));
2191 locations->SetInAt(4, Location::RequiresRegister());
2192 }
2193
2194 // Force a byte register for the output.
2195 locations->SetOut(Location::RegisterLocation(EAX));
2196 if (type == Primitive::kPrimNot) {
2197 // Need temp registers for card-marking.
Roland Levillainb488b782015-10-22 11:38:49 +01002198 locations->AddTemp(Location::RequiresRegister()); // Possibly used for reference poisoning too.
Mark Mendell58d25fd2015-04-03 14:52:31 -04002199 // Need a byte register for marking.
2200 locations->AddTemp(Location::RegisterLocation(ECX));
2201 }
2202}
2203
2204void IntrinsicLocationsBuilderX86::VisitUnsafeCASInt(HInvoke* invoke) {
2205 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimInt, invoke);
2206}
2207
2208void IntrinsicLocationsBuilderX86::VisitUnsafeCASLong(HInvoke* invoke) {
2209 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimLong, invoke);
2210}
2211
2212void IntrinsicLocationsBuilderX86::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain391b8662015-12-18 11:43:38 +00002213 // The UnsafeCASObject intrinsic is missing a read barrier, and
2214 // therefore sometimes does not work as expected (b/25883050).
2215 // Turn it off temporarily as a quick fix, until the read barrier is
Roland Levillain3d312422016-06-23 13:53:42 +01002216 // implemented (see TODO in GenCAS).
Roland Levillain391b8662015-12-18 11:43:38 +00002217 //
Roland Levillain3d312422016-06-23 13:53:42 +01002218 // TODO(rpl): Implement read barrier support in GenCAS and re-enable
Roland Levillain391b8662015-12-18 11:43:38 +00002219 // this intrinsic.
2220 if (kEmitCompilerReadBarrier) {
2221 return;
2222 }
2223
Mark Mendell58d25fd2015-04-03 14:52:31 -04002224 CreateIntIntIntIntIntToInt(arena_, Primitive::kPrimNot, invoke);
2225}
2226
2227static void GenCAS(Primitive::Type type, HInvoke* invoke, CodeGeneratorX86* codegen) {
Roland Levillainb488b782015-10-22 11:38:49 +01002228 X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
Mark Mendell58d25fd2015-04-03 14:52:31 -04002229 LocationSummary* locations = invoke->GetLocations();
2230
2231 Register base = locations->InAt(1).AsRegister<Register>();
2232 Register offset = locations->InAt(2).AsRegisterPairLow<Register>();
2233 Location out = locations->Out();
2234 DCHECK_EQ(out.AsRegister<Register>(), EAX);
2235
Roland Levillainb488b782015-10-22 11:38:49 +01002236 if (type == Primitive::kPrimNot) {
Roland Levillain4d027112015-07-01 15:41:14 +01002237 Register expected = locations->InAt(3).AsRegister<Register>();
Roland Levillainb488b782015-10-22 11:38:49 +01002238 // Ensure `expected` is in EAX (required by the CMPXCHG instruction).
Roland Levillain4d027112015-07-01 15:41:14 +01002239 DCHECK_EQ(expected, EAX);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002240 Register value = locations->InAt(4).AsRegister<Register>();
Roland Levillain4d027112015-07-01 15:41:14 +01002241
Roland Levillainb488b782015-10-22 11:38:49 +01002242 // Mark card for object assuming new value is stored.
2243 bool value_can_be_null = true; // TODO: Worth finding out this information?
2244 codegen->MarkGCCard(locations->GetTemp(0).AsRegister<Register>(),
2245 locations->GetTemp(1).AsRegister<Register>(),
2246 base,
2247 value,
2248 value_can_be_null);
2249
2250 bool base_equals_value = (base == value);
2251 if (kPoisonHeapReferences) {
2252 if (base_equals_value) {
2253 // If `base` and `value` are the same register location, move
2254 // `value` to a temporary register. This way, poisoning
2255 // `value` won't invalidate `base`.
2256 value = locations->GetTemp(0).AsRegister<Register>();
2257 __ movl(value, base);
Roland Levillain4d027112015-07-01 15:41:14 +01002258 }
Roland Levillainb488b782015-10-22 11:38:49 +01002259
2260 // Check that the register allocator did not assign the location
2261 // of `expected` (EAX) to `value` nor to `base`, so that heap
2262 // poisoning (when enabled) works as intended below.
2263 // - If `value` were equal to `expected`, both references would
2264 // be poisoned twice, meaning they would not be poisoned at
2265 // all, as heap poisoning uses address negation.
2266 // - If `base` were equal to `expected`, poisoning `expected`
2267 // would invalidate `base`.
2268 DCHECK_NE(value, expected);
2269 DCHECK_NE(base, expected);
2270
2271 __ PoisonHeapReference(expected);
2272 __ PoisonHeapReference(value);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002273 }
2274
Roland Levillain391b8662015-12-18 11:43:38 +00002275 // TODO: Add a read barrier for the reference stored in the object
2276 // before attempting the CAS, similar to the one in the
2277 // art::Unsafe_compareAndSwapObject JNI implementation.
2278 //
2279 // Note that this code is not (yet) used when read barriers are
2280 // enabled (see IntrinsicLocationsBuilderX86::VisitUnsafeCASObject).
2281 DCHECK(!kEmitCompilerReadBarrier);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002282 __ LockCmpxchgl(Address(base, offset, TIMES_1, 0), value);
Mark Mendell58d25fd2015-04-03 14:52:31 -04002283
Roland Levillain0d5a2812015-11-13 10:07:31 +00002284 // LOCK CMPXCHG has full barrier semantics, and we don't need
Roland Levillainb488b782015-10-22 11:38:49 +01002285 // scheduling barriers at this time.
Mark Mendell58d25fd2015-04-03 14:52:31 -04002286
Roland Levillainb488b782015-10-22 11:38:49 +01002287 // Convert ZF into the boolean result.
2288 __ setb(kZero, out.AsRegister<Register>());
2289 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01002290
Roland Levillain391b8662015-12-18 11:43:38 +00002291 // If heap poisoning is enabled, we need to unpoison the values
2292 // that were poisoned earlier.
Roland Levillainb488b782015-10-22 11:38:49 +01002293 if (kPoisonHeapReferences) {
2294 if (base_equals_value) {
2295 // `value` has been moved to a temporary register, no need to
2296 // unpoison it.
2297 } else {
2298 // Ensure `value` is different from `out`, so that unpoisoning
2299 // the former does not invalidate the latter.
2300 DCHECK_NE(value, out.AsRegister<Register>());
2301 __ UnpoisonHeapReference(value);
2302 }
2303 // Do not unpoison the reference contained in register
2304 // `expected`, as it is the same as register `out` (EAX).
2305 }
2306 } else {
2307 if (type == Primitive::kPrimInt) {
2308 // Ensure the expected value is in EAX (required by the CMPXCHG
2309 // instruction).
2310 DCHECK_EQ(locations->InAt(3).AsRegister<Register>(), EAX);
2311 __ LockCmpxchgl(Address(base, offset, TIMES_1, 0),
2312 locations->InAt(4).AsRegister<Register>());
2313 } else if (type == Primitive::kPrimLong) {
2314 // Ensure the expected value is in EAX:EDX and that the new
2315 // value is in EBX:ECX (required by the CMPXCHG8B instruction).
2316 DCHECK_EQ(locations->InAt(3).AsRegisterPairLow<Register>(), EAX);
2317 DCHECK_EQ(locations->InAt(3).AsRegisterPairHigh<Register>(), EDX);
2318 DCHECK_EQ(locations->InAt(4).AsRegisterPairLow<Register>(), EBX);
2319 DCHECK_EQ(locations->InAt(4).AsRegisterPairHigh<Register>(), ECX);
2320 __ LockCmpxchg8b(Address(base, offset, TIMES_1, 0));
2321 } else {
2322 LOG(FATAL) << "Unexpected CAS type " << type;
2323 }
2324
Roland Levillain0d5a2812015-11-13 10:07:31 +00002325 // LOCK CMPXCHG/LOCK CMPXCHG8B have full barrier semantics, and we
2326 // don't need scheduling barriers at this time.
Roland Levillainb488b782015-10-22 11:38:49 +01002327
2328 // Convert ZF into the boolean result.
2329 __ setb(kZero, out.AsRegister<Register>());
2330 __ movzxb(out.AsRegister<Register>(), out.AsRegister<ByteRegister>());
Roland Levillain4d027112015-07-01 15:41:14 +01002331 }
Mark Mendell58d25fd2015-04-03 14:52:31 -04002332}
2333
2334void IntrinsicCodeGeneratorX86::VisitUnsafeCASInt(HInvoke* invoke) {
2335 GenCAS(Primitive::kPrimInt, invoke, codegen_);
2336}
2337
2338void IntrinsicCodeGeneratorX86::VisitUnsafeCASLong(HInvoke* invoke) {
2339 GenCAS(Primitive::kPrimLong, invoke, codegen_);
2340}
2341
2342void IntrinsicCodeGeneratorX86::VisitUnsafeCASObject(HInvoke* invoke) {
Roland Levillain3d312422016-06-23 13:53:42 +01002343 // The UnsafeCASObject intrinsic is missing a read barrier, and
2344 // therefore sometimes does not work as expected (b/25883050).
2345 // Turn it off temporarily as a quick fix, until the read barrier is
2346 // implemented (see TODO in GenCAS).
2347 //
2348 // TODO(rpl): Implement read barrier support in GenCAS and re-enable
2349 // this intrinsic.
2350 DCHECK(!kEmitCompilerReadBarrier);
2351
Mark Mendell58d25fd2015-04-03 14:52:31 -04002352 GenCAS(Primitive::kPrimNot, invoke, codegen_);
2353}
2354
2355void IntrinsicLocationsBuilderX86::VisitIntegerReverse(HInvoke* invoke) {
2356 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2357 LocationSummary::kNoCall,
2358 kIntrinsified);
2359 locations->SetInAt(0, Location::RequiresRegister());
2360 locations->SetOut(Location::SameAsFirstInput());
2361 locations->AddTemp(Location::RequiresRegister());
2362}
2363
2364static void SwapBits(Register reg, Register temp, int32_t shift, int32_t mask,
2365 X86Assembler* assembler) {
2366 Immediate imm_shift(shift);
2367 Immediate imm_mask(mask);
2368 __ movl(temp, reg);
2369 __ shrl(reg, imm_shift);
2370 __ andl(temp, imm_mask);
2371 __ andl(reg, imm_mask);
2372 __ shll(temp, imm_shift);
2373 __ orl(reg, temp);
2374}
2375
2376void IntrinsicCodeGeneratorX86::VisitIntegerReverse(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002377 X86Assembler* assembler = GetAssembler();
Mark Mendell58d25fd2015-04-03 14:52:31 -04002378 LocationSummary* locations = invoke->GetLocations();
2379
2380 Register reg = locations->InAt(0).AsRegister<Register>();
2381 Register temp = locations->GetTemp(0).AsRegister<Register>();
2382
2383 /*
2384 * Use one bswap instruction to reverse byte order first and then use 3 rounds of
2385 * swapping bits to reverse bits in a number x. Using bswap to save instructions
2386 * compared to generic luni implementation which has 5 rounds of swapping bits.
2387 * x = bswap x
2388 * x = (x & 0x55555555) << 1 | (x >> 1) & 0x55555555;
2389 * x = (x & 0x33333333) << 2 | (x >> 2) & 0x33333333;
2390 * x = (x & 0x0F0F0F0F) << 4 | (x >> 4) & 0x0F0F0F0F;
2391 */
2392 __ bswapl(reg);
2393 SwapBits(reg, temp, 1, 0x55555555, assembler);
2394 SwapBits(reg, temp, 2, 0x33333333, assembler);
2395 SwapBits(reg, temp, 4, 0x0f0f0f0f, assembler);
2396}
2397
2398void IntrinsicLocationsBuilderX86::VisitLongReverse(HInvoke* invoke) {
2399 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2400 LocationSummary::kNoCall,
2401 kIntrinsified);
2402 locations->SetInAt(0, Location::RequiresRegister());
2403 locations->SetOut(Location::SameAsFirstInput());
2404 locations->AddTemp(Location::RequiresRegister());
2405}
2406
2407void IntrinsicCodeGeneratorX86::VisitLongReverse(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002408 X86Assembler* assembler = GetAssembler();
Mark Mendell58d25fd2015-04-03 14:52:31 -04002409 LocationSummary* locations = invoke->GetLocations();
2410
2411 Register reg_low = locations->InAt(0).AsRegisterPairLow<Register>();
2412 Register reg_high = locations->InAt(0).AsRegisterPairHigh<Register>();
2413 Register temp = locations->GetTemp(0).AsRegister<Register>();
2414
2415 // We want to swap high/low, then bswap each one, and then do the same
2416 // as a 32 bit reverse.
2417 // Exchange high and low.
2418 __ movl(temp, reg_low);
2419 __ movl(reg_low, reg_high);
2420 __ movl(reg_high, temp);
2421
2422 // bit-reverse low
2423 __ bswapl(reg_low);
2424 SwapBits(reg_low, temp, 1, 0x55555555, assembler);
2425 SwapBits(reg_low, temp, 2, 0x33333333, assembler);
2426 SwapBits(reg_low, temp, 4, 0x0f0f0f0f, assembler);
2427
2428 // bit-reverse high
2429 __ bswapl(reg_high);
2430 SwapBits(reg_high, temp, 1, 0x55555555, assembler);
2431 SwapBits(reg_high, temp, 2, 0x33333333, assembler);
2432 SwapBits(reg_high, temp, 4, 0x0f0f0f0f, assembler);
2433}
2434
Aart Bikc39dac12016-01-21 08:59:48 -08002435static void CreateBitCountLocations(
2436 ArenaAllocator* arena, CodeGeneratorX86* codegen, HInvoke* invoke, bool is_long) {
2437 if (!codegen->GetInstructionSetFeatures().HasPopCnt()) {
2438 // Do nothing if there is no popcnt support. This results in generating
2439 // a call for the intrinsic rather than direct code.
2440 return;
2441 }
2442 LocationSummary* locations = new (arena) LocationSummary(invoke,
2443 LocationSummary::kNoCall,
2444 kIntrinsified);
2445 if (is_long) {
Aart Bikc39dac12016-01-21 08:59:48 -08002446 locations->AddTemp(Location::RequiresRegister());
Aart Bikc39dac12016-01-21 08:59:48 -08002447 }
Aart Bik2a946072016-01-21 12:49:00 -08002448 locations->SetInAt(0, Location::Any());
Aart Bikc39dac12016-01-21 08:59:48 -08002449 locations->SetOut(Location::RequiresRegister());
2450}
2451
Aart Bika19616e2016-02-01 18:57:58 -08002452static void GenBitCount(X86Assembler* assembler,
2453 CodeGeneratorX86* codegen,
2454 HInvoke* invoke, bool is_long) {
Aart Bikc39dac12016-01-21 08:59:48 -08002455 LocationSummary* locations = invoke->GetLocations();
2456 Location src = locations->InAt(0);
2457 Register out = locations->Out().AsRegister<Register>();
2458
2459 if (invoke->InputAt(0)->IsConstant()) {
2460 // Evaluate this at compile time.
2461 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
Roland Levillainfa3912e2016-04-01 18:21:55 +01002462 int32_t result = is_long
Aart Bikc39dac12016-01-21 08:59:48 -08002463 ? POPCOUNT(static_cast<uint64_t>(value))
2464 : POPCOUNT(static_cast<uint32_t>(value));
Roland Levillainfa3912e2016-04-01 18:21:55 +01002465 codegen->Load32BitValue(out, result);
Aart Bikc39dac12016-01-21 08:59:48 -08002466 return;
2467 }
2468
2469 // Handle the non-constant cases.
2470 if (!is_long) {
2471 if (src.IsRegister()) {
2472 __ popcntl(out, src.AsRegister<Register>());
2473 } else {
2474 DCHECK(src.IsStackSlot());
2475 __ popcntl(out, Address(ESP, src.GetStackIndex()));
2476 }
Aart Bik2a946072016-01-21 12:49:00 -08002477 } else {
2478 // The 64-bit case needs to worry about two parts.
2479 Register temp = locations->GetTemp(0).AsRegister<Register>();
2480 if (src.IsRegisterPair()) {
2481 __ popcntl(temp, src.AsRegisterPairLow<Register>());
2482 __ popcntl(out, src.AsRegisterPairHigh<Register>());
2483 } else {
2484 DCHECK(src.IsDoubleStackSlot());
2485 __ popcntl(temp, Address(ESP, src.GetStackIndex()));
2486 __ popcntl(out, Address(ESP, src.GetHighStackIndex(kX86WordSize)));
2487 }
2488 __ addl(out, temp);
Aart Bikc39dac12016-01-21 08:59:48 -08002489 }
Aart Bikc39dac12016-01-21 08:59:48 -08002490}
2491
2492void IntrinsicLocationsBuilderX86::VisitIntegerBitCount(HInvoke* invoke) {
2493 CreateBitCountLocations(arena_, codegen_, invoke, /* is_long */ false);
2494}
2495
2496void IntrinsicCodeGeneratorX86::VisitIntegerBitCount(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002497 GenBitCount(GetAssembler(), codegen_, invoke, /* is_long */ false);
Aart Bikc39dac12016-01-21 08:59:48 -08002498}
2499
2500void IntrinsicLocationsBuilderX86::VisitLongBitCount(HInvoke* invoke) {
2501 CreateBitCountLocations(arena_, codegen_, invoke, /* is_long */ true);
2502}
2503
2504void IntrinsicCodeGeneratorX86::VisitLongBitCount(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002505 GenBitCount(GetAssembler(), codegen_, invoke, /* is_long */ true);
Aart Bikc39dac12016-01-21 08:59:48 -08002506}
2507
Mark Mendelld5897672015-08-12 21:16:41 -04002508static void CreateLeadingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2509 LocationSummary* locations = new (arena) LocationSummary(invoke,
2510 LocationSummary::kNoCall,
2511 kIntrinsified);
2512 if (is_long) {
2513 locations->SetInAt(0, Location::RequiresRegister());
2514 } else {
2515 locations->SetInAt(0, Location::Any());
2516 }
2517 locations->SetOut(Location::RequiresRegister());
2518}
2519
Aart Bika19616e2016-02-01 18:57:58 -08002520static void GenLeadingZeros(X86Assembler* assembler,
2521 CodeGeneratorX86* codegen,
2522 HInvoke* invoke, bool is_long) {
Mark Mendelld5897672015-08-12 21:16:41 -04002523 LocationSummary* locations = invoke->GetLocations();
2524 Location src = locations->InAt(0);
2525 Register out = locations->Out().AsRegister<Register>();
2526
2527 if (invoke->InputAt(0)->IsConstant()) {
2528 // Evaluate this at compile time.
2529 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2530 if (value == 0) {
2531 value = is_long ? 64 : 32;
2532 } else {
2533 value = is_long ? CLZ(static_cast<uint64_t>(value)) : CLZ(static_cast<uint32_t>(value));
2534 }
Aart Bika19616e2016-02-01 18:57:58 -08002535 codegen->Load32BitValue(out, value);
Mark Mendelld5897672015-08-12 21:16:41 -04002536 return;
2537 }
2538
2539 // Handle the non-constant cases.
2540 if (!is_long) {
2541 if (src.IsRegister()) {
2542 __ bsrl(out, src.AsRegister<Register>());
2543 } else {
2544 DCHECK(src.IsStackSlot());
2545 __ bsrl(out, Address(ESP, src.GetStackIndex()));
2546 }
2547
2548 // BSR sets ZF if the input was zero, and the output is undefined.
Mark Mendell0c9497d2015-08-21 09:30:05 -04002549 NearLabel all_zeroes, done;
Mark Mendelld5897672015-08-12 21:16:41 -04002550 __ j(kEqual, &all_zeroes);
2551
2552 // Correct the result from BSR to get the final CLZ result.
2553 __ xorl(out, Immediate(31));
2554 __ jmp(&done);
2555
2556 // Fix the zero case with the expected result.
2557 __ Bind(&all_zeroes);
2558 __ movl(out, Immediate(32));
2559
2560 __ Bind(&done);
2561 return;
2562 }
2563
2564 // 64 bit case needs to worry about both parts of the register.
2565 DCHECK(src.IsRegisterPair());
2566 Register src_lo = src.AsRegisterPairLow<Register>();
2567 Register src_hi = src.AsRegisterPairHigh<Register>();
Mark Mendell0c9497d2015-08-21 09:30:05 -04002568 NearLabel handle_low, done, all_zeroes;
Mark Mendelld5897672015-08-12 21:16:41 -04002569
2570 // Is the high word zero?
2571 __ testl(src_hi, src_hi);
2572 __ j(kEqual, &handle_low);
2573
2574 // High word is not zero. We know that the BSR result is defined in this case.
2575 __ bsrl(out, src_hi);
2576
2577 // Correct the result from BSR to get the final CLZ result.
2578 __ xorl(out, Immediate(31));
2579 __ jmp(&done);
2580
2581 // High word was zero. We have to compute the low word count and add 32.
2582 __ Bind(&handle_low);
2583 __ bsrl(out, src_lo);
2584 __ j(kEqual, &all_zeroes);
2585
2586 // We had a valid result. Use an XOR to both correct the result and add 32.
2587 __ xorl(out, Immediate(63));
2588 __ jmp(&done);
2589
2590 // All zero case.
2591 __ Bind(&all_zeroes);
2592 __ movl(out, Immediate(64));
2593
2594 __ Bind(&done);
2595}
2596
2597void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
2598 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ false);
2599}
2600
2601void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfLeadingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002602 GenLeadingZeros(GetAssembler(), codegen_, invoke, /* is_long */ false);
Mark Mendelld5897672015-08-12 21:16:41 -04002603}
2604
2605void IntrinsicLocationsBuilderX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
2606 CreateLeadingZeroLocations(arena_, invoke, /* is_long */ true);
2607}
2608
2609void IntrinsicCodeGeneratorX86::VisitLongNumberOfLeadingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002610 GenLeadingZeros(GetAssembler(), codegen_, invoke, /* is_long */ true);
Mark Mendelld5897672015-08-12 21:16:41 -04002611}
2612
Mark Mendell2d554792015-09-15 21:45:18 -04002613static void CreateTrailingZeroLocations(ArenaAllocator* arena, HInvoke* invoke, bool is_long) {
2614 LocationSummary* locations = new (arena) LocationSummary(invoke,
2615 LocationSummary::kNoCall,
2616 kIntrinsified);
2617 if (is_long) {
2618 locations->SetInAt(0, Location::RequiresRegister());
2619 } else {
2620 locations->SetInAt(0, Location::Any());
2621 }
2622 locations->SetOut(Location::RequiresRegister());
2623}
2624
Aart Bika19616e2016-02-01 18:57:58 -08002625static void GenTrailingZeros(X86Assembler* assembler,
2626 CodeGeneratorX86* codegen,
2627 HInvoke* invoke, bool is_long) {
Mark Mendell2d554792015-09-15 21:45:18 -04002628 LocationSummary* locations = invoke->GetLocations();
2629 Location src = locations->InAt(0);
2630 Register out = locations->Out().AsRegister<Register>();
2631
2632 if (invoke->InputAt(0)->IsConstant()) {
2633 // Evaluate this at compile time.
2634 int64_t value = Int64FromConstant(invoke->InputAt(0)->AsConstant());
2635 if (value == 0) {
2636 value = is_long ? 64 : 32;
2637 } else {
2638 value = is_long ? CTZ(static_cast<uint64_t>(value)) : CTZ(static_cast<uint32_t>(value));
2639 }
Aart Bika19616e2016-02-01 18:57:58 -08002640 codegen->Load32BitValue(out, value);
Mark Mendell2d554792015-09-15 21:45:18 -04002641 return;
2642 }
2643
2644 // Handle the non-constant cases.
2645 if (!is_long) {
2646 if (src.IsRegister()) {
2647 __ bsfl(out, src.AsRegister<Register>());
2648 } else {
2649 DCHECK(src.IsStackSlot());
2650 __ bsfl(out, Address(ESP, src.GetStackIndex()));
2651 }
2652
2653 // BSF sets ZF if the input was zero, and the output is undefined.
2654 NearLabel done;
2655 __ j(kNotEqual, &done);
2656
2657 // Fix the zero case with the expected result.
2658 __ movl(out, Immediate(32));
2659
2660 __ Bind(&done);
2661 return;
2662 }
2663
2664 // 64 bit case needs to worry about both parts of the register.
2665 DCHECK(src.IsRegisterPair());
2666 Register src_lo = src.AsRegisterPairLow<Register>();
2667 Register src_hi = src.AsRegisterPairHigh<Register>();
2668 NearLabel done, all_zeroes;
2669
2670 // If the low word is zero, then ZF will be set. If not, we have the answer.
2671 __ bsfl(out, src_lo);
2672 __ j(kNotEqual, &done);
2673
2674 // Low word was zero. We have to compute the high word count and add 32.
2675 __ bsfl(out, src_hi);
2676 __ j(kEqual, &all_zeroes);
2677
2678 // We had a valid result. Add 32 to account for the low word being zero.
2679 __ addl(out, Immediate(32));
2680 __ jmp(&done);
2681
2682 // All zero case.
2683 __ Bind(&all_zeroes);
2684 __ movl(out, Immediate(64));
2685
2686 __ Bind(&done);
2687}
2688
2689void IntrinsicLocationsBuilderX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
2690 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ false);
2691}
2692
2693void IntrinsicCodeGeneratorX86::VisitIntegerNumberOfTrailingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002694 GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long */ false);
Mark Mendell2d554792015-09-15 21:45:18 -04002695}
2696
2697void IntrinsicLocationsBuilderX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
2698 CreateTrailingZeroLocations(arena_, invoke, /* is_long */ true);
2699}
2700
2701void IntrinsicCodeGeneratorX86::VisitLongNumberOfTrailingZeros(HInvoke* invoke) {
Aart Bika19616e2016-02-01 18:57:58 -08002702 GenTrailingZeros(GetAssembler(), codegen_, invoke, /* is_long */ true);
Mark Mendell2d554792015-09-15 21:45:18 -04002703}
2704
Serguei Katkov288c7a82016-05-16 11:53:15 +06002705void IntrinsicLocationsBuilderX86::VisitReferenceGetReferent(HInvoke* invoke) {
2706 if (kEmitCompilerReadBarrier) {
2707 // Do not intrinsify this call with the read barrier configuration.
2708 return;
2709 }
2710 LocationSummary* locations = new (arena_) LocationSummary(invoke,
2711 LocationSummary::kCallOnSlowPath,
2712 kIntrinsified);
2713 locations->SetInAt(0, Location::RequiresRegister());
2714 locations->SetOut(Location::SameAsFirstInput());
2715 locations->AddTemp(Location::RequiresRegister());
2716}
2717
2718void IntrinsicCodeGeneratorX86::VisitReferenceGetReferent(HInvoke* invoke) {
2719 DCHECK(!kEmitCompilerReadBarrier);
2720 LocationSummary* locations = invoke->GetLocations();
2721 X86Assembler* assembler = GetAssembler();
2722
2723 Register obj = locations->InAt(0).AsRegister<Register>();
2724 Register out = locations->Out().AsRegister<Register>();
2725
2726 SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
2727 codegen_->AddSlowPath(slow_path);
2728
2729 // Load ArtMethod first.
2730 HInvokeStaticOrDirect* invoke_direct = invoke->AsInvokeStaticOrDirect();
2731 DCHECK(invoke_direct != nullptr);
2732 Location temp_loc = codegen_->GenerateCalleeMethodStaticOrDirectCall(
2733 invoke_direct, locations->GetTemp(0));
2734 DCHECK(temp_loc.Equals(locations->GetTemp(0)));
2735 Register temp = temp_loc.AsRegister<Register>();
2736
2737 // Now get declaring class.
2738 __ movl(temp, Address(temp, ArtMethod::DeclaringClassOffset().Int32Value()));
2739
2740 uint32_t slow_path_flag_offset = codegen_->GetReferenceSlowFlagOffset();
2741 uint32_t disable_flag_offset = codegen_->GetReferenceDisableFlagOffset();
2742 DCHECK_NE(slow_path_flag_offset, 0u);
2743 DCHECK_NE(disable_flag_offset, 0u);
2744 DCHECK_NE(slow_path_flag_offset, disable_flag_offset);
2745
2746 // Check static flags preventing us for using intrinsic.
2747 if (slow_path_flag_offset == disable_flag_offset + 1) {
2748 __ cmpw(Address(temp, disable_flag_offset), Immediate(0));
2749 __ j(kNotEqual, slow_path->GetEntryLabel());
2750 } else {
2751 __ cmpb(Address(temp, disable_flag_offset), Immediate(0));
2752 __ j(kNotEqual, slow_path->GetEntryLabel());
2753 __ cmpb(Address(temp, slow_path_flag_offset), Immediate(0));
2754 __ j(kNotEqual, slow_path->GetEntryLabel());
2755 }
2756
2757 // Fast path.
2758 __ movl(out, Address(obj, mirror::Reference::ReferentOffset().Int32Value()));
2759 codegen_->MaybeRecordImplicitNullCheck(invoke);
2760 __ MaybeUnpoisonHeapReference(out);
2761 __ Bind(slow_path->GetExitLabel());
2762}
2763
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002764static bool IsSameInput(HInstruction* instruction, size_t input0, size_t input1) {
2765 return instruction->InputAt(input0) == instruction->InputAt(input1);
2766}
2767
2768void IntrinsicLocationsBuilderX86::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002769 // The only read barrier implementation supporting the
2770 // SystemArrayCopy intrinsic is the Baker-style read barriers.
2771 if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002772 return;
2773 }
2774
2775 CodeGenerator::CreateSystemArrayCopyLocationSummary(invoke);
2776 if (invoke->GetLocations() != nullptr) {
2777 // Need a byte register for marking.
2778 invoke->GetLocations()->SetTempAt(1, Location::RegisterLocation(ECX));
2779
2780 static constexpr size_t kSrc = 0;
2781 static constexpr size_t kSrcPos = 1;
2782 static constexpr size_t kDest = 2;
2783 static constexpr size_t kDestPos = 3;
2784 static constexpr size_t kLength = 4;
2785
2786 if (!invoke->InputAt(kSrcPos)->IsIntConstant() &&
2787 !invoke->InputAt(kDestPos)->IsIntConstant() &&
2788 !invoke->InputAt(kLength)->IsIntConstant()) {
2789 if (!IsSameInput(invoke, kSrcPos, kDestPos) &&
2790 !IsSameInput(invoke, kSrcPos, kLength) &&
2791 !IsSameInput(invoke, kDestPos, kLength) &&
2792 !IsSameInput(invoke, kSrc, kDest)) {
2793 // Not enough registers, make the length also take a stack slot.
2794 invoke->GetLocations()->SetInAt(kLength, Location::Any());
2795 }
2796 }
2797 }
2798}
2799
2800void IntrinsicCodeGeneratorX86::VisitSystemArrayCopy(HInvoke* invoke) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002801 // The only read barrier implementation supporting the
2802 // SystemArrayCopy intrinsic is the Baker-style read barriers.
2803 DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002804
2805 X86Assembler* assembler = GetAssembler();
2806 LocationSummary* locations = invoke->GetLocations();
2807
2808 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2809 uint32_t super_offset = mirror::Class::SuperClassOffset().Int32Value();
2810 uint32_t component_offset = mirror::Class::ComponentTypeOffset().Int32Value();
2811 uint32_t primitive_offset = mirror::Class::PrimitiveTypeOffset().Int32Value();
Roland Levillain0b671c02016-08-19 12:02:34 +01002812 uint32_t monitor_offset = mirror::Object::MonitorOffset().Int32Value();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002813
2814 Register src = locations->InAt(0).AsRegister<Register>();
2815 Location src_pos = locations->InAt(1);
2816 Register dest = locations->InAt(2).AsRegister<Register>();
2817 Location dest_pos = locations->InAt(3);
Roland Levillain0b671c02016-08-19 12:02:34 +01002818 Location length_arg = locations->InAt(4);
2819 Location length = length_arg;
2820 Location temp1_loc = locations->GetTemp(0);
2821 Register temp1 = temp1_loc.AsRegister<Register>();
2822 Location temp2_loc = locations->GetTemp(1);
2823 Register temp2 = temp2_loc.AsRegister<Register>();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002824
Roland Levillain0b671c02016-08-19 12:02:34 +01002825 SlowPathCode* intrinsic_slow_path = new (GetAllocator()) IntrinsicSlowPathX86(invoke);
2826 codegen_->AddSlowPath(intrinsic_slow_path);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002827
2828 NearLabel conditions_on_positions_validated;
2829 SystemArrayCopyOptimizations optimizations(invoke);
2830
2831 // If source and destination are the same, we go to slow path if we need to do
2832 // forward copying.
2833 if (src_pos.IsConstant()) {
2834 int32_t src_pos_constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
2835 if (dest_pos.IsConstant()) {
2836 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2837 if (optimizations.GetDestinationIsSource()) {
2838 // Checked when building locations.
2839 DCHECK_GE(src_pos_constant, dest_pos_constant);
2840 } else if (src_pos_constant < dest_pos_constant) {
2841 __ cmpl(src, dest);
Roland Levillain0b671c02016-08-19 12:02:34 +01002842 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002843 }
2844 } else {
2845 if (!optimizations.GetDestinationIsSource()) {
2846 __ cmpl(src, dest);
2847 __ j(kNotEqual, &conditions_on_positions_validated);
2848 }
2849 __ cmpl(dest_pos.AsRegister<Register>(), Immediate(src_pos_constant));
Roland Levillain0b671c02016-08-19 12:02:34 +01002850 __ j(kGreater, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002851 }
2852 } else {
2853 if (!optimizations.GetDestinationIsSource()) {
2854 __ cmpl(src, dest);
2855 __ j(kNotEqual, &conditions_on_positions_validated);
2856 }
2857 if (dest_pos.IsConstant()) {
2858 int32_t dest_pos_constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
2859 __ cmpl(src_pos.AsRegister<Register>(), Immediate(dest_pos_constant));
Roland Levillain0b671c02016-08-19 12:02:34 +01002860 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002861 } else {
2862 __ cmpl(src_pos.AsRegister<Register>(), dest_pos.AsRegister<Register>());
Roland Levillain0b671c02016-08-19 12:02:34 +01002863 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002864 }
2865 }
2866
2867 __ Bind(&conditions_on_positions_validated);
2868
2869 if (!optimizations.GetSourceIsNotNull()) {
2870 // Bail out if the source is null.
2871 __ testl(src, src);
Roland Levillain0b671c02016-08-19 12:02:34 +01002872 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002873 }
2874
2875 if (!optimizations.GetDestinationIsNotNull() && !optimizations.GetDestinationIsSource()) {
2876 // Bail out if the destination is null.
2877 __ testl(dest, dest);
Roland Levillain0b671c02016-08-19 12:02:34 +01002878 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002879 }
2880
Roland Levillain0b671c02016-08-19 12:02:34 +01002881 Location temp3_loc = locations->GetTemp(2);
2882 Register temp3 = temp3_loc.AsRegister<Register>();
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002883 if (length.IsStackSlot()) {
2884 __ movl(temp3, Address(ESP, length.GetStackIndex()));
2885 length = Location::RegisterLocation(temp3);
2886 }
2887
2888 // If the length is negative, bail out.
2889 // We have already checked in the LocationsBuilder for the constant case.
2890 if (!length.IsConstant() &&
2891 !optimizations.GetCountIsSourceLength() &&
2892 !optimizations.GetCountIsDestinationLength()) {
2893 __ testl(length.AsRegister<Register>(), length.AsRegister<Register>());
Roland Levillain0b671c02016-08-19 12:02:34 +01002894 __ j(kLess, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002895 }
2896
2897 // Validity checks: source.
2898 CheckPosition(assembler,
2899 src_pos,
2900 src,
2901 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01002902 intrinsic_slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002903 temp1,
2904 optimizations.GetCountIsSourceLength());
2905
2906 // Validity checks: dest.
2907 CheckPosition(assembler,
2908 dest_pos,
2909 dest,
2910 length,
Roland Levillain0b671c02016-08-19 12:02:34 +01002911 intrinsic_slow_path,
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002912 temp1,
2913 optimizations.GetCountIsDestinationLength());
2914
2915 if (!optimizations.GetDoesNotNeedTypeCheck()) {
2916 // Check whether all elements of the source array are assignable to the component
2917 // type of the destination array. We do two checks: the classes are the same,
2918 // or the destination is Object[]. If none of these checks succeed, we go to the
2919 // slow path.
Roland Levillain0b671c02016-08-19 12:02:34 +01002920
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002921 if (!optimizations.GetSourceIsNonPrimitiveArray()) {
Roland Levillain0b671c02016-08-19 12:02:34 +01002922 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2923 // /* HeapReference<Class> */ temp1 = src->klass_
2924 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002925 invoke, temp1_loc, src, class_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002926 // Bail out if the source is not a non primitive array.
2927 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2928 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002929 invoke, temp1_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002930 __ testl(temp1, temp1);
2931 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2932 // If heap poisoning is enabled, `temp1` has been unpoisoned
2933 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
2934 } else {
2935 // /* HeapReference<Class> */ temp1 = src->klass_
2936 __ movl(temp1, Address(src, class_offset));
2937 __ MaybeUnpoisonHeapReference(temp1);
2938 // Bail out if the source is not a non primitive array.
2939 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2940 __ movl(temp1, Address(temp1, component_offset));
2941 __ testl(temp1, temp1);
2942 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2943 __ MaybeUnpoisonHeapReference(temp1);
2944 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002945 __ cmpw(Address(temp1, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0b671c02016-08-19 12:02:34 +01002946 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002947 }
2948
Roland Levillain0b671c02016-08-19 12:02:34 +01002949 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
2950 if (length.Equals(Location::RegisterLocation(temp3))) {
2951 // When Baker read barriers are enabled, register `temp3`,
2952 // which in the present case contains the `length` parameter,
2953 // will be overwritten below. Make the `length` location
2954 // reference the original stack location; it will be moved
2955 // back to `temp3` later if necessary.
2956 DCHECK(length_arg.IsStackSlot());
2957 length = length_arg;
2958 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002959
Roland Levillain0b671c02016-08-19 12:02:34 +01002960 // /* HeapReference<Class> */ temp1 = dest->klass_
2961 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002962 invoke, temp1_loc, dest, class_offset, /* needs_null_check */ false);
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01002963
Roland Levillain0b671c02016-08-19 12:02:34 +01002964 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
2965 // Bail out if the destination is not a non primitive array.
2966 //
2967 // Register `temp1` is not trashed by the read barrier emitted
2968 // by GenerateFieldLoadWithBakerReadBarrier below, as that
2969 // method produces a call to a ReadBarrierMarkRegX entry point,
2970 // which saves all potentially live registers, including
2971 // temporaries such a `temp1`.
2972 // /* HeapReference<Class> */ temp2 = temp1->component_type_
2973 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002974 invoke, temp2_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002975 __ testl(temp2, temp2);
2976 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
2977 // If heap poisoning is enabled, `temp2` has been unpoisoned
2978 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
2979 __ cmpw(Address(temp2, primitive_offset), Immediate(Primitive::kPrimNot));
2980 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
2981 }
2982
2983 // For the same reason given earlier, `temp1` is not trashed by the
2984 // read barrier emitted by GenerateFieldLoadWithBakerReadBarrier below.
2985 // /* HeapReference<Class> */ temp2 = src->klass_
2986 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002987 invoke, temp2_loc, src, class_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002988 // Note: if heap poisoning is on, we are comparing two unpoisoned references here.
2989 __ cmpl(temp1, temp2);
2990
2991 if (optimizations.GetDestinationIsTypedObjectArray()) {
2992 NearLabel do_copy;
2993 __ j(kEqual, &do_copy);
2994 // /* HeapReference<Class> */ temp1 = temp1->component_type_
2995 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00002996 invoke, temp1_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01002997 // We do not need to emit a read barrier for the following
2998 // heap reference load, as `temp1` is only used in a
2999 // comparison with null below, and this reference is not
3000 // kept afterwards.
3001 __ cmpl(Address(temp1, super_offset), Immediate(0));
3002 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3003 __ Bind(&do_copy);
3004 } else {
3005 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3006 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003007 } else {
Roland Levillain0b671c02016-08-19 12:02:34 +01003008 // Non read barrier code.
3009
3010 // /* HeapReference<Class> */ temp1 = dest->klass_
3011 __ movl(temp1, Address(dest, class_offset));
3012 if (!optimizations.GetDestinationIsNonPrimitiveArray()) {
3013 __ MaybeUnpoisonHeapReference(temp1);
3014 // Bail out if the destination is not a non primitive array.
3015 // /* HeapReference<Class> */ temp2 = temp1->component_type_
3016 __ movl(temp2, Address(temp1, component_offset));
3017 __ testl(temp2, temp2);
3018 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3019 __ MaybeUnpoisonHeapReference(temp2);
3020 __ cmpw(Address(temp2, primitive_offset), Immediate(Primitive::kPrimNot));
3021 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3022 // Re-poison the heap reference to make the compare instruction below
3023 // compare two poisoned references.
3024 __ PoisonHeapReference(temp1);
3025 }
3026
3027 // Note: if heap poisoning is on, we are comparing two poisoned references here.
3028 __ cmpl(temp1, Address(src, class_offset));
3029
3030 if (optimizations.GetDestinationIsTypedObjectArray()) {
3031 NearLabel do_copy;
3032 __ j(kEqual, &do_copy);
3033 __ MaybeUnpoisonHeapReference(temp1);
3034 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3035 __ movl(temp1, Address(temp1, component_offset));
3036 __ MaybeUnpoisonHeapReference(temp1);
3037 __ cmpl(Address(temp1, super_offset), Immediate(0));
3038 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3039 __ Bind(&do_copy);
3040 } else {
3041 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
3042 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003043 }
3044 } else if (!optimizations.GetSourceIsNonPrimitiveArray()) {
3045 DCHECK(optimizations.GetDestinationIsNonPrimitiveArray());
3046 // Bail out if the source is not a non primitive array.
Roland Levillain0b671c02016-08-19 12:02:34 +01003047 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
3048 // /* HeapReference<Class> */ temp1 = src->klass_
3049 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003050 invoke, temp1_loc, src, class_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01003051 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3052 codegen_->GenerateFieldLoadWithBakerReadBarrier(
Vladimir Marko953437b2016-08-24 08:30:46 +00003053 invoke, temp1_loc, temp1, component_offset, /* needs_null_check */ false);
Roland Levillain0b671c02016-08-19 12:02:34 +01003054 __ testl(temp1, temp1);
3055 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3056 // If heap poisoning is enabled, `temp1` has been unpoisoned
3057 // by the the previous call to GenerateFieldLoadWithBakerReadBarrier.
3058 } else {
3059 // /* HeapReference<Class> */ temp1 = src->klass_
3060 __ movl(temp1, Address(src, class_offset));
3061 __ MaybeUnpoisonHeapReference(temp1);
3062 // /* HeapReference<Class> */ temp1 = temp1->component_type_
3063 __ movl(temp1, Address(temp1, component_offset));
3064 __ testl(temp1, temp1);
3065 __ j(kEqual, intrinsic_slow_path->GetEntryLabel());
3066 __ MaybeUnpoisonHeapReference(temp1);
3067 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003068 __ cmpw(Address(temp1, primitive_offset), Immediate(Primitive::kPrimNot));
Roland Levillain0b671c02016-08-19 12:02:34 +01003069 __ j(kNotEqual, intrinsic_slow_path->GetEntryLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003070 }
3071
Roland Levillain0b671c02016-08-19 12:02:34 +01003072 // Compute the base source address in `temp1`.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003073 int32_t element_size = Primitive::ComponentSize(Primitive::kPrimNot);
3074 DCHECK_EQ(element_size, 4);
3075 uint32_t offset = mirror::Array::DataOffset(element_size).Uint32Value();
3076 if (src_pos.IsConstant()) {
3077 int32_t constant = src_pos.GetConstant()->AsIntConstant()->GetValue();
3078 __ leal(temp1, Address(src, element_size * constant + offset));
3079 } else {
3080 __ leal(temp1, Address(src, src_pos.AsRegister<Register>(), ScaleFactor::TIMES_4, offset));
3081 }
3082
Roland Levillain0b671c02016-08-19 12:02:34 +01003083 if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) {
3084 // If it is needed (in the case of the fast-path loop), the base
3085 // destination address is computed later, as `temp2` is used for
3086 // intermediate computations.
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003087
Roland Levillain0b671c02016-08-19 12:02:34 +01003088 // Compute the end source address in `temp3`.
3089 if (length.IsConstant()) {
3090 int32_t constant = length.GetConstant()->AsIntConstant()->GetValue();
3091 __ leal(temp3, Address(temp1, element_size * constant));
3092 } else {
3093 if (length.IsStackSlot()) {
3094 // Location `length` is again pointing at a stack slot, as
3095 // register `temp3` (which was containing the length parameter
3096 // earlier) has been overwritten; restore it now
3097 DCHECK(length.Equals(length_arg));
3098 __ movl(temp3, Address(ESP, length.GetStackIndex()));
3099 length = Location::RegisterLocation(temp3);
3100 }
3101 __ leal(temp3, Address(temp1, length.AsRegister<Register>(), ScaleFactor::TIMES_4, 0));
3102 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003103
Roland Levillain0b671c02016-08-19 12:02:34 +01003104 // SystemArrayCopy implementation for Baker read barriers (see
3105 // also CodeGeneratorX86::GenerateReferenceLoadWithBakerReadBarrier):
3106 //
3107 // if (src_ptr != end_ptr) {
3108 // uint32_t rb_state = Lockword(src->monitor_).ReadBarrierState();
3109 // lfence; // Load fence or artificial data dependency to prevent load-load reordering
3110 // bool is_gray = (rb_state == ReadBarrier::gray_ptr_);
3111 // if (is_gray) {
3112 // // Slow-path copy.
3113 // for (size_t i = 0; i != length; ++i) {
3114 // dest_array[dest_pos + i] =
3115 // MaybePoison(ReadBarrier::Mark(MaybeUnpoison(src_array[src_pos + i])));
3116 // }
3117 // } else {
3118 // // Fast-path copy.
3119 // do {
3120 // *dest_ptr++ = *src_ptr++;
3121 // } while (src_ptr != end_ptr)
3122 // }
3123 // }
3124
3125 NearLabel loop, done;
3126
3127 // Don't enter copy loop if `length == 0`.
3128 __ cmpl(temp1, temp3);
3129 __ j(kEqual, &done);
3130
Vladimir Marko953437b2016-08-24 08:30:46 +00003131 // Given the numeric representation, it's enough to check the low bit of the rb_state.
3132 static_assert(ReadBarrier::white_ptr_ == 0, "Expecting white to have value 0");
3133 static_assert(ReadBarrier::gray_ptr_ == 1, "Expecting gray to have value 1");
3134 static_assert(ReadBarrier::black_ptr_ == 2, "Expecting black to have value 2");
3135 constexpr uint32_t gray_byte_position = LockWord::kReadBarrierStateShift / kBitsPerByte;
3136 constexpr uint32_t gray_bit_position = LockWord::kReadBarrierStateShift % kBitsPerByte;
3137 constexpr int32_t test_value = static_cast<int8_t>(1 << gray_bit_position);
3138
3139 // if (rb_state == ReadBarrier::gray_ptr_)
3140 // goto slow_path;
3141 // At this point, just do the "if" and make sure that flags are preserved until the branch.
3142 __ testb(Address(src, monitor_offset + gray_byte_position), Immediate(test_value));
Roland Levillain0b671c02016-08-19 12:02:34 +01003143
3144 // Load fence to prevent load-load reordering.
3145 // Note that this is a no-op, thanks to the x86 memory model.
3146 codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
3147
3148 // Slow path used to copy array when `src` is gray.
3149 SlowPathCode* read_barrier_slow_path =
3150 new (GetAllocator()) ReadBarrierSystemArrayCopySlowPathX86(invoke);
3151 codegen_->AddSlowPath(read_barrier_slow_path);
3152
Vladimir Marko953437b2016-08-24 08:30:46 +00003153 // We have done the "if" of the gray bit check above, now branch based on the flags.
3154 __ j(kNotZero, read_barrier_slow_path->GetEntryLabel());
Roland Levillain0b671c02016-08-19 12:02:34 +01003155
3156 // Fast-path copy.
3157
3158 // Set the base destination address in `temp2`.
3159 if (dest_pos.IsConstant()) {
3160 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
3161 __ leal(temp2, Address(dest, element_size * constant + offset));
3162 } else {
3163 __ leal(temp2, Address(dest, dest_pos.AsRegister<Register>(), ScaleFactor::TIMES_4, offset));
3164 }
3165
3166 // Iterate over the arrays and do a raw copy of the objects. We don't need to
3167 // poison/unpoison.
3168 __ Bind(&loop);
3169 __ pushl(Address(temp1, 0));
3170 __ cfi().AdjustCFAOffset(4);
3171 __ popl(Address(temp2, 0));
3172 __ cfi().AdjustCFAOffset(-4);
3173 __ addl(temp1, Immediate(element_size));
3174 __ addl(temp2, Immediate(element_size));
3175 __ cmpl(temp1, temp3);
3176 __ j(kNotEqual, &loop);
3177
3178 __ Bind(read_barrier_slow_path->GetExitLabel());
3179 __ Bind(&done);
3180 } else {
3181 // Non read barrier code.
3182
3183 // Compute the base destination address in `temp2`.
3184 if (dest_pos.IsConstant()) {
3185 int32_t constant = dest_pos.GetConstant()->AsIntConstant()->GetValue();
3186 __ leal(temp2, Address(dest, element_size * constant + offset));
3187 } else {
3188 __ leal(temp2, Address(dest, dest_pos.AsRegister<Register>(), ScaleFactor::TIMES_4, offset));
3189 }
3190
3191 // Compute the end source address in `temp3`.
3192 if (length.IsConstant()) {
3193 int32_t constant = length.GetConstant()->AsIntConstant()->GetValue();
3194 __ leal(temp3, Address(temp1, element_size * constant));
3195 } else {
3196 __ leal(temp3, Address(temp1, length.AsRegister<Register>(), ScaleFactor::TIMES_4, 0));
3197 }
3198
3199 // Iterate over the arrays and do a raw copy of the objects. We don't need to
3200 // poison/unpoison.
3201 NearLabel loop, done;
3202 __ cmpl(temp1, temp3);
3203 __ j(kEqual, &done);
3204 __ Bind(&loop);
3205 __ pushl(Address(temp1, 0));
3206 __ cfi().AdjustCFAOffset(4);
3207 __ popl(Address(temp2, 0));
3208 __ cfi().AdjustCFAOffset(-4);
3209 __ addl(temp1, Immediate(element_size));
3210 __ addl(temp2, Immediate(element_size));
3211 __ cmpl(temp1, temp3);
3212 __ j(kNotEqual, &loop);
3213 __ Bind(&done);
3214 }
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003215
3216 // We only need one card marking on the destination array.
3217 codegen_->MarkGCCard(temp1,
3218 temp2,
3219 dest,
3220 Register(kNoRegister),
3221 /* value_can_be_null */ false);
3222
Roland Levillain0b671c02016-08-19 12:02:34 +01003223 __ Bind(intrinsic_slow_path->GetExitLabel());
Nicolas Geoffrayfea1abd2016-07-06 12:09:12 +01003224}
3225
Aart Bik2f9fcc92016-03-01 15:16:54 -08003226UNIMPLEMENTED_INTRINSIC(X86, MathRoundDouble)
Aart Bik2f9fcc92016-03-01 15:16:54 -08003227UNIMPLEMENTED_INTRINSIC(X86, FloatIsInfinite)
3228UNIMPLEMENTED_INTRINSIC(X86, DoubleIsInfinite)
3229UNIMPLEMENTED_INTRINSIC(X86, IntegerHighestOneBit)
3230UNIMPLEMENTED_INTRINSIC(X86, LongHighestOneBit)
3231UNIMPLEMENTED_INTRINSIC(X86, IntegerLowestOneBit)
3232UNIMPLEMENTED_INTRINSIC(X86, LongLowestOneBit)
Mark Mendell09ed1a32015-03-25 08:30:06 -04003233
Aart Bik0e54c012016-03-04 12:08:31 -08003234// 1.8.
3235UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndAddInt)
3236UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndAddLong)
3237UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetInt)
3238UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetLong)
3239UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndSetObject)
Aart Bik0e54c012016-03-04 12:08:31 -08003240
Aart Bik2f9fcc92016-03-01 15:16:54 -08003241UNREACHABLE_INTRINSICS(X86)
Roland Levillain4d027112015-07-01 15:41:14 +01003242
3243#undef __
3244
Mark Mendell09ed1a32015-03-25 08:30:06 -04003245} // namespace x86
3246} // namespace art